entities
listlengths
1
44.6k
max_stars_repo_path
stringlengths
6
160
max_stars_repo_name
stringlengths
6
66
max_stars_count
int64
0
47.9k
content
stringlengths
18
1.04M
id
stringlengths
1
6
new_content
stringlengths
18
1.04M
modified
bool
1 class
references
stringlengths
32
1.52M
[ { "context": ";; Copyright 2014-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold Li", "end": 111, "score": 0.9998077750205994, "start": 96, "tag": "NAME", "value": "Ragnar Svensson" }, { "context": "-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold License version 1.0 ", "end": 129, "score": 0.9998217821121216, "start": 113, "tag": "NAME", "value": "Christian Murray" } ]
editor/test/editor/fs_test.clj
cmarincia/defold
0
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 Ragnar Svensson, Christian Murray ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.fs-test (:require [clojure.java.io :as io] [clojure.test :refer :all] [editor.fs :as fs] [integration.test-util :as test-util]) (:import [java.io File] [java.nio.file NoSuchFileException FileAlreadyExistsException])) (def ^:private file-test-tree [{"directory" ["dir.txt" {"subdirectory" ["sub.txt" {"subsubdirectory" ["subsub.txt"]}]}]} "root.txt"]) (def ^:private no-root-file-test-tree (subvec file-test-tree 0 1)) (def ^:private no-subdirectory-file-test-tree (assoc-in file-test-tree [0 "directory"] ["dir.txt"])) (defn- setup-delete-file-test [^File dir] (test-util/make-file-tree! dir file-test-tree)) (deftest delete-test (testing "Delete file" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= (io/file dir "root.txt") (fs/delete-file! (io/file dir "root.txt")))) (is (= no-root-file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= (io/file dir "non-existing") (fs/delete-file! (io/file dir "non-existing")))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file failing" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (thrown? NoSuchFileException (fs/delete-file! (io/file dir "non-existing") {:missing :fail}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file failing silently" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= nil (fs/delete-file! (io/file dir "non-existing") {:missing :fail :fail :silently}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete dir" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (let [target (io/file dir "directory" "subdirectory")] (is (= target (fs/delete-directory! target))) (is (= no-subdirectory-file-test-tree (test-util/file-tree dir)))))) (testing "Delete missing dir" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (let [target (io/file dir "directory" "non-existing-subdirectory")] (is (= target (fs/delete-directory! target))) (is (= file-test-tree (test-util/file-tree dir)))))) (testing "Delete missing dir failing" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (thrown? NoSuchFileException (fs/delete-directory! (io/file dir "non-existing-subdirectory") {:missing :fail}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing dir failing silently" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= nil (fs/delete-directory! (io/file dir "non-existing-subdirectory") {:missing :fail :fail :silently})))))) (def ^:private silly-tree [{"a" [{"b" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]}]} "file.txt"]}]}]) (deftest move-rename-test (testing "Rename file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["old-name.txt"]) (let [src (io/file dir "old-name.txt") tgt (io/file dir "new-name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["new-name.txt"] (test-util/file-tree dir)))))) (testing "Rename file no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))) (testing "Rename file caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "Name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["Name.txt"] (test-util/file-tree dir)))))) (testing "Rename file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename file replace dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"cake" ["tosca.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["cake"] (test-util/file-tree dir)))))) (testing "Rename file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? FileAlreadyExistsException (fs/move-file! src tgt {:target :keep}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir)))))) (testing "Rename file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir)))))) (testing "Rename missing file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? NoSuchFileException (fs/move-file! src tgt {:target :keep}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename missing file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "Directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"Directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]} {"directory2" ["name2.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir replace file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["file.txt"]} "cake"]) (let [src (io/file dir "directory") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"cake" ["file.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]} {"directory" ["file.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (thrown? FileAlreadyExistsException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]} {"directory" ["file.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (thrown? NoSuchFileException (fs/move-directory! src tgt))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [] (fs/move-directory! src tgt {:fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest move-file-test (testing "Move file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt" "name.txt"]}] (test-util/file-tree dir)))))) (testing "Move file renaming" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "new-name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt" "new-name.txt"]}] (test-util/file-tree dir)))))) (testing "Move file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt"]}] (test-util/file-tree dir)))))) (testing "Move file failing" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]} "name.txt"]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (thrown? FileAlreadyExistsException (fs/move-file! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move file failing silently" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]} "name.txt"]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing file failing" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]}]] ; no name.txt (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (thrown? NoSuchFileException (fs/move-file! src tgt))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing file failing silently" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]}]] ; no name.txt (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (= [] (fs/move-file! src tgt {:fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest move-dir-test (testing "Move dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" ["also.txt"]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["also.txt" {"directory" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir renaming" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" ["also.txt"]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["also.txt" {"directory2" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" [{"directory2" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir merging" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt {:target :merge}))) (is (= [{"another" [{"directory2" ["name.txt" "name2.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? NoSuchFileException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) ;; Below tests assume this initial file tree: (def silly-tree above) ;; ;; . ;; └── a ;; └── b ;; β”œβ”€β”€ b ;; β”‚ └── b ;; β”‚ β”œβ”€β”€ file.txt ;; β”‚ └── k ;; β”‚ └── also.txt ;; └── file.txt ;; ;; ... Then moves directory /a/b/b up to /a using :keep :merge and :replace ;; ;; This will break if move-directory would do for instance "cp src trg; rm src" for :merge ;; or "rm trg; cp src trg rm src" for :replace (testing "Move dir replacing parent" ;; expect: ;; . ;; └── a ;; └── b ;; └── b ;; β”œβ”€β”€ file.txt ;; └── k ;; └── also.txt ;; (test-util/with-temp-dir! dir (test-util/make-file-tree! dir silly-tree) (let [src (io/file dir "a" "b" "b") tgt (io/file dir "a" "b")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"a" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]}]}]}] (test-util/file-tree dir)))))) (testing "Move dir merging with parent" ;; expect ;; . ;; └── a ;; └── b ;; β”œβ”€β”€ b ;; β”‚ β”œβ”€β”€ file.txt ;; β”‚ └── k ;; β”‚ └── also.txt ;; └── file.txt (test-util/with-temp-dir! dir (test-util/make-file-tree! dir silly-tree) (let [src (io/file dir "a" "b" "b") tgt (io/file dir "a" "b")] (is (= [[src tgt]] (fs/move-directory! src tgt {:target :merge}))) (is (= [{"a" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]} "file.txt"]}]}] (test-util/file-tree dir))))))) (deftest copy-file-test (testing "Copy file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name2.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["name.txt" "name2.txt"] (test-util/file-tree dir)))))) (testing "Copy file no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))) (testing "Copy file caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "Name.txt")] (if fs/case-sensitive? (do (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["Name.txt" "name.txt"] (test-util/file-tree dir)))) (do (is (= [[src src]] (fs/copy-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))))) (testing "Copy file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (= (slurp src) (slurp tgt)))))) (testing "Copy file replace dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["directory" "name.txt"] (test-util/file-tree dir))) (is (= (slurp src) (slurp tgt)))))) (testing "Copy file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? FileAlreadyExistsException (fs/copy-file! src tgt {:target :keep}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (not= (slurp src) (slurp tgt)))))) (testing "Copy file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/copy-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (not= (slurp src) (slurp tgt)))))) (testing "Copy missing file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? NoSuchFileException (fs/copy-file! src tgt {:target :keep}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Copy missing file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/copy-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt"] (test-util/file-tree dir))))))) (deftest copy-dir-test (testing "Copy dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]} {"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "Directory")] (if fs/case-sensitive? (do (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"Directory" ["name.txt"]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))) (do (is (= [[src src]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))))) (testing "Copy dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]} {"directory2" ["name2.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]} {"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir replace file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["cake" {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"cake" ["name.txt"]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir merge" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt {:target :merge}))) (is (= [{"another" [{"directory2" ["name.txt" "name2.txt"]}]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/copy-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/copy-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/copy-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest various (testing "File move to dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["file.txt" {"directory" []}]) (let [src (io/file dir "file.txt") tgt (io/file dir "directory" "file.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["file.txt"]}] (test-util/file-tree dir)))))) (testing "File move to same dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["file.txt"]) (let [src (io/file dir "file.txt") tgt (io/file dir "file.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["file.txt"] (test-util/file-tree dir)))))) (testing "Directory rename" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"old-name" ["file.txt"]}]) (is (= [[(io/file dir "old-name") (io/file dir "new-name")]] (fs/move-directory! (io/file dir "old-name") (io/file dir "new-name")))) (is (= [{"new-name" ["file.txt"]}] (test-util/file-tree dir))))) (testing "Directory move" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"project" []} {"archive" []}]) (is (= [[(io/file dir "project") (io/file dir "archive" "project")]] (fs/move-directory! (io/file dir "project") (io/file dir "archive" "project")))) (is (= [{"archive" [{"project" []}]}] (test-util/file-tree dir))))) (testing "File overwrite" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["source.txt" "destination.txt"]) (let [src (io/file dir "source.txt") tgt (io/file dir "destination.txt") src-contents (slurp src)] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["destination.txt"] (test-util/file-tree dir))) (is (= src-contents (slurp tgt)))))))
35679
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 <NAME>, <NAME> ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.fs-test (:require [clojure.java.io :as io] [clojure.test :refer :all] [editor.fs :as fs] [integration.test-util :as test-util]) (:import [java.io File] [java.nio.file NoSuchFileException FileAlreadyExistsException])) (def ^:private file-test-tree [{"directory" ["dir.txt" {"subdirectory" ["sub.txt" {"subsubdirectory" ["subsub.txt"]}]}]} "root.txt"]) (def ^:private no-root-file-test-tree (subvec file-test-tree 0 1)) (def ^:private no-subdirectory-file-test-tree (assoc-in file-test-tree [0 "directory"] ["dir.txt"])) (defn- setup-delete-file-test [^File dir] (test-util/make-file-tree! dir file-test-tree)) (deftest delete-test (testing "Delete file" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= (io/file dir "root.txt") (fs/delete-file! (io/file dir "root.txt")))) (is (= no-root-file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= (io/file dir "non-existing") (fs/delete-file! (io/file dir "non-existing")))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file failing" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (thrown? NoSuchFileException (fs/delete-file! (io/file dir "non-existing") {:missing :fail}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file failing silently" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= nil (fs/delete-file! (io/file dir "non-existing") {:missing :fail :fail :silently}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete dir" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (let [target (io/file dir "directory" "subdirectory")] (is (= target (fs/delete-directory! target))) (is (= no-subdirectory-file-test-tree (test-util/file-tree dir)))))) (testing "Delete missing dir" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (let [target (io/file dir "directory" "non-existing-subdirectory")] (is (= target (fs/delete-directory! target))) (is (= file-test-tree (test-util/file-tree dir)))))) (testing "Delete missing dir failing" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (thrown? NoSuchFileException (fs/delete-directory! (io/file dir "non-existing-subdirectory") {:missing :fail}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing dir failing silently" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= nil (fs/delete-directory! (io/file dir "non-existing-subdirectory") {:missing :fail :fail :silently})))))) (def ^:private silly-tree [{"a" [{"b" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]}]} "file.txt"]}]}]) (deftest move-rename-test (testing "Rename file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["old-name.txt"]) (let [src (io/file dir "old-name.txt") tgt (io/file dir "new-name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["new-name.txt"] (test-util/file-tree dir)))))) (testing "Rename file no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))) (testing "Rename file caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "Name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["Name.txt"] (test-util/file-tree dir)))))) (testing "Rename file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename file replace dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"cake" ["tosca.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["cake"] (test-util/file-tree dir)))))) (testing "Rename file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? FileAlreadyExistsException (fs/move-file! src tgt {:target :keep}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir)))))) (testing "Rename file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir)))))) (testing "Rename missing file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? NoSuchFileException (fs/move-file! src tgt {:target :keep}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename missing file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "Directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"Directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]} {"directory2" ["name2.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir replace file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["file.txt"]} "cake"]) (let [src (io/file dir "directory") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"cake" ["file.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]} {"directory" ["file.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (thrown? FileAlreadyExistsException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]} {"directory" ["file.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (thrown? NoSuchFileException (fs/move-directory! src tgt))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [] (fs/move-directory! src tgt {:fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest move-file-test (testing "Move file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt" "name.txt"]}] (test-util/file-tree dir)))))) (testing "Move file renaming" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "new-name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt" "new-name.txt"]}] (test-util/file-tree dir)))))) (testing "Move file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt"]}] (test-util/file-tree dir)))))) (testing "Move file failing" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]} "name.txt"]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (thrown? FileAlreadyExistsException (fs/move-file! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move file failing silently" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]} "name.txt"]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing file failing" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]}]] ; no name.txt (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (thrown? NoSuchFileException (fs/move-file! src tgt))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing file failing silently" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]}]] ; no name.txt (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (= [] (fs/move-file! src tgt {:fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest move-dir-test (testing "Move dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" ["also.txt"]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["also.txt" {"directory" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir renaming" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" ["also.txt"]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["also.txt" {"directory2" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" [{"directory2" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir merging" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt {:target :merge}))) (is (= [{"another" [{"directory2" ["name.txt" "name2.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? NoSuchFileException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) ;; Below tests assume this initial file tree: (def silly-tree above) ;; ;; . ;; └── a ;; └── b ;; β”œβ”€β”€ b ;; β”‚ └── b ;; β”‚ β”œβ”€β”€ file.txt ;; β”‚ └── k ;; β”‚ └── also.txt ;; └── file.txt ;; ;; ... Then moves directory /a/b/b up to /a using :keep :merge and :replace ;; ;; This will break if move-directory would do for instance "cp src trg; rm src" for :merge ;; or "rm trg; cp src trg rm src" for :replace (testing "Move dir replacing parent" ;; expect: ;; . ;; └── a ;; └── b ;; └── b ;; β”œβ”€β”€ file.txt ;; └── k ;; └── also.txt ;; (test-util/with-temp-dir! dir (test-util/make-file-tree! dir silly-tree) (let [src (io/file dir "a" "b" "b") tgt (io/file dir "a" "b")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"a" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]}]}]}] (test-util/file-tree dir)))))) (testing "Move dir merging with parent" ;; expect ;; . ;; └── a ;; └── b ;; β”œβ”€β”€ b ;; β”‚ β”œβ”€β”€ file.txt ;; β”‚ └── k ;; β”‚ └── also.txt ;; └── file.txt (test-util/with-temp-dir! dir (test-util/make-file-tree! dir silly-tree) (let [src (io/file dir "a" "b" "b") tgt (io/file dir "a" "b")] (is (= [[src tgt]] (fs/move-directory! src tgt {:target :merge}))) (is (= [{"a" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]} "file.txt"]}]}] (test-util/file-tree dir))))))) (deftest copy-file-test (testing "Copy file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name2.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["name.txt" "name2.txt"] (test-util/file-tree dir)))))) (testing "Copy file no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))) (testing "Copy file caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "Name.txt")] (if fs/case-sensitive? (do (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["Name.txt" "name.txt"] (test-util/file-tree dir)))) (do (is (= [[src src]] (fs/copy-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))))) (testing "Copy file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (= (slurp src) (slurp tgt)))))) (testing "Copy file replace dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["directory" "name.txt"] (test-util/file-tree dir))) (is (= (slurp src) (slurp tgt)))))) (testing "Copy file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? FileAlreadyExistsException (fs/copy-file! src tgt {:target :keep}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (not= (slurp src) (slurp tgt)))))) (testing "Copy file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/copy-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (not= (slurp src) (slurp tgt)))))) (testing "Copy missing file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? NoSuchFileException (fs/copy-file! src tgt {:target :keep}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Copy missing file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/copy-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt"] (test-util/file-tree dir))))))) (deftest copy-dir-test (testing "Copy dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]} {"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "Directory")] (if fs/case-sensitive? (do (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"Directory" ["name.txt"]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))) (do (is (= [[src src]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))))) (testing "Copy dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]} {"directory2" ["name2.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]} {"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir replace file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["cake" {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"cake" ["name.txt"]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir merge" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt {:target :merge}))) (is (= [{"another" [{"directory2" ["name.txt" "name2.txt"]}]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/copy-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/copy-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/copy-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest various (testing "File move to dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["file.txt" {"directory" []}]) (let [src (io/file dir "file.txt") tgt (io/file dir "directory" "file.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["file.txt"]}] (test-util/file-tree dir)))))) (testing "File move to same dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["file.txt"]) (let [src (io/file dir "file.txt") tgt (io/file dir "file.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["file.txt"] (test-util/file-tree dir)))))) (testing "Directory rename" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"old-name" ["file.txt"]}]) (is (= [[(io/file dir "old-name") (io/file dir "new-name")]] (fs/move-directory! (io/file dir "old-name") (io/file dir "new-name")))) (is (= [{"new-name" ["file.txt"]}] (test-util/file-tree dir))))) (testing "Directory move" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"project" []} {"archive" []}]) (is (= [[(io/file dir "project") (io/file dir "archive" "project")]] (fs/move-directory! (io/file dir "project") (io/file dir "archive" "project")))) (is (= [{"archive" [{"project" []}]}] (test-util/file-tree dir))))) (testing "File overwrite" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["source.txt" "destination.txt"]) (let [src (io/file dir "source.txt") tgt (io/file dir "destination.txt") src-contents (slurp src)] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["destination.txt"] (test-util/file-tree dir))) (is (= src-contents (slurp tgt)))))))
true
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.fs-test (:require [clojure.java.io :as io] [clojure.test :refer :all] [editor.fs :as fs] [integration.test-util :as test-util]) (:import [java.io File] [java.nio.file NoSuchFileException FileAlreadyExistsException])) (def ^:private file-test-tree [{"directory" ["dir.txt" {"subdirectory" ["sub.txt" {"subsubdirectory" ["subsub.txt"]}]}]} "root.txt"]) (def ^:private no-root-file-test-tree (subvec file-test-tree 0 1)) (def ^:private no-subdirectory-file-test-tree (assoc-in file-test-tree [0 "directory"] ["dir.txt"])) (defn- setup-delete-file-test [^File dir] (test-util/make-file-tree! dir file-test-tree)) (deftest delete-test (testing "Delete file" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= (io/file dir "root.txt") (fs/delete-file! (io/file dir "root.txt")))) (is (= no-root-file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= (io/file dir "non-existing") (fs/delete-file! (io/file dir "non-existing")))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file failing" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (thrown? NoSuchFileException (fs/delete-file! (io/file dir "non-existing") {:missing :fail}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing file failing silently" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= nil (fs/delete-file! (io/file dir "non-existing") {:missing :fail :fail :silently}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete dir" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (let [target (io/file dir "directory" "subdirectory")] (is (= target (fs/delete-directory! target))) (is (= no-subdirectory-file-test-tree (test-util/file-tree dir)))))) (testing "Delete missing dir" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (let [target (io/file dir "directory" "non-existing-subdirectory")] (is (= target (fs/delete-directory! target))) (is (= file-test-tree (test-util/file-tree dir)))))) (testing "Delete missing dir failing" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (thrown? NoSuchFileException (fs/delete-directory! (io/file dir "non-existing-subdirectory") {:missing :fail}))) (is (= file-test-tree (test-util/file-tree dir))))) (testing "Delete missing dir failing silently" (test-util/with-temp-dir! dir (setup-delete-file-test dir) (is (= nil (fs/delete-directory! (io/file dir "non-existing-subdirectory") {:missing :fail :fail :silently})))))) (def ^:private silly-tree [{"a" [{"b" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]}]} "file.txt"]}]}]) (deftest move-rename-test (testing "Rename file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["old-name.txt"]) (let [src (io/file dir "old-name.txt") tgt (io/file dir "new-name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["new-name.txt"] (test-util/file-tree dir)))))) (testing "Rename file no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))) (testing "Rename file caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "Name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["Name.txt"] (test-util/file-tree dir)))))) (testing "Rename file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename file replace dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"cake" ["tosca.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["cake"] (test-util/file-tree dir)))))) (testing "Rename file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? FileAlreadyExistsException (fs/move-file! src tgt {:target :keep}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir)))))) (testing "Rename file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir)))))) (testing "Rename missing file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? NoSuchFileException (fs/move-file! src tgt {:target :keep}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename missing file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Rename dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "Directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"Directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]} {"directory2" ["name2.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir replace file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["file.txt"]} "cake"]) (let [src (io/file dir "directory") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"cake" ["file.txt"]}] (test-util/file-tree dir)))))) (testing "Rename dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]} {"directory" ["file.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (thrown? FileAlreadyExistsException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]} {"directory" ["file.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (thrown? NoSuchFileException (fs/move-directory! src tgt))) (is (= tree (test-util/file-tree dir))))))) (testing "Rename missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" ["file2.txt"]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another")] (is (= [] (fs/move-directory! src tgt {:fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest move-file-test (testing "Move file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt" "name.txt"]}] (test-util/file-tree dir)))))) (testing "Move file renaming" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "new-name.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt" "new-name.txt"]}] (test-util/file-tree dir)))))) (testing "Move file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["also.txt"]}] (test-util/file-tree dir)))))) (testing "Move file failing" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]} "name.txt"]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (thrown? FileAlreadyExistsException (fs/move-file! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move file failing silently" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]} "name.txt"]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "also.txt")] (is (= [] (fs/move-file! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing file failing" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]}]] ; no name.txt (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (thrown? NoSuchFileException (fs/move-file! src tgt))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing file failing silently" (test-util/with-temp-dir! dir (let [tree [{"directory" ["also.txt"]}]] ; no name.txt (test-util/make-file-tree! dir tree) (let [src (io/file dir "name.txt") tgt (io/file dir "directory" "name.txt")] (is (= [] (fs/move-file! src tgt {:fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest move-dir-test (testing "Move dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" ["also.txt"]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["also.txt" {"directory" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir renaming" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" ["also.txt"]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" ["also.txt" {"directory2" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"another" [{"directory2" ["name.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir merging" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/move-directory! src tgt {:target :merge}))) (is (= [{"another" [{"directory2" ["name.txt" "name2.txt"]}]}] (test-util/file-tree dir)))))) (testing "Move dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? NoSuchFileException (fs/move-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Move missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) ;; Below tests assume this initial file tree: (def silly-tree above) ;; ;; . ;; └── a ;; └── b ;; β”œβ”€β”€ b ;; β”‚ └── b ;; β”‚ β”œβ”€β”€ file.txt ;; β”‚ └── k ;; β”‚ └── also.txt ;; └── file.txt ;; ;; ... Then moves directory /a/b/b up to /a using :keep :merge and :replace ;; ;; This will break if move-directory would do for instance "cp src trg; rm src" for :merge ;; or "rm trg; cp src trg rm src" for :replace (testing "Move dir replacing parent" ;; expect: ;; . ;; └── a ;; └── b ;; └── b ;; β”œβ”€β”€ file.txt ;; └── k ;; └── also.txt ;; (test-util/with-temp-dir! dir (test-util/make-file-tree! dir silly-tree) (let [src (io/file dir "a" "b" "b") tgt (io/file dir "a" "b")] (is (= [[src tgt]] (fs/move-directory! src tgt))) (is (= [{"a" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]}]}]}] (test-util/file-tree dir)))))) (testing "Move dir merging with parent" ;; expect ;; . ;; └── a ;; └── b ;; β”œβ”€β”€ b ;; β”‚ β”œβ”€β”€ file.txt ;; β”‚ └── k ;; β”‚ └── also.txt ;; └── file.txt (test-util/with-temp-dir! dir (test-util/make-file-tree! dir silly-tree) (let [src (io/file dir "a" "b" "b") tgt (io/file dir "a" "b")] (is (= [[src tgt]] (fs/move-directory! src tgt {:target :merge}))) (is (= [{"a" [{"b" [{"b" ["file.txt" {"k" ["also.txt"]}]} "file.txt"]}]}] (test-util/file-tree dir))))))) (deftest copy-file-test (testing "Copy file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name2.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["name.txt" "name2.txt"] (test-util/file-tree dir)))))) (testing "Copy file no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "name.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))) (testing "Copy file caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "Name.txt")] (if fs/case-sensitive? (do (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["Name.txt" "name.txt"] (test-util/file-tree dir)))) (do (is (= [[src src]] (fs/copy-file! src tgt))) (is (= ["name.txt"] (test-util/file-tree dir)))))))) (testing "Copy file replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (= (slurp src) (slurp tgt)))))) (testing "Copy file replace dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["also.txt"]} "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/copy-file! src tgt))) (is (= ["directory" "name.txt"] (test-util/file-tree dir))) (is (= (slurp src) (slurp tgt)))))) (testing "Copy file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? FileAlreadyExistsException (fs/copy-file! src tgt {:target :keep}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (not= (slurp src) (slurp tgt)))))) (testing "Copy file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt" "name.txt"]) (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/copy-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt" "name.txt"] (test-util/file-tree dir))) (is (not= (slurp src) (slurp tgt)))))) (testing "Copy missing file failing" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (thrown? NoSuchFileException (fs/copy-file! src tgt {:target :keep}))) (is (= ["also.txt"] (test-util/file-tree dir)))))) (testing "Copy missing file failing silently" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["also.txt"]) ; no name.txt (let [src (io/file dir "name.txt") tgt (io/file dir "also.txt")] (is (= [] (fs/copy-file! src tgt {:target :keep :fail :silently}))) (is (= ["also.txt"] (test-util/file-tree dir))))))) (deftest copy-dir-test (testing "Copy dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]} {"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir no-change" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir caps only" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "Directory")] (if fs/case-sensitive? (do (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"Directory" ["name.txt"]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))) (do (is (= [[src src]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]}] (test-util/file-tree dir)))))))) (testing "Copy dir replace" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"directory" ["name.txt"]} {"directory2" ["name2.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"directory" ["name.txt"]} {"directory2" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir replace file" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["cake" {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "cake")] (is (= [[src tgt]] (fs/copy-directory! src tgt))) (is (= [{"cake" ["name.txt"]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir merge" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [[src tgt]] (fs/copy-directory! src tgt {:target :merge}))) (is (= [{"another" [{"directory2" ["name.txt" "name2.txt"]}]} {"directory" ["name.txt"]}] (test-util/file-tree dir)))))) (testing "Copy dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/copy-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]} {"directory" ["name.txt"]}]] (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/copy-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy missing dir failing" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (thrown? FileAlreadyExistsException (fs/copy-directory! src tgt {:target :keep}))) (is (= tree (test-util/file-tree dir))))))) (testing "Copy missing dir failing silently" (test-util/with-temp-dir! dir (let [tree [{"another" [{"directory2" ["name2.txt"]}]}]] ; no {"directory" [...]} (test-util/make-file-tree! dir tree) (let [src (io/file dir "directory") tgt (io/file dir "another" "directory2")] (is (= [] (fs/move-directory! src tgt {:target :keep :fail :silently}))) (is (= tree (test-util/file-tree dir)))))))) (deftest various (testing "File move to dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["file.txt" {"directory" []}]) (let [src (io/file dir "file.txt") tgt (io/file dir "directory" "file.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= [{"directory" ["file.txt"]}] (test-util/file-tree dir)))))) (testing "File move to same dir" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["file.txt"]) (let [src (io/file dir "file.txt") tgt (io/file dir "file.txt")] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["file.txt"] (test-util/file-tree dir)))))) (testing "Directory rename" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"old-name" ["file.txt"]}]) (is (= [[(io/file dir "old-name") (io/file dir "new-name")]] (fs/move-directory! (io/file dir "old-name") (io/file dir "new-name")))) (is (= [{"new-name" ["file.txt"]}] (test-util/file-tree dir))))) (testing "Directory move" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir [{"project" []} {"archive" []}]) (is (= [[(io/file dir "project") (io/file dir "archive" "project")]] (fs/move-directory! (io/file dir "project") (io/file dir "archive" "project")))) (is (= [{"archive" [{"project" []}]}] (test-util/file-tree dir))))) (testing "File overwrite" (test-util/with-temp-dir! dir (test-util/make-file-tree! dir ["source.txt" "destination.txt"]) (let [src (io/file dir "source.txt") tgt (io/file dir "destination.txt") src-contents (slurp src)] (is (= [[src tgt]] (fs/move-file! src tgt))) (is (= ["destination.txt"] (test-util/file-tree dir))) (is (= src-contents (slurp tgt)))))))
[ { "context": "/init-placeholder-data!)\n (user/create! \"user1\" \"password1\")\n (let [token (invoke-login {:username \"user1\" ", "end": 759, "score": 0.7882088422775269, "start": 750, "tag": "PASSWORD", "value": "password1" }, { "context": "ssword1\")\n (let [token (invoke-login {:username \"user1\" :password \"password1\"})\n [_ record] (post", "end": 807, "score": 0.9932942390441895, "start": 802, "tag": "USERNAME", "value": "user1" }, { "context": "token (invoke-login {:username \"user1\" :password \"password1\"})\n [_ record] (post-request \"/api/thought", "end": 829, "score": 0.9988659620285034, "start": 820, "tag": "PASSWORD", "value": "password1" }, { "context": "login {:username tdu/ph-username :password tdu/ph-password})\n [response item-2] (get-request (str", "end": 2957, "score": 0.7165058255195618, "start": 2949, "tag": "PASSWORD", "value": "password" }, { "context": "nvoke-login {:username tdu/ph-username :password tdu/ph-password})\n all-after (tdb/get-al", "end": 3368, "score": 0.7058231830596924, "start": 3366, "tag": "PASSWORD", "value": "du" }, { "context": "ke-login {:username tdu/ph-username :password tdu/ph-password})\n all-after (tdb/get-all-reminders)", "end": 3380, "score": 0.7797614932060242, "start": 3369, "tag": "PASSWORD", "value": "ph-password" }, { "context": "/init-placeholder-data!)\n (user/create! \"user1\" \"password1\")\n (let [token (invoke-login {:username \"user1\" ", "end": 3863, "score": 0.9712865352630615, "start": 3854, "tag": "PASSWORD", "value": "password1" }, { "context": "ssword1\")\n (let [token (invoke-login {:username \"user1\" :password \"password1\"})\n [_ record] (post", "end": 3911, "score": 0.9978905916213989, "start": 3906, "tag": "USERNAME", "value": "user1" }, { "context": "token (invoke-login {:username \"user1\" :password \"password1\"})\n [_ record] (post-request \"/api/thought", "end": 3933, "score": 0.9992657899856567, "start": 3924, "tag": "PASSWORD", "value": "password1" }, { "context": "/init-placeholder-data!)\n (user/create! \"user1\" \"password1\")\n (let [token (invoke-login {:username \"user1\" ", "end": 4829, "score": 0.9955393075942993, "start": 4820, "tag": "PASSWORD", "value": "password1" }, { "context": "ssword1\")\n (let [token (invoke-login {:username \"user1\" :password \"password1\"})\n [_ record] (post", "end": 4877, "score": 0.9969226121902466, "start": 4872, "tag": "USERNAME", "value": "user1" }, { "context": "token (invoke-login {:username \"user1\" :password \"password1\"})\n [_ record] (post-request \"/api/thought", "end": 4899, "score": 0.9993141293525696, "start": 4890, "tag": "PASSWORD", "value": "password1" }, { "context": " invalid-token (invoke-login {:username tdu/ph-username :password tdu/ph-password})\n ", "end": 6993, "score": 0.8925710916519165, "start": 6990, "tag": "USERNAME", "value": "tdu" }, { "context": "(invoke-login {:username tdu/ph-username :password tdu/ph-password})\n [r-updated r-empty] (patch-request ", "end": 7031, "score": 0.8892338871955872, "start": 7016, "tag": "PASSWORD", "value": "tdu/ph-password" }, { "context": " Test with a single user\n (user/create! \"user1\" \"password1\")\n (let [token (invoke-login {:username ", "end": 7628, "score": 0.9746489524841309, "start": 7619, "tag": "PASSWORD", "value": "password1" }, { "context": ")\n (let [token (invoke-login {:username \"user1\" :password \"password1\"})\n ;; Test thought ", "end": 7684, "score": 0.9604769945144653, "start": 7679, "tag": "USERNAME", "value": "user1" }, { "context": " (invoke-login {:username \"user1\" :password \"password1\"})\n ;; Test thought and reminders\n ", "end": 7706, "score": 0.99933922290802, "start": 7697, "tag": "PASSWORD", "value": "password1" }, { "context": " (let [other-token (invoke-login {:username \"user2\" :password \"password\"})\n [response rem", "end": 10842, "score": 0.9918555021286011, "start": 10837, "tag": "USERNAME", "value": "user2" }, { "context": "token (invoke-login {:username \"user2\" :password \"password\"})\n [response reminders] (get-request ", "end": 10863, "score": 0.9993419647216797, "start": 10855, "tag": "PASSWORD", "value": "password" }, { "context": " Test with a single user\n (user/create! \"user1\" \"password1\")\n (let [token (invoke-login {:username ", "end": 11562, "score": 0.9621951580047607, "start": 11553, "tag": "PASSWORD", "value": "password1" }, { "context": ")\n (let [token (invoke-login {:username \"user1\" :password \"password1\"})\n ;; Test thought ", "end": 11618, "score": 0.9984457492828369, "start": 11613, "tag": "USERNAME", "value": "user1" }, { "context": " (invoke-login {:username \"user1\" :password \"password1\"})\n ;; Test thought and reminders\n ", "end": 11640, "score": 0.9990108013153076, "start": 11631, "tag": "PASSWORD", "value": "password1" }, { "context": " (let [other-token (invoke-login {:username \"user2\" :password \"password\"})\n rem-before (", "end": 14484, "score": 0.9881756901741028, "start": 14479, "tag": "USERNAME", "value": "user2" }, { "context": "token (invoke-login {:username \"user2\" :password \"password\"})\n rem-before (reminder/get-by-id (:", "end": 14505, "score": 0.9992693066596985, "start": 14497, "tag": "PASSWORD", "value": "password" } ]
test/clj/memento/test/routes/api/reminder.clj
ricardojmendez/memento
2
(ns memento.test.routes.api.reminder (:require [clojure.test :refer :all] [clj-time.coerce :as c] [clj-time.core :as t] [memento.handler :refer [app]] [memento.db.user :as user] [memento.test.db.user :as tdu] [memento.test.db.core :as tdb] [memento.test.routes.helpers :refer [patch-request post-request get-request put-request del-request invoke-login]] [mount.core :as mount] [memento.db.reminder :as reminder])) (use-fixtures :once (fn [f] (mount/start #'memento.config/env #'memento.db.core/*db*) (f))) ;;;; ;;;; Tests ;;;; (deftest test-add-reminder (tdu/init-placeholder-data!) (user/create! "user1" "password1") (let [token (invoke-login {:username "user1" :password "password1"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; Verify the basics (is (string? token)) (is (map? record)) ;; On to the tests (testing "Attempting to add a reminder without a token results in a 400" (let [[response _] (post-request "/api/reminder" {:thought-id (:id record) :type-id "spaced"} nil)] (is (= 400 (:status response))))) (testing "We can add a new reminder to a thought" (let [[response item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token)] (is (= 201 (:status response))) (is (= "application/transit+json" (get-in response [:headers "Content-Type"]))) (is (map? item)) (is (:id item)) (is (= "spaced" (:type-id item))) (is (= 4 (count (get-in item [:properties :days])))) (is (zero? (get-in item [:properties :day-idx]))) (is (= (str "http://localhost/api/reminders/" (:id item)) (get-in response [:headers "Location"]))) )) (testing "After adding a reminder, we can retrieve it" (let [[_ item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [response item-2] (get-request (str "/api/reminders/" (:id item)) nil token)] (is (= 200 (:status response))) (is (= "application/transit+json" (get-in response [:headers "Content-Type"]))) (is (= item item-2)))) (testing "If we query for thoughts with a reminder, we get a single thought with the two reminders we created" (let [[_ response] (get-request "/api/thoughts" nil token)] (is (= 1 (:total response) (:pages response) (count (:results response)))) (is (= 2 (count (:reminders (first (:results response)))))))) ;; Security concerns (testing "Trying to retrieve a reminder from someone other than the owner fails" (let [[_ item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) other-token (invoke-login {:username tdu/ph-username :password tdu/ph-password}) [response item-2] (get-request (str "/api/reminders/" (:id item)) nil other-token)] (is (= 404 (:status response))) (is (some? item)) (is (nil? item-2)))) (testing "Trying to add a reminder to a thought by someone other than the owner fails" (let [all-before (tdb/get-all-reminders) other-token (invoke-login {:username tdu/ph-username :password tdu/ph-password}) all-after (tdb/get-all-reminders) [response item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} other-token)] (is (= 404 (:status response))) (is (nil? item)) ;; Ensure not only we 404'd, but there weren't any actual changes (is (= 3 (count all-before))) (is (= all-before all-after)))))) (deftest test-delete-thought (tdu/init-placeholder-data!) (user/create! "user1" "password1") (let [token (invoke-login {:username "user1" :password "password1"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; On to the tests (testing "We can delete a thought with reminders" (let [[_ reminder] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [deleted _] (del-request "/api/thoughts" (:id record) token) ; Query post-delete [r-del-thought thoughts-after-delete] (get-request "/api/thoughts" nil token) [r-del-reminder reminder-after-delete] (get-request (str "/api/reminders/" (:id reminder)) nil token)] (is reminder) (is (= 204 (:status deleted))) (is (= 404 (:status r-del-reminder))) (is (zero? (:total thoughts-after-delete))) (is (empty? reminder-after-delete)) )))) (deftest test-patch-next-date (tdu/init-placeholder-data!) (user/create! "user1" "password1") (let [token (invoke-login {:username "user1" :password "password1"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; Verify the basics (is (string? token)) (is (map? record)) ;; On to the tests (testing "We can set a nil date for an existing reminder" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date nil} token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] (is (= 201 (:status r-initial))) (is (= 204 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Both reminders should be the same, other than the next_date (is (= (dissoc initial :next-date) (dissoc updated :next-date))) (is (nil? (:next-date updated))) )) (testing "We can set a string date for an existing reminder" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date "2017-01-01"} token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] ;; TODO: Expand tests, just verifying the basics work right now, since the API may change (is (= 201 (:status r-initial))) (is (= 204 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Both reminders should be the same, other than the next_date (is (= (dissoc initial :next-date) (dissoc updated :next-date))) (is (= (read-string "#inst \"2017-01-01\"") (:next-date updated))) )) (testing "Trying to set the date from someone other than the owner fails" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) invalid-token (invoke-login {:username tdu/ph-username :password tdu/ph-password}) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date "2017-01-01"} invalid-token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] (is (= 201 (:status r-initial))) (is (= 404 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Nothing should have changed (is (= initial updated)))))) (deftest test-get-active-reminders (tdu/init-placeholder-data!) ;; Test with a single user (user/create! "user1" "password1") (let [token (invoke-login {:username "user1" :password "password1"}) ;; Test thought and reminders [_ thought-1] (post-request "/api/thoughts" {:thought "Just a thought"} token) [_ thought-2] (post-request "/api/thoughts" {:thought "Another thought"} token) [_ rem-1-1] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-1-2] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-2] (post-request "/api/reminders" {:thought-id (:id thought-2) :type-id "spaced"} token) ;; Some test timestamps minus-2s (c/to-date (t/plus (t/now) (t/seconds -2))) minus-1s (c/to-date (t/plus (t/now) (t/seconds -1))) in-10m (c/to-date (t/plus (t/now) (t/minutes 10))) ;; We get the thought description when returning the reminders, so ;; let's define a function to remove it clear-thought #(dissoc % :thought) ] ;; Verify the basics (is (string? token)) (doseq [item [thought-1 thought-2 rem-1-1 rem-1-2 rem-2]] (is (map? item) (str "Item should be a map " item))) ;; On to the tests (testing "There are no pending reminders initially" (let [[response reminders] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (empty? reminders)))) (testing "A reminder shows up as pending if its next_date is in the past" ;; Change the next reminder dates (is (= 1 (reminder/update-reminder-date! (:id rem-1-2) minus-1s (:properties rem-1-2)))) (is (= 1 (reminder/update-reminder-date! (:id rem-1-1) in-10m (:properties rem-1-1)))) (let [rem-1-2 (reminder/get-by-id (:id rem-1-2)) ; Reload since we changed the date [response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-1-2] (map clear-thought r-list))) ; Reminder list includes the thought (is (= (:thought thought-1) (:thought (first r-list)))) )) (testing "Reminders are returned in next_date order" (is (= 1 (reminder/update-reminder-date! (:id rem-2) minus-2s (:properties rem-2)))) (let [rem-1-2 (reminder/get-by-id (:id rem-1-2)) ; Reload since we changed the date rem-2 (reminder/get-by-id (:id rem-2)) [response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-2 rem-1-2] (map clear-thought r-list))) ; Reminder list includes the thought (is (= (map :thought [thought-2 thought-1]) (map :thought r-list))))) (testing "A different user does not get any pending reminders" (user/create! "user2" "password") (let [other-token (invoke-login {:username "user2" :password "password"}) [response reminders] (get-request "/api/reminders" nil other-token)] (is (string? other-token)) ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 200 (:status response))) (is (empty? reminders)))) (testing "An invalid token throws up an error" (let [[response result] (get-request "/api/reminders" nil "invalid")] ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 401 (:status response))) (is (:error result)))))) (deftest test-mark-as-viewed (tdu/init-placeholder-data!) ;; Test with a single user (user/create! "user1" "password1") (let [token (invoke-login {:username "user1" :password "password1"}) ;; Test thought and reminders [_ thought-1] (post-request "/api/thoughts" {:thought "Just a thought"} token) [_ thought-2] (post-request "/api/thoughts" {:thought "Another thought"} token) [_ rem-1] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-2] (post-request "/api/reminders" {:thought-id (:id thought-2) :type-id "spaced"} token) ;; Some test timestamps minus-2s (c/to-date (t/plus (t/now) (t/seconds -2))) minus-1s (c/to-date (t/plus (t/now) (t/seconds -1))) ;; Set both reminders to have their next reminder date ready and reload _ (reminder/update-reminder-date! (:id rem-1) minus-1s (:properties rem-1)) _ (reminder/update-reminder-date! (:id rem-2) minus-2s (:properties rem-2)) rem-1 (reminder/get-by-id (:id rem-1)) rem-2 (reminder/get-by-id (:id rem-2)) ;; We get the thought description when returning the reminders, so ;; let's define a function to remove it clear-thought #(dissoc % :thought)] ;; Verify the basics (is (string? token)) (doseq [item [thought-1 thought-2 rem-1 rem-2]] (is (map? item) (str "Item should be a map " item))) ;; On to the tests (testing "All reminders are pending initially" (let [[response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-2 rem-1] (map clear-thought r-list))))) (testing "Marking a reminder as viewed removes it from the list" (let [[response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) [_ r-list] (get-request "/api/reminders" nil token) new-rem-1 (reminder/get-by-id (:id rem-1))] ;; API call returns what we expect (is (= 200 (:status response))) (is (= 1 post-result)) ;; Reminder should no longer show up as pending (is (= [rem-2] (map clear-thought r-list))) ;; The reminder itself was updated (is (not= rem-1 new-rem-1)) (is (= (dissoc rem-1 :next-date :properties) (dissoc new-rem-1 :next-date :properties))) (is (t/after? (c/to-date-time (:next-date new-rem-1)) (c/to-date-time (:next-date rem-1)))) (is (= 1 (get-in new-rem-1 [:properties :day-idx]))))) (testing "A different user cannot mark the reminder as viewed" (user/create! "user2" "password") (let [other-token (invoke-login {:username "user2" :password "password"}) rem-before (reminder/get-by-id (:id rem-1)) [response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil other-token) rem-after (reminder/get-by-id (:id rem-1))] (is (string? other-token)) ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 404 (:status response))) (is (empty? post-result)) (is (= rem-before rem-after)))) (testing "Calling the function repeatedly keeps moving forward the date and increasing the day index" (let [day-count (count (get-in rem-1 [:properties :days])) ;; Will call the function multiple times on purpose to ensure it works even after the reminder has no more repetitions time-series (for [i (range (+ 3 day-count))] (let [[response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) new-rem (reminder/get-by-id (:id rem-1))] {:status (:status response) :result post-result :index i :next-date (:next-date new-rem) :day-idx (get-in new-rem [:properties :day-idx])} ))] (doseq [item time-series] ;; We will always return that we updated an existing reminder, even if we didn't move it forward (is (= 200 (:status item))) (is (= 1 (:result item))) (if (< (:day-idx item) day-count) (is (some? (:next-date item)) (str "Expected a date on " item)) (do (is (nil? (:next-date item)) (str "Did not expect a date for " item)) (is (= day-count (:day-idx item)) (str "Did not expect the day index to move after " day-count)))) ) )) (testing "Marking a legacy reminder as viewed adds a schedule and moves its date forward" (let [_ (reminder/update-reminder-date! (:id rem-1) minus-1s nil) legacy (reminder/get-by-id (:id rem-1)) [response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) [_ r-list] (get-request "/api/reminders" nil token) new-rem-1 (reminder/get-by-id (:id rem-1))] ;; Ensure our setup went as expected (is (nil? (:properties legacy))) (is (= minus-1s (:next-date legacy))) ;; API call returns what we expect (is (= 200 (:status response))) (is (= 1 post-result)) ;; Reminder does not show up as pending (is (= [rem-2] (map clear-thought r-list))) ;; The call configured the reminder to for spaced repetition and moved the date ;; forward, but left it at the first index (is (zero? (get-in new-rem-1 [:properties :day-idx]))) (is (t/after? (c/to-date-time (:next-date new-rem-1)) (c/to-date-time minus-1s))) (is (< 0 (count (get-in new-rem-1 [:properties :days])))))) )) ;; TODO: Tests for ;; - Mark a reminder as expired ;; - Get reminders for a thought
8684
(ns memento.test.routes.api.reminder (:require [clojure.test :refer :all] [clj-time.coerce :as c] [clj-time.core :as t] [memento.handler :refer [app]] [memento.db.user :as user] [memento.test.db.user :as tdu] [memento.test.db.core :as tdb] [memento.test.routes.helpers :refer [patch-request post-request get-request put-request del-request invoke-login]] [mount.core :as mount] [memento.db.reminder :as reminder])) (use-fixtures :once (fn [f] (mount/start #'memento.config/env #'memento.db.core/*db*) (f))) ;;;; ;;;; Tests ;;;; (deftest test-add-reminder (tdu/init-placeholder-data!) (user/create! "user1" "<PASSWORD>") (let [token (invoke-login {:username "user1" :password "<PASSWORD>"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; Verify the basics (is (string? token)) (is (map? record)) ;; On to the tests (testing "Attempting to add a reminder without a token results in a 400" (let [[response _] (post-request "/api/reminder" {:thought-id (:id record) :type-id "spaced"} nil)] (is (= 400 (:status response))))) (testing "We can add a new reminder to a thought" (let [[response item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token)] (is (= 201 (:status response))) (is (= "application/transit+json" (get-in response [:headers "Content-Type"]))) (is (map? item)) (is (:id item)) (is (= "spaced" (:type-id item))) (is (= 4 (count (get-in item [:properties :days])))) (is (zero? (get-in item [:properties :day-idx]))) (is (= (str "http://localhost/api/reminders/" (:id item)) (get-in response [:headers "Location"]))) )) (testing "After adding a reminder, we can retrieve it" (let [[_ item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [response item-2] (get-request (str "/api/reminders/" (:id item)) nil token)] (is (= 200 (:status response))) (is (= "application/transit+json" (get-in response [:headers "Content-Type"]))) (is (= item item-2)))) (testing "If we query for thoughts with a reminder, we get a single thought with the two reminders we created" (let [[_ response] (get-request "/api/thoughts" nil token)] (is (= 1 (:total response) (:pages response) (count (:results response)))) (is (= 2 (count (:reminders (first (:results response)))))))) ;; Security concerns (testing "Trying to retrieve a reminder from someone other than the owner fails" (let [[_ item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) other-token (invoke-login {:username tdu/ph-username :password tdu/ph-<PASSWORD>}) [response item-2] (get-request (str "/api/reminders/" (:id item)) nil other-token)] (is (= 404 (:status response))) (is (some? item)) (is (nil? item-2)))) (testing "Trying to add a reminder to a thought by someone other than the owner fails" (let [all-before (tdb/get-all-reminders) other-token (invoke-login {:username tdu/ph-username :password t<PASSWORD>/<PASSWORD>}) all-after (tdb/get-all-reminders) [response item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} other-token)] (is (= 404 (:status response))) (is (nil? item)) ;; Ensure not only we 404'd, but there weren't any actual changes (is (= 3 (count all-before))) (is (= all-before all-after)))))) (deftest test-delete-thought (tdu/init-placeholder-data!) (user/create! "user1" "<PASSWORD>") (let [token (invoke-login {:username "user1" :password "<PASSWORD>"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; On to the tests (testing "We can delete a thought with reminders" (let [[_ reminder] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [deleted _] (del-request "/api/thoughts" (:id record) token) ; Query post-delete [r-del-thought thoughts-after-delete] (get-request "/api/thoughts" nil token) [r-del-reminder reminder-after-delete] (get-request (str "/api/reminders/" (:id reminder)) nil token)] (is reminder) (is (= 204 (:status deleted))) (is (= 404 (:status r-del-reminder))) (is (zero? (:total thoughts-after-delete))) (is (empty? reminder-after-delete)) )))) (deftest test-patch-next-date (tdu/init-placeholder-data!) (user/create! "user1" "<PASSWORD>") (let [token (invoke-login {:username "user1" :password "<PASSWORD>"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; Verify the basics (is (string? token)) (is (map? record)) ;; On to the tests (testing "We can set a nil date for an existing reminder" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date nil} token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] (is (= 201 (:status r-initial))) (is (= 204 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Both reminders should be the same, other than the next_date (is (= (dissoc initial :next-date) (dissoc updated :next-date))) (is (nil? (:next-date updated))) )) (testing "We can set a string date for an existing reminder" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date "2017-01-01"} token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] ;; TODO: Expand tests, just verifying the basics work right now, since the API may change (is (= 201 (:status r-initial))) (is (= 204 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Both reminders should be the same, other than the next_date (is (= (dissoc initial :next-date) (dissoc updated :next-date))) (is (= (read-string "#inst \"2017-01-01\"") (:next-date updated))) )) (testing "Trying to set the date from someone other than the owner fails" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) invalid-token (invoke-login {:username tdu/ph-username :password <PASSWORD>}) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date "2017-01-01"} invalid-token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] (is (= 201 (:status r-initial))) (is (= 404 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Nothing should have changed (is (= initial updated)))))) (deftest test-get-active-reminders (tdu/init-placeholder-data!) ;; Test with a single user (user/create! "user1" "<PASSWORD>") (let [token (invoke-login {:username "user1" :password "<PASSWORD>"}) ;; Test thought and reminders [_ thought-1] (post-request "/api/thoughts" {:thought "Just a thought"} token) [_ thought-2] (post-request "/api/thoughts" {:thought "Another thought"} token) [_ rem-1-1] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-1-2] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-2] (post-request "/api/reminders" {:thought-id (:id thought-2) :type-id "spaced"} token) ;; Some test timestamps minus-2s (c/to-date (t/plus (t/now) (t/seconds -2))) minus-1s (c/to-date (t/plus (t/now) (t/seconds -1))) in-10m (c/to-date (t/plus (t/now) (t/minutes 10))) ;; We get the thought description when returning the reminders, so ;; let's define a function to remove it clear-thought #(dissoc % :thought) ] ;; Verify the basics (is (string? token)) (doseq [item [thought-1 thought-2 rem-1-1 rem-1-2 rem-2]] (is (map? item) (str "Item should be a map " item))) ;; On to the tests (testing "There are no pending reminders initially" (let [[response reminders] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (empty? reminders)))) (testing "A reminder shows up as pending if its next_date is in the past" ;; Change the next reminder dates (is (= 1 (reminder/update-reminder-date! (:id rem-1-2) minus-1s (:properties rem-1-2)))) (is (= 1 (reminder/update-reminder-date! (:id rem-1-1) in-10m (:properties rem-1-1)))) (let [rem-1-2 (reminder/get-by-id (:id rem-1-2)) ; Reload since we changed the date [response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-1-2] (map clear-thought r-list))) ; Reminder list includes the thought (is (= (:thought thought-1) (:thought (first r-list)))) )) (testing "Reminders are returned in next_date order" (is (= 1 (reminder/update-reminder-date! (:id rem-2) minus-2s (:properties rem-2)))) (let [rem-1-2 (reminder/get-by-id (:id rem-1-2)) ; Reload since we changed the date rem-2 (reminder/get-by-id (:id rem-2)) [response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-2 rem-1-2] (map clear-thought r-list))) ; Reminder list includes the thought (is (= (map :thought [thought-2 thought-1]) (map :thought r-list))))) (testing "A different user does not get any pending reminders" (user/create! "user2" "password") (let [other-token (invoke-login {:username "user2" :password "<PASSWORD>"}) [response reminders] (get-request "/api/reminders" nil other-token)] (is (string? other-token)) ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 200 (:status response))) (is (empty? reminders)))) (testing "An invalid token throws up an error" (let [[response result] (get-request "/api/reminders" nil "invalid")] ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 401 (:status response))) (is (:error result)))))) (deftest test-mark-as-viewed (tdu/init-placeholder-data!) ;; Test with a single user (user/create! "user1" "<PASSWORD>") (let [token (invoke-login {:username "user1" :password "<PASSWORD>"}) ;; Test thought and reminders [_ thought-1] (post-request "/api/thoughts" {:thought "Just a thought"} token) [_ thought-2] (post-request "/api/thoughts" {:thought "Another thought"} token) [_ rem-1] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-2] (post-request "/api/reminders" {:thought-id (:id thought-2) :type-id "spaced"} token) ;; Some test timestamps minus-2s (c/to-date (t/plus (t/now) (t/seconds -2))) minus-1s (c/to-date (t/plus (t/now) (t/seconds -1))) ;; Set both reminders to have their next reminder date ready and reload _ (reminder/update-reminder-date! (:id rem-1) minus-1s (:properties rem-1)) _ (reminder/update-reminder-date! (:id rem-2) minus-2s (:properties rem-2)) rem-1 (reminder/get-by-id (:id rem-1)) rem-2 (reminder/get-by-id (:id rem-2)) ;; We get the thought description when returning the reminders, so ;; let's define a function to remove it clear-thought #(dissoc % :thought)] ;; Verify the basics (is (string? token)) (doseq [item [thought-1 thought-2 rem-1 rem-2]] (is (map? item) (str "Item should be a map " item))) ;; On to the tests (testing "All reminders are pending initially" (let [[response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-2 rem-1] (map clear-thought r-list))))) (testing "Marking a reminder as viewed removes it from the list" (let [[response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) [_ r-list] (get-request "/api/reminders" nil token) new-rem-1 (reminder/get-by-id (:id rem-1))] ;; API call returns what we expect (is (= 200 (:status response))) (is (= 1 post-result)) ;; Reminder should no longer show up as pending (is (= [rem-2] (map clear-thought r-list))) ;; The reminder itself was updated (is (not= rem-1 new-rem-1)) (is (= (dissoc rem-1 :next-date :properties) (dissoc new-rem-1 :next-date :properties))) (is (t/after? (c/to-date-time (:next-date new-rem-1)) (c/to-date-time (:next-date rem-1)))) (is (= 1 (get-in new-rem-1 [:properties :day-idx]))))) (testing "A different user cannot mark the reminder as viewed" (user/create! "user2" "password") (let [other-token (invoke-login {:username "user2" :password "<PASSWORD>"}) rem-before (reminder/get-by-id (:id rem-1)) [response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil other-token) rem-after (reminder/get-by-id (:id rem-1))] (is (string? other-token)) ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 404 (:status response))) (is (empty? post-result)) (is (= rem-before rem-after)))) (testing "Calling the function repeatedly keeps moving forward the date and increasing the day index" (let [day-count (count (get-in rem-1 [:properties :days])) ;; Will call the function multiple times on purpose to ensure it works even after the reminder has no more repetitions time-series (for [i (range (+ 3 day-count))] (let [[response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) new-rem (reminder/get-by-id (:id rem-1))] {:status (:status response) :result post-result :index i :next-date (:next-date new-rem) :day-idx (get-in new-rem [:properties :day-idx])} ))] (doseq [item time-series] ;; We will always return that we updated an existing reminder, even if we didn't move it forward (is (= 200 (:status item))) (is (= 1 (:result item))) (if (< (:day-idx item) day-count) (is (some? (:next-date item)) (str "Expected a date on " item)) (do (is (nil? (:next-date item)) (str "Did not expect a date for " item)) (is (= day-count (:day-idx item)) (str "Did not expect the day index to move after " day-count)))) ) )) (testing "Marking a legacy reminder as viewed adds a schedule and moves its date forward" (let [_ (reminder/update-reminder-date! (:id rem-1) minus-1s nil) legacy (reminder/get-by-id (:id rem-1)) [response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) [_ r-list] (get-request "/api/reminders" nil token) new-rem-1 (reminder/get-by-id (:id rem-1))] ;; Ensure our setup went as expected (is (nil? (:properties legacy))) (is (= minus-1s (:next-date legacy))) ;; API call returns what we expect (is (= 200 (:status response))) (is (= 1 post-result)) ;; Reminder does not show up as pending (is (= [rem-2] (map clear-thought r-list))) ;; The call configured the reminder to for spaced repetition and moved the date ;; forward, but left it at the first index (is (zero? (get-in new-rem-1 [:properties :day-idx]))) (is (t/after? (c/to-date-time (:next-date new-rem-1)) (c/to-date-time minus-1s))) (is (< 0 (count (get-in new-rem-1 [:properties :days])))))) )) ;; TODO: Tests for ;; - Mark a reminder as expired ;; - Get reminders for a thought
true
(ns memento.test.routes.api.reminder (:require [clojure.test :refer :all] [clj-time.coerce :as c] [clj-time.core :as t] [memento.handler :refer [app]] [memento.db.user :as user] [memento.test.db.user :as tdu] [memento.test.db.core :as tdb] [memento.test.routes.helpers :refer [patch-request post-request get-request put-request del-request invoke-login]] [mount.core :as mount] [memento.db.reminder :as reminder])) (use-fixtures :once (fn [f] (mount/start #'memento.config/env #'memento.db.core/*db*) (f))) ;;;; ;;;; Tests ;;;; (deftest test-add-reminder (tdu/init-placeholder-data!) (user/create! "user1" "PI:PASSWORD:<PASSWORD>END_PI") (let [token (invoke-login {:username "user1" :password "PI:PASSWORD:<PASSWORD>END_PI"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; Verify the basics (is (string? token)) (is (map? record)) ;; On to the tests (testing "Attempting to add a reminder without a token results in a 400" (let [[response _] (post-request "/api/reminder" {:thought-id (:id record) :type-id "spaced"} nil)] (is (= 400 (:status response))))) (testing "We can add a new reminder to a thought" (let [[response item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token)] (is (= 201 (:status response))) (is (= "application/transit+json" (get-in response [:headers "Content-Type"]))) (is (map? item)) (is (:id item)) (is (= "spaced" (:type-id item))) (is (= 4 (count (get-in item [:properties :days])))) (is (zero? (get-in item [:properties :day-idx]))) (is (= (str "http://localhost/api/reminders/" (:id item)) (get-in response [:headers "Location"]))) )) (testing "After adding a reminder, we can retrieve it" (let [[_ item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [response item-2] (get-request (str "/api/reminders/" (:id item)) nil token)] (is (= 200 (:status response))) (is (= "application/transit+json" (get-in response [:headers "Content-Type"]))) (is (= item item-2)))) (testing "If we query for thoughts with a reminder, we get a single thought with the two reminders we created" (let [[_ response] (get-request "/api/thoughts" nil token)] (is (= 1 (:total response) (:pages response) (count (:results response)))) (is (= 2 (count (:reminders (first (:results response)))))))) ;; Security concerns (testing "Trying to retrieve a reminder from someone other than the owner fails" (let [[_ item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) other-token (invoke-login {:username tdu/ph-username :password tdu/ph-PI:PASSWORD:<PASSWORD>END_PI}) [response item-2] (get-request (str "/api/reminders/" (:id item)) nil other-token)] (is (= 404 (:status response))) (is (some? item)) (is (nil? item-2)))) (testing "Trying to add a reminder to a thought by someone other than the owner fails" (let [all-before (tdb/get-all-reminders) other-token (invoke-login {:username tdu/ph-username :password tPI:PASSWORD:<PASSWORD>END_PI/PI:PASSWORD:<PASSWORD>END_PI}) all-after (tdb/get-all-reminders) [response item] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} other-token)] (is (= 404 (:status response))) (is (nil? item)) ;; Ensure not only we 404'd, but there weren't any actual changes (is (= 3 (count all-before))) (is (= all-before all-after)))))) (deftest test-delete-thought (tdu/init-placeholder-data!) (user/create! "user1" "PI:PASSWORD:<PASSWORD>END_PI") (let [token (invoke-login {:username "user1" :password "PI:PASSWORD:<PASSWORD>END_PI"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; On to the tests (testing "We can delete a thought with reminders" (let [[_ reminder] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [deleted _] (del-request "/api/thoughts" (:id record) token) ; Query post-delete [r-del-thought thoughts-after-delete] (get-request "/api/thoughts" nil token) [r-del-reminder reminder-after-delete] (get-request (str "/api/reminders/" (:id reminder)) nil token)] (is reminder) (is (= 204 (:status deleted))) (is (= 404 (:status r-del-reminder))) (is (zero? (:total thoughts-after-delete))) (is (empty? reminder-after-delete)) )))) (deftest test-patch-next-date (tdu/init-placeholder-data!) (user/create! "user1" "PI:PASSWORD:<PASSWORD>END_PI") (let [token (invoke-login {:username "user1" :password "PI:PASSWORD:<PASSWORD>END_PI"}) [_ record] (post-request "/api/thoughts" {:thought "Just a thought"} token)] ;; Verify the basics (is (string? token)) (is (map? record)) ;; On to the tests (testing "We can set a nil date for an existing reminder" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date nil} token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] (is (= 201 (:status r-initial))) (is (= 204 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Both reminders should be the same, other than the next_date (is (= (dissoc initial :next-date) (dissoc updated :next-date))) (is (nil? (:next-date updated))) )) (testing "We can set a string date for an existing reminder" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date "2017-01-01"} token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] ;; TODO: Expand tests, just verifying the basics work right now, since the API may change (is (= 201 (:status r-initial))) (is (= 204 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Both reminders should be the same, other than the next_date (is (= (dissoc initial :next-date) (dissoc updated :next-date))) (is (= (read-string "#inst \"2017-01-01\"") (:next-date updated))) )) (testing "Trying to set the date from someone other than the owner fails" (let [[r-initial initial] (post-request "/api/reminders" {:thought-id (:id record) :type-id "spaced"} token) invalid-token (invoke-login {:username tdu/ph-username :password PI:PASSWORD:<PASSWORD>END_PI}) [r-updated r-empty] (patch-request "/api/reminders" (:id initial) {:next-date "2017-01-01"} invalid-token) [_ updated] (get-request (str "/api/reminders/" (:id initial)) nil token) ] (is (= 201 (:status r-initial))) (is (= 404 (:status r-updated))) (is (empty? r-empty)) ; Patch returns no content ;; Nothing should have changed (is (= initial updated)))))) (deftest test-get-active-reminders (tdu/init-placeholder-data!) ;; Test with a single user (user/create! "user1" "PI:PASSWORD:<PASSWORD>END_PI") (let [token (invoke-login {:username "user1" :password "PI:PASSWORD:<PASSWORD>END_PI"}) ;; Test thought and reminders [_ thought-1] (post-request "/api/thoughts" {:thought "Just a thought"} token) [_ thought-2] (post-request "/api/thoughts" {:thought "Another thought"} token) [_ rem-1-1] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-1-2] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-2] (post-request "/api/reminders" {:thought-id (:id thought-2) :type-id "spaced"} token) ;; Some test timestamps minus-2s (c/to-date (t/plus (t/now) (t/seconds -2))) minus-1s (c/to-date (t/plus (t/now) (t/seconds -1))) in-10m (c/to-date (t/plus (t/now) (t/minutes 10))) ;; We get the thought description when returning the reminders, so ;; let's define a function to remove it clear-thought #(dissoc % :thought) ] ;; Verify the basics (is (string? token)) (doseq [item [thought-1 thought-2 rem-1-1 rem-1-2 rem-2]] (is (map? item) (str "Item should be a map " item))) ;; On to the tests (testing "There are no pending reminders initially" (let [[response reminders] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (empty? reminders)))) (testing "A reminder shows up as pending if its next_date is in the past" ;; Change the next reminder dates (is (= 1 (reminder/update-reminder-date! (:id rem-1-2) minus-1s (:properties rem-1-2)))) (is (= 1 (reminder/update-reminder-date! (:id rem-1-1) in-10m (:properties rem-1-1)))) (let [rem-1-2 (reminder/get-by-id (:id rem-1-2)) ; Reload since we changed the date [response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-1-2] (map clear-thought r-list))) ; Reminder list includes the thought (is (= (:thought thought-1) (:thought (first r-list)))) )) (testing "Reminders are returned in next_date order" (is (= 1 (reminder/update-reminder-date! (:id rem-2) minus-2s (:properties rem-2)))) (let [rem-1-2 (reminder/get-by-id (:id rem-1-2)) ; Reload since we changed the date rem-2 (reminder/get-by-id (:id rem-2)) [response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-2 rem-1-2] (map clear-thought r-list))) ; Reminder list includes the thought (is (= (map :thought [thought-2 thought-1]) (map :thought r-list))))) (testing "A different user does not get any pending reminders" (user/create! "user2" "password") (let [other-token (invoke-login {:username "user2" :password "PI:PASSWORD:<PASSWORD>END_PI"}) [response reminders] (get-request "/api/reminders" nil other-token)] (is (string? other-token)) ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 200 (:status response))) (is (empty? reminders)))) (testing "An invalid token throws up an error" (let [[response result] (get-request "/api/reminders" nil "invalid")] ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 401 (:status response))) (is (:error result)))))) (deftest test-mark-as-viewed (tdu/init-placeholder-data!) ;; Test with a single user (user/create! "user1" "PI:PASSWORD:<PASSWORD>END_PI") (let [token (invoke-login {:username "user1" :password "PI:PASSWORD:<PASSWORD>END_PI"}) ;; Test thought and reminders [_ thought-1] (post-request "/api/thoughts" {:thought "Just a thought"} token) [_ thought-2] (post-request "/api/thoughts" {:thought "Another thought"} token) [_ rem-1] (post-request "/api/reminders" {:thought-id (:id thought-1) :type-id "spaced"} token) [_ rem-2] (post-request "/api/reminders" {:thought-id (:id thought-2) :type-id "spaced"} token) ;; Some test timestamps minus-2s (c/to-date (t/plus (t/now) (t/seconds -2))) minus-1s (c/to-date (t/plus (t/now) (t/seconds -1))) ;; Set both reminders to have their next reminder date ready and reload _ (reminder/update-reminder-date! (:id rem-1) minus-1s (:properties rem-1)) _ (reminder/update-reminder-date! (:id rem-2) minus-2s (:properties rem-2)) rem-1 (reminder/get-by-id (:id rem-1)) rem-2 (reminder/get-by-id (:id rem-2)) ;; We get the thought description when returning the reminders, so ;; let's define a function to remove it clear-thought #(dissoc % :thought)] ;; Verify the basics (is (string? token)) (doseq [item [thought-1 thought-2 rem-1 rem-2]] (is (map? item) (str "Item should be a map " item))) ;; On to the tests (testing "All reminders are pending initially" (let [[response r-list] (get-request "/api/reminders" nil token)] (is (= 200 (:status response))) (is (= [rem-2 rem-1] (map clear-thought r-list))))) (testing "Marking a reminder as viewed removes it from the list" (let [[response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) [_ r-list] (get-request "/api/reminders" nil token) new-rem-1 (reminder/get-by-id (:id rem-1))] ;; API call returns what we expect (is (= 200 (:status response))) (is (= 1 post-result)) ;; Reminder should no longer show up as pending (is (= [rem-2] (map clear-thought r-list))) ;; The reminder itself was updated (is (not= rem-1 new-rem-1)) (is (= (dissoc rem-1 :next-date :properties) (dissoc new-rem-1 :next-date :properties))) (is (t/after? (c/to-date-time (:next-date new-rem-1)) (c/to-date-time (:next-date rem-1)))) (is (= 1 (get-in new-rem-1 [:properties :day-idx]))))) (testing "A different user cannot mark the reminder as viewed" (user/create! "user2" "password") (let [other-token (invoke-login {:username "user2" :password "PI:PASSWORD:<PASSWORD>END_PI"}) rem-before (reminder/get-by-id (:id rem-1)) [response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil other-token) rem-after (reminder/get-by-id (:id rem-1))] (is (string? other-token)) ;; The call itself succeeded, since it's a valid user, but there are no reminders (is (= 404 (:status response))) (is (empty? post-result)) (is (= rem-before rem-after)))) (testing "Calling the function repeatedly keeps moving forward the date and increasing the day index" (let [day-count (count (get-in rem-1 [:properties :days])) ;; Will call the function multiple times on purpose to ensure it works even after the reminder has no more repetitions time-series (for [i (range (+ 3 day-count))] (let [[response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) new-rem (reminder/get-by-id (:id rem-1))] {:status (:status response) :result post-result :index i :next-date (:next-date new-rem) :day-idx (get-in new-rem [:properties :day-idx])} ))] (doseq [item time-series] ;; We will always return that we updated an existing reminder, even if we didn't move it forward (is (= 200 (:status item))) (is (= 1 (:result item))) (if (< (:day-idx item) day-count) (is (some? (:next-date item)) (str "Expected a date on " item)) (do (is (nil? (:next-date item)) (str "Did not expect a date for " item)) (is (= day-count (:day-idx item)) (str "Did not expect the day index to move after " day-count)))) ) )) (testing "Marking a legacy reminder as viewed adds a schedule and moves its date forward" (let [_ (reminder/update-reminder-date! (:id rem-1) minus-1s nil) legacy (reminder/get-by-id (:id rem-1)) [response post-result] (post-request (str "/api/reminders/viewed/" (:id rem-1)) nil token) [_ r-list] (get-request "/api/reminders" nil token) new-rem-1 (reminder/get-by-id (:id rem-1))] ;; Ensure our setup went as expected (is (nil? (:properties legacy))) (is (= minus-1s (:next-date legacy))) ;; API call returns what we expect (is (= 200 (:status response))) (is (= 1 post-result)) ;; Reminder does not show up as pending (is (= [rem-2] (map clear-thought r-list))) ;; The call configured the reminder to for spaced repetition and moved the date ;; forward, but left it at the first index (is (zero? (get-in new-rem-1 [:properties :day-idx]))) (is (t/after? (c/to-date-time (:next-date new-rem-1)) (c/to-date-time minus-1s))) (is (< 0 (count (get-in new-rem-1 [:properties :days])))))) )) ;; TODO: Tests for ;; - Mark a reminder as expired ;; - Get reminders for a thought
[ { "context": "; Copyright (c) Rich Hickey. All rights reserved.\n; The use and distributio", "end": 29, "score": 0.9998160004615784, "start": 18, "tag": "NAME", "value": "Rich Hickey" } ]
ext/clojure-clojurescript-bef56a7/src/clj/cljs/closure.clj
yokolet/clementine
35
; Copyright (c) Rich Hickey. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ; which can be found in the file epl-v10.html at the root of this distribution. ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. ; You must not remove this notice, or any other, from this software. (ns cljs.closure "Compile ClojureScript to JavaScript with optimizations from Google Closure Compiler producing runnable JavaScript. The Closure Compiler (compiler.jar) must be on the classpath. Use the 'build' function for end-to-end compilation. build = compile -> add-dependencies -> optimize -> output Two protocols are defined: IJavaScript and Compilable. The Compilable protocol is satisfied by something which can return one or more IJavaScripts. With IJavaScript objects in hand, calling add-dependencies will produce a sequence of IJavaScript objects which includes all required dependencies from the Closure library and ClojureScript, in dependency order. This function replaces the closurebuilder tool. The optimize function converts one or more IJavaScripts into a single string of JavaScript source code using the Closure Compiler API. The produced output is either a single string of optimized JavaScript or a deps file for use during development. " (:require [cljs.compiler :as comp] [cljs.analyzer :as ana] [clojure.java.io :as io] [clojure.string :as string]) (:import java.io.File java.io.BufferedInputStream java.net.URL java.util.logging.Level java.util.jar.JarFile com.google.common.collect.ImmutableList com.google.javascript.jscomp.CompilerOptions com.google.javascript.jscomp.CompilationLevel com.google.javascript.jscomp.ClosureCodingConvention com.google.javascript.jscomp.JSSourceFile com.google.javascript.jscomp.Result com.google.javascript.jscomp.JSError com.google.javascript.jscomp.CommandLineRunner)) (def name-chars (map char (concat (range 48 57) (range 65 90) (range 97 122)))) (defn random-char [] (nth name-chars (.nextInt (java.util.Random.) (count name-chars)))) (defn random-string [length] (apply str (take length (repeatedly random-char)))) ;; Closure API ;; =========== (defmulti js-source-file (fn [_ source] (class source))) (defmethod js-source-file String [^String name ^String source] (JSSourceFile/fromCode name source)) (defmethod js-source-file File [_ ^File source] (JSSourceFile/fromFile source)) (defmethod js-source-file BufferedInputStream [^String name ^BufferedInputStream source] (JSSourceFile/fromInputStream name source)) (defn set-options "TODO: Add any other options that we would like to support." [opts ^CompilerOptions compiler-options] (when (contains? opts :pretty-print) (set! (.prettyPrint compiler-options) (:pretty-print opts))) (when (contains? opts :print-input-delimiter) (set! (.printInputDelimiter compiler-options) (:print-input-delimiter opts)))) (defn make-options "Create a CompilerOptions object and set options from opts map." [opts] (let [level (case (:optimizations opts) :advanced CompilationLevel/ADVANCED_OPTIMIZATIONS :whitespace CompilationLevel/WHITESPACE_ONLY :simple CompilationLevel/SIMPLE_OPTIMIZATIONS) compiler-options (doto (CompilerOptions.) (.setCodingConvention (ClosureCodingConvention.)))] (do (.setOptionsForCompilationLevel level compiler-options) (set-options opts compiler-options) compiler-options))) (defn jar-entry-names* [jar-path] (with-open [z (java.util.zip.ZipFile. jar-path)] (doall (map #(.getName %) (enumeration-seq (.entries z)))))) (def jar-entry-names (memoize jar-entry-names*)) (defn find-js-jar "finds js resources from a given path in a jar file" [jar-path lib-path] (doall (map #(io/resource %) (filter #(do (and (.startsWith % lib-path) (.endsWith % ".js"))) (jar-entry-names jar-path))))) (declare to-url) (defn find-js-fs "finds js resources from a path on the files system" [path] (let [file (io/file path)] (when (.exists file) (map to-url (filter #(.endsWith (.getName %) ".js") (file-seq (io/file path))))))) (defn find-js-classpath "finds all js files on the classpath matching the path provided" [path] (let [process-entry #(if (.endsWith % ".jar") (find-js-jar % path) (find-js-fs (str % "/" path))) cpath-list (let [sysp (System/getProperty "java.class.path" )] (if (.contains sysp ";") (string/split sysp #";") (string/split sysp #":")))] (doall (reduce #(let [p (process-entry %2)] (if p (concat %1 p) %1)) [] cpath-list)))) (defn find-js-resources [path] "finds js resources in a given path on either the file system or the classpath" (let [file (io/file path)] (if (.exists file) (find-js-fs path) (find-js-classpath path)))) (defn load-externs "Externs are JavaScript files which contain empty definitions of functions which will be provided by the envorinment. Any function in an extern file will not be renamed during optimization. Options may contain an :externs key with a list of file paths to load. The :use-only-custom-externs flag may be used to indicate that the default externs should be excluded." [{:keys [externs use-only-custom-externs target ups-externs]}] (let [filter-cp-js (fn [paths] (for [p paths u (find-js-classpath p)] u)) filter-js (fn [paths] (for [p paths u (find-js-resources p)] u)) add-target (fn [ext] (if (= :nodejs target) (cons (io/resource "cljs/nodejs_externs.js") (or ext [])) ext)) load-js (fn [ext] (map #(js-source-file (.getFile %) (slurp %)) ext))] (let [js-sources (-> externs filter-js add-target load-js) ups-sources (-> ups-externs filter-cp-js load-js) all-sources (concat js-sources ups-sources)] (if use-only-custom-externs all-sources (into all-sources (CommandLineRunner/getDefaultExterns)))))) (defn ^com.google.javascript.jscomp.Compiler make-closure-compiler [] (let [compiler (com.google.javascript.jscomp.Compiler.)] (do (com.google.javascript.jscomp.Compiler/setLoggingLevel Level/WARNING) compiler))) (defn report-failure [^Result result] (let [errors (.errors result) warnings (.warnings result)] (doseq [next (seq errors)] (println "ERROR:" (.toString ^JSError next))) (doseq [next (seq warnings)] (println "WARNING:" (.toString ^JSError next))))) (defn parse-js-ns "Given the lines from a JavaScript source file, parse the provide and require statements and return them in a map. Assumes that all provide and require statements appear before the first function definition." [lines] (letfn [(conj-in [m k v] (update-in m [k] (fn [old] (conj old v))))] (->> (for [line lines x (string/split line #";")] x) (map string/trim) (take-while #(not (re-matches #".*=[\s]*function\(.*\)[\s]*[{].*" %))) (map #(re-matches #".*goog\.(provide|require)\(['\"](.*)['\"]\)" %)) (remove nil?) (map #(drop 1 %)) (reduce (fn [m ns] (if (= (first ns) "require") (conj-in m :requires (last ns)) (conj-in m :provides (last ns)))) {:requires [] :provides []})))) ;; Protocols for IJavaScript and Compilable ;; ======================================== (defmulti to-url class) (defmethod to-url File [^File f] (.toURL (.toURI f))) (defmethod to-url String [s] (to-url (io/file s))) (defprotocol IJavaScript (-foreign? [this] "Whether the Javascript represents a foreign library (a js file that not have any goog.provide statement") (-url [this] "The URL where this JavaScript is located. Returns nil when JavaScript exists in memory only.") (-provides [this] "A list of namespaces that this JavaScript provides.") (-requires [this] "A list of namespaces that this JavaScript requires.") (-source [this] "The JavaScript source string.")) (extend-protocol IJavaScript String (-foreign? [this] false) (-url [this] nil) (-provides [this] (:provides (parse-js-ns (string/split-lines this)))) (-requires [this] (:requires (parse-js-ns (string/split-lines this)))) (-source [this] this) clojure.lang.IPersistentMap (-foreign? [this] (:foreign this)) (-url [this] (or (:url this) (to-url (:file this)))) (-provides [this] (map name (:provides this))) (-requires [this] (map name (:requires this))) (-source [this] (if-let [s (:source this)] s (slurp (io/reader (-url this)))))) (defrecord JavaScriptFile [foreign ^URL url provides requires] IJavaScript (-foreign? [this] foreign) (-url [this] url) (-provides [this] provides) (-requires [this] requires) (-source [this] (slurp (io/reader url)))) (defn javascript-file [foreign ^URL url provides requires] (JavaScriptFile. foreign url (map name provides) (map name requires))) (defn map->javascript-file [m] (javascript-file (:foreign m) (to-url (:file m)) (:provides m) (:requires m))) (defn read-js "Read a JavaScript file returning a map of file information." [f] (let [source (slurp f) m (parse-js-ns (string/split-lines source))] (map->javascript-file (assoc m :file f)))) (defprotocol Compilable (-compile [this opts] "Returns one or more IJavaScripts.")) (defn build-index "Index a list of dependencies by namespace and file name. There can be zero or more namespaces provided per file." [deps] (reduce (fn [m next] (let [provides (:provides next)] (-> (if (seq provides) (reduce (fn [m* provide] (assoc m* provide next)) m provides) m) (assoc (:file next) next)))) {} deps)) (defn dependency-order-visit [state ns-name] (let [file (get state ns-name)] (if (or (:visited file) (nil? file)) state (let [state (assoc-in state [ns-name :visited] true) deps (:requires file) state (reduce dependency-order-visit state deps)] (assoc state :order (conj (:order state) file)))))) (defn- pack-string [s] (if (string? s) {:provides (-provides s) :requires (-requires s) :file (str "from_source_" (gensym) ".clj") ::original s} s)) (defn- unpack-string [m] (or (::original m) m)) (defn dependency-order "Topologically sort a collection of dependencies." [coll] (let [state (build-index (map pack-string coll))] (map unpack-string (distinct (:order (reduce dependency-order-visit (assoc state :order []) (keys state))))))) ;; Compile ;; ======= (defn compile-form-seq "Compile a sequence of forms to a JavaScript source string." [forms] (comp/with-core-cljs (with-out-str (binding [ana/*cljs-ns* 'cljs.user] (doseq [form forms] (comp/emit (ana/analyze (ana/empty-env) form))))))) (defn output-directory [opts] (or (:output-dir opts) "out")) (def compiled-cljs (atom {})) (defn compiled-file "Given a map with at least a :file key, return a map with {:file .. :provides .. :requires ..}. Compiled files are cached so they will only be read once." [m] (let [path (.getAbsolutePath (:file m)) js (if (:provides m) (map->javascript-file m) (if-let [js (get @compiled-cljs path)] js (read-js (:file m))))] (do (swap! compiled-cljs (fn [old] (assoc old path js))) js))) (defn compile-file "Compile a single cljs file. If no output-file is specified, returns a string of compiled JavaScript. With an output-file option, the compiled JavaScript will written to this location and the function returns a JavaScriptFile. In either case the return value satisfies IJavaScript." [^File file {:keys [output-file] :as opts}] (if output-file (let [out-file (io/file (output-directory opts) output-file)] (compiled-file (comp/compile-file file out-file))) (compile-form-seq (comp/forms-seq file)))) (defn compile-dir "Recursively compile all cljs files under the given source directory. Return a list of JavaScriptFiles." [^File src-dir opts] (let [out-dir (output-directory opts)] (map compiled-file (comp/compile-root src-dir out-dir)))) (defn path-from-jarfile "Given the URL of a file within a jar, return the path of the file from the root of the jar." [^URL url] (last (string/split (.getFile url) #"\.jar!/"))) (defn jar-file-to-disk "Copy a file contained within a jar to disk. Return the created file." [url out-dir] (let [out-file (io/file out-dir (path-from-jarfile url)) content (slurp (io/reader url))] (do (comp/mkdirs out-file) (spit out-file content) out-file))) (defn compile-from-jar "Compile a file from a jar." [this {:keys [output-file] :as opts}] (or (when output-file (let [out-file (io/file (output-directory opts) output-file)] (when (.exists out-file) (compiled-file {:file out-file})))) (let [file-on-disk (jar-file-to-disk this (output-directory opts))] (-compile file-on-disk opts)))) (extend-protocol Compilable File (-compile [this opts] (if (.isDirectory this) (compile-dir this opts) (compile-file this opts))) URL (-compile [this opts] (case (.getProtocol this) "file" (-compile (io/file this) opts) "jar" (compile-from-jar this opts))) clojure.lang.PersistentList (-compile [this opts] (compile-form-seq [this])) String (-compile [this opts] (-compile (io/file this) opts)) clojure.lang.PersistentVector (-compile [this opts] (compile-form-seq this)) ) (comment ;; compile a file in memory (-compile "samples/hello/src/hello/core.cljs" {}) ;; compile a file to disk - see file @ 'out/clojure/set.js' (-compile (io/resource "clojure/set.cljs") {:output-file "clojure/set.js"}) ;; compile a project (-compile (io/file "samples/hello/src") {}) ;; compile a project with a custom output directory (-compile (io/file "samples/hello/src") {:output-dir "my-output"}) ;; compile a form (-compile '(defn plus-one [x] (inc x)) {}) ;; compile a vector of forms (-compile '[(ns test.app (:require [goog.array :as array])) (defn plus-one [x] (inc x))] {}) ) ;; Dependencies ;; ============ ;; ;; Find all dependencies from files on the classpath. Eliminates the ;; need for closurebuilder. cljs dependencies will be compiled as ;; needed. (defn find-url "Given a string, returns a URL. Attempts to resolve as a classpath-relative path, then as a path relative to the working directory or a URL string" [path-or-url] (or (io/resource path-or-url) (try (io/as-url path-or-url) (catch java.net.MalformedURLException e false)) (io/as-url (io/as-file path-or-url)))) (defn load-foreign-library* "Given a library spec (a map containing the keys :file and :provides), returns a map containing :provides, :requires, :file and :url" ([lib-spec] (load-foreign-library* lib-spec false)) ([lib-spec cp-only?] (let [find-func (if cp-only? io/resource find-url)] (merge lib-spec {:foreign true :url (find-func (:file lib-spec))})))) (def load-foreign-library (memoize load-foreign-library*)) (defn load-library* "Given a path to a JavaScript library, which is a directory containing Javascript files, return a list of maps containing :provides, :requires, :file and :url." ([path] (load-library* path false)) ([path cp-only?] (let [find-func (if cp-only? find-js-classpath find-js-resources) graph-node (fn [u] (-> (io/reader u) line-seq parse-js-ns (assoc :url u)))] (let [js-sources (find-js-resources path)] (filter #(seq (:provides %)) (map graph-node js-sources)))))) (def load-library (memoize load-library*)) (defn library-dependencies [{libs :libs foreign-libs :foreign-libs ups-libs :ups-libs ups-flibs :ups-foreign-libs}] (concat (mapcat #(load-library % true) ups-libs) ;upstream deps (mapcat load-library libs) (mapcat #(load-foreign-library % true) ups-flibs) ;upstream deps (map load-foreign-library foreign-libs))) (comment ;; load one library (load-library* "closure/library/third_party/closure") ;; load all library dependencies (library-dependencies {:libs ["closure/library/third_party/closure"]}) (library-dependencies {:foreign-libs [{:file "http://example.com/remote.js" :provides ["my.example"]}]}) (library-dependencies {:foreign-libs [{:file "local/file.js" :provides ["my.example"]}]}) (library-dependencies {:foreign-libs [{:file "cljs/nodejs_externs.js" :provides ["my.example"]}]})) (defn goog-dependencies* "Create an index of Google dependencies by namespace and file name." [] (letfn [(parse-list [s] (when (> (count s) 0) (-> (.substring s 1 (dec (count s))) (string/split #"'\s*,\s*'"))))] (->> (line-seq (io/reader (io/resource "goog/deps.js"))) (map #(re-matches #"^goog\.addDependency\(['\"](.*)['\"],\s*\[(.*)\],\s*\[(.*)\]\);.*" %)) (remove nil?) (map #(drop 1 %)) (remove #(.startsWith (first %) "../../third_party")) (map #(hash-map :file (str "goog/"(first %)) :provides (parse-list (second %)) :requires (parse-list (last %)) :group :goog))))) (def goog-dependencies (memoize goog-dependencies*)) (defn js-dependency-index "Returns the index for all JavaScript dependencies. Lookup by namespace or file name." [opts] (build-index (concat (goog-dependencies) (library-dependencies opts)))) (defn js-dependencies "Given a sequence of Closure namespace strings, return the list of all dependencies. The returned list includes all Google and third-party library dependencies. Third-party libraries are configured using the :libs option where the value is a list of directories containing third-party libraries." [opts requires] (let [index (js-dependency-index opts)] (loop [requires requires visited (set requires) deps #{}] (if (seq requires) (let [node (get index (first requires)) new-req (remove #(contains? visited %) (:requires node))] (recur (into (rest requires) new-req) (into visited new-req) (conj deps node))) (remove nil? deps))))) (comment ;; find dependencies (js-dependencies {} ["goog.array"]) ;; find dependencies in an external library (js-dependencies {:libs ["closure/library/third_party/closure"]} ["goog.dom.query"]) ) (defn get-compiled-cljs "Return an IJavaScript for this file. Compiled output will be written to the working directory." [opts {:keys [relative-path uri]}] (let [js-file (comp/rename-to-js relative-path)] (-compile uri (merge opts {:output-file js-file})))) (defn cljs-dependencies "Given a list of all required namespaces, return a list of IJavaScripts which are the cljs dependencies. The returned list will not only include the explicitly required files but any transitive depedencies as well. JavaScript files will be compiled to the working directory if they do not already exist. Only load dependencies from the classpath." [opts requires] (let [index (js-dependency-index opts)] (letfn [(ns->cp [s] (str (string/replace (munge s) \. \/) ".cljs")) (cljs-deps [coll] (->> coll (remove #(contains? index %)) (map #(let [f (ns->cp %)] (hash-map :relative-path f :uri (io/resource f)))) (remove #(nil? (:uri %)))))] (loop [required-files (cljs-deps requires) visited (set required-files) js-deps #{}] (if (seq required-files) (let [next-file (first required-files) js (get-compiled-cljs opts next-file) new-req (remove #(contains? visited %) (cljs-deps (-requires js)))] (recur (into (rest required-files) new-req) (into visited new-req) (conj js-deps js))) (remove nil? js-deps)))))) (comment ;; only get cljs deps (cljs-dependencies {} ["goog.string" "cljs.core"]) ;; get transitive deps (cljs-dependencies {} ["clojure.string"]) ;; don't get cljs.core twice (cljs-dependencies {} ["cljs.core" "clojure.string"]) ) (defn add-dependencies "Given one or more IJavaScript objects in dependency order, produce a new sequence of IJavaScript objects which includes the input list plus all dependencies in dependency order." [opts & inputs] (let [requires (mapcat -requires inputs) required-cljs (remove (set inputs) (cljs-dependencies opts requires)) required-js (js-dependencies opts (set (concat (mapcat -requires required-cljs) requires)))] (cons (javascript-file nil (io/resource "goog/base.js") ["goog"] nil) (dependency-order (concat (map #(-> (javascript-file (:foreign %) (or (:url %) (io/resource (:file %))) (:provides %) (:requires %)) (assoc :group (:group %))) required-js) required-cljs inputs))))) (comment ;; add dependencies to literal js (add-dependencies {} "goog.provide('test.app');\ngoog.require('cljs.core');") (add-dependencies {} "goog.provide('test.app');\ngoog.require('goog.array');") (add-dependencies {} (str "goog.provide('test.app');\n" "goog.require('goog.array');\n" "goog.require('clojure.set');")) ;; add dependencies with external lib (add-dependencies {:libs ["closure/library/third_party/closure"]} (str "goog.provide('test.app');\n" "goog.require('goog.array');\n" "goog.require('goog.dom.query');")) ;; add dependencies with foreign lib (add-dependencies {:foreign-libs [{:file "samples/hello/src/hello/core.cljs" :provides ["example.lib"]}]} (str "goog.provide('test.app');\n" "goog.require('example.lib');\n")) ;; add dependencies to a JavaScriptFile record (add-dependencies {} (javascript-file false (to-url "samples/hello/src/hello/core.cljs") ["hello.core"] ["goog.array"])) ) ;; Optimize ;; ======== (defmulti javascript-name class) (defmethod javascript-name URL [^URL url] (if url (.getPath url) "cljs/user.js")) (defmethod javascript-name String [s] (if-let [name (first (-provides s))] name "cljs/user.js")) (defmethod javascript-name JavaScriptFile [js] (javascript-name (-url js))) (defn build-provides "Given a vector of provides, builds required goog.provide statements" [provides] (apply str (map #(str "goog.provide('" % "');\n") provides))) (defmethod js-source-file JavaScriptFile [_ js] (when-let [url (-url js)] (js-source-file (javascript-name url) (if (-foreign? js) (str (build-provides (-provides js)) (slurp url)) (io/input-stream url))))) (defn optimize "Use the Closure Compiler to optimize one or more JavaScript files." [opts & sources] (let [closure-compiler (make-closure-compiler) externs (load-externs opts) compiler-options (make-options opts) sources (if (= :whitespace (:optimizations opts)) (cons "var CLOSURE_NO_DEPS = true;" sources) sources) inputs (map #(js-source-file (javascript-name %) %) sources) result ^Result (.compile closure-compiler externs inputs compiler-options)] (if (.success result) (.toSource closure-compiler) (report-failure result)))) (comment ;; optimize JavaScript strings (optimize {:optimizations :whitespace} "var x = 3 + 2; alert(x);") ;; => "var x=3+2;alert(x);" (optimize {:optimizations :simple} "var x = 3 + 2; alert(x);") ;; => "var x=5;alert(x);" (optimize {:optimizations :advanced} "var x = 3 + 2; alert(x);") ;; => "alert(5);" ;; optimize a ClojureScript form (optimize {:optimizations :simple} (-compile '(def x 3) {})) ;; optimize a project (println (->> (-compile "samples/hello/src" {}) (apply add-dependencies {}) (apply optimize {:optimizations :simple :pretty-print true}))) ) ;; Output ;; ====== ;; ;; The result of a build is always a single string of JavaScript. The ;; build process may produce files on disk but a single string is ;; always output. What this string contains depends on whether the ;; input has been optimized or not. If the :output-to option is set ;; then this string will be written to the specified file. If not, it ;; will be returned. ;; ;; The :output-dir option can be used to set the working directory ;; where any files will be written to disk. By default this directory ;; is 'out'. ;; ;; If inputs are optimized then the output string will be the complete ;; application with all dependencies included. ;; ;; For unoptimized output, the string will be a Closure deps file ;; describing where the JavaScript files are on disk and their ;; dependencies. All JavaScript files will be located in the working ;; directory, including any dependencies from the Closure library. ;; ;; Unoptimized mode is faster because the Closure Compiler is not ;; run. It also makes debugging much simpler because each file is ;; loaded in its own script tag. ;; ;; When working with uncompiled files, you will need to add additional ;; script tags to the hosting HTML file: one which pulls in Closure ;; library's base.js and one which calls goog.require to load your ;; code. See samples/hello/hello-dev.html for an example. (defn path-relative-to "Generate a string which is the path to input relative to base." [^File base input] (let [base-path (comp/path-seq (.getCanonicalPath base)) input-path (comp/path-seq (.getCanonicalPath (io/file ^URL (-url input)))) count-base (count base-path) common (count (take-while true? (map #(= %1 %2) base-path input-path))) prefix (repeat (- count-base common 1) "..")] (if (= count-base common) (last input-path) ;; same file (comp/to-path (concat prefix (drop common input-path)) "/")))) (defn add-dep-string "Return a goog.addDependency string for an input." [opts input] (letfn [(ns-list [coll] (when (seq coll) (apply str (interpose ", " (map #(str "'" (munge %) "'") coll)))))] (str "goog.addDependency(\"" (path-relative-to (io/file (output-directory opts) "goog/base.js") input) "\", [" (ns-list (-provides input)) "], [" (ns-list (-requires input)) "]);"))) (defn deps-file "Return a deps file string for a sequence of inputs." [opts sources] (apply str (interpose "\n" (map #(add-dep-string opts %) sources)))) (comment (path-relative-to (io/file "out/goog/base.js") {:url (to-url "out/cljs/core.js")}) (add-dep-string {} {:url (to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]}) (deps-file {} [{:url (to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]}]) ) (defn output-one-file [{:keys [output-to]} js] (cond (nil? output-to) js (string? output-to) (spit output-to js) :else (println js))) (defn output-deps-file [opts sources] (output-one-file opts (deps-file opts sources))) (defn ^String output-path "Given an IJavaScript which is either in memory or in a jar file, return the output path for this file relative to the working directory." [js] (if-let [url ^URL (-url js)] (path-from-jarfile url) (str (random-string 5) ".js"))) (defn write-javascript "Write a JavaScript file to disk. Only write if the file does not already exist. Return IJavaScript for the file on disk." [opts js] (let [out-dir (io/file (output-directory opts)) out-name (output-path js) out-file (io/file out-dir out-name)] (do (when-not (.exists out-file) (do (comp/mkdirs out-file) (spit out-file (-source js)))) {:url (to-url out-file) :requires (-requires js) :provides (-provides js) :group (:group js)}))) (defn source-on-disk "Ensure that the given JavaScript exists on disk. Write in memory sources and files contained in jars to the working directory. Return updated IJavaScript with the new location." [opts js] (let [url ^URL (-url js)] (if (or (not url) (= (.getProtocol url) "jar")) (write-javascript opts js) js))) (comment (write-javascript {} "goog.provide('demo');\nalert('hello');\n") ;; write something from a jar file to disk (source-on-disk {} {:url (io/resource "goog/base.js") :source (slurp (io/reader (io/resource "goog/base.js")))}) ;; doesn't write a file that is already on disk (source-on-disk {} {:url (io/resource "cljs/core.cljs")}) ) (defn output-unoptimized "Ensure that all JavaScript source files are on disk (not in jars), write the goog deps file including only the libraries that are being used and write the deps file for the current project. The deps file for the current project will include third-party libraries." [opts & sources] (let [disk-sources (map #(source-on-disk opts %) sources)] (let [goog-deps (io/file (output-directory opts) "goog/deps.js")] (do (comp/mkdirs goog-deps) (spit goog-deps (deps-file opts (filter #(= (:group %) :goog) disk-sources))) (output-deps-file opts (remove #(= (:group %) :goog) disk-sources)))))) (comment ;; output unoptimized alone (output-unoptimized {} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n") ;; output unoptimized with all dependencies (apply output-unoptimized {} (add-dependencies {} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n")) ;; output unoptimized with external library (apply output-unoptimized {} (add-dependencies {:libs ["closure/library/third_party/closure"]} "goog.provide('test');\ngoog.require('cljs.core');\ngoog.require('goog.dom.query');\n")) ;; output unoptimized and write deps file to 'out/test.js' (output-unoptimized {:output-to "out/test.js"} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n") ) (defn get-upstream-deps* "returns a merged map containing all upstream dependencies defined by libraries on the classpath" [] (let [classloader (. (Thread/currentThread) (getContextClassLoader)) upstream-deps (map #(read-string (slurp %)) (enumeration-seq (. classloader (findResources "deps.cljs"))))] (doseq [dep upstream-deps] (println (str "Upstream deps.cljs found on classpath. " dep " This is an EXPERIMENTAL FEATURE and is not guarenteed to remain stable in future versions."))) (apply merge-with concat upstream-deps))) (def get-upstream-deps (memoize get-upstream-deps*)) (defn add-header [{:keys [hashbang target]} js] (if (= :nodejs target) (str "#!" (or hashbang "/usr/bin/env node") "\n" js) js)) (defn add-wrapper [{:keys [output-wrapper] :as opts} js] (if output-wrapper (str ";(function(){\n" js "\n})();\n") js)) (defn build "Given a source which can be compiled, produce runnable JavaScript." [source opts] (ana/reset-namespaces!) (let [opts (if (= :nodejs (:target opts)) (merge {:optimizations :simple} opts) opts) ups-deps (get-upstream-deps) all-opts (assoc opts :ups-libs (:libs ups-deps) :ups-foreign-libs (:foreign-libs ups-deps) :ups-externs (:externs ups-deps))] (binding [ana/*cljs-static-fns* (or (and (= (opts :optimizations) :advanced)) (:static-fns opts) ana/*cljs-static-fns*) ana/*cljs-warn-on-undeclared* (true? (opts :warnings))] (let [compiled (-compile source all-opts) js-sources (concat (apply add-dependencies all-opts (if (coll? compiled) compiled [compiled])) (when (= :nodejs (:target all-opts)) [(-compile (io/resource "cljs/nodejscli.cljs") all-opts)])) optim (:optimizations all-opts)] (if (and optim (not= optim :none)) (->> js-sources (apply optimize all-opts) (add-header all-opts) (add-wrapper all-opts) (output-one-file all-opts)) (apply output-unoptimized all-opts js-sources)))))) (comment (println (build '[(ns hello.core) (defn ^{:export greet} greet [n] (str "Hola " n)) (defn ^:export sum [xs] 42)] {:optimizations :simple :pretty-print true})) ;; build a project with optimizations (build "samples/hello/src" {:optimizations :advanced}) (build "samples/hello/src" {:optimizations :advanced :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello.html' to see the result in action ;; build a project without optimizations (build "samples/hello/src" {:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello-dev.html' to see the result in action ;; notice how each script was loaded individually ;; build unoptimized from raw ClojureScript (build '[(ns hello.core) (defn ^{:export greet} greet [n] (str "Hola " n)) (defn ^:export sum [xs] 42)] {:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello-dev.html' to see the result in action )
110415
; Copyright (c) <NAME>. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ; which can be found in the file epl-v10.html at the root of this distribution. ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. ; You must not remove this notice, or any other, from this software. (ns cljs.closure "Compile ClojureScript to JavaScript with optimizations from Google Closure Compiler producing runnable JavaScript. The Closure Compiler (compiler.jar) must be on the classpath. Use the 'build' function for end-to-end compilation. build = compile -> add-dependencies -> optimize -> output Two protocols are defined: IJavaScript and Compilable. The Compilable protocol is satisfied by something which can return one or more IJavaScripts. With IJavaScript objects in hand, calling add-dependencies will produce a sequence of IJavaScript objects which includes all required dependencies from the Closure library and ClojureScript, in dependency order. This function replaces the closurebuilder tool. The optimize function converts one or more IJavaScripts into a single string of JavaScript source code using the Closure Compiler API. The produced output is either a single string of optimized JavaScript or a deps file for use during development. " (:require [cljs.compiler :as comp] [cljs.analyzer :as ana] [clojure.java.io :as io] [clojure.string :as string]) (:import java.io.File java.io.BufferedInputStream java.net.URL java.util.logging.Level java.util.jar.JarFile com.google.common.collect.ImmutableList com.google.javascript.jscomp.CompilerOptions com.google.javascript.jscomp.CompilationLevel com.google.javascript.jscomp.ClosureCodingConvention com.google.javascript.jscomp.JSSourceFile com.google.javascript.jscomp.Result com.google.javascript.jscomp.JSError com.google.javascript.jscomp.CommandLineRunner)) (def name-chars (map char (concat (range 48 57) (range 65 90) (range 97 122)))) (defn random-char [] (nth name-chars (.nextInt (java.util.Random.) (count name-chars)))) (defn random-string [length] (apply str (take length (repeatedly random-char)))) ;; Closure API ;; =========== (defmulti js-source-file (fn [_ source] (class source))) (defmethod js-source-file String [^String name ^String source] (JSSourceFile/fromCode name source)) (defmethod js-source-file File [_ ^File source] (JSSourceFile/fromFile source)) (defmethod js-source-file BufferedInputStream [^String name ^BufferedInputStream source] (JSSourceFile/fromInputStream name source)) (defn set-options "TODO: Add any other options that we would like to support." [opts ^CompilerOptions compiler-options] (when (contains? opts :pretty-print) (set! (.prettyPrint compiler-options) (:pretty-print opts))) (when (contains? opts :print-input-delimiter) (set! (.printInputDelimiter compiler-options) (:print-input-delimiter opts)))) (defn make-options "Create a CompilerOptions object and set options from opts map." [opts] (let [level (case (:optimizations opts) :advanced CompilationLevel/ADVANCED_OPTIMIZATIONS :whitespace CompilationLevel/WHITESPACE_ONLY :simple CompilationLevel/SIMPLE_OPTIMIZATIONS) compiler-options (doto (CompilerOptions.) (.setCodingConvention (ClosureCodingConvention.)))] (do (.setOptionsForCompilationLevel level compiler-options) (set-options opts compiler-options) compiler-options))) (defn jar-entry-names* [jar-path] (with-open [z (java.util.zip.ZipFile. jar-path)] (doall (map #(.getName %) (enumeration-seq (.entries z)))))) (def jar-entry-names (memoize jar-entry-names*)) (defn find-js-jar "finds js resources from a given path in a jar file" [jar-path lib-path] (doall (map #(io/resource %) (filter #(do (and (.startsWith % lib-path) (.endsWith % ".js"))) (jar-entry-names jar-path))))) (declare to-url) (defn find-js-fs "finds js resources from a path on the files system" [path] (let [file (io/file path)] (when (.exists file) (map to-url (filter #(.endsWith (.getName %) ".js") (file-seq (io/file path))))))) (defn find-js-classpath "finds all js files on the classpath matching the path provided" [path] (let [process-entry #(if (.endsWith % ".jar") (find-js-jar % path) (find-js-fs (str % "/" path))) cpath-list (let [sysp (System/getProperty "java.class.path" )] (if (.contains sysp ";") (string/split sysp #";") (string/split sysp #":")))] (doall (reduce #(let [p (process-entry %2)] (if p (concat %1 p) %1)) [] cpath-list)))) (defn find-js-resources [path] "finds js resources in a given path on either the file system or the classpath" (let [file (io/file path)] (if (.exists file) (find-js-fs path) (find-js-classpath path)))) (defn load-externs "Externs are JavaScript files which contain empty definitions of functions which will be provided by the envorinment. Any function in an extern file will not be renamed during optimization. Options may contain an :externs key with a list of file paths to load. The :use-only-custom-externs flag may be used to indicate that the default externs should be excluded." [{:keys [externs use-only-custom-externs target ups-externs]}] (let [filter-cp-js (fn [paths] (for [p paths u (find-js-classpath p)] u)) filter-js (fn [paths] (for [p paths u (find-js-resources p)] u)) add-target (fn [ext] (if (= :nodejs target) (cons (io/resource "cljs/nodejs_externs.js") (or ext [])) ext)) load-js (fn [ext] (map #(js-source-file (.getFile %) (slurp %)) ext))] (let [js-sources (-> externs filter-js add-target load-js) ups-sources (-> ups-externs filter-cp-js load-js) all-sources (concat js-sources ups-sources)] (if use-only-custom-externs all-sources (into all-sources (CommandLineRunner/getDefaultExterns)))))) (defn ^com.google.javascript.jscomp.Compiler make-closure-compiler [] (let [compiler (com.google.javascript.jscomp.Compiler.)] (do (com.google.javascript.jscomp.Compiler/setLoggingLevel Level/WARNING) compiler))) (defn report-failure [^Result result] (let [errors (.errors result) warnings (.warnings result)] (doseq [next (seq errors)] (println "ERROR:" (.toString ^JSError next))) (doseq [next (seq warnings)] (println "WARNING:" (.toString ^JSError next))))) (defn parse-js-ns "Given the lines from a JavaScript source file, parse the provide and require statements and return them in a map. Assumes that all provide and require statements appear before the first function definition." [lines] (letfn [(conj-in [m k v] (update-in m [k] (fn [old] (conj old v))))] (->> (for [line lines x (string/split line #";")] x) (map string/trim) (take-while #(not (re-matches #".*=[\s]*function\(.*\)[\s]*[{].*" %))) (map #(re-matches #".*goog\.(provide|require)\(['\"](.*)['\"]\)" %)) (remove nil?) (map #(drop 1 %)) (reduce (fn [m ns] (if (= (first ns) "require") (conj-in m :requires (last ns)) (conj-in m :provides (last ns)))) {:requires [] :provides []})))) ;; Protocols for IJavaScript and Compilable ;; ======================================== (defmulti to-url class) (defmethod to-url File [^File f] (.toURL (.toURI f))) (defmethod to-url String [s] (to-url (io/file s))) (defprotocol IJavaScript (-foreign? [this] "Whether the Javascript represents a foreign library (a js file that not have any goog.provide statement") (-url [this] "The URL where this JavaScript is located. Returns nil when JavaScript exists in memory only.") (-provides [this] "A list of namespaces that this JavaScript provides.") (-requires [this] "A list of namespaces that this JavaScript requires.") (-source [this] "The JavaScript source string.")) (extend-protocol IJavaScript String (-foreign? [this] false) (-url [this] nil) (-provides [this] (:provides (parse-js-ns (string/split-lines this)))) (-requires [this] (:requires (parse-js-ns (string/split-lines this)))) (-source [this] this) clojure.lang.IPersistentMap (-foreign? [this] (:foreign this)) (-url [this] (or (:url this) (to-url (:file this)))) (-provides [this] (map name (:provides this))) (-requires [this] (map name (:requires this))) (-source [this] (if-let [s (:source this)] s (slurp (io/reader (-url this)))))) (defrecord JavaScriptFile [foreign ^URL url provides requires] IJavaScript (-foreign? [this] foreign) (-url [this] url) (-provides [this] provides) (-requires [this] requires) (-source [this] (slurp (io/reader url)))) (defn javascript-file [foreign ^URL url provides requires] (JavaScriptFile. foreign url (map name provides) (map name requires))) (defn map->javascript-file [m] (javascript-file (:foreign m) (to-url (:file m)) (:provides m) (:requires m))) (defn read-js "Read a JavaScript file returning a map of file information." [f] (let [source (slurp f) m (parse-js-ns (string/split-lines source))] (map->javascript-file (assoc m :file f)))) (defprotocol Compilable (-compile [this opts] "Returns one or more IJavaScripts.")) (defn build-index "Index a list of dependencies by namespace and file name. There can be zero or more namespaces provided per file." [deps] (reduce (fn [m next] (let [provides (:provides next)] (-> (if (seq provides) (reduce (fn [m* provide] (assoc m* provide next)) m provides) m) (assoc (:file next) next)))) {} deps)) (defn dependency-order-visit [state ns-name] (let [file (get state ns-name)] (if (or (:visited file) (nil? file)) state (let [state (assoc-in state [ns-name :visited] true) deps (:requires file) state (reduce dependency-order-visit state deps)] (assoc state :order (conj (:order state) file)))))) (defn- pack-string [s] (if (string? s) {:provides (-provides s) :requires (-requires s) :file (str "from_source_" (gensym) ".clj") ::original s} s)) (defn- unpack-string [m] (or (::original m) m)) (defn dependency-order "Topologically sort a collection of dependencies." [coll] (let [state (build-index (map pack-string coll))] (map unpack-string (distinct (:order (reduce dependency-order-visit (assoc state :order []) (keys state))))))) ;; Compile ;; ======= (defn compile-form-seq "Compile a sequence of forms to a JavaScript source string." [forms] (comp/with-core-cljs (with-out-str (binding [ana/*cljs-ns* 'cljs.user] (doseq [form forms] (comp/emit (ana/analyze (ana/empty-env) form))))))) (defn output-directory [opts] (or (:output-dir opts) "out")) (def compiled-cljs (atom {})) (defn compiled-file "Given a map with at least a :file key, return a map with {:file .. :provides .. :requires ..}. Compiled files are cached so they will only be read once." [m] (let [path (.getAbsolutePath (:file m)) js (if (:provides m) (map->javascript-file m) (if-let [js (get @compiled-cljs path)] js (read-js (:file m))))] (do (swap! compiled-cljs (fn [old] (assoc old path js))) js))) (defn compile-file "Compile a single cljs file. If no output-file is specified, returns a string of compiled JavaScript. With an output-file option, the compiled JavaScript will written to this location and the function returns a JavaScriptFile. In either case the return value satisfies IJavaScript." [^File file {:keys [output-file] :as opts}] (if output-file (let [out-file (io/file (output-directory opts) output-file)] (compiled-file (comp/compile-file file out-file))) (compile-form-seq (comp/forms-seq file)))) (defn compile-dir "Recursively compile all cljs files under the given source directory. Return a list of JavaScriptFiles." [^File src-dir opts] (let [out-dir (output-directory opts)] (map compiled-file (comp/compile-root src-dir out-dir)))) (defn path-from-jarfile "Given the URL of a file within a jar, return the path of the file from the root of the jar." [^URL url] (last (string/split (.getFile url) #"\.jar!/"))) (defn jar-file-to-disk "Copy a file contained within a jar to disk. Return the created file." [url out-dir] (let [out-file (io/file out-dir (path-from-jarfile url)) content (slurp (io/reader url))] (do (comp/mkdirs out-file) (spit out-file content) out-file))) (defn compile-from-jar "Compile a file from a jar." [this {:keys [output-file] :as opts}] (or (when output-file (let [out-file (io/file (output-directory opts) output-file)] (when (.exists out-file) (compiled-file {:file out-file})))) (let [file-on-disk (jar-file-to-disk this (output-directory opts))] (-compile file-on-disk opts)))) (extend-protocol Compilable File (-compile [this opts] (if (.isDirectory this) (compile-dir this opts) (compile-file this opts))) URL (-compile [this opts] (case (.getProtocol this) "file" (-compile (io/file this) opts) "jar" (compile-from-jar this opts))) clojure.lang.PersistentList (-compile [this opts] (compile-form-seq [this])) String (-compile [this opts] (-compile (io/file this) opts)) clojure.lang.PersistentVector (-compile [this opts] (compile-form-seq this)) ) (comment ;; compile a file in memory (-compile "samples/hello/src/hello/core.cljs" {}) ;; compile a file to disk - see file @ 'out/clojure/set.js' (-compile (io/resource "clojure/set.cljs") {:output-file "clojure/set.js"}) ;; compile a project (-compile (io/file "samples/hello/src") {}) ;; compile a project with a custom output directory (-compile (io/file "samples/hello/src") {:output-dir "my-output"}) ;; compile a form (-compile '(defn plus-one [x] (inc x)) {}) ;; compile a vector of forms (-compile '[(ns test.app (:require [goog.array :as array])) (defn plus-one [x] (inc x))] {}) ) ;; Dependencies ;; ============ ;; ;; Find all dependencies from files on the classpath. Eliminates the ;; need for closurebuilder. cljs dependencies will be compiled as ;; needed. (defn find-url "Given a string, returns a URL. Attempts to resolve as a classpath-relative path, then as a path relative to the working directory or a URL string" [path-or-url] (or (io/resource path-or-url) (try (io/as-url path-or-url) (catch java.net.MalformedURLException e false)) (io/as-url (io/as-file path-or-url)))) (defn load-foreign-library* "Given a library spec (a map containing the keys :file and :provides), returns a map containing :provides, :requires, :file and :url" ([lib-spec] (load-foreign-library* lib-spec false)) ([lib-spec cp-only?] (let [find-func (if cp-only? io/resource find-url)] (merge lib-spec {:foreign true :url (find-func (:file lib-spec))})))) (def load-foreign-library (memoize load-foreign-library*)) (defn load-library* "Given a path to a JavaScript library, which is a directory containing Javascript files, return a list of maps containing :provides, :requires, :file and :url." ([path] (load-library* path false)) ([path cp-only?] (let [find-func (if cp-only? find-js-classpath find-js-resources) graph-node (fn [u] (-> (io/reader u) line-seq parse-js-ns (assoc :url u)))] (let [js-sources (find-js-resources path)] (filter #(seq (:provides %)) (map graph-node js-sources)))))) (def load-library (memoize load-library*)) (defn library-dependencies [{libs :libs foreign-libs :foreign-libs ups-libs :ups-libs ups-flibs :ups-foreign-libs}] (concat (mapcat #(load-library % true) ups-libs) ;upstream deps (mapcat load-library libs) (mapcat #(load-foreign-library % true) ups-flibs) ;upstream deps (map load-foreign-library foreign-libs))) (comment ;; load one library (load-library* "closure/library/third_party/closure") ;; load all library dependencies (library-dependencies {:libs ["closure/library/third_party/closure"]}) (library-dependencies {:foreign-libs [{:file "http://example.com/remote.js" :provides ["my.example"]}]}) (library-dependencies {:foreign-libs [{:file "local/file.js" :provides ["my.example"]}]}) (library-dependencies {:foreign-libs [{:file "cljs/nodejs_externs.js" :provides ["my.example"]}]})) (defn goog-dependencies* "Create an index of Google dependencies by namespace and file name." [] (letfn [(parse-list [s] (when (> (count s) 0) (-> (.substring s 1 (dec (count s))) (string/split #"'\s*,\s*'"))))] (->> (line-seq (io/reader (io/resource "goog/deps.js"))) (map #(re-matches #"^goog\.addDependency\(['\"](.*)['\"],\s*\[(.*)\],\s*\[(.*)\]\);.*" %)) (remove nil?) (map #(drop 1 %)) (remove #(.startsWith (first %) "../../third_party")) (map #(hash-map :file (str "goog/"(first %)) :provides (parse-list (second %)) :requires (parse-list (last %)) :group :goog))))) (def goog-dependencies (memoize goog-dependencies*)) (defn js-dependency-index "Returns the index for all JavaScript dependencies. Lookup by namespace or file name." [opts] (build-index (concat (goog-dependencies) (library-dependencies opts)))) (defn js-dependencies "Given a sequence of Closure namespace strings, return the list of all dependencies. The returned list includes all Google and third-party library dependencies. Third-party libraries are configured using the :libs option where the value is a list of directories containing third-party libraries." [opts requires] (let [index (js-dependency-index opts)] (loop [requires requires visited (set requires) deps #{}] (if (seq requires) (let [node (get index (first requires)) new-req (remove #(contains? visited %) (:requires node))] (recur (into (rest requires) new-req) (into visited new-req) (conj deps node))) (remove nil? deps))))) (comment ;; find dependencies (js-dependencies {} ["goog.array"]) ;; find dependencies in an external library (js-dependencies {:libs ["closure/library/third_party/closure"]} ["goog.dom.query"]) ) (defn get-compiled-cljs "Return an IJavaScript for this file. Compiled output will be written to the working directory." [opts {:keys [relative-path uri]}] (let [js-file (comp/rename-to-js relative-path)] (-compile uri (merge opts {:output-file js-file})))) (defn cljs-dependencies "Given a list of all required namespaces, return a list of IJavaScripts which are the cljs dependencies. The returned list will not only include the explicitly required files but any transitive depedencies as well. JavaScript files will be compiled to the working directory if they do not already exist. Only load dependencies from the classpath." [opts requires] (let [index (js-dependency-index opts)] (letfn [(ns->cp [s] (str (string/replace (munge s) \. \/) ".cljs")) (cljs-deps [coll] (->> coll (remove #(contains? index %)) (map #(let [f (ns->cp %)] (hash-map :relative-path f :uri (io/resource f)))) (remove #(nil? (:uri %)))))] (loop [required-files (cljs-deps requires) visited (set required-files) js-deps #{}] (if (seq required-files) (let [next-file (first required-files) js (get-compiled-cljs opts next-file) new-req (remove #(contains? visited %) (cljs-deps (-requires js)))] (recur (into (rest required-files) new-req) (into visited new-req) (conj js-deps js))) (remove nil? js-deps)))))) (comment ;; only get cljs deps (cljs-dependencies {} ["goog.string" "cljs.core"]) ;; get transitive deps (cljs-dependencies {} ["clojure.string"]) ;; don't get cljs.core twice (cljs-dependencies {} ["cljs.core" "clojure.string"]) ) (defn add-dependencies "Given one or more IJavaScript objects in dependency order, produce a new sequence of IJavaScript objects which includes the input list plus all dependencies in dependency order." [opts & inputs] (let [requires (mapcat -requires inputs) required-cljs (remove (set inputs) (cljs-dependencies opts requires)) required-js (js-dependencies opts (set (concat (mapcat -requires required-cljs) requires)))] (cons (javascript-file nil (io/resource "goog/base.js") ["goog"] nil) (dependency-order (concat (map #(-> (javascript-file (:foreign %) (or (:url %) (io/resource (:file %))) (:provides %) (:requires %)) (assoc :group (:group %))) required-js) required-cljs inputs))))) (comment ;; add dependencies to literal js (add-dependencies {} "goog.provide('test.app');\ngoog.require('cljs.core');") (add-dependencies {} "goog.provide('test.app');\ngoog.require('goog.array');") (add-dependencies {} (str "goog.provide('test.app');\n" "goog.require('goog.array');\n" "goog.require('clojure.set');")) ;; add dependencies with external lib (add-dependencies {:libs ["closure/library/third_party/closure"]} (str "goog.provide('test.app');\n" "goog.require('goog.array');\n" "goog.require('goog.dom.query');")) ;; add dependencies with foreign lib (add-dependencies {:foreign-libs [{:file "samples/hello/src/hello/core.cljs" :provides ["example.lib"]}]} (str "goog.provide('test.app');\n" "goog.require('example.lib');\n")) ;; add dependencies to a JavaScriptFile record (add-dependencies {} (javascript-file false (to-url "samples/hello/src/hello/core.cljs") ["hello.core"] ["goog.array"])) ) ;; Optimize ;; ======== (defmulti javascript-name class) (defmethod javascript-name URL [^URL url] (if url (.getPath url) "cljs/user.js")) (defmethod javascript-name String [s] (if-let [name (first (-provides s))] name "cljs/user.js")) (defmethod javascript-name JavaScriptFile [js] (javascript-name (-url js))) (defn build-provides "Given a vector of provides, builds required goog.provide statements" [provides] (apply str (map #(str "goog.provide('" % "');\n") provides))) (defmethod js-source-file JavaScriptFile [_ js] (when-let [url (-url js)] (js-source-file (javascript-name url) (if (-foreign? js) (str (build-provides (-provides js)) (slurp url)) (io/input-stream url))))) (defn optimize "Use the Closure Compiler to optimize one or more JavaScript files." [opts & sources] (let [closure-compiler (make-closure-compiler) externs (load-externs opts) compiler-options (make-options opts) sources (if (= :whitespace (:optimizations opts)) (cons "var CLOSURE_NO_DEPS = true;" sources) sources) inputs (map #(js-source-file (javascript-name %) %) sources) result ^Result (.compile closure-compiler externs inputs compiler-options)] (if (.success result) (.toSource closure-compiler) (report-failure result)))) (comment ;; optimize JavaScript strings (optimize {:optimizations :whitespace} "var x = 3 + 2; alert(x);") ;; => "var x=3+2;alert(x);" (optimize {:optimizations :simple} "var x = 3 + 2; alert(x);") ;; => "var x=5;alert(x);" (optimize {:optimizations :advanced} "var x = 3 + 2; alert(x);") ;; => "alert(5);" ;; optimize a ClojureScript form (optimize {:optimizations :simple} (-compile '(def x 3) {})) ;; optimize a project (println (->> (-compile "samples/hello/src" {}) (apply add-dependencies {}) (apply optimize {:optimizations :simple :pretty-print true}))) ) ;; Output ;; ====== ;; ;; The result of a build is always a single string of JavaScript. The ;; build process may produce files on disk but a single string is ;; always output. What this string contains depends on whether the ;; input has been optimized or not. If the :output-to option is set ;; then this string will be written to the specified file. If not, it ;; will be returned. ;; ;; The :output-dir option can be used to set the working directory ;; where any files will be written to disk. By default this directory ;; is 'out'. ;; ;; If inputs are optimized then the output string will be the complete ;; application with all dependencies included. ;; ;; For unoptimized output, the string will be a Closure deps file ;; describing where the JavaScript files are on disk and their ;; dependencies. All JavaScript files will be located in the working ;; directory, including any dependencies from the Closure library. ;; ;; Unoptimized mode is faster because the Closure Compiler is not ;; run. It also makes debugging much simpler because each file is ;; loaded in its own script tag. ;; ;; When working with uncompiled files, you will need to add additional ;; script tags to the hosting HTML file: one which pulls in Closure ;; library's base.js and one which calls goog.require to load your ;; code. See samples/hello/hello-dev.html for an example. (defn path-relative-to "Generate a string which is the path to input relative to base." [^File base input] (let [base-path (comp/path-seq (.getCanonicalPath base)) input-path (comp/path-seq (.getCanonicalPath (io/file ^URL (-url input)))) count-base (count base-path) common (count (take-while true? (map #(= %1 %2) base-path input-path))) prefix (repeat (- count-base common 1) "..")] (if (= count-base common) (last input-path) ;; same file (comp/to-path (concat prefix (drop common input-path)) "/")))) (defn add-dep-string "Return a goog.addDependency string for an input." [opts input] (letfn [(ns-list [coll] (when (seq coll) (apply str (interpose ", " (map #(str "'" (munge %) "'") coll)))))] (str "goog.addDependency(\"" (path-relative-to (io/file (output-directory opts) "goog/base.js") input) "\", [" (ns-list (-provides input)) "], [" (ns-list (-requires input)) "]);"))) (defn deps-file "Return a deps file string for a sequence of inputs." [opts sources] (apply str (interpose "\n" (map #(add-dep-string opts %) sources)))) (comment (path-relative-to (io/file "out/goog/base.js") {:url (to-url "out/cljs/core.js")}) (add-dep-string {} {:url (to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]}) (deps-file {} [{:url (to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]}]) ) (defn output-one-file [{:keys [output-to]} js] (cond (nil? output-to) js (string? output-to) (spit output-to js) :else (println js))) (defn output-deps-file [opts sources] (output-one-file opts (deps-file opts sources))) (defn ^String output-path "Given an IJavaScript which is either in memory or in a jar file, return the output path for this file relative to the working directory." [js] (if-let [url ^URL (-url js)] (path-from-jarfile url) (str (random-string 5) ".js"))) (defn write-javascript "Write a JavaScript file to disk. Only write if the file does not already exist. Return IJavaScript for the file on disk." [opts js] (let [out-dir (io/file (output-directory opts)) out-name (output-path js) out-file (io/file out-dir out-name)] (do (when-not (.exists out-file) (do (comp/mkdirs out-file) (spit out-file (-source js)))) {:url (to-url out-file) :requires (-requires js) :provides (-provides js) :group (:group js)}))) (defn source-on-disk "Ensure that the given JavaScript exists on disk. Write in memory sources and files contained in jars to the working directory. Return updated IJavaScript with the new location." [opts js] (let [url ^URL (-url js)] (if (or (not url) (= (.getProtocol url) "jar")) (write-javascript opts js) js))) (comment (write-javascript {} "goog.provide('demo');\nalert('hello');\n") ;; write something from a jar file to disk (source-on-disk {} {:url (io/resource "goog/base.js") :source (slurp (io/reader (io/resource "goog/base.js")))}) ;; doesn't write a file that is already on disk (source-on-disk {} {:url (io/resource "cljs/core.cljs")}) ) (defn output-unoptimized "Ensure that all JavaScript source files are on disk (not in jars), write the goog deps file including only the libraries that are being used and write the deps file for the current project. The deps file for the current project will include third-party libraries." [opts & sources] (let [disk-sources (map #(source-on-disk opts %) sources)] (let [goog-deps (io/file (output-directory opts) "goog/deps.js")] (do (comp/mkdirs goog-deps) (spit goog-deps (deps-file opts (filter #(= (:group %) :goog) disk-sources))) (output-deps-file opts (remove #(= (:group %) :goog) disk-sources)))))) (comment ;; output unoptimized alone (output-unoptimized {} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n") ;; output unoptimized with all dependencies (apply output-unoptimized {} (add-dependencies {} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n")) ;; output unoptimized with external library (apply output-unoptimized {} (add-dependencies {:libs ["closure/library/third_party/closure"]} "goog.provide('test');\ngoog.require('cljs.core');\ngoog.require('goog.dom.query');\n")) ;; output unoptimized and write deps file to 'out/test.js' (output-unoptimized {:output-to "out/test.js"} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n") ) (defn get-upstream-deps* "returns a merged map containing all upstream dependencies defined by libraries on the classpath" [] (let [classloader (. (Thread/currentThread) (getContextClassLoader)) upstream-deps (map #(read-string (slurp %)) (enumeration-seq (. classloader (findResources "deps.cljs"))))] (doseq [dep upstream-deps] (println (str "Upstream deps.cljs found on classpath. " dep " This is an EXPERIMENTAL FEATURE and is not guarenteed to remain stable in future versions."))) (apply merge-with concat upstream-deps))) (def get-upstream-deps (memoize get-upstream-deps*)) (defn add-header [{:keys [hashbang target]} js] (if (= :nodejs target) (str "#!" (or hashbang "/usr/bin/env node") "\n" js) js)) (defn add-wrapper [{:keys [output-wrapper] :as opts} js] (if output-wrapper (str ";(function(){\n" js "\n})();\n") js)) (defn build "Given a source which can be compiled, produce runnable JavaScript." [source opts] (ana/reset-namespaces!) (let [opts (if (= :nodejs (:target opts)) (merge {:optimizations :simple} opts) opts) ups-deps (get-upstream-deps) all-opts (assoc opts :ups-libs (:libs ups-deps) :ups-foreign-libs (:foreign-libs ups-deps) :ups-externs (:externs ups-deps))] (binding [ana/*cljs-static-fns* (or (and (= (opts :optimizations) :advanced)) (:static-fns opts) ana/*cljs-static-fns*) ana/*cljs-warn-on-undeclared* (true? (opts :warnings))] (let [compiled (-compile source all-opts) js-sources (concat (apply add-dependencies all-opts (if (coll? compiled) compiled [compiled])) (when (= :nodejs (:target all-opts)) [(-compile (io/resource "cljs/nodejscli.cljs") all-opts)])) optim (:optimizations all-opts)] (if (and optim (not= optim :none)) (->> js-sources (apply optimize all-opts) (add-header all-opts) (add-wrapper all-opts) (output-one-file all-opts)) (apply output-unoptimized all-opts js-sources)))))) (comment (println (build '[(ns hello.core) (defn ^{:export greet} greet [n] (str "Hola " n)) (defn ^:export sum [xs] 42)] {:optimizations :simple :pretty-print true})) ;; build a project with optimizations (build "samples/hello/src" {:optimizations :advanced}) (build "samples/hello/src" {:optimizations :advanced :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello.html' to see the result in action ;; build a project without optimizations (build "samples/hello/src" {:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello-dev.html' to see the result in action ;; notice how each script was loaded individually ;; build unoptimized from raw ClojureScript (build '[(ns hello.core) (defn ^{:export greet} greet [n] (str "Hola " n)) (defn ^:export sum [xs] 42)] {:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello-dev.html' to see the result in action )
true
; Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ; which can be found in the file epl-v10.html at the root of this distribution. ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. ; You must not remove this notice, or any other, from this software. (ns cljs.closure "Compile ClojureScript to JavaScript with optimizations from Google Closure Compiler producing runnable JavaScript. The Closure Compiler (compiler.jar) must be on the classpath. Use the 'build' function for end-to-end compilation. build = compile -> add-dependencies -> optimize -> output Two protocols are defined: IJavaScript and Compilable. The Compilable protocol is satisfied by something which can return one or more IJavaScripts. With IJavaScript objects in hand, calling add-dependencies will produce a sequence of IJavaScript objects which includes all required dependencies from the Closure library and ClojureScript, in dependency order. This function replaces the closurebuilder tool. The optimize function converts one or more IJavaScripts into a single string of JavaScript source code using the Closure Compiler API. The produced output is either a single string of optimized JavaScript or a deps file for use during development. " (:require [cljs.compiler :as comp] [cljs.analyzer :as ana] [clojure.java.io :as io] [clojure.string :as string]) (:import java.io.File java.io.BufferedInputStream java.net.URL java.util.logging.Level java.util.jar.JarFile com.google.common.collect.ImmutableList com.google.javascript.jscomp.CompilerOptions com.google.javascript.jscomp.CompilationLevel com.google.javascript.jscomp.ClosureCodingConvention com.google.javascript.jscomp.JSSourceFile com.google.javascript.jscomp.Result com.google.javascript.jscomp.JSError com.google.javascript.jscomp.CommandLineRunner)) (def name-chars (map char (concat (range 48 57) (range 65 90) (range 97 122)))) (defn random-char [] (nth name-chars (.nextInt (java.util.Random.) (count name-chars)))) (defn random-string [length] (apply str (take length (repeatedly random-char)))) ;; Closure API ;; =========== (defmulti js-source-file (fn [_ source] (class source))) (defmethod js-source-file String [^String name ^String source] (JSSourceFile/fromCode name source)) (defmethod js-source-file File [_ ^File source] (JSSourceFile/fromFile source)) (defmethod js-source-file BufferedInputStream [^String name ^BufferedInputStream source] (JSSourceFile/fromInputStream name source)) (defn set-options "TODO: Add any other options that we would like to support." [opts ^CompilerOptions compiler-options] (when (contains? opts :pretty-print) (set! (.prettyPrint compiler-options) (:pretty-print opts))) (when (contains? opts :print-input-delimiter) (set! (.printInputDelimiter compiler-options) (:print-input-delimiter opts)))) (defn make-options "Create a CompilerOptions object and set options from opts map." [opts] (let [level (case (:optimizations opts) :advanced CompilationLevel/ADVANCED_OPTIMIZATIONS :whitespace CompilationLevel/WHITESPACE_ONLY :simple CompilationLevel/SIMPLE_OPTIMIZATIONS) compiler-options (doto (CompilerOptions.) (.setCodingConvention (ClosureCodingConvention.)))] (do (.setOptionsForCompilationLevel level compiler-options) (set-options opts compiler-options) compiler-options))) (defn jar-entry-names* [jar-path] (with-open [z (java.util.zip.ZipFile. jar-path)] (doall (map #(.getName %) (enumeration-seq (.entries z)))))) (def jar-entry-names (memoize jar-entry-names*)) (defn find-js-jar "finds js resources from a given path in a jar file" [jar-path lib-path] (doall (map #(io/resource %) (filter #(do (and (.startsWith % lib-path) (.endsWith % ".js"))) (jar-entry-names jar-path))))) (declare to-url) (defn find-js-fs "finds js resources from a path on the files system" [path] (let [file (io/file path)] (when (.exists file) (map to-url (filter #(.endsWith (.getName %) ".js") (file-seq (io/file path))))))) (defn find-js-classpath "finds all js files on the classpath matching the path provided" [path] (let [process-entry #(if (.endsWith % ".jar") (find-js-jar % path) (find-js-fs (str % "/" path))) cpath-list (let [sysp (System/getProperty "java.class.path" )] (if (.contains sysp ";") (string/split sysp #";") (string/split sysp #":")))] (doall (reduce #(let [p (process-entry %2)] (if p (concat %1 p) %1)) [] cpath-list)))) (defn find-js-resources [path] "finds js resources in a given path on either the file system or the classpath" (let [file (io/file path)] (if (.exists file) (find-js-fs path) (find-js-classpath path)))) (defn load-externs "Externs are JavaScript files which contain empty definitions of functions which will be provided by the envorinment. Any function in an extern file will not be renamed during optimization. Options may contain an :externs key with a list of file paths to load. The :use-only-custom-externs flag may be used to indicate that the default externs should be excluded." [{:keys [externs use-only-custom-externs target ups-externs]}] (let [filter-cp-js (fn [paths] (for [p paths u (find-js-classpath p)] u)) filter-js (fn [paths] (for [p paths u (find-js-resources p)] u)) add-target (fn [ext] (if (= :nodejs target) (cons (io/resource "cljs/nodejs_externs.js") (or ext [])) ext)) load-js (fn [ext] (map #(js-source-file (.getFile %) (slurp %)) ext))] (let [js-sources (-> externs filter-js add-target load-js) ups-sources (-> ups-externs filter-cp-js load-js) all-sources (concat js-sources ups-sources)] (if use-only-custom-externs all-sources (into all-sources (CommandLineRunner/getDefaultExterns)))))) (defn ^com.google.javascript.jscomp.Compiler make-closure-compiler [] (let [compiler (com.google.javascript.jscomp.Compiler.)] (do (com.google.javascript.jscomp.Compiler/setLoggingLevel Level/WARNING) compiler))) (defn report-failure [^Result result] (let [errors (.errors result) warnings (.warnings result)] (doseq [next (seq errors)] (println "ERROR:" (.toString ^JSError next))) (doseq [next (seq warnings)] (println "WARNING:" (.toString ^JSError next))))) (defn parse-js-ns "Given the lines from a JavaScript source file, parse the provide and require statements and return them in a map. Assumes that all provide and require statements appear before the first function definition." [lines] (letfn [(conj-in [m k v] (update-in m [k] (fn [old] (conj old v))))] (->> (for [line lines x (string/split line #";")] x) (map string/trim) (take-while #(not (re-matches #".*=[\s]*function\(.*\)[\s]*[{].*" %))) (map #(re-matches #".*goog\.(provide|require)\(['\"](.*)['\"]\)" %)) (remove nil?) (map #(drop 1 %)) (reduce (fn [m ns] (if (= (first ns) "require") (conj-in m :requires (last ns)) (conj-in m :provides (last ns)))) {:requires [] :provides []})))) ;; Protocols for IJavaScript and Compilable ;; ======================================== (defmulti to-url class) (defmethod to-url File [^File f] (.toURL (.toURI f))) (defmethod to-url String [s] (to-url (io/file s))) (defprotocol IJavaScript (-foreign? [this] "Whether the Javascript represents a foreign library (a js file that not have any goog.provide statement") (-url [this] "The URL where this JavaScript is located. Returns nil when JavaScript exists in memory only.") (-provides [this] "A list of namespaces that this JavaScript provides.") (-requires [this] "A list of namespaces that this JavaScript requires.") (-source [this] "The JavaScript source string.")) (extend-protocol IJavaScript String (-foreign? [this] false) (-url [this] nil) (-provides [this] (:provides (parse-js-ns (string/split-lines this)))) (-requires [this] (:requires (parse-js-ns (string/split-lines this)))) (-source [this] this) clojure.lang.IPersistentMap (-foreign? [this] (:foreign this)) (-url [this] (or (:url this) (to-url (:file this)))) (-provides [this] (map name (:provides this))) (-requires [this] (map name (:requires this))) (-source [this] (if-let [s (:source this)] s (slurp (io/reader (-url this)))))) (defrecord JavaScriptFile [foreign ^URL url provides requires] IJavaScript (-foreign? [this] foreign) (-url [this] url) (-provides [this] provides) (-requires [this] requires) (-source [this] (slurp (io/reader url)))) (defn javascript-file [foreign ^URL url provides requires] (JavaScriptFile. foreign url (map name provides) (map name requires))) (defn map->javascript-file [m] (javascript-file (:foreign m) (to-url (:file m)) (:provides m) (:requires m))) (defn read-js "Read a JavaScript file returning a map of file information." [f] (let [source (slurp f) m (parse-js-ns (string/split-lines source))] (map->javascript-file (assoc m :file f)))) (defprotocol Compilable (-compile [this opts] "Returns one or more IJavaScripts.")) (defn build-index "Index a list of dependencies by namespace and file name. There can be zero or more namespaces provided per file." [deps] (reduce (fn [m next] (let [provides (:provides next)] (-> (if (seq provides) (reduce (fn [m* provide] (assoc m* provide next)) m provides) m) (assoc (:file next) next)))) {} deps)) (defn dependency-order-visit [state ns-name] (let [file (get state ns-name)] (if (or (:visited file) (nil? file)) state (let [state (assoc-in state [ns-name :visited] true) deps (:requires file) state (reduce dependency-order-visit state deps)] (assoc state :order (conj (:order state) file)))))) (defn- pack-string [s] (if (string? s) {:provides (-provides s) :requires (-requires s) :file (str "from_source_" (gensym) ".clj") ::original s} s)) (defn- unpack-string [m] (or (::original m) m)) (defn dependency-order "Topologically sort a collection of dependencies." [coll] (let [state (build-index (map pack-string coll))] (map unpack-string (distinct (:order (reduce dependency-order-visit (assoc state :order []) (keys state))))))) ;; Compile ;; ======= (defn compile-form-seq "Compile a sequence of forms to a JavaScript source string." [forms] (comp/with-core-cljs (with-out-str (binding [ana/*cljs-ns* 'cljs.user] (doseq [form forms] (comp/emit (ana/analyze (ana/empty-env) form))))))) (defn output-directory [opts] (or (:output-dir opts) "out")) (def compiled-cljs (atom {})) (defn compiled-file "Given a map with at least a :file key, return a map with {:file .. :provides .. :requires ..}. Compiled files are cached so they will only be read once." [m] (let [path (.getAbsolutePath (:file m)) js (if (:provides m) (map->javascript-file m) (if-let [js (get @compiled-cljs path)] js (read-js (:file m))))] (do (swap! compiled-cljs (fn [old] (assoc old path js))) js))) (defn compile-file "Compile a single cljs file. If no output-file is specified, returns a string of compiled JavaScript. With an output-file option, the compiled JavaScript will written to this location and the function returns a JavaScriptFile. In either case the return value satisfies IJavaScript." [^File file {:keys [output-file] :as opts}] (if output-file (let [out-file (io/file (output-directory opts) output-file)] (compiled-file (comp/compile-file file out-file))) (compile-form-seq (comp/forms-seq file)))) (defn compile-dir "Recursively compile all cljs files under the given source directory. Return a list of JavaScriptFiles." [^File src-dir opts] (let [out-dir (output-directory opts)] (map compiled-file (comp/compile-root src-dir out-dir)))) (defn path-from-jarfile "Given the URL of a file within a jar, return the path of the file from the root of the jar." [^URL url] (last (string/split (.getFile url) #"\.jar!/"))) (defn jar-file-to-disk "Copy a file contained within a jar to disk. Return the created file." [url out-dir] (let [out-file (io/file out-dir (path-from-jarfile url)) content (slurp (io/reader url))] (do (comp/mkdirs out-file) (spit out-file content) out-file))) (defn compile-from-jar "Compile a file from a jar." [this {:keys [output-file] :as opts}] (or (when output-file (let [out-file (io/file (output-directory opts) output-file)] (when (.exists out-file) (compiled-file {:file out-file})))) (let [file-on-disk (jar-file-to-disk this (output-directory opts))] (-compile file-on-disk opts)))) (extend-protocol Compilable File (-compile [this opts] (if (.isDirectory this) (compile-dir this opts) (compile-file this opts))) URL (-compile [this opts] (case (.getProtocol this) "file" (-compile (io/file this) opts) "jar" (compile-from-jar this opts))) clojure.lang.PersistentList (-compile [this opts] (compile-form-seq [this])) String (-compile [this opts] (-compile (io/file this) opts)) clojure.lang.PersistentVector (-compile [this opts] (compile-form-seq this)) ) (comment ;; compile a file in memory (-compile "samples/hello/src/hello/core.cljs" {}) ;; compile a file to disk - see file @ 'out/clojure/set.js' (-compile (io/resource "clojure/set.cljs") {:output-file "clojure/set.js"}) ;; compile a project (-compile (io/file "samples/hello/src") {}) ;; compile a project with a custom output directory (-compile (io/file "samples/hello/src") {:output-dir "my-output"}) ;; compile a form (-compile '(defn plus-one [x] (inc x)) {}) ;; compile a vector of forms (-compile '[(ns test.app (:require [goog.array :as array])) (defn plus-one [x] (inc x))] {}) ) ;; Dependencies ;; ============ ;; ;; Find all dependencies from files on the classpath. Eliminates the ;; need for closurebuilder. cljs dependencies will be compiled as ;; needed. (defn find-url "Given a string, returns a URL. Attempts to resolve as a classpath-relative path, then as a path relative to the working directory or a URL string" [path-or-url] (or (io/resource path-or-url) (try (io/as-url path-or-url) (catch java.net.MalformedURLException e false)) (io/as-url (io/as-file path-or-url)))) (defn load-foreign-library* "Given a library spec (a map containing the keys :file and :provides), returns a map containing :provides, :requires, :file and :url" ([lib-spec] (load-foreign-library* lib-spec false)) ([lib-spec cp-only?] (let [find-func (if cp-only? io/resource find-url)] (merge lib-spec {:foreign true :url (find-func (:file lib-spec))})))) (def load-foreign-library (memoize load-foreign-library*)) (defn load-library* "Given a path to a JavaScript library, which is a directory containing Javascript files, return a list of maps containing :provides, :requires, :file and :url." ([path] (load-library* path false)) ([path cp-only?] (let [find-func (if cp-only? find-js-classpath find-js-resources) graph-node (fn [u] (-> (io/reader u) line-seq parse-js-ns (assoc :url u)))] (let [js-sources (find-js-resources path)] (filter #(seq (:provides %)) (map graph-node js-sources)))))) (def load-library (memoize load-library*)) (defn library-dependencies [{libs :libs foreign-libs :foreign-libs ups-libs :ups-libs ups-flibs :ups-foreign-libs}] (concat (mapcat #(load-library % true) ups-libs) ;upstream deps (mapcat load-library libs) (mapcat #(load-foreign-library % true) ups-flibs) ;upstream deps (map load-foreign-library foreign-libs))) (comment ;; load one library (load-library* "closure/library/third_party/closure") ;; load all library dependencies (library-dependencies {:libs ["closure/library/third_party/closure"]}) (library-dependencies {:foreign-libs [{:file "http://example.com/remote.js" :provides ["my.example"]}]}) (library-dependencies {:foreign-libs [{:file "local/file.js" :provides ["my.example"]}]}) (library-dependencies {:foreign-libs [{:file "cljs/nodejs_externs.js" :provides ["my.example"]}]})) (defn goog-dependencies* "Create an index of Google dependencies by namespace and file name." [] (letfn [(parse-list [s] (when (> (count s) 0) (-> (.substring s 1 (dec (count s))) (string/split #"'\s*,\s*'"))))] (->> (line-seq (io/reader (io/resource "goog/deps.js"))) (map #(re-matches #"^goog\.addDependency\(['\"](.*)['\"],\s*\[(.*)\],\s*\[(.*)\]\);.*" %)) (remove nil?) (map #(drop 1 %)) (remove #(.startsWith (first %) "../../third_party")) (map #(hash-map :file (str "goog/"(first %)) :provides (parse-list (second %)) :requires (parse-list (last %)) :group :goog))))) (def goog-dependencies (memoize goog-dependencies*)) (defn js-dependency-index "Returns the index for all JavaScript dependencies. Lookup by namespace or file name." [opts] (build-index (concat (goog-dependencies) (library-dependencies opts)))) (defn js-dependencies "Given a sequence of Closure namespace strings, return the list of all dependencies. The returned list includes all Google and third-party library dependencies. Third-party libraries are configured using the :libs option where the value is a list of directories containing third-party libraries." [opts requires] (let [index (js-dependency-index opts)] (loop [requires requires visited (set requires) deps #{}] (if (seq requires) (let [node (get index (first requires)) new-req (remove #(contains? visited %) (:requires node))] (recur (into (rest requires) new-req) (into visited new-req) (conj deps node))) (remove nil? deps))))) (comment ;; find dependencies (js-dependencies {} ["goog.array"]) ;; find dependencies in an external library (js-dependencies {:libs ["closure/library/third_party/closure"]} ["goog.dom.query"]) ) (defn get-compiled-cljs "Return an IJavaScript for this file. Compiled output will be written to the working directory." [opts {:keys [relative-path uri]}] (let [js-file (comp/rename-to-js relative-path)] (-compile uri (merge opts {:output-file js-file})))) (defn cljs-dependencies "Given a list of all required namespaces, return a list of IJavaScripts which are the cljs dependencies. The returned list will not only include the explicitly required files but any transitive depedencies as well. JavaScript files will be compiled to the working directory if they do not already exist. Only load dependencies from the classpath." [opts requires] (let [index (js-dependency-index opts)] (letfn [(ns->cp [s] (str (string/replace (munge s) \. \/) ".cljs")) (cljs-deps [coll] (->> coll (remove #(contains? index %)) (map #(let [f (ns->cp %)] (hash-map :relative-path f :uri (io/resource f)))) (remove #(nil? (:uri %)))))] (loop [required-files (cljs-deps requires) visited (set required-files) js-deps #{}] (if (seq required-files) (let [next-file (first required-files) js (get-compiled-cljs opts next-file) new-req (remove #(contains? visited %) (cljs-deps (-requires js)))] (recur (into (rest required-files) new-req) (into visited new-req) (conj js-deps js))) (remove nil? js-deps)))))) (comment ;; only get cljs deps (cljs-dependencies {} ["goog.string" "cljs.core"]) ;; get transitive deps (cljs-dependencies {} ["clojure.string"]) ;; don't get cljs.core twice (cljs-dependencies {} ["cljs.core" "clojure.string"]) ) (defn add-dependencies "Given one or more IJavaScript objects in dependency order, produce a new sequence of IJavaScript objects which includes the input list plus all dependencies in dependency order." [opts & inputs] (let [requires (mapcat -requires inputs) required-cljs (remove (set inputs) (cljs-dependencies opts requires)) required-js (js-dependencies opts (set (concat (mapcat -requires required-cljs) requires)))] (cons (javascript-file nil (io/resource "goog/base.js") ["goog"] nil) (dependency-order (concat (map #(-> (javascript-file (:foreign %) (or (:url %) (io/resource (:file %))) (:provides %) (:requires %)) (assoc :group (:group %))) required-js) required-cljs inputs))))) (comment ;; add dependencies to literal js (add-dependencies {} "goog.provide('test.app');\ngoog.require('cljs.core');") (add-dependencies {} "goog.provide('test.app');\ngoog.require('goog.array');") (add-dependencies {} (str "goog.provide('test.app');\n" "goog.require('goog.array');\n" "goog.require('clojure.set');")) ;; add dependencies with external lib (add-dependencies {:libs ["closure/library/third_party/closure"]} (str "goog.provide('test.app');\n" "goog.require('goog.array');\n" "goog.require('goog.dom.query');")) ;; add dependencies with foreign lib (add-dependencies {:foreign-libs [{:file "samples/hello/src/hello/core.cljs" :provides ["example.lib"]}]} (str "goog.provide('test.app');\n" "goog.require('example.lib');\n")) ;; add dependencies to a JavaScriptFile record (add-dependencies {} (javascript-file false (to-url "samples/hello/src/hello/core.cljs") ["hello.core"] ["goog.array"])) ) ;; Optimize ;; ======== (defmulti javascript-name class) (defmethod javascript-name URL [^URL url] (if url (.getPath url) "cljs/user.js")) (defmethod javascript-name String [s] (if-let [name (first (-provides s))] name "cljs/user.js")) (defmethod javascript-name JavaScriptFile [js] (javascript-name (-url js))) (defn build-provides "Given a vector of provides, builds required goog.provide statements" [provides] (apply str (map #(str "goog.provide('" % "');\n") provides))) (defmethod js-source-file JavaScriptFile [_ js] (when-let [url (-url js)] (js-source-file (javascript-name url) (if (-foreign? js) (str (build-provides (-provides js)) (slurp url)) (io/input-stream url))))) (defn optimize "Use the Closure Compiler to optimize one or more JavaScript files." [opts & sources] (let [closure-compiler (make-closure-compiler) externs (load-externs opts) compiler-options (make-options opts) sources (if (= :whitespace (:optimizations opts)) (cons "var CLOSURE_NO_DEPS = true;" sources) sources) inputs (map #(js-source-file (javascript-name %) %) sources) result ^Result (.compile closure-compiler externs inputs compiler-options)] (if (.success result) (.toSource closure-compiler) (report-failure result)))) (comment ;; optimize JavaScript strings (optimize {:optimizations :whitespace} "var x = 3 + 2; alert(x);") ;; => "var x=3+2;alert(x);" (optimize {:optimizations :simple} "var x = 3 + 2; alert(x);") ;; => "var x=5;alert(x);" (optimize {:optimizations :advanced} "var x = 3 + 2; alert(x);") ;; => "alert(5);" ;; optimize a ClojureScript form (optimize {:optimizations :simple} (-compile '(def x 3) {})) ;; optimize a project (println (->> (-compile "samples/hello/src" {}) (apply add-dependencies {}) (apply optimize {:optimizations :simple :pretty-print true}))) ) ;; Output ;; ====== ;; ;; The result of a build is always a single string of JavaScript. The ;; build process may produce files on disk but a single string is ;; always output. What this string contains depends on whether the ;; input has been optimized or not. If the :output-to option is set ;; then this string will be written to the specified file. If not, it ;; will be returned. ;; ;; The :output-dir option can be used to set the working directory ;; where any files will be written to disk. By default this directory ;; is 'out'. ;; ;; If inputs are optimized then the output string will be the complete ;; application with all dependencies included. ;; ;; For unoptimized output, the string will be a Closure deps file ;; describing where the JavaScript files are on disk and their ;; dependencies. All JavaScript files will be located in the working ;; directory, including any dependencies from the Closure library. ;; ;; Unoptimized mode is faster because the Closure Compiler is not ;; run. It also makes debugging much simpler because each file is ;; loaded in its own script tag. ;; ;; When working with uncompiled files, you will need to add additional ;; script tags to the hosting HTML file: one which pulls in Closure ;; library's base.js and one which calls goog.require to load your ;; code. See samples/hello/hello-dev.html for an example. (defn path-relative-to "Generate a string which is the path to input relative to base." [^File base input] (let [base-path (comp/path-seq (.getCanonicalPath base)) input-path (comp/path-seq (.getCanonicalPath (io/file ^URL (-url input)))) count-base (count base-path) common (count (take-while true? (map #(= %1 %2) base-path input-path))) prefix (repeat (- count-base common 1) "..")] (if (= count-base common) (last input-path) ;; same file (comp/to-path (concat prefix (drop common input-path)) "/")))) (defn add-dep-string "Return a goog.addDependency string for an input." [opts input] (letfn [(ns-list [coll] (when (seq coll) (apply str (interpose ", " (map #(str "'" (munge %) "'") coll)))))] (str "goog.addDependency(\"" (path-relative-to (io/file (output-directory opts) "goog/base.js") input) "\", [" (ns-list (-provides input)) "], [" (ns-list (-requires input)) "]);"))) (defn deps-file "Return a deps file string for a sequence of inputs." [opts sources] (apply str (interpose "\n" (map #(add-dep-string opts %) sources)))) (comment (path-relative-to (io/file "out/goog/base.js") {:url (to-url "out/cljs/core.js")}) (add-dep-string {} {:url (to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]}) (deps-file {} [{:url (to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]}]) ) (defn output-one-file [{:keys [output-to]} js] (cond (nil? output-to) js (string? output-to) (spit output-to js) :else (println js))) (defn output-deps-file [opts sources] (output-one-file opts (deps-file opts sources))) (defn ^String output-path "Given an IJavaScript which is either in memory or in a jar file, return the output path for this file relative to the working directory." [js] (if-let [url ^URL (-url js)] (path-from-jarfile url) (str (random-string 5) ".js"))) (defn write-javascript "Write a JavaScript file to disk. Only write if the file does not already exist. Return IJavaScript for the file on disk." [opts js] (let [out-dir (io/file (output-directory opts)) out-name (output-path js) out-file (io/file out-dir out-name)] (do (when-not (.exists out-file) (do (comp/mkdirs out-file) (spit out-file (-source js)))) {:url (to-url out-file) :requires (-requires js) :provides (-provides js) :group (:group js)}))) (defn source-on-disk "Ensure that the given JavaScript exists on disk. Write in memory sources and files contained in jars to the working directory. Return updated IJavaScript with the new location." [opts js] (let [url ^URL (-url js)] (if (or (not url) (= (.getProtocol url) "jar")) (write-javascript opts js) js))) (comment (write-javascript {} "goog.provide('demo');\nalert('hello');\n") ;; write something from a jar file to disk (source-on-disk {} {:url (io/resource "goog/base.js") :source (slurp (io/reader (io/resource "goog/base.js")))}) ;; doesn't write a file that is already on disk (source-on-disk {} {:url (io/resource "cljs/core.cljs")}) ) (defn output-unoptimized "Ensure that all JavaScript source files are on disk (not in jars), write the goog deps file including only the libraries that are being used and write the deps file for the current project. The deps file for the current project will include third-party libraries." [opts & sources] (let [disk-sources (map #(source-on-disk opts %) sources)] (let [goog-deps (io/file (output-directory opts) "goog/deps.js")] (do (comp/mkdirs goog-deps) (spit goog-deps (deps-file opts (filter #(= (:group %) :goog) disk-sources))) (output-deps-file opts (remove #(= (:group %) :goog) disk-sources)))))) (comment ;; output unoptimized alone (output-unoptimized {} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n") ;; output unoptimized with all dependencies (apply output-unoptimized {} (add-dependencies {} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n")) ;; output unoptimized with external library (apply output-unoptimized {} (add-dependencies {:libs ["closure/library/third_party/closure"]} "goog.provide('test');\ngoog.require('cljs.core');\ngoog.require('goog.dom.query');\n")) ;; output unoptimized and write deps file to 'out/test.js' (output-unoptimized {:output-to "out/test.js"} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n") ) (defn get-upstream-deps* "returns a merged map containing all upstream dependencies defined by libraries on the classpath" [] (let [classloader (. (Thread/currentThread) (getContextClassLoader)) upstream-deps (map #(read-string (slurp %)) (enumeration-seq (. classloader (findResources "deps.cljs"))))] (doseq [dep upstream-deps] (println (str "Upstream deps.cljs found on classpath. " dep " This is an EXPERIMENTAL FEATURE and is not guarenteed to remain stable in future versions."))) (apply merge-with concat upstream-deps))) (def get-upstream-deps (memoize get-upstream-deps*)) (defn add-header [{:keys [hashbang target]} js] (if (= :nodejs target) (str "#!" (or hashbang "/usr/bin/env node") "\n" js) js)) (defn add-wrapper [{:keys [output-wrapper] :as opts} js] (if output-wrapper (str ";(function(){\n" js "\n})();\n") js)) (defn build "Given a source which can be compiled, produce runnable JavaScript." [source opts] (ana/reset-namespaces!) (let [opts (if (= :nodejs (:target opts)) (merge {:optimizations :simple} opts) opts) ups-deps (get-upstream-deps) all-opts (assoc opts :ups-libs (:libs ups-deps) :ups-foreign-libs (:foreign-libs ups-deps) :ups-externs (:externs ups-deps))] (binding [ana/*cljs-static-fns* (or (and (= (opts :optimizations) :advanced)) (:static-fns opts) ana/*cljs-static-fns*) ana/*cljs-warn-on-undeclared* (true? (opts :warnings))] (let [compiled (-compile source all-opts) js-sources (concat (apply add-dependencies all-opts (if (coll? compiled) compiled [compiled])) (when (= :nodejs (:target all-opts)) [(-compile (io/resource "cljs/nodejscli.cljs") all-opts)])) optim (:optimizations all-opts)] (if (and optim (not= optim :none)) (->> js-sources (apply optimize all-opts) (add-header all-opts) (add-wrapper all-opts) (output-one-file all-opts)) (apply output-unoptimized all-opts js-sources)))))) (comment (println (build '[(ns hello.core) (defn ^{:export greet} greet [n] (str "Hola " n)) (defn ^:export sum [xs] 42)] {:optimizations :simple :pretty-print true})) ;; build a project with optimizations (build "samples/hello/src" {:optimizations :advanced}) (build "samples/hello/src" {:optimizations :advanced :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello.html' to see the result in action ;; build a project without optimizations (build "samples/hello/src" {:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello-dev.html' to see the result in action ;; notice how each script was loaded individually ;; build unoptimized from raw ClojureScript (build '[(ns hello.core) (defn ^{:export greet} greet [n] (str "Hola " n)) (defn ^:export sum [xs] 42)] {:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"}) ;; open 'samples/hello/hello-dev.html' to see the result in action )
[ { "context": "alisation of formulae\n\n; Copyright (c) 2016 - 2017 Burkhardt Renz, Juan Markowich THM. \n; All rights reserved.\n; Th", "end": 94, "score": 0.9998602271080017, "start": 80, "tag": "NAME", "value": "Burkhardt Renz" }, { "context": "mulae\n\n; Copyright (c) 2016 - 2017 Burkhardt Renz, Juan Markowich THM. \n; All rights reserved.\n; The use and distri", "end": 110, "score": 0.9998698234558105, "start": 96, "tag": "NAME", "value": "Juan Markowich" } ]
src/lwb/vis.clj
esb-lwb/lwb
22
; lwb Logic WorkBench -- Visualisation of formulae ; Copyright (c) 2016 - 2017 Burkhardt Renz, Juan Markowich THM. ; All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.vis (:require [clojure.zip :as zip] [clojure.string :as str] [lwb.util.shell :as shell] [clojure.java.browse :as browse])) (defn man "Manual" [] (browse/browse-url "https://github.com/esb-dev/lwb/wiki/vis")) ;; # Visualisation of formulas ;; ;; The syntax tree of a formula of propositional logic, predicate logic or ;; linear temporal logic is transformed into code for tikz and the package ;; `tikz-tree`. ;; With the help of `tex2pdf` (see lwb.util.shell) a pdf file is generated. ;; A running TeX is a prerequisite. (def ^:private tikz-header "\\documentclass{standalone} \\standaloneconfig{border=8pt} \\usepackage{MnSymbol} \\usepackage[english]{babel} \\usepackage{tikz-qtree} \\tikzset{every tree node/.style={shape=rectangle,minimum size=6mm,rounded corners=3mm,draw}, edge from parent/.style={draw, edge from parent path={(\\tikzparentnode) -- (\\tikzchildnode)}}, sibling distance=8pt } \\begin{document} \\begin{tikzpicture} \\Tree") (def ^:private tikz-footer "\\end{tikzpicture} \\end{document}") (defn- first? "Is loc the most left location of siblings?" [loc] (nil? (zip/left loc))) (defn- end? "Is loc a node marked with `:end`?" [loc] (= :end (zip/node loc))) (defn- mark-end-of-branch "To facilitate the generation of code in tikz, we mark the ends of lists with `:end`" [phi] (loop [loc (zip/seq-zip (seq phi))] (if (zip/end? loc) (zip/root loc) (recur (zip/next (if (zip/branch? loc) (let [inserted-loc (zip/insert-right (-> loc zip/down zip/rightmost) :end)] (zip/leftmost inserted-loc)) loc)))))) (defn- process-head "Generates texcode for the head of a list" [node] (let [symbols {:and "\\land" :or "\\lor" :not "\\lnot" :impl "\\to" :equiv "\\leftrightarrow" :true "\\top" :false "\\bot" :xor "\\oplus" :ite "\\mathsf{ite}" :always "\\medsquare" :finally "\\lozenge" :atnext "\\medcircle" :until "\\mathcal{U}" :release "\\mathcal{R}"} nkey (keyword (name node))] (if (contains? symbols nkey) (str " [.\\node{$" (nkey symbols) "$};") (str " [.\\node{$" node "$};")))) (defn- process-quantor "Generates texcode for quantors" [node vars] (let [quantors {:forall "\\forall" :exists "\\exists"} nkey (keyword (name node))] (str " [.\\node{$" (nkey quantors) " " (str/join "\\, " vars) "$};"))) (defn- process-atom "Generates texcode for atoms Since `{` and `}` are a reserved character in Clojure, one can use `<` and `>` as characters for grouping subscripts e.g." [node] (let [node-str (str node) node-str' (str/replace node-str \< \{) node-str'' (str/replace node-str' \> \})] (str " $" node-str'' "$"))) (defn- mapfn "Mapping function that generates the tikz code from the traversing of the tree." [loc] (let [n (zip/node loc)] (cond (vector? n) "" ; already processed (first? loc) ; head with special case of quantor (if (or (= n 'forall) (= n 'exists)) (let [n' (-> loc zip/next zip/node)] (process-quantor n n')) (process-head n)) (end? loc) " ]" ; last in list :else (process-atom n)))) ; in the middle of the list (defn- vis-tikz-body "Visualization with tikz, the body" [phi] (let [phi-n (if (symbol? phi) (list phi) phi) ; special case phi' (mark-end-of-branch phi-n) loc (zip/seq-zip (seq phi'))] (str/join (map mapfn (remove zip/branch? (take-while (complement zip/end?) (iterate zip/next loc))))))) (defn texify "Visualisation of the syntax tree of formula `phi`. Generates code for tikz. With the filename given: Makes a pdf file with the visualisation of the syntax tree of `phi`. `filename` is the name of the file to be generated, must have no extension. The function uses the commands defined in `lwb.util.shell` to generate a tex file and open it. The function requires that TeX could be found on the path of a subshell." ([phi] (let [tikz-body (vis-tikz-body phi)] (str tikz-header "\n" tikz-body "\n" tikz-footer))) ([phi filename] (let [tex-code (texify phi)] (spit (str filename ".tex") tex-code) (shell/tex2pdf (str filename ".tex")) (shell/open (str filename ".pdf"))))) (comment (texify '(and P Q) "example"))
5907
; lwb Logic WorkBench -- Visualisation of formulae ; Copyright (c) 2016 - 2017 <NAME>, <NAME> THM. ; All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.vis (:require [clojure.zip :as zip] [clojure.string :as str] [lwb.util.shell :as shell] [clojure.java.browse :as browse])) (defn man "Manual" [] (browse/browse-url "https://github.com/esb-dev/lwb/wiki/vis")) ;; # Visualisation of formulas ;; ;; The syntax tree of a formula of propositional logic, predicate logic or ;; linear temporal logic is transformed into code for tikz and the package ;; `tikz-tree`. ;; With the help of `tex2pdf` (see lwb.util.shell) a pdf file is generated. ;; A running TeX is a prerequisite. (def ^:private tikz-header "\\documentclass{standalone} \\standaloneconfig{border=8pt} \\usepackage{MnSymbol} \\usepackage[english]{babel} \\usepackage{tikz-qtree} \\tikzset{every tree node/.style={shape=rectangle,minimum size=6mm,rounded corners=3mm,draw}, edge from parent/.style={draw, edge from parent path={(\\tikzparentnode) -- (\\tikzchildnode)}}, sibling distance=8pt } \\begin{document} \\begin{tikzpicture} \\Tree") (def ^:private tikz-footer "\\end{tikzpicture} \\end{document}") (defn- first? "Is loc the most left location of siblings?" [loc] (nil? (zip/left loc))) (defn- end? "Is loc a node marked with `:end`?" [loc] (= :end (zip/node loc))) (defn- mark-end-of-branch "To facilitate the generation of code in tikz, we mark the ends of lists with `:end`" [phi] (loop [loc (zip/seq-zip (seq phi))] (if (zip/end? loc) (zip/root loc) (recur (zip/next (if (zip/branch? loc) (let [inserted-loc (zip/insert-right (-> loc zip/down zip/rightmost) :end)] (zip/leftmost inserted-loc)) loc)))))) (defn- process-head "Generates texcode for the head of a list" [node] (let [symbols {:and "\\land" :or "\\lor" :not "\\lnot" :impl "\\to" :equiv "\\leftrightarrow" :true "\\top" :false "\\bot" :xor "\\oplus" :ite "\\mathsf{ite}" :always "\\medsquare" :finally "\\lozenge" :atnext "\\medcircle" :until "\\mathcal{U}" :release "\\mathcal{R}"} nkey (keyword (name node))] (if (contains? symbols nkey) (str " [.\\node{$" (nkey symbols) "$};") (str " [.\\node{$" node "$};")))) (defn- process-quantor "Generates texcode for quantors" [node vars] (let [quantors {:forall "\\forall" :exists "\\exists"} nkey (keyword (name node))] (str " [.\\node{$" (nkey quantors) " " (str/join "\\, " vars) "$};"))) (defn- process-atom "Generates texcode for atoms Since `{` and `}` are a reserved character in Clojure, one can use `<` and `>` as characters for grouping subscripts e.g." [node] (let [node-str (str node) node-str' (str/replace node-str \< \{) node-str'' (str/replace node-str' \> \})] (str " $" node-str'' "$"))) (defn- mapfn "Mapping function that generates the tikz code from the traversing of the tree." [loc] (let [n (zip/node loc)] (cond (vector? n) "" ; already processed (first? loc) ; head with special case of quantor (if (or (= n 'forall) (= n 'exists)) (let [n' (-> loc zip/next zip/node)] (process-quantor n n')) (process-head n)) (end? loc) " ]" ; last in list :else (process-atom n)))) ; in the middle of the list (defn- vis-tikz-body "Visualization with tikz, the body" [phi] (let [phi-n (if (symbol? phi) (list phi) phi) ; special case phi' (mark-end-of-branch phi-n) loc (zip/seq-zip (seq phi'))] (str/join (map mapfn (remove zip/branch? (take-while (complement zip/end?) (iterate zip/next loc))))))) (defn texify "Visualisation of the syntax tree of formula `phi`. Generates code for tikz. With the filename given: Makes a pdf file with the visualisation of the syntax tree of `phi`. `filename` is the name of the file to be generated, must have no extension. The function uses the commands defined in `lwb.util.shell` to generate a tex file and open it. The function requires that TeX could be found on the path of a subshell." ([phi] (let [tikz-body (vis-tikz-body phi)] (str tikz-header "\n" tikz-body "\n" tikz-footer))) ([phi filename] (let [tex-code (texify phi)] (spit (str filename ".tex") tex-code) (shell/tex2pdf (str filename ".tex")) (shell/open (str filename ".pdf"))))) (comment (texify '(and P Q) "example"))
true
; lwb Logic WorkBench -- Visualisation of formulae ; Copyright (c) 2016 - 2017 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI THM. ; All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.vis (:require [clojure.zip :as zip] [clojure.string :as str] [lwb.util.shell :as shell] [clojure.java.browse :as browse])) (defn man "Manual" [] (browse/browse-url "https://github.com/esb-dev/lwb/wiki/vis")) ;; # Visualisation of formulas ;; ;; The syntax tree of a formula of propositional logic, predicate logic or ;; linear temporal logic is transformed into code for tikz and the package ;; `tikz-tree`. ;; With the help of `tex2pdf` (see lwb.util.shell) a pdf file is generated. ;; A running TeX is a prerequisite. (def ^:private tikz-header "\\documentclass{standalone} \\standaloneconfig{border=8pt} \\usepackage{MnSymbol} \\usepackage[english]{babel} \\usepackage{tikz-qtree} \\tikzset{every tree node/.style={shape=rectangle,minimum size=6mm,rounded corners=3mm,draw}, edge from parent/.style={draw, edge from parent path={(\\tikzparentnode) -- (\\tikzchildnode)}}, sibling distance=8pt } \\begin{document} \\begin{tikzpicture} \\Tree") (def ^:private tikz-footer "\\end{tikzpicture} \\end{document}") (defn- first? "Is loc the most left location of siblings?" [loc] (nil? (zip/left loc))) (defn- end? "Is loc a node marked with `:end`?" [loc] (= :end (zip/node loc))) (defn- mark-end-of-branch "To facilitate the generation of code in tikz, we mark the ends of lists with `:end`" [phi] (loop [loc (zip/seq-zip (seq phi))] (if (zip/end? loc) (zip/root loc) (recur (zip/next (if (zip/branch? loc) (let [inserted-loc (zip/insert-right (-> loc zip/down zip/rightmost) :end)] (zip/leftmost inserted-loc)) loc)))))) (defn- process-head "Generates texcode for the head of a list" [node] (let [symbols {:and "\\land" :or "\\lor" :not "\\lnot" :impl "\\to" :equiv "\\leftrightarrow" :true "\\top" :false "\\bot" :xor "\\oplus" :ite "\\mathsf{ite}" :always "\\medsquare" :finally "\\lozenge" :atnext "\\medcircle" :until "\\mathcal{U}" :release "\\mathcal{R}"} nkey (keyword (name node))] (if (contains? symbols nkey) (str " [.\\node{$" (nkey symbols) "$};") (str " [.\\node{$" node "$};")))) (defn- process-quantor "Generates texcode for quantors" [node vars] (let [quantors {:forall "\\forall" :exists "\\exists"} nkey (keyword (name node))] (str " [.\\node{$" (nkey quantors) " " (str/join "\\, " vars) "$};"))) (defn- process-atom "Generates texcode for atoms Since `{` and `}` are a reserved character in Clojure, one can use `<` and `>` as characters for grouping subscripts e.g." [node] (let [node-str (str node) node-str' (str/replace node-str \< \{) node-str'' (str/replace node-str' \> \})] (str " $" node-str'' "$"))) (defn- mapfn "Mapping function that generates the tikz code from the traversing of the tree." [loc] (let [n (zip/node loc)] (cond (vector? n) "" ; already processed (first? loc) ; head with special case of quantor (if (or (= n 'forall) (= n 'exists)) (let [n' (-> loc zip/next zip/node)] (process-quantor n n')) (process-head n)) (end? loc) " ]" ; last in list :else (process-atom n)))) ; in the middle of the list (defn- vis-tikz-body "Visualization with tikz, the body" [phi] (let [phi-n (if (symbol? phi) (list phi) phi) ; special case phi' (mark-end-of-branch phi-n) loc (zip/seq-zip (seq phi'))] (str/join (map mapfn (remove zip/branch? (take-while (complement zip/end?) (iterate zip/next loc))))))) (defn texify "Visualisation of the syntax tree of formula `phi`. Generates code for tikz. With the filename given: Makes a pdf file with the visualisation of the syntax tree of `phi`. `filename` is the name of the file to be generated, must have no extension. The function uses the commands defined in `lwb.util.shell` to generate a tex file and open it. The function requires that TeX could be found on the path of a subshell." ([phi] (let [tikz-body (vis-tikz-body phi)] (str tikz-header "\n" tikz-body "\n" tikz-footer))) ([phi filename] (let [tex-code (texify phi)] (spit (str filename ".tex") tex-code) (shell/tex2pdf (str filename ".tex")) (shell/open (str filename ".pdf"))))) (comment (texify '(and P Q) "example"))
[ { "context": "code\n [ctx]\n (let [username (-> ctx :user :user/username)\n _ (log/info (str \"Repl Eval reque", "end": 7391, "score": 0.8497355580329895, "start": 7383, "tag": "USERNAME", "value": "username" }, { "context": " :password password})\n (check-email-activation username-or-email)\n", "end": 13679, "score": 0.9982847571372986, "start": 13671, "tag": "PASSWORD", "value": "password" } ]
src/clj/clojurecademy/controller/course/core.clj
harunpehlivan/clojurecademy
0
(ns clojurecademy.controller.course.core (:require [clojurecademy.dao.course :as course.dao] [clojurecademy.dao.progress :as progress.dao] [clojurecademy.dao.sub-chapter :as sub-chapter.dao] [clojurecademy.dao.subject :as subject.dao] [clojurecademy.controller.course.eval :as course.eval] [clojurecademy.controller.course.subject :as course.subject] [clojurecademy.controller.course.resume :as course.resume] [clojurecademy.controller.course.common :as course.common] [clojurecademy.controller.course.learn :as course.learn] [clojurecademy.util.resource :as resource.util :refer [check]] [clojurecademy.repl.util :as util] [clojurecademy.view.learn :as view.learn] [clojurecademy.view.courses :as view.courses] [clojurecademy.dsl.validator :as dsl.validator] [clojurecademy.controller.auth :as controller.auth] [clojurecademy.dao.user :as user.dao] [clojurecademy.dao.db :as db] [clojurecademy.util.logging :as log] [clojure.string :as str] [clojure.java.io :as io] [kezban.core :refer :all])) (defn- not-enrolled? [course-id user] (if (or (= (:user/username user) (course.dao/find-course-owner-by-course-id course-id)) (= (:user/role user) :admin)) false (not (course.dao/enrolled? course-id (:db/id user))))) (defn resume-course [ctx course-id] (let [user (:user ctx) user-id (:db/id user)] (log/info (str "User: " (:user/username user) " resuming course.")) (cond (not (course.dao/find-course-name-by-course-id course-id)) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (if (progress.dao/find-progress-id-by-course-id-and-user-id course-id user-id) (course.resume/get-tracked-latest-subject-map user course-id) (course.resume/get-first-subject-of-course-map user course-id))))) (defn release-course [ctx course-id] (let [user (:user ctx) user-id (:db/id user)] (log/info (str "User: " (:user/username user) " releasing course.")) (cond (not (course.dao/find-course-name-by-course-id course-id)) {:does-not-exists? true} (not= (course.dao/find-course-owner-by-course-id course-id) (:user/username user)) {:not-owner? true} :else (do (db/transact {:db/id course-id :course/latest-release (System/currentTimeMillis) :course/released? true}) {:success true})))) (defn resume-sub-chapter [ctx sub-chapter-id] (let [course-id (course.dao/find-course-id-by-sub-chapter-id-non-active-also sub-chapter-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " resuming course via sub chapter.")) (cond (or (not course-id) (not (sub-chapter.dao/find-sub-chapter-name-by-sub-chapter-id release-t sub-chapter-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (if (progress.dao/find-progress-id-by-sub-chapter-id-and-user-id sub-chapter-id user-id) (course.resume/get-tracked-latest-subject-of-sub-chapter-map user sub-chapter-id) (course.resume/get-first-subject-of-sub-chapter-map user sub-chapter-id course-id))))) (defn subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " resuming course via subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} (and (:instruction (subject.dao/collect-subject-and-childs release-t subject-id)) (not (:progress/done? (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id))) (not (course.common/able-to-access-all-subjects? course-id user))) {:locked-subject true} :else (course.subject/get-subject user subject-id :course-finished?)))) (defn next-subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " requested next subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} (and (:instruction (subject.dao/collect-subject-and-childs release-t subject-id)) (not (:progress/done? (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id))) (not (course.common/able-to-access-all-subjects? course-id user))) {:locked-subject true} :else (course.subject/get-subject user (course.subject/get-next-subject-id release-t subject-id) :course-finished?)))) (defn pre-subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " requested pre subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (course.subject/get-subject user (course.subject/get-pre-subject-id release-t subject-id) :no-pre-left?)))) (defn eval-code [ctx] (let [user (:user ctx) _ (log/info (str "Eval requested from: " (:user/username user))) d (resource.util/convert-data-map (:request-data ctx)) subject-id (:subject-id d) course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) release-t (course.common/get-release (:db/id user) course-id) subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id) subject (course.eval/create-subject-structure subject-and-childs) result (course.eval/get-result user subject d)] {:result result})) (defn eval-repl-code [ctx] (let [username (-> ctx :user :user/username) _ (log/info (str "Repl Eval requested from: " username)) d (resource.util/convert-data-map (:request-data ctx))] {:result (course.eval/get-repl-result username (:client-code d))})) (defn return-execution-result [c] (let [result (:result c)] (update-in result [:code-body :result] (constantly (-> result :code-body :result str))))) (defn- print-result [r] (cond (nil? r) "=> nil" (lazy? r) (str "=> " (pr-str r)) :else (str "=> " r))) (defn- print-new-line-if-needed [x] (if (str/ends-with? x "\n") x (str x "\n"))) (defn return-repl-execution-result [c] (let [result (:result c)] (if (:error result) result {:out-str (apply str (reduce (fn [v r] (if (str/blank? (:str r)) (conj v (str (print-result (:result r)) "\n\n")) (conj v (str (print-new-line-if-needed (:str r)) (print-result (:result r)) "\n\n")))) [] (:results result))) :err-str (:err-str result)}))) (defn start-course [ctx course-id] (if (or (not (course.dao/find-course-name-by-course-id course-id)) (not (course.dao/find-course-released?-by-course-id course-id))) (util/runtime-ex "Course does not exist or has not released yet.") (let [user-id (-> ctx :user :db/id) username (user.dao/find-username-by-user-id user-id)] (course.dao/enroll-user-to-course course-id user-id) (log/info (str username " started course id: " course-id))))) (defn course-syllabus [ctx course-id] (view.learn/syllabus (assoc (course.learn/get-user-progress-map-for-syllabus ctx course-id) :course-id course-id))) (defn course-overview [ctx course-id] (view.learn/overview (assoc (course.learn/get-user-progress-map-for-overview ctx course-id) :course-id course-id))) (defn- get-latest-updated-course-id [user-id enrolled-course-ids] (->> enrolled-course-ids (progress.dao/find-progresses-last-updates-by-user-id-and-course-ids user-id) (progress.dao/find-progress-id-by-user-id-and-last-updated user-id) course.dao/find-course-id-by-progress-id)) (defn learn-dispatcher [ctx] (if-let [user (-> ctx resource.util/authorized? :user)] (if-let [enrolled-course-ids (seq (map :db/id (:user/courses user)))] (if-not (progress.dao/find-any-progress-id-by-user-id (:db/id user)) [true (str "/courses/" (last enrolled-course-ids))] [true (str "/courses/" (get-latest-updated-course-id (:db/id user) enrolled-course-ids))]) [true (str "/courses/" (-> (course.dao/find-all-released-course-ids) shuffle first))]) [true "/"])) (defn course-dispatcher [ctx course-id] (if-let* [user-id (-> ctx resource.util/authorized? :user :db/id) _ (or (course.dao/enrolled? course-id user-id) (course.dao/owner? user-id course-id))] [true (str "/courses/" course-id "/learn/syllabus")] [true (str "/courses/" course-id "/learn/overview")])) (defn- sort-by-user-counts-desc [courses] (sort-by #(nth % 3) #(compare %2 %1) courses)) (defn get-all-courses-for-user [user-id] (let [courses (sort-by-user-counts-desc (course.dao/find-all-released-courses)) courses-with-percentage (course.learn/get-enrolled-courses user-id)] (reduce (fn [v course] (if-let [percentage (some #(when (= (:course-id %) (first course)) (:course-percentage %)) courses-with-percentage)] (conj v (conj course percentage)) (conj v course))) [] courses))) (defn courses [ctx] (if-let [user (-> ctx resource.util/authorized? :user)] (view.courses/all-courses true (get-all-courses-for-user (:db/id user))) (view.courses/all-courses false (sort-by-user-counts-desc (course.dao/find-all-released-courses))))) (defn- get-file [ctx] (if-let [file (-> ctx :request :params (get "file") :tempfile)] file (util/runtime-ex "Apparently you could not upload file properly.Try again please."))) (defn- validate-form [content] (try (read-string content) (catch Exception e (util/runtime-ex "The file format is not proper lisp format!")))) (defn- validate-course [course] (dsl.validator/validate course) course) (defn- validate-course-map-and-get-helper-fns [[course & helper-fns]] {:course (validate-course course) :helper-fns helper-fns}) (defn- validate-content [content] (if (str/blank? content) (util/runtime-ex "Apparently you uploaded empty file!") (-> content validate-form validate-course-map-and-get-helper-fns))) (defn- persist-course-and-helper-fns [username-or-email course helper-fns] (try (let [user (user.dao/find-user-by-username-or-email username-or-email) owner (:user/username user) course-id (db/persist-course course owner)] (db/transact {:db/id course-id :course/helper-fns (if (seq helper-fns) (str helper-fns) "()") :course/users (:db/id user) :course/owner owner :course/latest-commit (System/currentTimeMillis)}) (db/transact {:db/id (:db/id user) :user/courses course-id})) (catch Throwable t (log/error "Course could not get persisted!" t) (throw t)))) (defn- check-email-activation [username-or-email] (check (:user/email-activated? (user.dao/find-user-by-username-or-email username-or-email)) "You need to activate your account.Go to your profile and send an activation mail.")) (defn- check-auth [ctx] (let [headers (-> ctx :request :headers) username-or-email (get headers "username-or-email") password (get headers "password")] (when (or (str/blank? username-or-email) (str/blank? password)) (util/runtime-ex "Username/e-mail and password don't match.")) (controller.auth/check-credentials {:username-or-email username-or-email :password password}) (check-email-activation username-or-email) username-or-email)) (defn upload-course [ctx] (let [username-or-email (check-auth ctx) file (get-file ctx)] (with-open [rdr (io/reader file)] (let [content (->> rdr line-seq (str/join "\n")) {:keys [course helper-fns]} (validate-content content)] (persist-course-and-helper-fns username-or-email course helper-fns)))))
54528
(ns clojurecademy.controller.course.core (:require [clojurecademy.dao.course :as course.dao] [clojurecademy.dao.progress :as progress.dao] [clojurecademy.dao.sub-chapter :as sub-chapter.dao] [clojurecademy.dao.subject :as subject.dao] [clojurecademy.controller.course.eval :as course.eval] [clojurecademy.controller.course.subject :as course.subject] [clojurecademy.controller.course.resume :as course.resume] [clojurecademy.controller.course.common :as course.common] [clojurecademy.controller.course.learn :as course.learn] [clojurecademy.util.resource :as resource.util :refer [check]] [clojurecademy.repl.util :as util] [clojurecademy.view.learn :as view.learn] [clojurecademy.view.courses :as view.courses] [clojurecademy.dsl.validator :as dsl.validator] [clojurecademy.controller.auth :as controller.auth] [clojurecademy.dao.user :as user.dao] [clojurecademy.dao.db :as db] [clojurecademy.util.logging :as log] [clojure.string :as str] [clojure.java.io :as io] [kezban.core :refer :all])) (defn- not-enrolled? [course-id user] (if (or (= (:user/username user) (course.dao/find-course-owner-by-course-id course-id)) (= (:user/role user) :admin)) false (not (course.dao/enrolled? course-id (:db/id user))))) (defn resume-course [ctx course-id] (let [user (:user ctx) user-id (:db/id user)] (log/info (str "User: " (:user/username user) " resuming course.")) (cond (not (course.dao/find-course-name-by-course-id course-id)) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (if (progress.dao/find-progress-id-by-course-id-and-user-id course-id user-id) (course.resume/get-tracked-latest-subject-map user course-id) (course.resume/get-first-subject-of-course-map user course-id))))) (defn release-course [ctx course-id] (let [user (:user ctx) user-id (:db/id user)] (log/info (str "User: " (:user/username user) " releasing course.")) (cond (not (course.dao/find-course-name-by-course-id course-id)) {:does-not-exists? true} (not= (course.dao/find-course-owner-by-course-id course-id) (:user/username user)) {:not-owner? true} :else (do (db/transact {:db/id course-id :course/latest-release (System/currentTimeMillis) :course/released? true}) {:success true})))) (defn resume-sub-chapter [ctx sub-chapter-id] (let [course-id (course.dao/find-course-id-by-sub-chapter-id-non-active-also sub-chapter-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " resuming course via sub chapter.")) (cond (or (not course-id) (not (sub-chapter.dao/find-sub-chapter-name-by-sub-chapter-id release-t sub-chapter-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (if (progress.dao/find-progress-id-by-sub-chapter-id-and-user-id sub-chapter-id user-id) (course.resume/get-tracked-latest-subject-of-sub-chapter-map user sub-chapter-id) (course.resume/get-first-subject-of-sub-chapter-map user sub-chapter-id course-id))))) (defn subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " resuming course via subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} (and (:instruction (subject.dao/collect-subject-and-childs release-t subject-id)) (not (:progress/done? (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id))) (not (course.common/able-to-access-all-subjects? course-id user))) {:locked-subject true} :else (course.subject/get-subject user subject-id :course-finished?)))) (defn next-subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " requested next subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} (and (:instruction (subject.dao/collect-subject-and-childs release-t subject-id)) (not (:progress/done? (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id))) (not (course.common/able-to-access-all-subjects? course-id user))) {:locked-subject true} :else (course.subject/get-subject user (course.subject/get-next-subject-id release-t subject-id) :course-finished?)))) (defn pre-subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " requested pre subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (course.subject/get-subject user (course.subject/get-pre-subject-id release-t subject-id) :no-pre-left?)))) (defn eval-code [ctx] (let [user (:user ctx) _ (log/info (str "Eval requested from: " (:user/username user))) d (resource.util/convert-data-map (:request-data ctx)) subject-id (:subject-id d) course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) release-t (course.common/get-release (:db/id user) course-id) subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id) subject (course.eval/create-subject-structure subject-and-childs) result (course.eval/get-result user subject d)] {:result result})) (defn eval-repl-code [ctx] (let [username (-> ctx :user :user/username) _ (log/info (str "Repl Eval requested from: " username)) d (resource.util/convert-data-map (:request-data ctx))] {:result (course.eval/get-repl-result username (:client-code d))})) (defn return-execution-result [c] (let [result (:result c)] (update-in result [:code-body :result] (constantly (-> result :code-body :result str))))) (defn- print-result [r] (cond (nil? r) "=> nil" (lazy? r) (str "=> " (pr-str r)) :else (str "=> " r))) (defn- print-new-line-if-needed [x] (if (str/ends-with? x "\n") x (str x "\n"))) (defn return-repl-execution-result [c] (let [result (:result c)] (if (:error result) result {:out-str (apply str (reduce (fn [v r] (if (str/blank? (:str r)) (conj v (str (print-result (:result r)) "\n\n")) (conj v (str (print-new-line-if-needed (:str r)) (print-result (:result r)) "\n\n")))) [] (:results result))) :err-str (:err-str result)}))) (defn start-course [ctx course-id] (if (or (not (course.dao/find-course-name-by-course-id course-id)) (not (course.dao/find-course-released?-by-course-id course-id))) (util/runtime-ex "Course does not exist or has not released yet.") (let [user-id (-> ctx :user :db/id) username (user.dao/find-username-by-user-id user-id)] (course.dao/enroll-user-to-course course-id user-id) (log/info (str username " started course id: " course-id))))) (defn course-syllabus [ctx course-id] (view.learn/syllabus (assoc (course.learn/get-user-progress-map-for-syllabus ctx course-id) :course-id course-id))) (defn course-overview [ctx course-id] (view.learn/overview (assoc (course.learn/get-user-progress-map-for-overview ctx course-id) :course-id course-id))) (defn- get-latest-updated-course-id [user-id enrolled-course-ids] (->> enrolled-course-ids (progress.dao/find-progresses-last-updates-by-user-id-and-course-ids user-id) (progress.dao/find-progress-id-by-user-id-and-last-updated user-id) course.dao/find-course-id-by-progress-id)) (defn learn-dispatcher [ctx] (if-let [user (-> ctx resource.util/authorized? :user)] (if-let [enrolled-course-ids (seq (map :db/id (:user/courses user)))] (if-not (progress.dao/find-any-progress-id-by-user-id (:db/id user)) [true (str "/courses/" (last enrolled-course-ids))] [true (str "/courses/" (get-latest-updated-course-id (:db/id user) enrolled-course-ids))]) [true (str "/courses/" (-> (course.dao/find-all-released-course-ids) shuffle first))]) [true "/"])) (defn course-dispatcher [ctx course-id] (if-let* [user-id (-> ctx resource.util/authorized? :user :db/id) _ (or (course.dao/enrolled? course-id user-id) (course.dao/owner? user-id course-id))] [true (str "/courses/" course-id "/learn/syllabus")] [true (str "/courses/" course-id "/learn/overview")])) (defn- sort-by-user-counts-desc [courses] (sort-by #(nth % 3) #(compare %2 %1) courses)) (defn get-all-courses-for-user [user-id] (let [courses (sort-by-user-counts-desc (course.dao/find-all-released-courses)) courses-with-percentage (course.learn/get-enrolled-courses user-id)] (reduce (fn [v course] (if-let [percentage (some #(when (= (:course-id %) (first course)) (:course-percentage %)) courses-with-percentage)] (conj v (conj course percentage)) (conj v course))) [] courses))) (defn courses [ctx] (if-let [user (-> ctx resource.util/authorized? :user)] (view.courses/all-courses true (get-all-courses-for-user (:db/id user))) (view.courses/all-courses false (sort-by-user-counts-desc (course.dao/find-all-released-courses))))) (defn- get-file [ctx] (if-let [file (-> ctx :request :params (get "file") :tempfile)] file (util/runtime-ex "Apparently you could not upload file properly.Try again please."))) (defn- validate-form [content] (try (read-string content) (catch Exception e (util/runtime-ex "The file format is not proper lisp format!")))) (defn- validate-course [course] (dsl.validator/validate course) course) (defn- validate-course-map-and-get-helper-fns [[course & helper-fns]] {:course (validate-course course) :helper-fns helper-fns}) (defn- validate-content [content] (if (str/blank? content) (util/runtime-ex "Apparently you uploaded empty file!") (-> content validate-form validate-course-map-and-get-helper-fns))) (defn- persist-course-and-helper-fns [username-or-email course helper-fns] (try (let [user (user.dao/find-user-by-username-or-email username-or-email) owner (:user/username user) course-id (db/persist-course course owner)] (db/transact {:db/id course-id :course/helper-fns (if (seq helper-fns) (str helper-fns) "()") :course/users (:db/id user) :course/owner owner :course/latest-commit (System/currentTimeMillis)}) (db/transact {:db/id (:db/id user) :user/courses course-id})) (catch Throwable t (log/error "Course could not get persisted!" t) (throw t)))) (defn- check-email-activation [username-or-email] (check (:user/email-activated? (user.dao/find-user-by-username-or-email username-or-email)) "You need to activate your account.Go to your profile and send an activation mail.")) (defn- check-auth [ctx] (let [headers (-> ctx :request :headers) username-or-email (get headers "username-or-email") password (get headers "password")] (when (or (str/blank? username-or-email) (str/blank? password)) (util/runtime-ex "Username/e-mail and password don't match.")) (controller.auth/check-credentials {:username-or-email username-or-email :password <PASSWORD>}) (check-email-activation username-or-email) username-or-email)) (defn upload-course [ctx] (let [username-or-email (check-auth ctx) file (get-file ctx)] (with-open [rdr (io/reader file)] (let [content (->> rdr line-seq (str/join "\n")) {:keys [course helper-fns]} (validate-content content)] (persist-course-and-helper-fns username-or-email course helper-fns)))))
true
(ns clojurecademy.controller.course.core (:require [clojurecademy.dao.course :as course.dao] [clojurecademy.dao.progress :as progress.dao] [clojurecademy.dao.sub-chapter :as sub-chapter.dao] [clojurecademy.dao.subject :as subject.dao] [clojurecademy.controller.course.eval :as course.eval] [clojurecademy.controller.course.subject :as course.subject] [clojurecademy.controller.course.resume :as course.resume] [clojurecademy.controller.course.common :as course.common] [clojurecademy.controller.course.learn :as course.learn] [clojurecademy.util.resource :as resource.util :refer [check]] [clojurecademy.repl.util :as util] [clojurecademy.view.learn :as view.learn] [clojurecademy.view.courses :as view.courses] [clojurecademy.dsl.validator :as dsl.validator] [clojurecademy.controller.auth :as controller.auth] [clojurecademy.dao.user :as user.dao] [clojurecademy.dao.db :as db] [clojurecademy.util.logging :as log] [clojure.string :as str] [clojure.java.io :as io] [kezban.core :refer :all])) (defn- not-enrolled? [course-id user] (if (or (= (:user/username user) (course.dao/find-course-owner-by-course-id course-id)) (= (:user/role user) :admin)) false (not (course.dao/enrolled? course-id (:db/id user))))) (defn resume-course [ctx course-id] (let [user (:user ctx) user-id (:db/id user)] (log/info (str "User: " (:user/username user) " resuming course.")) (cond (not (course.dao/find-course-name-by-course-id course-id)) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (if (progress.dao/find-progress-id-by-course-id-and-user-id course-id user-id) (course.resume/get-tracked-latest-subject-map user course-id) (course.resume/get-first-subject-of-course-map user course-id))))) (defn release-course [ctx course-id] (let [user (:user ctx) user-id (:db/id user)] (log/info (str "User: " (:user/username user) " releasing course.")) (cond (not (course.dao/find-course-name-by-course-id course-id)) {:does-not-exists? true} (not= (course.dao/find-course-owner-by-course-id course-id) (:user/username user)) {:not-owner? true} :else (do (db/transact {:db/id course-id :course/latest-release (System/currentTimeMillis) :course/released? true}) {:success true})))) (defn resume-sub-chapter [ctx sub-chapter-id] (let [course-id (course.dao/find-course-id-by-sub-chapter-id-non-active-also sub-chapter-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " resuming course via sub chapter.")) (cond (or (not course-id) (not (sub-chapter.dao/find-sub-chapter-name-by-sub-chapter-id release-t sub-chapter-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (if (progress.dao/find-progress-id-by-sub-chapter-id-and-user-id sub-chapter-id user-id) (course.resume/get-tracked-latest-subject-of-sub-chapter-map user sub-chapter-id) (course.resume/get-first-subject-of-sub-chapter-map user sub-chapter-id course-id))))) (defn subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " resuming course via subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} (and (:instruction (subject.dao/collect-subject-and-childs release-t subject-id)) (not (:progress/done? (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id))) (not (course.common/able-to-access-all-subjects? course-id user))) {:locked-subject true} :else (course.subject/get-subject user subject-id :course-finished?)))) (defn next-subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " requested next subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} (and (:instruction (subject.dao/collect-subject-and-childs release-t subject-id)) (not (:progress/done? (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id))) (not (course.common/able-to-access-all-subjects? course-id user))) {:locked-subject true} :else (course.subject/get-subject user (course.subject/get-next-subject-id release-t subject-id) :course-finished?)))) (defn pre-subject [ctx subject-id] (let [course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) user (:user ctx) user-id (:db/id user) release-t (some->> course-id (course.common/get-release user-id))] (log/info (str "User: " (:user/username user) " requested pre subject.")) (cond (or (not course-id) (not (subject.dao/find-subject-name-by-subject-id release-t subject-id)) (not (course.dao/find-course-name-by-course-id course-id))) {:does-not-exists? true} (not-enrolled? course-id user) {:not-enrolled? true :course-id course-id} :else (course.subject/get-subject user (course.subject/get-pre-subject-id release-t subject-id) :no-pre-left?)))) (defn eval-code [ctx] (let [user (:user ctx) _ (log/info (str "Eval requested from: " (:user/username user))) d (resource.util/convert-data-map (:request-data ctx)) subject-id (:subject-id d) course-id (course.dao/find-course-id-by-subject-id-non-active-also subject-id) release-t (course.common/get-release (:db/id user) course-id) subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id) subject (course.eval/create-subject-structure subject-and-childs) result (course.eval/get-result user subject d)] {:result result})) (defn eval-repl-code [ctx] (let [username (-> ctx :user :user/username) _ (log/info (str "Repl Eval requested from: " username)) d (resource.util/convert-data-map (:request-data ctx))] {:result (course.eval/get-repl-result username (:client-code d))})) (defn return-execution-result [c] (let [result (:result c)] (update-in result [:code-body :result] (constantly (-> result :code-body :result str))))) (defn- print-result [r] (cond (nil? r) "=> nil" (lazy? r) (str "=> " (pr-str r)) :else (str "=> " r))) (defn- print-new-line-if-needed [x] (if (str/ends-with? x "\n") x (str x "\n"))) (defn return-repl-execution-result [c] (let [result (:result c)] (if (:error result) result {:out-str (apply str (reduce (fn [v r] (if (str/blank? (:str r)) (conj v (str (print-result (:result r)) "\n\n")) (conj v (str (print-new-line-if-needed (:str r)) (print-result (:result r)) "\n\n")))) [] (:results result))) :err-str (:err-str result)}))) (defn start-course [ctx course-id] (if (or (not (course.dao/find-course-name-by-course-id course-id)) (not (course.dao/find-course-released?-by-course-id course-id))) (util/runtime-ex "Course does not exist or has not released yet.") (let [user-id (-> ctx :user :db/id) username (user.dao/find-username-by-user-id user-id)] (course.dao/enroll-user-to-course course-id user-id) (log/info (str username " started course id: " course-id))))) (defn course-syllabus [ctx course-id] (view.learn/syllabus (assoc (course.learn/get-user-progress-map-for-syllabus ctx course-id) :course-id course-id))) (defn course-overview [ctx course-id] (view.learn/overview (assoc (course.learn/get-user-progress-map-for-overview ctx course-id) :course-id course-id))) (defn- get-latest-updated-course-id [user-id enrolled-course-ids] (->> enrolled-course-ids (progress.dao/find-progresses-last-updates-by-user-id-and-course-ids user-id) (progress.dao/find-progress-id-by-user-id-and-last-updated user-id) course.dao/find-course-id-by-progress-id)) (defn learn-dispatcher [ctx] (if-let [user (-> ctx resource.util/authorized? :user)] (if-let [enrolled-course-ids (seq (map :db/id (:user/courses user)))] (if-not (progress.dao/find-any-progress-id-by-user-id (:db/id user)) [true (str "/courses/" (last enrolled-course-ids))] [true (str "/courses/" (get-latest-updated-course-id (:db/id user) enrolled-course-ids))]) [true (str "/courses/" (-> (course.dao/find-all-released-course-ids) shuffle first))]) [true "/"])) (defn course-dispatcher [ctx course-id] (if-let* [user-id (-> ctx resource.util/authorized? :user :db/id) _ (or (course.dao/enrolled? course-id user-id) (course.dao/owner? user-id course-id))] [true (str "/courses/" course-id "/learn/syllabus")] [true (str "/courses/" course-id "/learn/overview")])) (defn- sort-by-user-counts-desc [courses] (sort-by #(nth % 3) #(compare %2 %1) courses)) (defn get-all-courses-for-user [user-id] (let [courses (sort-by-user-counts-desc (course.dao/find-all-released-courses)) courses-with-percentage (course.learn/get-enrolled-courses user-id)] (reduce (fn [v course] (if-let [percentage (some #(when (= (:course-id %) (first course)) (:course-percentage %)) courses-with-percentage)] (conj v (conj course percentage)) (conj v course))) [] courses))) (defn courses [ctx] (if-let [user (-> ctx resource.util/authorized? :user)] (view.courses/all-courses true (get-all-courses-for-user (:db/id user))) (view.courses/all-courses false (sort-by-user-counts-desc (course.dao/find-all-released-courses))))) (defn- get-file [ctx] (if-let [file (-> ctx :request :params (get "file") :tempfile)] file (util/runtime-ex "Apparently you could not upload file properly.Try again please."))) (defn- validate-form [content] (try (read-string content) (catch Exception e (util/runtime-ex "The file format is not proper lisp format!")))) (defn- validate-course [course] (dsl.validator/validate course) course) (defn- validate-course-map-and-get-helper-fns [[course & helper-fns]] {:course (validate-course course) :helper-fns helper-fns}) (defn- validate-content [content] (if (str/blank? content) (util/runtime-ex "Apparently you uploaded empty file!") (-> content validate-form validate-course-map-and-get-helper-fns))) (defn- persist-course-and-helper-fns [username-or-email course helper-fns] (try (let [user (user.dao/find-user-by-username-or-email username-or-email) owner (:user/username user) course-id (db/persist-course course owner)] (db/transact {:db/id course-id :course/helper-fns (if (seq helper-fns) (str helper-fns) "()") :course/users (:db/id user) :course/owner owner :course/latest-commit (System/currentTimeMillis)}) (db/transact {:db/id (:db/id user) :user/courses course-id})) (catch Throwable t (log/error "Course could not get persisted!" t) (throw t)))) (defn- check-email-activation [username-or-email] (check (:user/email-activated? (user.dao/find-user-by-username-or-email username-or-email)) "You need to activate your account.Go to your profile and send an activation mail.")) (defn- check-auth [ctx] (let [headers (-> ctx :request :headers) username-or-email (get headers "username-or-email") password (get headers "password")] (when (or (str/blank? username-or-email) (str/blank? password)) (util/runtime-ex "Username/e-mail and password don't match.")) (controller.auth/check-credentials {:username-or-email username-or-email :password PI:PASSWORD:<PASSWORD>END_PI}) (check-email-activation username-or-email) username-or-email)) (defn upload-course [ctx] (let [username-or-email (check-auth ctx) file (get-file ctx)] (with-open [rdr (io/reader file)] (let [content (->> rdr line-seq (str/join "\n")) {:keys [course helper-fns]} (validate-content content)] (persist-course-and-helper-fns username-or-email course helper-fns)))))
[ { "context": ";\n; Copyright Β© 2021 Peter Monks\n;\n; Licensed under the Apache License, Version 2.", "end": 32, "score": 0.9996048808097839, "start": 21, "tag": "NAME", "value": "Peter Monks" }, { "context": "ant to look up. For example: `\" cmd/prefix \"lookup Santa Claus`\")))))\n\n(defn ^{:bot-command \"move\"} mo", "end": 2499, "score": 0.9523478746414185, "start": 2498, "tag": "NAME", "value": "S" } ]
src/ctac_bot/commands.clj
pmonks/ctac-bot
2
; ; Copyright Β© 2021 Peter Monks ; ; Licensed under the Apache License, Version 2.0 (the "License"); ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. ; ; SPDX-License-Identifier: Apache-2.0 ; (ns ctac-bot.commands (:require [clojure.string :as s] [clojure.instant :as inst] [clojure.tools.logging :as log] [java-time :as tm] [discljord.formatting :as df] [discljord-utils.util :as u] [discljord-utils.message-util :as mu] [bot.config :as cfg] [bot.commands :as cmd])) (defn ^{:bot-command "lookup"} lookup-command! "Provides links that look up a person on various internet platforms e.g. !lookup Santa Claus" [^String args event-data] (if (not (s/blank? args)) (let [name-qs (java.net.URLEncoder/encode args "UTF-8")] (mu/create-message! (:discord-message-channel cfg/config) (:channel-id event-data) :embed (assoc (cmd/embed-template) :description (str "Here's what the internet has to say about " args ":\n" " β€’ [Nextdoor](https://nextdoor.com/search/neighbors/?query=" name-qs ")\n" " β€’ [Facebook](https://www.facebook.com/search/people/?q=" name-qs ")\n" " β€’ [LinkedIn](https://www.linkedin.com/search/results/people/?keywords=" name-qs ")\n" " β€’ [Google](https://www.google.com/search?q=amador+county+" name-qs ")")))) (mu/create-message! (:discord-message-channel cfg/config) (:channel-id event-data) :embed (assoc (cmd/embed-template) :description (str "I need to know who you want to look up. For example: `" cmd/prefix "lookup Santa Claus`"))))) (defn ^{:bot-command "move"} move-command! "Moves a conversation to the specified channel e.g. !move #memes" [args event-data] (when (not (mu/direct-message? event-data)) ; Only respond if the message was sent to a real channel in a server (i.e. not in a DM) (let [guild-id (:guild-id event-data) channel-id (:channel-id event-data) discord-message-channel (:discord-message-channel cfg/config)] (if (not (s/blank? args)) (if-let [target-channel-id (second (re-find df/channel-mention args))] (if (not= channel-id target-channel-id) (let [move-message-id (:id event-data) _ (mu/delete-message! discord-message-channel channel-id move-message-id) ; Don't delete the original message unless we've validated everything target-message-id (:id (mu/create-message! discord-message-channel target-channel-id :embed (assoc (cmd/embed-template) :description (str "Continuing the conversation from " (mu/channel-link channel-id) "...")))) target-message-url (mu/message-url guild-id target-channel-id target-message-id) source-message-id (:id (mu/create-message! discord-message-channel channel-id :embed (assoc (cmd/embed-template) :description (str "Let's continue this conversation in " (mu/channel-link target-channel-id) " ([link](" target-message-url ")).")))) source-message-url (mu/message-url guild-id channel-id source-message-id)] (mu/edit-message! discord-message-channel target-channel-id target-message-id :embed (assoc (cmd/embed-template) :description (str "Continuing the conversation from " (mu/channel-link channel-id) " ([link](" source-message-url "))...")))) (log/info "Cannot move a conversation to the same channel.")) (log/warn "Could not find target channel in move command.")) (log/warn "move-command! arguments missing a target channel."))))) (defn ^{:bot-command "epoch"} epoch-command! "Displays the 'epoch seconds' value of the given date (in RFC-3339 format), or now if no value is provided." [args event-data] (let [channel-id (:channel-id event-data)] (try (let [d (if (s/blank? args) (java.util.Date.) (inst/read-instant-date args)) epoch (long (/ (.getTime ^java.util.Date d) 1000))] (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" epoch "`")))) (catch RuntimeException re (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (.getMessage re))))))) (defn ^{:bot-command "dmath"} dmath-command! "Displays the result of the given date math expression e.g. now + 1 day" [args event-data] (let [channel-id (:channel-id event-data)] (try (let [[b o v u] (s/split (s/lower-case (s/trim args)) #"\s+") base (if (= b "now") (.getEpochSecond (tm/instant)) (u/parse-int b)) op (case o "-" - "+" + nil) val (u/parse-int v) multiplier (case u ("m" "min" "mins" "minutes") 60 ("h" "hr" "hrs" "hours") (* 60 60) ("d" "day" "days") (* 60 60 24) ("w" "wk" "wks" "weeks") (* 60 60 24 7) 1)] ; Default to seconds (if base (if (and op val multiplier) ; Everything was provided - evaluate the expression (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" (op base (* val multiplier)) "`"))) (if-not (or op val) ; Only base was provided - display it (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" base "`"))) (throw (ex-info "Op, val or multiplier not provided" {})))) (throw (ex-info "Base not provided" {})))) (catch Exception _ (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "Unable to parse date math expression: `" args "`")))))))
47001
; ; Copyright Β© 2021 <NAME> ; ; Licensed under the Apache License, Version 2.0 (the "License"); ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. ; ; SPDX-License-Identifier: Apache-2.0 ; (ns ctac-bot.commands (:require [clojure.string :as s] [clojure.instant :as inst] [clojure.tools.logging :as log] [java-time :as tm] [discljord.formatting :as df] [discljord-utils.util :as u] [discljord-utils.message-util :as mu] [bot.config :as cfg] [bot.commands :as cmd])) (defn ^{:bot-command "lookup"} lookup-command! "Provides links that look up a person on various internet platforms e.g. !lookup Santa Claus" [^String args event-data] (if (not (s/blank? args)) (let [name-qs (java.net.URLEncoder/encode args "UTF-8")] (mu/create-message! (:discord-message-channel cfg/config) (:channel-id event-data) :embed (assoc (cmd/embed-template) :description (str "Here's what the internet has to say about " args ":\n" " β€’ [Nextdoor](https://nextdoor.com/search/neighbors/?query=" name-qs ")\n" " β€’ [Facebook](https://www.facebook.com/search/people/?q=" name-qs ")\n" " β€’ [LinkedIn](https://www.linkedin.com/search/results/people/?keywords=" name-qs ")\n" " β€’ [Google](https://www.google.com/search?q=amador+county+" name-qs ")")))) (mu/create-message! (:discord-message-channel cfg/config) (:channel-id event-data) :embed (assoc (cmd/embed-template) :description (str "I need to know who you want to look up. For example: `" cmd/prefix "lookup <NAME>anta Claus`"))))) (defn ^{:bot-command "move"} move-command! "Moves a conversation to the specified channel e.g. !move #memes" [args event-data] (when (not (mu/direct-message? event-data)) ; Only respond if the message was sent to a real channel in a server (i.e. not in a DM) (let [guild-id (:guild-id event-data) channel-id (:channel-id event-data) discord-message-channel (:discord-message-channel cfg/config)] (if (not (s/blank? args)) (if-let [target-channel-id (second (re-find df/channel-mention args))] (if (not= channel-id target-channel-id) (let [move-message-id (:id event-data) _ (mu/delete-message! discord-message-channel channel-id move-message-id) ; Don't delete the original message unless we've validated everything target-message-id (:id (mu/create-message! discord-message-channel target-channel-id :embed (assoc (cmd/embed-template) :description (str "Continuing the conversation from " (mu/channel-link channel-id) "...")))) target-message-url (mu/message-url guild-id target-channel-id target-message-id) source-message-id (:id (mu/create-message! discord-message-channel channel-id :embed (assoc (cmd/embed-template) :description (str "Let's continue this conversation in " (mu/channel-link target-channel-id) " ([link](" target-message-url ")).")))) source-message-url (mu/message-url guild-id channel-id source-message-id)] (mu/edit-message! discord-message-channel target-channel-id target-message-id :embed (assoc (cmd/embed-template) :description (str "Continuing the conversation from " (mu/channel-link channel-id) " ([link](" source-message-url "))...")))) (log/info "Cannot move a conversation to the same channel.")) (log/warn "Could not find target channel in move command.")) (log/warn "move-command! arguments missing a target channel."))))) (defn ^{:bot-command "epoch"} epoch-command! "Displays the 'epoch seconds' value of the given date (in RFC-3339 format), or now if no value is provided." [args event-data] (let [channel-id (:channel-id event-data)] (try (let [d (if (s/blank? args) (java.util.Date.) (inst/read-instant-date args)) epoch (long (/ (.getTime ^java.util.Date d) 1000))] (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" epoch "`")))) (catch RuntimeException re (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (.getMessage re))))))) (defn ^{:bot-command "dmath"} dmath-command! "Displays the result of the given date math expression e.g. now + 1 day" [args event-data] (let [channel-id (:channel-id event-data)] (try (let [[b o v u] (s/split (s/lower-case (s/trim args)) #"\s+") base (if (= b "now") (.getEpochSecond (tm/instant)) (u/parse-int b)) op (case o "-" - "+" + nil) val (u/parse-int v) multiplier (case u ("m" "min" "mins" "minutes") 60 ("h" "hr" "hrs" "hours") (* 60 60) ("d" "day" "days") (* 60 60 24) ("w" "wk" "wks" "weeks") (* 60 60 24 7) 1)] ; Default to seconds (if base (if (and op val multiplier) ; Everything was provided - evaluate the expression (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" (op base (* val multiplier)) "`"))) (if-not (or op val) ; Only base was provided - display it (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" base "`"))) (throw (ex-info "Op, val or multiplier not provided" {})))) (throw (ex-info "Base not provided" {})))) (catch Exception _ (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "Unable to parse date math expression: `" args "`")))))))
true
; ; Copyright Β© 2021 PI:NAME:<NAME>END_PI ; ; Licensed under the Apache License, Version 2.0 (the "License"); ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. ; ; SPDX-License-Identifier: Apache-2.0 ; (ns ctac-bot.commands (:require [clojure.string :as s] [clojure.instant :as inst] [clojure.tools.logging :as log] [java-time :as tm] [discljord.formatting :as df] [discljord-utils.util :as u] [discljord-utils.message-util :as mu] [bot.config :as cfg] [bot.commands :as cmd])) (defn ^{:bot-command "lookup"} lookup-command! "Provides links that look up a person on various internet platforms e.g. !lookup Santa Claus" [^String args event-data] (if (not (s/blank? args)) (let [name-qs (java.net.URLEncoder/encode args "UTF-8")] (mu/create-message! (:discord-message-channel cfg/config) (:channel-id event-data) :embed (assoc (cmd/embed-template) :description (str "Here's what the internet has to say about " args ":\n" " β€’ [Nextdoor](https://nextdoor.com/search/neighbors/?query=" name-qs ")\n" " β€’ [Facebook](https://www.facebook.com/search/people/?q=" name-qs ")\n" " β€’ [LinkedIn](https://www.linkedin.com/search/results/people/?keywords=" name-qs ")\n" " β€’ [Google](https://www.google.com/search?q=amador+county+" name-qs ")")))) (mu/create-message! (:discord-message-channel cfg/config) (:channel-id event-data) :embed (assoc (cmd/embed-template) :description (str "I need to know who you want to look up. For example: `" cmd/prefix "lookup PI:NAME:<NAME>END_PIanta Claus`"))))) (defn ^{:bot-command "move"} move-command! "Moves a conversation to the specified channel e.g. !move #memes" [args event-data] (when (not (mu/direct-message? event-data)) ; Only respond if the message was sent to a real channel in a server (i.e. not in a DM) (let [guild-id (:guild-id event-data) channel-id (:channel-id event-data) discord-message-channel (:discord-message-channel cfg/config)] (if (not (s/blank? args)) (if-let [target-channel-id (second (re-find df/channel-mention args))] (if (not= channel-id target-channel-id) (let [move-message-id (:id event-data) _ (mu/delete-message! discord-message-channel channel-id move-message-id) ; Don't delete the original message unless we've validated everything target-message-id (:id (mu/create-message! discord-message-channel target-channel-id :embed (assoc (cmd/embed-template) :description (str "Continuing the conversation from " (mu/channel-link channel-id) "...")))) target-message-url (mu/message-url guild-id target-channel-id target-message-id) source-message-id (:id (mu/create-message! discord-message-channel channel-id :embed (assoc (cmd/embed-template) :description (str "Let's continue this conversation in " (mu/channel-link target-channel-id) " ([link](" target-message-url ")).")))) source-message-url (mu/message-url guild-id channel-id source-message-id)] (mu/edit-message! discord-message-channel target-channel-id target-message-id :embed (assoc (cmd/embed-template) :description (str "Continuing the conversation from " (mu/channel-link channel-id) " ([link](" source-message-url "))...")))) (log/info "Cannot move a conversation to the same channel.")) (log/warn "Could not find target channel in move command.")) (log/warn "move-command! arguments missing a target channel."))))) (defn ^{:bot-command "epoch"} epoch-command! "Displays the 'epoch seconds' value of the given date (in RFC-3339 format), or now if no value is provided." [args event-data] (let [channel-id (:channel-id event-data)] (try (let [d (if (s/blank? args) (java.util.Date.) (inst/read-instant-date args)) epoch (long (/ (.getTime ^java.util.Date d) 1000))] (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" epoch "`")))) (catch RuntimeException re (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (.getMessage re))))))) (defn ^{:bot-command "dmath"} dmath-command! "Displays the result of the given date math expression e.g. now + 1 day" [args event-data] (let [channel-id (:channel-id event-data)] (try (let [[b o v u] (s/split (s/lower-case (s/trim args)) #"\s+") base (if (= b "now") (.getEpochSecond (tm/instant)) (u/parse-int b)) op (case o "-" - "+" + nil) val (u/parse-int v) multiplier (case u ("m" "min" "mins" "minutes") 60 ("h" "hr" "hrs" "hours") (* 60 60) ("d" "day" "days") (* 60 60 24) ("w" "wk" "wks" "weeks") (* 60 60 24 7) 1)] ; Default to seconds (if base (if (and op val multiplier) ; Everything was provided - evaluate the expression (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" (op base (* val multiplier)) "`"))) (if-not (or op val) ; Only base was provided - display it (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "`" base "`"))) (throw (ex-info "Op, val or multiplier not provided" {})))) (throw (ex-info "Base not provided" {})))) (catch Exception _ (mu/create-message! (:discord-message-channel cfg/config) channel-id :embed (assoc (cmd/embed-template) :description (str "Unable to parse date math expression: `" args "`")))))))
[ { "context": "y with the \r\n;; preprocessed result\r\n;;\r\n;; Autor: Christian Meichsner\r\n\r\n(ns org.lambdaroyal.util.localcache\r\n\t(:gen-cl", "end": 259, "score": 0.9998248219490051, "start": 240, "tag": "NAME", "value": "Christian Meichsner" } ]
src/main/clojure/org/lambdaroyal/util/localcache.clj
gixxi/clojure-util
1
;; Aspect Local Caching. Provides shared access to a cached result of a function call. ;; The call is blocking if the function call is not yet realized otherwise the call returns immediatly with the ;; preprocessed result ;; ;; Autor: Christian Meichsner (ns org.lambdaroyal.util.localcache (:gen-class)) (def #^{:doc "Global Cache for function cache"} global-cache (atom {})) (defn clear [k] "Clears a certain association given by the key from the global cache" (swap! global-cache dissoc @global-cache k)) (defn clear-all [k] "Clears all associations from the global cache" (swap! global-cache empty)) (defn cache [k f & args] "Returns a memoized version of the application of the provided function. This version is associated with the provided key. Repeatedly arguments for the same key return the previously computed application or block until the function application returns. Usage: (:key (cache :cacheName f args)) will return a value associated by key :key from the cached version of the function application (f args) " (let [cache-state @global-cache hit-or-miss (fn [& _] (if (contains? cache-state k) (identity cache-state) (do ;;(println (str "add key " k " to map " cache-state " function application " f " args " args)) (assoc cache-state k (delay (apply f args)))))) swapped (swap! global-cache hit-or-miss f args)] @(get swapped k)))
96258
;; Aspect Local Caching. Provides shared access to a cached result of a function call. ;; The call is blocking if the function call is not yet realized otherwise the call returns immediatly with the ;; preprocessed result ;; ;; Autor: <NAME> (ns org.lambdaroyal.util.localcache (:gen-class)) (def #^{:doc "Global Cache for function cache"} global-cache (atom {})) (defn clear [k] "Clears a certain association given by the key from the global cache" (swap! global-cache dissoc @global-cache k)) (defn clear-all [k] "Clears all associations from the global cache" (swap! global-cache empty)) (defn cache [k f & args] "Returns a memoized version of the application of the provided function. This version is associated with the provided key. Repeatedly arguments for the same key return the previously computed application or block until the function application returns. Usage: (:key (cache :cacheName f args)) will return a value associated by key :key from the cached version of the function application (f args) " (let [cache-state @global-cache hit-or-miss (fn [& _] (if (contains? cache-state k) (identity cache-state) (do ;;(println (str "add key " k " to map " cache-state " function application " f " args " args)) (assoc cache-state k (delay (apply f args)))))) swapped (swap! global-cache hit-or-miss f args)] @(get swapped k)))
true
;; Aspect Local Caching. Provides shared access to a cached result of a function call. ;; The call is blocking if the function call is not yet realized otherwise the call returns immediatly with the ;; preprocessed result ;; ;; Autor: PI:NAME:<NAME>END_PI (ns org.lambdaroyal.util.localcache (:gen-class)) (def #^{:doc "Global Cache for function cache"} global-cache (atom {})) (defn clear [k] "Clears a certain association given by the key from the global cache" (swap! global-cache dissoc @global-cache k)) (defn clear-all [k] "Clears all associations from the global cache" (swap! global-cache empty)) (defn cache [k f & args] "Returns a memoized version of the application of the provided function. This version is associated with the provided key. Repeatedly arguments for the same key return the previously computed application or block until the function application returns. Usage: (:key (cache :cacheName f args)) will return a value associated by key :key from the cached version of the function application (f args) " (let [cache-state @global-cache hit-or-miss (fn [& _] (if (contains? cache-state k) (identity cache-state) (do ;;(println (str "add key " k " to map " cache-state " function application " f " args " args)) (assoc cache-state k (delay (apply f args)))))) swapped (swap! global-cache hit-or-miss f args)] @(get swapped k)))
[ { "context": "st\")))\n (is (= \"δΈ­ε›½\\tζ±Ÿθ‹\\tθ‹ε·ž\\t\" (find-geography \"180.117.51.245\")))\n (is (= \"δΈ­ε›½\\t上桷\\t上桷\\t\" (find-geography \"11", "end": 573, "score": 0.9997270703315735, "start": 559, "tag": "IP_ADDRESS", "value": "180.117.51.245" }, { "context": "45\")))\n (is (= \"δΈ­ε›½\\t上桷\\t上桷\\t\" (find-geography \"116.227.225.210\")))))\n", "end": 636, "score": 0.9997265338897705, "start": 621, "tag": "IP_ADDRESS", "value": "116.227.225.210" } ]
version-2/test/ip_service/test/ip.clj
killme2008/ip-service
65
(ns ip-service.test.ip (:require [clojure.test :refer :all] [ip-service.ip :refer :all] [clojure.data.json :as json] [ring.mock.request :as mock])) (deftest test-find-geography (testing "find-geography" (is (thrown-with-msg? clojure.lang.ExceptionInfo #"llegal address,only supports IPv4." (find-geography nil))) (is (thrown-with-msg? clojure.lang.ExceptionInfo #"llegal address,only supports IPv4." (find-geography "localhost"))) (is (= "δΈ­ε›½\tζ±Ÿθ‹\tθ‹ε·ž\t" (find-geography "180.117.51.245"))) (is (= "δΈ­ε›½\t上桷\t上桷\t" (find-geography "116.227.225.210")))))
120356
(ns ip-service.test.ip (:require [clojure.test :refer :all] [ip-service.ip :refer :all] [clojure.data.json :as json] [ring.mock.request :as mock])) (deftest test-find-geography (testing "find-geography" (is (thrown-with-msg? clojure.lang.ExceptionInfo #"llegal address,only supports IPv4." (find-geography nil))) (is (thrown-with-msg? clojure.lang.ExceptionInfo #"llegal address,only supports IPv4." (find-geography "localhost"))) (is (= "δΈ­ε›½\tζ±Ÿθ‹\tθ‹ε·ž\t" (find-geography "172.16.31.10"))) (is (= "δΈ­ε›½\t上桷\t上桷\t" (find-geography "172.16.17.32")))))
true
(ns ip-service.test.ip (:require [clojure.test :refer :all] [ip-service.ip :refer :all] [clojure.data.json :as json] [ring.mock.request :as mock])) (deftest test-find-geography (testing "find-geography" (is (thrown-with-msg? clojure.lang.ExceptionInfo #"llegal address,only supports IPv4." (find-geography nil))) (is (thrown-with-msg? clojure.lang.ExceptionInfo #"llegal address,only supports IPv4." (find-geography "localhost"))) (is (= "δΈ­ε›½\tζ±Ÿθ‹\tθ‹ε·ž\t" (find-geography "PI:IP_ADDRESS:172.16.31.10END_PI"))) (is (= "δΈ­ε›½\t上桷\t上桷\t" (find-geography "PI:IP_ADDRESS:172.16.17.32END_PI")))))
[ { "context": ";; Copyright (c) 2015-2017 Andrey Antukh <[email protected]>\n;; All rights reserved.\n;;\n;; Redi", "end": 40, "score": 0.9998835325241089, "start": 27, "tag": "NAME", "value": "Andrey Antukh" }, { "context": ";; Copyright (c) 2015-2017 Andrey Antukh <[email protected]>\n;; All rights reserved.\n;;\n;; Redistribution and", "end": 54, "score": 0.9999333024024963, "start": 42, "tag": "EMAIL", "value": "[email protected]" } ]
src/datoteka/storages.clj
kiramclean/datoteka
0
;; Copyright (c) 2015-2017 Andrey Antukh <[email protected]> ;; All rights reserved. ;; ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are met: ;; ;; * Redistributions of source code must retain the above copyright notice, this ;; list of conditions and the following disclaimer. ;; ;; * Redistributions in binary form must reproduce the above copyright notice, ;; this list of conditions and the following disclaimer in the documentation ;; and/or other materials provided with the distribution. ;; ;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE ;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER ;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. (ns datoteka.storages "A datoteka abstraction layer." (:require [datoteka.proto :as pt])) (defn save "Perists a file or bytes in the storage. This function returns a relative path where file is saved. The final file path can be different to the one provided to this function and the behavior is totally dependen on the storage implementation." [storage path content] (pt/-save storage path content)) (defn lookup "Resolve provided relative path in the storage and return the local filesystem absolute path to it. This method may be not implemented in all datoteka." [storage path] (pt/-lookup storage path)) (defn exists? "Check if a relative `path` exists in the storage." [storage path] (pt/-exists? storage path)) (defn delete "Delete a file from the storage." [storage path] (pt/-delete storage path)) (defn clear! "Clear all contents of the storage." [storage] (pt/-clear storage)) (defn public-url [storage path] (pt/-public-uri storage path)) (defn storage? "Return `true` if `v` implements IStorage protocol" [v] (satisfies? pt/IStorage v))
61834
;; Copyright (c) 2015-2017 <NAME> <<EMAIL>> ;; All rights reserved. ;; ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are met: ;; ;; * Redistributions of source code must retain the above copyright notice, this ;; list of conditions and the following disclaimer. ;; ;; * Redistributions in binary form must reproduce the above copyright notice, ;; this list of conditions and the following disclaimer in the documentation ;; and/or other materials provided with the distribution. ;; ;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE ;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER ;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. (ns datoteka.storages "A datoteka abstraction layer." (:require [datoteka.proto :as pt])) (defn save "Perists a file or bytes in the storage. This function returns a relative path where file is saved. The final file path can be different to the one provided to this function and the behavior is totally dependen on the storage implementation." [storage path content] (pt/-save storage path content)) (defn lookup "Resolve provided relative path in the storage and return the local filesystem absolute path to it. This method may be not implemented in all datoteka." [storage path] (pt/-lookup storage path)) (defn exists? "Check if a relative `path` exists in the storage." [storage path] (pt/-exists? storage path)) (defn delete "Delete a file from the storage." [storage path] (pt/-delete storage path)) (defn clear! "Clear all contents of the storage." [storage] (pt/-clear storage)) (defn public-url [storage path] (pt/-public-uri storage path)) (defn storage? "Return `true` if `v` implements IStorage protocol" [v] (satisfies? pt/IStorage v))
true
;; Copyright (c) 2015-2017 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> ;; All rights reserved. ;; ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are met: ;; ;; * Redistributions of source code must retain the above copyright notice, this ;; list of conditions and the following disclaimer. ;; ;; * Redistributions in binary form must reproduce the above copyright notice, ;; this list of conditions and the following disclaimer in the documentation ;; and/or other materials provided with the distribution. ;; ;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE ;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER ;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. (ns datoteka.storages "A datoteka abstraction layer." (:require [datoteka.proto :as pt])) (defn save "Perists a file or bytes in the storage. This function returns a relative path where file is saved. The final file path can be different to the one provided to this function and the behavior is totally dependen on the storage implementation." [storage path content] (pt/-save storage path content)) (defn lookup "Resolve provided relative path in the storage and return the local filesystem absolute path to it. This method may be not implemented in all datoteka." [storage path] (pt/-lookup storage path)) (defn exists? "Check if a relative `path` exists in the storage." [storage path] (pt/-exists? storage path)) (defn delete "Delete a file from the storage." [storage path] (pt/-delete storage path)) (defn clear! "Clear all contents of the storage." [storage] (pt/-clear storage)) (defn public-url [storage path] (pt/-public-uri storage path)) (defn storage? "Return `true` if `v` implements IStorage protocol" [v] (satisfies? pt/IStorage v))
[ { "context": "lue \"LEVEL1-3\", :count 2}]}]}]}]}]}\n {:value \"TORNADO\",\n :count 2,\n :subfields [\"topic\"],\n ", "end": 35306, "score": 0.7264914512634277, "start": 35299, "tag": "NAME", "value": "TORNADO" } ]
system-int-test/test/cmr/system_int_test/search/facets/facet_responses.clj
indiejames/Common-Metadata-Repository
0
(ns cmr.system-int-test.search.facets.facet-responses "Contains vars with large facet response examples used in tests.") (def expected-v2-facets-apply-links "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&include_facets=v2 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"} :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied false, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&platform_h%5B%5D=diadem-1D"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-remove-links "Expected facets to be returned in the facets v2 response for a search that includes all of the v2 facet terms: science keywords, projects, platforms, instruments, organizations, and processing levels. When running the applicable tests there are a couple of collections which contain these fields so that the search parameters are applied. This tests that the applied parameter is set to true correctly and that the generated links specify a link to remove each search parameter." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied true, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&science_keywords_h%5B1%5D%5Btopic%5D=Popular&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true} {:title "Topic1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1"}, :has_children true, :children [{:title "Term1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-2", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-3", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]}]}]}]}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&platform_h%5B%5D=DMSP+5B%2FF3&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Instruments", :type "group", :applied true, :has_children true, :children [{:title "ATM", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h%5B%5D=lVIs&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Organizations", :type "group", :applied true, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Projects", :type "group", :applied true, :has_children true, :children [{:title "proj1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&project_h%5B%5D=PROJ2&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Processing levels", :type "group", :applied true, :has_children true, :children [{:title "PL1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Measurements", :type "group", :applied true, :has_children true, :children [{:title "Measurement1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Variable1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&variables_h%5B1%5D%5Bmeasurement%5D=Measurement2&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]}]}) (def partial-v2-facets "Expected facet results with some facets present and some not included because there were not any matching collections for those facets. This tests that the generated facets correctly do not include any facets in the response which we do not apply for the search. In this example the projects, platforms, instruments, and organizations are omitted from the facet response." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]}]}) (def expected-facets-with-no-matching-collections "Facet response when searching against faceted fields which have 0 matching collections. Each of the search terms will be included in the facet response along with a remove link so that the user can remove that search term from their query." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :applied true, :children [{:title "Topic1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1"}, :has_children false} {:title "Term1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "Level1-1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "Level1-2", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1",} :has_children false} {:title "Level1-3", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "ASTER-p0", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Instruments", :type "group", :applied true, :has_children true, :children [{:title "ATM", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Organizations", :type "group", :applied true, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter" :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Projects", :type "group", :applied true, :has_children true, :children [{:title "proj1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Processing levels", :type "group", :applied true, :has_children true, :children [{:title "PL1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]}]}) (def expected-facets-modis-and-aster-no-results-found "Expected facet response when searching for MODIS keyword and MODIS or ASTER platform and no collections are found. If no collections are matched the values searched in the query should be present as remove links." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Topic1", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?platform_h=moDIS-p0&platform_h=ASTER-p0&keyword=MODIS&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "ASTER-p0", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?platform_h=moDIS-p0&keyword=MODIS&page_size=0&include_facets=v2"}, :has_children false} {:title "MODIS-p0", :type "filter", :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?platform_h=ASTER-p0&keyword=MODIS&page_size=0&include_facets=v2"}, :has_children false}]}]}) (def expected-all-hierarchical-facets "Expected value for the all-hierarchical-fields-test. This is using the version 1 hierarchical facets." [{:field "project", :value-counts [["PROJ2" 2] ["proj1" 2]]} {:field "sensor", :value-counts [["FROM_KMS-p0-i0-s0" 2] ["FROM_KMS-p0-i1-s0" 2] ["FROM_KMS-p1-i0-s0" 2] ["FROM_KMS-p1-i1-s0" 2]]} {:field "two_d_coordinate_system_name", :value-counts [["MISR" 2]]} {:field "processing_level_id", :value-counts [["PL1" 2]]} {:field "detailed_variable", :value-counts [["DETAIL1" 2] ["UNIVERSAL" 2]]} {:field "data_centers", :subfields ["level_0"], :level_0 [{:value "GOVERNMENT AGENCIES-U.S. FEDERAL AGENCIES", :count 2, :subfields ["level_1"], :level_1 [{:value "DOI", :count 2, :subfields ["level_2"], :level_2 [{:value "USGS", :count 2, :subfields ["level_3"], :level_3 [{:value "Added level 3 value", :count 2, :subfields ["short_name"], :short_name [{:value "DOI/USGS/CMG/WHSC", :count 2, :subfields ["long_name"], :long_name [{:value "Woods Hole Science Center, Coastal and Marine Geology, U.S. Geological Survey, U.S. Department of the Interior", :count 2}]}]}]}]}]}]} {:field "archive_centers", :subfields ["level_0"], :level_0 [{:value "GOVERNMENT AGENCIES-U.S. FEDERAL AGENCIES", :count 2, :subfields ["level_1"], :level_1 [{:value "DOI", :count 2, :subfields ["level_2"], :level_2 [{:value "USGS", :count 2, :subfields ["level_3"], :level_3 [{:value "Added level 3 value", :count 2, :subfields ["short_name"], :short_name [{:value "DOI/USGS/CMG/WHSC", :count 2, :subfields ["long_name"], :long_name [{:value "Woods Hole Science Center, Coastal and Marine Geology, U.S. Geological Survey, U.S. Department of the Interior", :count 2}]}]}]}]}]}]} {:field "platforms", :subfields ["category"], :category [{:value "Earth Observation Satellites", :count 2, :subfields ["series_entity"], :series_entity [{:value "DIADEM", :count 2, :subfields ["short_name"], :short_name [{:value "DIADEM-1D", :count 2, :subfields ["long_name"], :long_name [{:value "Not Provided", :count 2}]}]} {:value "DMSP (Defense Meteorological Satellite Program)", :count 2, :subfields ["short_name"], :short_name [{:value "DMSP 5B/F3", :count 2, :subfields ["long_name"], :long_name [{:value "Defense Meteorological Satellite Program-F3", :count 2}]}]}]}]} {:field "instruments", :subfields ["category"], :category [{:value "Earth Remote Sensing Instruments", :count 2, :subfields ["class"], :class [{:value "Active Remote Sensing", :count 2, :subfields ["type"], :type [{:value "Altimeters", :count 2, :subfields ["subtype"], :subtype [{:value "Lidar/Laser Altimeters", :count 2, :subfields ["short_name"], :short_name [{:value "ATM", :count 2, :subfields ["long_name"], :long_name [{:value "Airborne Topographic Mapper", :count 2}]} {:value "LVIS", :count 2, :subfields ["long_name"], :long_name [{:value "Land, Vegetation, and Ice Sensor", :count 2}]}]}]}]}]} {:value "Not Provided", ; Instruments now include sensors as child instruments :count 2, :subfields ["class"], :class [{:value "Not Provided", :count 2, :subfields ["type"], :type [{:value "Not Provided", :count 2, :subfields ["subtype"], :subtype [{:value "Not Provided", :count 2, :subfields ["short_name"], :short_name [{:value "FROM_KMS-p0-i0-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p0-i1-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p1-i0-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p1-i1-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]}]}]}]}]}]} {:field "science_keywords", :subfields ["category"], :category [{:value "HURRICANE", :count 2, :subfields ["topic"], :topic [{:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "EXTREME", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "LEVEL2-1", :count 2, :subfields ["variable_level_2"], :variable_level_2 [{:value "LEVEL2-2", :count 2, :subfields ["variable_level_3"], :variable_level_3 [{:value "LEVEL2-3", :count 2}]}]}]} {:value "UNIVERSAL", :count 2}]} {:value "COOL", :count 2, :subfields ["term"], :term [{:value "TERM4", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "UNIVERSAL", :count 2}]}]}]} {:value "UPCASE", :count 2, :subfields ["topic"], :topic [{:value "COOL", :count 2, :subfields ["term"], :term [{:value "MILD", :count 2}]} {:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "MILD", :count 2}]}]} {:value "CAT1", :count 2, :subfields ["topic"], :topic [{:value "TOPIC1", :count 2, :subfields ["term"], :term [{:value "TERM1", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "LEVEL1-1", :count 2, :subfields ["variable_level_2"], :variable_level_2 [{:value "LEVEL1-2", :count 2, :subfields ["variable_level_3"], :variable_level_3 [{:value "LEVEL1-3", :count 2}]}]}]}]}]} {:value "TORNADO", :count 2, :subfields ["topic"], :topic [{:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "EXTREME", :count 2}]}]}]} {:subfields ["category"], :category [{:subfields ["type"], :type [{:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 2, :value "Not Provided"}], :count 2, :value "ANGOLA"}], :count 2, :value "CENTRAL AFRICA"}], :count 2, :value "AFRICA"} {:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 1, :value "GAZA STRIP"}], :count 1, :value "MIDDLE EAST"}], :count 1, :value "WESTERN ASIA"}], :count 1, :value "ASIA"}], :count 2, :value "CONTINENT"} {:subfields ["type"], :type [{:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 1, :value "Not Provided"}], :count 1, :value "Not Provided"}], :count 1, :value "Not Provided"}], :count 1, :value "NOT IN KMS"}], :count 1, :value "OTHER"}], :field "location_keywords"}]) (def expected-v2-facets-apply-links-with-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied false, :has_children true, :children [{:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-apply-links-with-selecting-facet-outside-of-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=diadem-1D&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-apply-links-with-facets-size-and-non-existing-selecting-facet "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=Non-Exist&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=Non-Exist&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false} {:title "Non-Exist", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2"}, :has_children false}]}]}) (def expected-v2-facets-apply-links-with-selecting-facet-without-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=existingPlat&include_facets=v2 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?page_size=0&include_facets=v2"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]})
108190
(ns cmr.system-int-test.search.facets.facet-responses "Contains vars with large facet response examples used in tests.") (def expected-v2-facets-apply-links "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&include_facets=v2 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"} :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied false, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&platform_h%5B%5D=diadem-1D"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-remove-links "Expected facets to be returned in the facets v2 response for a search that includes all of the v2 facet terms: science keywords, projects, platforms, instruments, organizations, and processing levels. When running the applicable tests there are a couple of collections which contain these fields so that the search parameters are applied. This tests that the applied parameter is set to true correctly and that the generated links specify a link to remove each search parameter." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied true, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&science_keywords_h%5B1%5D%5Btopic%5D=Popular&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true} {:title "Topic1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1"}, :has_children true, :children [{:title "Term1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-2", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-3", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]}]}]}]}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&platform_h%5B%5D=DMSP+5B%2FF3&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Instruments", :type "group", :applied true, :has_children true, :children [{:title "ATM", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h%5B%5D=lVIs&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Organizations", :type "group", :applied true, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Projects", :type "group", :applied true, :has_children true, :children [{:title "proj1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&project_h%5B%5D=PROJ2&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Processing levels", :type "group", :applied true, :has_children true, :children [{:title "PL1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Measurements", :type "group", :applied true, :has_children true, :children [{:title "Measurement1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Variable1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&variables_h%5B1%5D%5Bmeasurement%5D=Measurement2&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]}]}) (def partial-v2-facets "Expected facet results with some facets present and some not included because there were not any matching collections for those facets. This tests that the generated facets correctly do not include any facets in the response which we do not apply for the search. In this example the projects, platforms, instruments, and organizations are omitted from the facet response." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]}]}) (def expected-facets-with-no-matching-collections "Facet response when searching against faceted fields which have 0 matching collections. Each of the search terms will be included in the facet response along with a remove link so that the user can remove that search term from their query." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :applied true, :children [{:title "Topic1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1"}, :has_children false} {:title "Term1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "Level1-1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "Level1-2", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1",} :has_children false} {:title "Level1-3", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "ASTER-p0", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Instruments", :type "group", :applied true, :has_children true, :children [{:title "ATM", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Organizations", :type "group", :applied true, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter" :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Projects", :type "group", :applied true, :has_children true, :children [{:title "proj1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Processing levels", :type "group", :applied true, :has_children true, :children [{:title "PL1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]}]}) (def expected-facets-modis-and-aster-no-results-found "Expected facet response when searching for MODIS keyword and MODIS or ASTER platform and no collections are found. If no collections are matched the values searched in the query should be present as remove links." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Topic1", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?platform_h=moDIS-p0&platform_h=ASTER-p0&keyword=MODIS&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "ASTER-p0", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?platform_h=moDIS-p0&keyword=MODIS&page_size=0&include_facets=v2"}, :has_children false} {:title "MODIS-p0", :type "filter", :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?platform_h=ASTER-p0&keyword=MODIS&page_size=0&include_facets=v2"}, :has_children false}]}]}) (def expected-all-hierarchical-facets "Expected value for the all-hierarchical-fields-test. This is using the version 1 hierarchical facets." [{:field "project", :value-counts [["PROJ2" 2] ["proj1" 2]]} {:field "sensor", :value-counts [["FROM_KMS-p0-i0-s0" 2] ["FROM_KMS-p0-i1-s0" 2] ["FROM_KMS-p1-i0-s0" 2] ["FROM_KMS-p1-i1-s0" 2]]} {:field "two_d_coordinate_system_name", :value-counts [["MISR" 2]]} {:field "processing_level_id", :value-counts [["PL1" 2]]} {:field "detailed_variable", :value-counts [["DETAIL1" 2] ["UNIVERSAL" 2]]} {:field "data_centers", :subfields ["level_0"], :level_0 [{:value "GOVERNMENT AGENCIES-U.S. FEDERAL AGENCIES", :count 2, :subfields ["level_1"], :level_1 [{:value "DOI", :count 2, :subfields ["level_2"], :level_2 [{:value "USGS", :count 2, :subfields ["level_3"], :level_3 [{:value "Added level 3 value", :count 2, :subfields ["short_name"], :short_name [{:value "DOI/USGS/CMG/WHSC", :count 2, :subfields ["long_name"], :long_name [{:value "Woods Hole Science Center, Coastal and Marine Geology, U.S. Geological Survey, U.S. Department of the Interior", :count 2}]}]}]}]}]}]} {:field "archive_centers", :subfields ["level_0"], :level_0 [{:value "GOVERNMENT AGENCIES-U.S. FEDERAL AGENCIES", :count 2, :subfields ["level_1"], :level_1 [{:value "DOI", :count 2, :subfields ["level_2"], :level_2 [{:value "USGS", :count 2, :subfields ["level_3"], :level_3 [{:value "Added level 3 value", :count 2, :subfields ["short_name"], :short_name [{:value "DOI/USGS/CMG/WHSC", :count 2, :subfields ["long_name"], :long_name [{:value "Woods Hole Science Center, Coastal and Marine Geology, U.S. Geological Survey, U.S. Department of the Interior", :count 2}]}]}]}]}]}]} {:field "platforms", :subfields ["category"], :category [{:value "Earth Observation Satellites", :count 2, :subfields ["series_entity"], :series_entity [{:value "DIADEM", :count 2, :subfields ["short_name"], :short_name [{:value "DIADEM-1D", :count 2, :subfields ["long_name"], :long_name [{:value "Not Provided", :count 2}]}]} {:value "DMSP (Defense Meteorological Satellite Program)", :count 2, :subfields ["short_name"], :short_name [{:value "DMSP 5B/F3", :count 2, :subfields ["long_name"], :long_name [{:value "Defense Meteorological Satellite Program-F3", :count 2}]}]}]}]} {:field "instruments", :subfields ["category"], :category [{:value "Earth Remote Sensing Instruments", :count 2, :subfields ["class"], :class [{:value "Active Remote Sensing", :count 2, :subfields ["type"], :type [{:value "Altimeters", :count 2, :subfields ["subtype"], :subtype [{:value "Lidar/Laser Altimeters", :count 2, :subfields ["short_name"], :short_name [{:value "ATM", :count 2, :subfields ["long_name"], :long_name [{:value "Airborne Topographic Mapper", :count 2}]} {:value "LVIS", :count 2, :subfields ["long_name"], :long_name [{:value "Land, Vegetation, and Ice Sensor", :count 2}]}]}]}]}]} {:value "Not Provided", ; Instruments now include sensors as child instruments :count 2, :subfields ["class"], :class [{:value "Not Provided", :count 2, :subfields ["type"], :type [{:value "Not Provided", :count 2, :subfields ["subtype"], :subtype [{:value "Not Provided", :count 2, :subfields ["short_name"], :short_name [{:value "FROM_KMS-p0-i0-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p0-i1-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p1-i0-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p1-i1-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]}]}]}]}]}]} {:field "science_keywords", :subfields ["category"], :category [{:value "HURRICANE", :count 2, :subfields ["topic"], :topic [{:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "EXTREME", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "LEVEL2-1", :count 2, :subfields ["variable_level_2"], :variable_level_2 [{:value "LEVEL2-2", :count 2, :subfields ["variable_level_3"], :variable_level_3 [{:value "LEVEL2-3", :count 2}]}]}]} {:value "UNIVERSAL", :count 2}]} {:value "COOL", :count 2, :subfields ["term"], :term [{:value "TERM4", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "UNIVERSAL", :count 2}]}]}]} {:value "UPCASE", :count 2, :subfields ["topic"], :topic [{:value "COOL", :count 2, :subfields ["term"], :term [{:value "MILD", :count 2}]} {:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "MILD", :count 2}]}]} {:value "CAT1", :count 2, :subfields ["topic"], :topic [{:value "TOPIC1", :count 2, :subfields ["term"], :term [{:value "TERM1", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "LEVEL1-1", :count 2, :subfields ["variable_level_2"], :variable_level_2 [{:value "LEVEL1-2", :count 2, :subfields ["variable_level_3"], :variable_level_3 [{:value "LEVEL1-3", :count 2}]}]}]}]}]} {:value "<NAME>", :count 2, :subfields ["topic"], :topic [{:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "EXTREME", :count 2}]}]}]} {:subfields ["category"], :category [{:subfields ["type"], :type [{:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 2, :value "Not Provided"}], :count 2, :value "ANGOLA"}], :count 2, :value "CENTRAL AFRICA"}], :count 2, :value "AFRICA"} {:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 1, :value "GAZA STRIP"}], :count 1, :value "MIDDLE EAST"}], :count 1, :value "WESTERN ASIA"}], :count 1, :value "ASIA"}], :count 2, :value "CONTINENT"} {:subfields ["type"], :type [{:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 1, :value "Not Provided"}], :count 1, :value "Not Provided"}], :count 1, :value "Not Provided"}], :count 1, :value "NOT IN KMS"}], :count 1, :value "OTHER"}], :field "location_keywords"}]) (def expected-v2-facets-apply-links-with-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied false, :has_children true, :children [{:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-apply-links-with-selecting-facet-outside-of-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=diadem-1D&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-apply-links-with-facets-size-and-non-existing-selecting-facet "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=Non-Exist&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=Non-Exist&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false} {:title "Non-Exist", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2"}, :has_children false}]}]}) (def expected-v2-facets-apply-links-with-selecting-facet-without-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=existingPlat&include_facets=v2 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?page_size=0&include_facets=v2"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]})
true
(ns cmr.system-int-test.search.facets.facet-responses "Contains vars with large facet response examples used in tests.") (def expected-v2-facets-apply-links "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&include_facets=v2 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"} :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied false, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&platform_h%5B%5D=diadem-1D"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-remove-links "Expected facets to be returned in the facets v2 response for a search that includes all of the v2 facet terms: science keywords, projects, platforms, instruments, organizations, and processing levels. When running the applicable tests there are a couple of collections which contain these fields so that the search parameters are applied. This tests that the applied parameter is set to true correctly and that the generated links specify a link to remove each search parameter." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied true, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&science_keywords_h%5B1%5D%5Btopic%5D=Popular&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true} {:title "Topic1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1"}, :has_children true, :children [{:title "Term1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-2", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Level1-3", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]}]}]}]}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&platform_h%5B%5D=DMSP+5B%2FF3&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Instruments", :type "group", :applied true, :has_children true, :children [{:title "ATM", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h%5B%5D=lVIs&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Organizations", :type "group", :applied true, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Projects", :type "group", :applied true, :has_children true, :children [{:title "proj1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&project_h%5B%5D=PROJ2&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Processing levels", :type "group", :applied true, :has_children true, :children [{:title "PL1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Measurements", :type "group", :applied true, :has_children true, :children [{:title "Measurement1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true, :children [{:title "Variable1", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&variables_h%5B1%5D%5Bmeasurement%5D=Measurement2&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&variables_h%5B0%5D%5Bvariable%5D=Variable1&instrument_h=ATM&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=DIADEM-1D&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]}]}) (def partial-v2-facets "Expected facet results with some facets present and some not included because there were not any matching collections for those facets. This tests that the generated facets correctly do not include any facets in the response which we do not apply for the search. In this example the projects, platforms, instruments, and organizations are omitted from the facet response." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]}]}) (def expected-facets-with-no-matching-collections "Facet response when searching against faceted fields which have 0 matching collections. Each of the search terms will be included in the facet response along with a remove link so that the user can remove that search term from their query." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :applied true, :children [{:title "Topic1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1"}, :has_children false} {:title "Term1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "Level1-1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false} {:title "Level1-2", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1",} :has_children false} {:title "Level1-3", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "ASTER-p0", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Instruments", :type "group", :applied true, :has_children true, :children [{:title "ATM", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Organizations", :type "group", :applied true, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter" :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Projects", :type "group", :applied true, :has_children true, :children [{:title "proj1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&processing_level_id_h=PL1&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]} {:title "Processing levels", :type "group", :applied true, :has_children true, :children [{:title "PL1", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?data_center_h=DOI%2FUSGS%2FCMG%2FWHSC&science_keywords_h%5B0%5D%5Bvariable_level_3%5D=Level1-3&science_keywords_h%5B0%5D%5Bvariable_level_1%5D=Level1-1&science_keywords_h%5B0%5D%5Bterm%5D=Term1&project_h=proj1&instrument_h=ATM&keyword=MODIS&page_size=0&science_keywords_h%5B0%5D%5Bvariable_level_2%5D=Level1-2&platform_h=ASTER-p0&include_facets=v2&science_keywords_h%5B0%5D%5Bcategory%5D=Earth+Science&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children false}]}]}) (def expected-facets-modis-and-aster-no-results-found "Expected facet response when searching for MODIS keyword and MODIS or ASTER platform and no collections are found. If no collections are matched the values searched in the query should be present as remove links." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Topic1", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?platform_h=moDIS-p0&platform_h=ASTER-p0&keyword=MODIS&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "ASTER-p0", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?platform_h=moDIS-p0&keyword=MODIS&page_size=0&include_facets=v2"}, :has_children false} {:title "MODIS-p0", :type "filter", :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?platform_h=ASTER-p0&keyword=MODIS&page_size=0&include_facets=v2"}, :has_children false}]}]}) (def expected-all-hierarchical-facets "Expected value for the all-hierarchical-fields-test. This is using the version 1 hierarchical facets." [{:field "project", :value-counts [["PROJ2" 2] ["proj1" 2]]} {:field "sensor", :value-counts [["FROM_KMS-p0-i0-s0" 2] ["FROM_KMS-p0-i1-s0" 2] ["FROM_KMS-p1-i0-s0" 2] ["FROM_KMS-p1-i1-s0" 2]]} {:field "two_d_coordinate_system_name", :value-counts [["MISR" 2]]} {:field "processing_level_id", :value-counts [["PL1" 2]]} {:field "detailed_variable", :value-counts [["DETAIL1" 2] ["UNIVERSAL" 2]]} {:field "data_centers", :subfields ["level_0"], :level_0 [{:value "GOVERNMENT AGENCIES-U.S. FEDERAL AGENCIES", :count 2, :subfields ["level_1"], :level_1 [{:value "DOI", :count 2, :subfields ["level_2"], :level_2 [{:value "USGS", :count 2, :subfields ["level_3"], :level_3 [{:value "Added level 3 value", :count 2, :subfields ["short_name"], :short_name [{:value "DOI/USGS/CMG/WHSC", :count 2, :subfields ["long_name"], :long_name [{:value "Woods Hole Science Center, Coastal and Marine Geology, U.S. Geological Survey, U.S. Department of the Interior", :count 2}]}]}]}]}]}]} {:field "archive_centers", :subfields ["level_0"], :level_0 [{:value "GOVERNMENT AGENCIES-U.S. FEDERAL AGENCIES", :count 2, :subfields ["level_1"], :level_1 [{:value "DOI", :count 2, :subfields ["level_2"], :level_2 [{:value "USGS", :count 2, :subfields ["level_3"], :level_3 [{:value "Added level 3 value", :count 2, :subfields ["short_name"], :short_name [{:value "DOI/USGS/CMG/WHSC", :count 2, :subfields ["long_name"], :long_name [{:value "Woods Hole Science Center, Coastal and Marine Geology, U.S. Geological Survey, U.S. Department of the Interior", :count 2}]}]}]}]}]}]} {:field "platforms", :subfields ["category"], :category [{:value "Earth Observation Satellites", :count 2, :subfields ["series_entity"], :series_entity [{:value "DIADEM", :count 2, :subfields ["short_name"], :short_name [{:value "DIADEM-1D", :count 2, :subfields ["long_name"], :long_name [{:value "Not Provided", :count 2}]}]} {:value "DMSP (Defense Meteorological Satellite Program)", :count 2, :subfields ["short_name"], :short_name [{:value "DMSP 5B/F3", :count 2, :subfields ["long_name"], :long_name [{:value "Defense Meteorological Satellite Program-F3", :count 2}]}]}]}]} {:field "instruments", :subfields ["category"], :category [{:value "Earth Remote Sensing Instruments", :count 2, :subfields ["class"], :class [{:value "Active Remote Sensing", :count 2, :subfields ["type"], :type [{:value "Altimeters", :count 2, :subfields ["subtype"], :subtype [{:value "Lidar/Laser Altimeters", :count 2, :subfields ["short_name"], :short_name [{:value "ATM", :count 2, :subfields ["long_name"], :long_name [{:value "Airborne Topographic Mapper", :count 2}]} {:value "LVIS", :count 2, :subfields ["long_name"], :long_name [{:value "Land, Vegetation, and Ice Sensor", :count 2}]}]}]}]}]} {:value "Not Provided", ; Instruments now include sensors as child instruments :count 2, :subfields ["class"], :class [{:value "Not Provided", :count 2, :subfields ["type"], :type [{:value "Not Provided", :count 2, :subfields ["subtype"], :subtype [{:value "Not Provided", :count 2, :subfields ["short_name"], :short_name [{:value "FROM_KMS-p0-i0-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p0-i1-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p1-i0-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]} {:value "FROM_KMS-p1-i1-s0", :count 2, :subfields ["long_name"], :long_name [{:count 2, :value "Not Provided"}]}]}]}]}]}]} {:field "science_keywords", :subfields ["category"], :category [{:value "HURRICANE", :count 2, :subfields ["topic"], :topic [{:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "EXTREME", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "LEVEL2-1", :count 2, :subfields ["variable_level_2"], :variable_level_2 [{:value "LEVEL2-2", :count 2, :subfields ["variable_level_3"], :variable_level_3 [{:value "LEVEL2-3", :count 2}]}]}]} {:value "UNIVERSAL", :count 2}]} {:value "COOL", :count 2, :subfields ["term"], :term [{:value "TERM4", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "UNIVERSAL", :count 2}]}]}]} {:value "UPCASE", :count 2, :subfields ["topic"], :topic [{:value "COOL", :count 2, :subfields ["term"], :term [{:value "MILD", :count 2}]} {:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "MILD", :count 2}]}]} {:value "CAT1", :count 2, :subfields ["topic"], :topic [{:value "TOPIC1", :count 2, :subfields ["term"], :term [{:value "TERM1", :count 2, :subfields ["variable_level_1"], :variable_level_1 [{:value "LEVEL1-1", :count 2, :subfields ["variable_level_2"], :variable_level_2 [{:value "LEVEL1-2", :count 2, :subfields ["variable_level_3"], :variable_level_3 [{:value "LEVEL1-3", :count 2}]}]}]}]}]} {:value "PI:NAME:<NAME>END_PI", :count 2, :subfields ["topic"], :topic [{:value "POPULAR", :count 2, :subfields ["term"], :term [{:value "EXTREME", :count 2}]}]}]} {:subfields ["category"], :category [{:subfields ["type"], :type [{:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 2, :value "Not Provided"}], :count 2, :value "ANGOLA"}], :count 2, :value "CENTRAL AFRICA"}], :count 2, :value "AFRICA"} {:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 1, :value "GAZA STRIP"}], :count 1, :value "MIDDLE EAST"}], :count 1, :value "WESTERN ASIA"}], :count 1, :value "ASIA"}], :count 2, :value "CONTINENT"} {:subfields ["type"], :type [{:subfields ["subregion_1"], :subregion_1 [{:subfields ["subregion_2"], :subregion_2 [{:subfields ["subregion_3"], :subregion_3 [{:count 1, :value "Not Provided"}], :count 1, :value "Not Provided"}], :count 1, :value "Not Provided"}], :count 1, :value "NOT IN KMS"}], :count 1, :value "OTHER"}], :field "location_keywords"}]) (def expected-v2-facets-apply-links-with-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied false, :has_children true, :children [{:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-apply-links-with-selecting-facet-outside-of-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=diadem-1D&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]}) (def expected-v2-facets-apply-links-with-facets-size-and-non-existing-selecting-facet "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=Non-Exist&include_facets=v2&facets_size[platform]=1 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&platform_h=Non-Exist&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false} {:title "Non-Exist", :type "filter", :applied true, :count 0, :links {:remove "http://localhost:3003/collections.json?facets_size%5Bplatform%5D=1&page_size=0&include_facets=v2"}, :has_children false}]}]}) (def expected-v2-facets-apply-links-with-selecting-facet-without-facets-size "Expected facets to be returned in the facets v2 response. The structure of the v2 facet response is documented in https://wiki.earthdata.nasa.gov/display/CMR/Updated+facet+response. This response is generated for the search http://localhost:3003/collections.json?page_size=0&platform_h[]=existingPlat&include_facets=v2 without any query parameters selected and with a couple of collections that have science keywords, projects, platforms, instruments, organizations, and processing levels in their metadata. This tests that the applied parameter is set to false correctly and that the generated links specify a a link to add each search parameter to apply that value to a search." {:title "Browse Collections", :type "group", :has_children true, :children [{:title "Keywords", :type "group", :applied false, :has_children true, :children [{:title "Popular", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Popular"}, :has_children true} {:title "Topic1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&science_keywords_h%5B0%5D%5Btopic%5D=Topic1"}, :has_children true}]} {:title "Platforms", :type "group", :applied true, :has_children true, :children [{:title "diadem-1D", :type "filter", :applied true, :count 2, :links {:remove "http://localhost:3003/collections.json?page_size=0&include_facets=v2"}, :has_children false} {:title "DMSP 5B/F3", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&platform_h%5B%5D=DMSP+5B%2FF3"}, :has_children false}]} {:title "Instruments", :type "group", :applied false, :has_children true, :children [{:title "ATM", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=ATM"}, :has_children false} {:title "lVIs", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&instrument_h%5B%5D=lVIs"}, :has_children false}]} {:title "Organizations", :type "group", :applied false, :has_children true, :children [{:title "DOI/USGS/CMG/WHSC", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&data_center_h%5B%5D=DOI%2FUSGS%2FCMG%2FWHSC"}, :has_children false}]} {:title "Projects", :type "group", :applied false, :has_children true, :children [{:title "proj1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=proj1"}, :has_children false} {:title "PROJ2", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&project_h%5B%5D=PROJ2"}, :has_children false}]} {:title "Processing levels", :type "group", :applied false, :has_children true, :children [{:title "PL1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&processing_level_id_h%5B%5D=PL1"}, :has_children false}]} {:title "Measurements", :type "group", :applied false, :has_children true, :children [{:title "Measurement1", :type "filter", :applied false, :count 2, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement1"}, :has_children true} {:title "Measurement2", :type "filter", :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?platform_h=diadem-1D&page_size=0&include_facets=v2&variables_h%5B0%5D%5Bmeasurement%5D=Measurement2"}, :has_children true}]}]})
[ { "context": ";; author: Mark W. Naylor\n;; file: day03.clj\n;; date: 2021-May-28\n(ns adv", "end": 25, "score": 0.9998757243156433, "start": 11, "tag": "NAME", "value": "Mark W. Naylor" }, { "context": "----\n;; BSD 3-Clause License\n\n;; Copyright Β© 2021, Mark W. Naylor\n;; All rights reserved.\n\n;; Redistribution and us", "end": 2569, "score": 0.9998660087585449, "start": 2555, "tag": "NAME", "value": "Mark W. Naylor" } ]
src/advent_2020/day03.clj
mark-naylor-1701/advent_2020
0
;; author: Mark W. Naylor ;; file: day03.clj ;; date: 2021-May-28 (ns advent-2020.day03 (:require [advent-2020.core :refer [input] ])) (def right-amount 3) ; Right step amount for Part 1. (def down-amount 1) ; Down step amount for Part 1. (def tree-char \#) (def input-file "day03.txt") ;; Part 2 multiple slopes (def slopes [ {:right 1 :down 1} {:right 3 :down 1} {:right 5 :down 1} {:right 7 :down 1} {:right 1 :down 2}]) (defn columns "How many \"openings\" and \"trees\" are in a given row of the input data. Assumes all the rows are the same size." [input] (-> input count)) (defn rows "How many rows are in the input data." [input] (count input)) (defn wrap-around "Handles the \"wrap-around\" for repeated rowitems, without actually duplicating the data." [step row-items] (mod step (count row-items))) ;; Function only used for Part 1. (defn col-inc "Slide right the appropriate amount." [n] (+ n right-amount)) ;; Function only used for Part 1. (defn row-inc "Slide right the appropriate amount." ([] down-amount) ([n] (+ n down-amount))) (defn tree? "Is the row item at `n' a tree?" [n row-items] (= tree-char (get row-items (wrap-around n row-items)))) (defn increment-factory "Generates a multiple arity function." [amt] (letfn [ (f ([] amt) ([n] (+ n amt)))] f)) (defn tree-count "Follow the slope through the forest grid, return the number of trees encounterd on the trip top to bottom." [forest row-inc col-inc] (letfn [ (-tree-count [acc idx forest] (cond (empty? forest) acc :else (let [head (first forest) tail (drop (row-inc) forest) spot-count (if (tree? idx head) 1 0)] (recur (+ acc spot-count) (col-inc idx) tail)))) ] (-tree-count 0 0 forest))) (defn multiple-slope-counts "" [forest] (for [slope slopes ] (tree-count forest (increment-factory (:down slope)) (increment-factory (:right slope))))) (defn multiple-slope-product "" [forest] (reduce * (multiple-slope-counts forest))) (defn part1-count "" [] (tree-count (input input-file) row-inc col-inc)) (defn part2-product "" [] (multiple-slope-product (input input-file))) ;; ------------------------------------------------------------------------------ ;; BSD 3-Clause License ;; Copyright Β© 2021, Mark W. Naylor ;; All rights reserved. ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are met: ;; 1. Redistributions of source code must retain the above copyright notice, this ;; list of conditions and the following disclaimer. ;; 2. Redistributions in binary form must reproduce the above copyright notice, ;; this list of conditions and the following disclaimer in the documentation ;; and/or other materials provided with the distribution. ;; 3. Neither the name of the copyright holder nor the names of its ;; contributors may be used to endorse or promote products derived from ;; this software without specific prior written permission. ;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE ;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER ;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
61664
;; author: <NAME> ;; file: day03.clj ;; date: 2021-May-28 (ns advent-2020.day03 (:require [advent-2020.core :refer [input] ])) (def right-amount 3) ; Right step amount for Part 1. (def down-amount 1) ; Down step amount for Part 1. (def tree-char \#) (def input-file "day03.txt") ;; Part 2 multiple slopes (def slopes [ {:right 1 :down 1} {:right 3 :down 1} {:right 5 :down 1} {:right 7 :down 1} {:right 1 :down 2}]) (defn columns "How many \"openings\" and \"trees\" are in a given row of the input data. Assumes all the rows are the same size." [input] (-> input count)) (defn rows "How many rows are in the input data." [input] (count input)) (defn wrap-around "Handles the \"wrap-around\" for repeated rowitems, without actually duplicating the data." [step row-items] (mod step (count row-items))) ;; Function only used for Part 1. (defn col-inc "Slide right the appropriate amount." [n] (+ n right-amount)) ;; Function only used for Part 1. (defn row-inc "Slide right the appropriate amount." ([] down-amount) ([n] (+ n down-amount))) (defn tree? "Is the row item at `n' a tree?" [n row-items] (= tree-char (get row-items (wrap-around n row-items)))) (defn increment-factory "Generates a multiple arity function." [amt] (letfn [ (f ([] amt) ([n] (+ n amt)))] f)) (defn tree-count "Follow the slope through the forest grid, return the number of trees encounterd on the trip top to bottom." [forest row-inc col-inc] (letfn [ (-tree-count [acc idx forest] (cond (empty? forest) acc :else (let [head (first forest) tail (drop (row-inc) forest) spot-count (if (tree? idx head) 1 0)] (recur (+ acc spot-count) (col-inc idx) tail)))) ] (-tree-count 0 0 forest))) (defn multiple-slope-counts "" [forest] (for [slope slopes ] (tree-count forest (increment-factory (:down slope)) (increment-factory (:right slope))))) (defn multiple-slope-product "" [forest] (reduce * (multiple-slope-counts forest))) (defn part1-count "" [] (tree-count (input input-file) row-inc col-inc)) (defn part2-product "" [] (multiple-slope-product (input input-file))) ;; ------------------------------------------------------------------------------ ;; BSD 3-Clause License ;; Copyright Β© 2021, <NAME> ;; All rights reserved. ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are met: ;; 1. Redistributions of source code must retain the above copyright notice, this ;; list of conditions and the following disclaimer. ;; 2. Redistributions in binary form must reproduce the above copyright notice, ;; this list of conditions and the following disclaimer in the documentation ;; and/or other materials provided with the distribution. ;; 3. Neither the name of the copyright holder nor the names of its ;; contributors may be used to endorse or promote products derived from ;; this software without specific prior written permission. ;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE ;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER ;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
true
;; author: PI:NAME:<NAME>END_PI ;; file: day03.clj ;; date: 2021-May-28 (ns advent-2020.day03 (:require [advent-2020.core :refer [input] ])) (def right-amount 3) ; Right step amount for Part 1. (def down-amount 1) ; Down step amount for Part 1. (def tree-char \#) (def input-file "day03.txt") ;; Part 2 multiple slopes (def slopes [ {:right 1 :down 1} {:right 3 :down 1} {:right 5 :down 1} {:right 7 :down 1} {:right 1 :down 2}]) (defn columns "How many \"openings\" and \"trees\" are in a given row of the input data. Assumes all the rows are the same size." [input] (-> input count)) (defn rows "How many rows are in the input data." [input] (count input)) (defn wrap-around "Handles the \"wrap-around\" for repeated rowitems, without actually duplicating the data." [step row-items] (mod step (count row-items))) ;; Function only used for Part 1. (defn col-inc "Slide right the appropriate amount." [n] (+ n right-amount)) ;; Function only used for Part 1. (defn row-inc "Slide right the appropriate amount." ([] down-amount) ([n] (+ n down-amount))) (defn tree? "Is the row item at `n' a tree?" [n row-items] (= tree-char (get row-items (wrap-around n row-items)))) (defn increment-factory "Generates a multiple arity function." [amt] (letfn [ (f ([] amt) ([n] (+ n amt)))] f)) (defn tree-count "Follow the slope through the forest grid, return the number of trees encounterd on the trip top to bottom." [forest row-inc col-inc] (letfn [ (-tree-count [acc idx forest] (cond (empty? forest) acc :else (let [head (first forest) tail (drop (row-inc) forest) spot-count (if (tree? idx head) 1 0)] (recur (+ acc spot-count) (col-inc idx) tail)))) ] (-tree-count 0 0 forest))) (defn multiple-slope-counts "" [forest] (for [slope slopes ] (tree-count forest (increment-factory (:down slope)) (increment-factory (:right slope))))) (defn multiple-slope-product "" [forest] (reduce * (multiple-slope-counts forest))) (defn part1-count "" [] (tree-count (input input-file) row-inc col-inc)) (defn part2-product "" [] (multiple-slope-product (input input-file))) ;; ------------------------------------------------------------------------------ ;; BSD 3-Clause License ;; Copyright Β© 2021, PI:NAME:<NAME>END_PI ;; All rights reserved. ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are met: ;; 1. Redistributions of source code must retain the above copyright notice, this ;; list of conditions and the following disclaimer. ;; 2. Redistributions in binary form must reproduce the above copyright notice, ;; this list of conditions and the following disclaimer in the documentation ;; and/or other materials provided with the distribution. ;; 3. Neither the name of the copyright holder nor the names of its ;; contributors may be used to endorse or promote products derived from ;; this software without specific prior written permission. ;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE ;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER ;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
[ { "context": "ef splunk-auth-map\n {:form-params\n {:username \"restclient\" :password \"restC0der\" :output_mode \"json\"}})\n(\n ", "end": 1799, "score": 0.9992148876190186, "start": 1789, "tag": "USERNAME", "value": "restclient" }, { "context": "form-params\n {:username \"restclient\" :password \"restC0der\" :output_mode \"json\"}})\n(\n ;; Base HTTP params th", "end": 1821, "score": 0.999264121055603, "start": 1812, "tag": "PASSWORD", "value": "restC0der" }, { "context": "m-urlencoded\"},\n ;; :form-params {:username \"restclient\", :password \"restC0der\", :output_mode \"json\"}}\n ", "end": 2440, "score": 0.999297022819519, "start": 2430, "tag": "USERNAME", "value": "restclient" }, { "context": " :form-params {:username \"restclient\", :password \"restC0der\", :output_mode \"json\"}}\n )\n\n;; Executes task1 po", "end": 2463, "score": 0.9992725849151611, "start": 2454, "tag": "PASSWORD", "value": "restC0der" }, { "context": "okie-store my-cs\n :form-params\n {:username \"restclient\"\n :password \"restC0der\"\n :output_mode \"js", "end": 4821, "score": 0.9456437826156616, "start": 4811, "tag": "USERNAME", "value": "restclient" }, { "context": "arams\n {:username \"restclient\"\n :password \"restC0der\"\n :output_mode \"json\"}}))\n\n;; Simple test of ", "end": 4848, "score": 0.9993247985839844, "start": 4839, "tag": "PASSWORD", "value": "restC0der" }, { "context": "orm-params\n {:username \"restclient\"\n :password \"restC0der", "end": 5749, "score": 0.9985263347625732, "start": 5739, "tag": "USERNAME", "value": "restclient" }, { "context": "estclient\"\n :password \"restC0der\"\n :output_mode \"json\"}", "end": 5799, "score": 0.9993031024932861, "start": 5790, "tag": "PASSWORD", "value": "restC0der" } ]
src/splunk_clojure_client/main.clj
dconnett-splunk/splunk-clojure-client
0
(ns splunk-clojure-client.main (:require [clojure.string] [clj-http.client :as client] [clojure.pprint] [clj-http.conn-mgr] [clj-http.core] [clj-http.cookies])) (use 'clojure.set) ;; Merge maps of data recusrively, I use this to compose HTTP params. (defn deep-merge-with "Like merge-with, but merges maps recursively, applying the given fn only when there's a non-map at a particular level. (deep-merge-with + {:a {:b {:c 1 :d {:x 1 :y 2}} :e 3} :f 4} {:a {:b {:c 2 :d {:z 9} :z 3} :e 100}}) -> {:a {:b {:z 3, :c 3, :d {:z 9, :x 1, :y 2}}, :e 103}, :f 4}" [f & maps] (apply (fn m [& maps] (if (every? map? maps) (apply merge-with m maps) (apply f maps))) maps)) ;; Debugging helper, not sure how this works... (def ^:dynamic *verbose* false) ;; Debugging helper, not sure how this works... (defmacro printfv [fmt & args] `(when *verbose* (printf ~fmt ~@args))) ;; Debugging helper, I don't know how this works... (defmacro dbg [x] `(let [x# ~x] (clojure.pprint/pprint '~x "=" x#) x#)) ;; Debugging helper, add function to pretty print function call. (defn cl-print [x] (doto x (clojure.pprint/pprint))) ;; Splunk base instance URI. (def splunk-base-uri "http://restapi-hur-04.class.splunk.com:8089/services") ;; Splunks login endpoint. (def splunk-auth-uri (str splunk-base-uri "/auth/login")) ;; Splunks search endpoint. (def splunk-search-uri (str splunk-base-uri "/search/jobs")) ;; Splunks session delete endpoint. (defn splunk-delete-auth-uri [token] (str splunk-base-uri "/authentication/httpauth-tokens/" token)) ;; Creates params for authentication. Change these for your environment. (def splunk-auth-map {:form-params {:username "restclient" :password "restC0der" :output_mode "json"}}) ( ;; Base HTTP params that are common to all HTTP methods. def splunk-base-params {:accept :json :throw-entire-message? true :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"}}) ;; Defines task1 HTTP Parameters. (defn task1-params-authenticate [] (merge splunk-base-params splunk-auth-map) ;; => {:accept :json, ;; :throw-entire-message? true, ;; :decode-body-headers true, ;; :as :auto, ;; :headers {:content-type "application/x-www-form-urlencoded"}, ;; :form-params {:username "restclient", :password "restC0der", :output_mode "json"}} ) ;; Executes task1 post. (defn task1-post [] (client/post splunk-auth-uri (task1-params-authenticate))) ;; Gets task1's HTTP response body. (defn task1-response-body [] (:body (task1-post))) ;; Gets session key task1's body. Executes task1 to do this. (defn get-session-key [] (select-keys (task1-response-body) [:sessionKey])) ;; Takes a session token, and produces the Splunk formatted HTTP param. (defn get-splunk-auth-header [token] {:Authorization (str "Splunk " token)}) ;; Refreshes current session key. (defn refresh-session-key [] (def session-key (get-session-key))) ;; Formats the current session key for use in composing HTTP params. (defn splunk-session-param [] {:header (get-splunk-auth-header (:sessionKey session-key))}) ;; Define task3 HTTP params. (defn task3-params [token] {:accept :json :throw-entire-message? true :decode-body-headers true :as :auto :headers (merge {:content-type "application/x-www-form-urlencoded"} (get-splunk-auth-header (:sessionKey session-key)))}) ;; Complete task3 task. (defn task3-delete [] (client/delete (splunk-delete-auth-uri (:sessionKey session-key)) (task3-params (:sessionKey session-key)))) (refresh-session-key) (task3-delete) ;; Define task4 HTTP Parameters. (defn task4-params [] (deep-merge-with union splunk-base-params (splunk-session-param))) ;; Complete task 4 of the REST API Splunk Course. (defn task4-post [] (client/post splunk-auth-uri (task4-params))) ;; Another cookie test. Doesn't work... ;; (binding [clj-http.core/*cookie-store* (clj-http.cookies/cookie-store)] ;; (refresh-session-key) ;; (task4-post)) ;; Create a cookie store to be used for subsequent calls. (def my-cs (clj-http.cookies/cookie-store)) ;; Simple test of setting cookies with binding. Currently doesn't seem to work. (binding [clj-http.core/*cookie-store* (clj-http.cookies/cookie-store)] (client/post splunk-auth-uri {:accept :json :debug true :debug-body true :save-request? true :response-interceptor (fn [resp ctx] (println ctx)) :throw-entire-message? true :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"} :cookie-store my-cs :form-params {:username "restclient" :password "restC0der" :output_mode "json"}})) ;; Simple test of setting cookies with let Currently doesn't seem to work. (clojure.pprint/pprint (let [my-cs (clj-http.cookies/cookie-store)] (client/post splunk-auth-uri {:accept :json :debug true :debug-body true :save-request? true :throw-entire-message? true :response-interceptor (fn [resp ctx] (println ctx)) :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"} :cookie-store my-cs :form-params {:username "restclient" :password "restC0der" :output_mode "json"}}))) (clojure.pprint/pprint (clj-http.cookies/get-cookies my-cs))
3576
(ns splunk-clojure-client.main (:require [clojure.string] [clj-http.client :as client] [clojure.pprint] [clj-http.conn-mgr] [clj-http.core] [clj-http.cookies])) (use 'clojure.set) ;; Merge maps of data recusrively, I use this to compose HTTP params. (defn deep-merge-with "Like merge-with, but merges maps recursively, applying the given fn only when there's a non-map at a particular level. (deep-merge-with + {:a {:b {:c 1 :d {:x 1 :y 2}} :e 3} :f 4} {:a {:b {:c 2 :d {:z 9} :z 3} :e 100}}) -> {:a {:b {:z 3, :c 3, :d {:z 9, :x 1, :y 2}}, :e 103}, :f 4}" [f & maps] (apply (fn m [& maps] (if (every? map? maps) (apply merge-with m maps) (apply f maps))) maps)) ;; Debugging helper, not sure how this works... (def ^:dynamic *verbose* false) ;; Debugging helper, not sure how this works... (defmacro printfv [fmt & args] `(when *verbose* (printf ~fmt ~@args))) ;; Debugging helper, I don't know how this works... (defmacro dbg [x] `(let [x# ~x] (clojure.pprint/pprint '~x "=" x#) x#)) ;; Debugging helper, add function to pretty print function call. (defn cl-print [x] (doto x (clojure.pprint/pprint))) ;; Splunk base instance URI. (def splunk-base-uri "http://restapi-hur-04.class.splunk.com:8089/services") ;; Splunks login endpoint. (def splunk-auth-uri (str splunk-base-uri "/auth/login")) ;; Splunks search endpoint. (def splunk-search-uri (str splunk-base-uri "/search/jobs")) ;; Splunks session delete endpoint. (defn splunk-delete-auth-uri [token] (str splunk-base-uri "/authentication/httpauth-tokens/" token)) ;; Creates params for authentication. Change these for your environment. (def splunk-auth-map {:form-params {:username "restclient" :password "<PASSWORD>" :output_mode "json"}}) ( ;; Base HTTP params that are common to all HTTP methods. def splunk-base-params {:accept :json :throw-entire-message? true :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"}}) ;; Defines task1 HTTP Parameters. (defn task1-params-authenticate [] (merge splunk-base-params splunk-auth-map) ;; => {:accept :json, ;; :throw-entire-message? true, ;; :decode-body-headers true, ;; :as :auto, ;; :headers {:content-type "application/x-www-form-urlencoded"}, ;; :form-params {:username "restclient", :password "<PASSWORD>", :output_mode "json"}} ) ;; Executes task1 post. (defn task1-post [] (client/post splunk-auth-uri (task1-params-authenticate))) ;; Gets task1's HTTP response body. (defn task1-response-body [] (:body (task1-post))) ;; Gets session key task1's body. Executes task1 to do this. (defn get-session-key [] (select-keys (task1-response-body) [:sessionKey])) ;; Takes a session token, and produces the Splunk formatted HTTP param. (defn get-splunk-auth-header [token] {:Authorization (str "Splunk " token)}) ;; Refreshes current session key. (defn refresh-session-key [] (def session-key (get-session-key))) ;; Formats the current session key for use in composing HTTP params. (defn splunk-session-param [] {:header (get-splunk-auth-header (:sessionKey session-key))}) ;; Define task3 HTTP params. (defn task3-params [token] {:accept :json :throw-entire-message? true :decode-body-headers true :as :auto :headers (merge {:content-type "application/x-www-form-urlencoded"} (get-splunk-auth-header (:sessionKey session-key)))}) ;; Complete task3 task. (defn task3-delete [] (client/delete (splunk-delete-auth-uri (:sessionKey session-key)) (task3-params (:sessionKey session-key)))) (refresh-session-key) (task3-delete) ;; Define task4 HTTP Parameters. (defn task4-params [] (deep-merge-with union splunk-base-params (splunk-session-param))) ;; Complete task 4 of the REST API Splunk Course. (defn task4-post [] (client/post splunk-auth-uri (task4-params))) ;; Another cookie test. Doesn't work... ;; (binding [clj-http.core/*cookie-store* (clj-http.cookies/cookie-store)] ;; (refresh-session-key) ;; (task4-post)) ;; Create a cookie store to be used for subsequent calls. (def my-cs (clj-http.cookies/cookie-store)) ;; Simple test of setting cookies with binding. Currently doesn't seem to work. (binding [clj-http.core/*cookie-store* (clj-http.cookies/cookie-store)] (client/post splunk-auth-uri {:accept :json :debug true :debug-body true :save-request? true :response-interceptor (fn [resp ctx] (println ctx)) :throw-entire-message? true :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"} :cookie-store my-cs :form-params {:username "restclient" :password "<PASSWORD>" :output_mode "json"}})) ;; Simple test of setting cookies with let Currently doesn't seem to work. (clojure.pprint/pprint (let [my-cs (clj-http.cookies/cookie-store)] (client/post splunk-auth-uri {:accept :json :debug true :debug-body true :save-request? true :throw-entire-message? true :response-interceptor (fn [resp ctx] (println ctx)) :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"} :cookie-store my-cs :form-params {:username "restclient" :password "<PASSWORD>" :output_mode "json"}}))) (clojure.pprint/pprint (clj-http.cookies/get-cookies my-cs))
true
(ns splunk-clojure-client.main (:require [clojure.string] [clj-http.client :as client] [clojure.pprint] [clj-http.conn-mgr] [clj-http.core] [clj-http.cookies])) (use 'clojure.set) ;; Merge maps of data recusrively, I use this to compose HTTP params. (defn deep-merge-with "Like merge-with, but merges maps recursively, applying the given fn only when there's a non-map at a particular level. (deep-merge-with + {:a {:b {:c 1 :d {:x 1 :y 2}} :e 3} :f 4} {:a {:b {:c 2 :d {:z 9} :z 3} :e 100}}) -> {:a {:b {:z 3, :c 3, :d {:z 9, :x 1, :y 2}}, :e 103}, :f 4}" [f & maps] (apply (fn m [& maps] (if (every? map? maps) (apply merge-with m maps) (apply f maps))) maps)) ;; Debugging helper, not sure how this works... (def ^:dynamic *verbose* false) ;; Debugging helper, not sure how this works... (defmacro printfv [fmt & args] `(when *verbose* (printf ~fmt ~@args))) ;; Debugging helper, I don't know how this works... (defmacro dbg [x] `(let [x# ~x] (clojure.pprint/pprint '~x "=" x#) x#)) ;; Debugging helper, add function to pretty print function call. (defn cl-print [x] (doto x (clojure.pprint/pprint))) ;; Splunk base instance URI. (def splunk-base-uri "http://restapi-hur-04.class.splunk.com:8089/services") ;; Splunks login endpoint. (def splunk-auth-uri (str splunk-base-uri "/auth/login")) ;; Splunks search endpoint. (def splunk-search-uri (str splunk-base-uri "/search/jobs")) ;; Splunks session delete endpoint. (defn splunk-delete-auth-uri [token] (str splunk-base-uri "/authentication/httpauth-tokens/" token)) ;; Creates params for authentication. Change these for your environment. (def splunk-auth-map {:form-params {:username "restclient" :password "PI:PASSWORD:<PASSWORD>END_PI" :output_mode "json"}}) ( ;; Base HTTP params that are common to all HTTP methods. def splunk-base-params {:accept :json :throw-entire-message? true :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"}}) ;; Defines task1 HTTP Parameters. (defn task1-params-authenticate [] (merge splunk-base-params splunk-auth-map) ;; => {:accept :json, ;; :throw-entire-message? true, ;; :decode-body-headers true, ;; :as :auto, ;; :headers {:content-type "application/x-www-form-urlencoded"}, ;; :form-params {:username "restclient", :password "PI:PASSWORD:<PASSWORD>END_PI", :output_mode "json"}} ) ;; Executes task1 post. (defn task1-post [] (client/post splunk-auth-uri (task1-params-authenticate))) ;; Gets task1's HTTP response body. (defn task1-response-body [] (:body (task1-post))) ;; Gets session key task1's body. Executes task1 to do this. (defn get-session-key [] (select-keys (task1-response-body) [:sessionKey])) ;; Takes a session token, and produces the Splunk formatted HTTP param. (defn get-splunk-auth-header [token] {:Authorization (str "Splunk " token)}) ;; Refreshes current session key. (defn refresh-session-key [] (def session-key (get-session-key))) ;; Formats the current session key for use in composing HTTP params. (defn splunk-session-param [] {:header (get-splunk-auth-header (:sessionKey session-key))}) ;; Define task3 HTTP params. (defn task3-params [token] {:accept :json :throw-entire-message? true :decode-body-headers true :as :auto :headers (merge {:content-type "application/x-www-form-urlencoded"} (get-splunk-auth-header (:sessionKey session-key)))}) ;; Complete task3 task. (defn task3-delete [] (client/delete (splunk-delete-auth-uri (:sessionKey session-key)) (task3-params (:sessionKey session-key)))) (refresh-session-key) (task3-delete) ;; Define task4 HTTP Parameters. (defn task4-params [] (deep-merge-with union splunk-base-params (splunk-session-param))) ;; Complete task 4 of the REST API Splunk Course. (defn task4-post [] (client/post splunk-auth-uri (task4-params))) ;; Another cookie test. Doesn't work... ;; (binding [clj-http.core/*cookie-store* (clj-http.cookies/cookie-store)] ;; (refresh-session-key) ;; (task4-post)) ;; Create a cookie store to be used for subsequent calls. (def my-cs (clj-http.cookies/cookie-store)) ;; Simple test of setting cookies with binding. Currently doesn't seem to work. (binding [clj-http.core/*cookie-store* (clj-http.cookies/cookie-store)] (client/post splunk-auth-uri {:accept :json :debug true :debug-body true :save-request? true :response-interceptor (fn [resp ctx] (println ctx)) :throw-entire-message? true :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"} :cookie-store my-cs :form-params {:username "restclient" :password "PI:PASSWORD:<PASSWORD>END_PI" :output_mode "json"}})) ;; Simple test of setting cookies with let Currently doesn't seem to work. (clojure.pprint/pprint (let [my-cs (clj-http.cookies/cookie-store)] (client/post splunk-auth-uri {:accept :json :debug true :debug-body true :save-request? true :throw-entire-message? true :response-interceptor (fn [resp ctx] (println ctx)) :decode-body-headers true :as :auto :headers {:content-type "application/x-www-form-urlencoded"} :cookie-store my-cs :form-params {:username "restclient" :password "PI:PASSWORD:<PASSWORD>END_PI" :output_mode "json"}}))) (clojure.pprint/pprint (clj-http.cookies/get-cookies my-cs))
[ { "context": "l for a deeper Clojure integration.\"\n\n {:author \"Adam Helinski\"}\n\n (:import (convex.core.data ABlob\n ", "end": 298, "score": 0.9992245435714722, "start": 285, "tag": "NAME", "value": "Adam Helinski" } ]
project/cvm/src/clj/main/convex/clj.clj
rosejn/convex.cljc
30
(ns convex.clj "Convert cells to Clojure types. Sometimes lossy since some cells do not have equivalents in Clojure. For instance, addresses are converted to long. Recursive when it comes to collection. Mainly useful for a deeper Clojure integration." {:author "Adam Helinski"} (:import (convex.core.data ABlob AList AMap ASet AString AVector Address Keyword Symbol Syntax) (convex.core.data.prim CVMBool CVMByte CVMChar CVMDouble CVMLong)) (:refer-clojure :exclude [boolean byte char double keyword list long map set symbol vector])) (declare any) ;;;;;;;;;; (defn address "Returns the given `address` as a JVM long." [^Address address] (.longValue address)) (defn blob "Returns the given `blob` as a byte array." [^ABlob blob] (.getBytes blob)) (defn boolean "Returns the given `boolean` cell as a JVM boolean." [^CVMBool boolean] (.booleanValue boolean)) (defn byte "Returns the given `byte` cell as a JVM long." [^CVMByte cell] (.longValue cell)) (defn char "Returns the given `char` cell as a JVM char." [^CVMChar char] (clojure.core/char (.longValue char))) (defn double "Returns the given `double` cell as a JVM double." [^CVMDouble double] (.doubleValue double)) (defn keyword "Returns the given `keyword` cell as a Clojure keyword." [^Keyword keyword] (clojure.core/keyword (.getName keyword))) (defn list "Returns the given `list` cell as a Clojure list." [^AList list] (clojure.core/map any list)) (defn long "Returns the given `long` cell as a JVM long." [^CVMLong long] (.longValue long)) (defn map "Returns the given `map` cell (hash map or blob map) as a Clojure map. Attention, in Clojure maps, sequential types containg the same items are equivalent but not in Convex. Hence, a clash could happen in the rare case where different sequential types are used as keys. For instance, the following is possible in Convex but not in Clojure (would complain about duplicate keys: ```clojure {[:a] :foo '(:a) :foo} ```" [^AMap map] (-> (reduce (fn [acc [k v]] (assoc! acc (any k) (any v))) (transient {}) map) persistent!)) (defn set "Returns the given `set` cell as a Clojure set. Same comment about sequential types as in [[map]] applies here." [^ASet set] (into #{} (clojure.core/map any) set)) (defn string "Returns the given `string` cell as a JVM string." [^AString string] (str string)) (defn symbol "Returns the given `symbol` cell as a Clojure symbol." [^Symbol symbol] (clojure.core/symbol (.getName symbol))) (defn syntax "Returns the given `syntax` cell as a Clojure map such as: | Key | Value | |---|---| | `:meta` | Clojure map of metadata | | `:value` | Value wrapped, converted as well |" [^Syntax syntax] {:meta (any (.getMeta syntax)) :value (any (.getValue syntax))}) (defn vector "Returns the given `vector` cell as a Clojure vector." [^AVector vector] (mapv any vector)) ;;;;;;;;;; Protocol (defprotocol IClojuresque "Generic function for converting a cell to a Clojure representation. Relies all other functions from this namespace. ```clojure (any (convex.cell/* {:a [:b]})) ```" (any [cell])) (extend-protocol IClojuresque nil (any [cell] nil) Address (any [cell] (address cell)) ABlob (any [cell] (blob cell)) AList (any [cell] (list cell)) AMap (any [cell] (map cell)) ASet (any [cell] (set cell)) AString (any [cell] (string cell)) AVector (any [cell] (vector cell)) CVMBool (any [cell] (boolean cell)) CVMByte (any [cell] (byte cell)) CVMChar (any [cell] (char cell)) CVMDouble (any [cell] (double cell)) CVMLong (any [cell] (long cell)) Keyword (any [cell] (keyword cell)) Syntax (any [cell] (syntax cell)) Symbol (any [cell] (symbol cell)))
92359
(ns convex.clj "Convert cells to Clojure types. Sometimes lossy since some cells do not have equivalents in Clojure. For instance, addresses are converted to long. Recursive when it comes to collection. Mainly useful for a deeper Clojure integration." {:author "<NAME>"} (:import (convex.core.data ABlob AList AMap ASet AString AVector Address Keyword Symbol Syntax) (convex.core.data.prim CVMBool CVMByte CVMChar CVMDouble CVMLong)) (:refer-clojure :exclude [boolean byte char double keyword list long map set symbol vector])) (declare any) ;;;;;;;;;; (defn address "Returns the given `address` as a JVM long." [^Address address] (.longValue address)) (defn blob "Returns the given `blob` as a byte array." [^ABlob blob] (.getBytes blob)) (defn boolean "Returns the given `boolean` cell as a JVM boolean." [^CVMBool boolean] (.booleanValue boolean)) (defn byte "Returns the given `byte` cell as a JVM long." [^CVMByte cell] (.longValue cell)) (defn char "Returns the given `char` cell as a JVM char." [^CVMChar char] (clojure.core/char (.longValue char))) (defn double "Returns the given `double` cell as a JVM double." [^CVMDouble double] (.doubleValue double)) (defn keyword "Returns the given `keyword` cell as a Clojure keyword." [^Keyword keyword] (clojure.core/keyword (.getName keyword))) (defn list "Returns the given `list` cell as a Clojure list." [^AList list] (clojure.core/map any list)) (defn long "Returns the given `long` cell as a JVM long." [^CVMLong long] (.longValue long)) (defn map "Returns the given `map` cell (hash map or blob map) as a Clojure map. Attention, in Clojure maps, sequential types containg the same items are equivalent but not in Convex. Hence, a clash could happen in the rare case where different sequential types are used as keys. For instance, the following is possible in Convex but not in Clojure (would complain about duplicate keys: ```clojure {[:a] :foo '(:a) :foo} ```" [^AMap map] (-> (reduce (fn [acc [k v]] (assoc! acc (any k) (any v))) (transient {}) map) persistent!)) (defn set "Returns the given `set` cell as a Clojure set. Same comment about sequential types as in [[map]] applies here." [^ASet set] (into #{} (clojure.core/map any) set)) (defn string "Returns the given `string` cell as a JVM string." [^AString string] (str string)) (defn symbol "Returns the given `symbol` cell as a Clojure symbol." [^Symbol symbol] (clojure.core/symbol (.getName symbol))) (defn syntax "Returns the given `syntax` cell as a Clojure map such as: | Key | Value | |---|---| | `:meta` | Clojure map of metadata | | `:value` | Value wrapped, converted as well |" [^Syntax syntax] {:meta (any (.getMeta syntax)) :value (any (.getValue syntax))}) (defn vector "Returns the given `vector` cell as a Clojure vector." [^AVector vector] (mapv any vector)) ;;;;;;;;;; Protocol (defprotocol IClojuresque "Generic function for converting a cell to a Clojure representation. Relies all other functions from this namespace. ```clojure (any (convex.cell/* {:a [:b]})) ```" (any [cell])) (extend-protocol IClojuresque nil (any [cell] nil) Address (any [cell] (address cell)) ABlob (any [cell] (blob cell)) AList (any [cell] (list cell)) AMap (any [cell] (map cell)) ASet (any [cell] (set cell)) AString (any [cell] (string cell)) AVector (any [cell] (vector cell)) CVMBool (any [cell] (boolean cell)) CVMByte (any [cell] (byte cell)) CVMChar (any [cell] (char cell)) CVMDouble (any [cell] (double cell)) CVMLong (any [cell] (long cell)) Keyword (any [cell] (keyword cell)) Syntax (any [cell] (syntax cell)) Symbol (any [cell] (symbol cell)))
true
(ns convex.clj "Convert cells to Clojure types. Sometimes lossy since some cells do not have equivalents in Clojure. For instance, addresses are converted to long. Recursive when it comes to collection. Mainly useful for a deeper Clojure integration." {:author "PI:NAME:<NAME>END_PI"} (:import (convex.core.data ABlob AList AMap ASet AString AVector Address Keyword Symbol Syntax) (convex.core.data.prim CVMBool CVMByte CVMChar CVMDouble CVMLong)) (:refer-clojure :exclude [boolean byte char double keyword list long map set symbol vector])) (declare any) ;;;;;;;;;; (defn address "Returns the given `address` as a JVM long." [^Address address] (.longValue address)) (defn blob "Returns the given `blob` as a byte array." [^ABlob blob] (.getBytes blob)) (defn boolean "Returns the given `boolean` cell as a JVM boolean." [^CVMBool boolean] (.booleanValue boolean)) (defn byte "Returns the given `byte` cell as a JVM long." [^CVMByte cell] (.longValue cell)) (defn char "Returns the given `char` cell as a JVM char." [^CVMChar char] (clojure.core/char (.longValue char))) (defn double "Returns the given `double` cell as a JVM double." [^CVMDouble double] (.doubleValue double)) (defn keyword "Returns the given `keyword` cell as a Clojure keyword." [^Keyword keyword] (clojure.core/keyword (.getName keyword))) (defn list "Returns the given `list` cell as a Clojure list." [^AList list] (clojure.core/map any list)) (defn long "Returns the given `long` cell as a JVM long." [^CVMLong long] (.longValue long)) (defn map "Returns the given `map` cell (hash map or blob map) as a Clojure map. Attention, in Clojure maps, sequential types containg the same items are equivalent but not in Convex. Hence, a clash could happen in the rare case where different sequential types are used as keys. For instance, the following is possible in Convex but not in Clojure (would complain about duplicate keys: ```clojure {[:a] :foo '(:a) :foo} ```" [^AMap map] (-> (reduce (fn [acc [k v]] (assoc! acc (any k) (any v))) (transient {}) map) persistent!)) (defn set "Returns the given `set` cell as a Clojure set. Same comment about sequential types as in [[map]] applies here." [^ASet set] (into #{} (clojure.core/map any) set)) (defn string "Returns the given `string` cell as a JVM string." [^AString string] (str string)) (defn symbol "Returns the given `symbol` cell as a Clojure symbol." [^Symbol symbol] (clojure.core/symbol (.getName symbol))) (defn syntax "Returns the given `syntax` cell as a Clojure map such as: | Key | Value | |---|---| | `:meta` | Clojure map of metadata | | `:value` | Value wrapped, converted as well |" [^Syntax syntax] {:meta (any (.getMeta syntax)) :value (any (.getValue syntax))}) (defn vector "Returns the given `vector` cell as a Clojure vector." [^AVector vector] (mapv any vector)) ;;;;;;;;;; Protocol (defprotocol IClojuresque "Generic function for converting a cell to a Clojure representation. Relies all other functions from this namespace. ```clojure (any (convex.cell/* {:a [:b]})) ```" (any [cell])) (extend-protocol IClojuresque nil (any [cell] nil) Address (any [cell] (address cell)) ABlob (any [cell] (blob cell)) AList (any [cell] (list cell)) AMap (any [cell] (map cell)) ASet (any [cell] (set cell)) AString (any [cell] (string cell)) AVector (any [cell] (vector cell)) CVMBool (any [cell] (boolean cell)) CVMByte (any [cell] (byte cell)) CVMChar (any [cell] (char cell)) CVMDouble (any [cell] (double cell)) CVMLong (any [cell] (long cell)) Keyword (any [cell] (keyword cell)) Syntax (any [cell] (syntax cell)) Symbol (any [cell] (symbol cell)))
[ { "context": "http://foo.bar.com\"\n :authenticated-user-email \"[email protected]\"})\n\n(deftest test-login\n (let [request {:params ", "end": 303, "score": 0.9999160766601562, "start": 292, "tag": "EMAIL", "value": "[email protected]" }, { "context": "(testing \"user placed in session\"\n (is (= \"[email protected]\" (-> (session/login request) :session :okta/user)", "end": 551, "score": 0.9998982548713684, "start": 540, "tag": "EMAIL", "value": "[email protected]" }, { "context": "foo \"foo\"}\n :session {:okta/user \"[email protected]\"\n :bar \"bar\"}}]\n (te", "end": 909, "score": 0.9999102354049683, "start": 898, "tag": "EMAIL", "value": "[email protected]" } ]
test/ring/ring_okta/session_test.clj
bostonaholic/ring-okta
9
(ns ring.ring-okta.session-test (:require [clojure.test :refer [deftest testing is]] [ring.ring-okta.saml :as saml] [ring.ring-okta.session :as session])) (defn- stub-respond-to-okta-post [& args] {:redirect-url "http://foo.bar.com" :authenticated-user-email "[email protected]"}) (deftest test-login (let [request {:params {} :okta-config-location "test-resources/okta-config.xml"}] (with-redefs [saml/respond-to-okta-post stub-respond-to-okta-post] (testing "user placed in session" (is (= "[email protected]" (-> (session/login request) :session :okta/user)))) (testing "redirect after login" (is (= 303 (-> (session/login request) :status))) (is (= "http://foo.bar.com" (-> (session/login request) :headers (get "Location")))))))) (deftest test-logout (let [request {:params {:foo "foo"} :session {:okta/user "[email protected]" :bar "bar"}}] (testing "logout removes only :okta/user from session" (is (= {:bar "bar"} (-> (session/logout request) :session)))) (testing "logout does not clear other items in the request" (is (= {:params {:foo "foo"} :session {:bar "bar"}} (session/logout request))))))
56089
(ns ring.ring-okta.session-test (:require [clojure.test :refer [deftest testing is]] [ring.ring-okta.saml :as saml] [ring.ring-okta.session :as session])) (defn- stub-respond-to-okta-post [& args] {:redirect-url "http://foo.bar.com" :authenticated-user-email "<EMAIL>"}) (deftest test-login (let [request {:params {} :okta-config-location "test-resources/okta-config.xml"}] (with-redefs [saml/respond-to-okta-post stub-respond-to-okta-post] (testing "user placed in session" (is (= "<EMAIL>" (-> (session/login request) :session :okta/user)))) (testing "redirect after login" (is (= 303 (-> (session/login request) :status))) (is (= "http://foo.bar.com" (-> (session/login request) :headers (get "Location")))))))) (deftest test-logout (let [request {:params {:foo "foo"} :session {:okta/user "<EMAIL>" :bar "bar"}}] (testing "logout removes only :okta/user from session" (is (= {:bar "bar"} (-> (session/logout request) :session)))) (testing "logout does not clear other items in the request" (is (= {:params {:foo "foo"} :session {:bar "bar"}} (session/logout request))))))
true
(ns ring.ring-okta.session-test (:require [clojure.test :refer [deftest testing is]] [ring.ring-okta.saml :as saml] [ring.ring-okta.session :as session])) (defn- stub-respond-to-okta-post [& args] {:redirect-url "http://foo.bar.com" :authenticated-user-email "PI:EMAIL:<EMAIL>END_PI"}) (deftest test-login (let [request {:params {} :okta-config-location "test-resources/okta-config.xml"}] (with-redefs [saml/respond-to-okta-post stub-respond-to-okta-post] (testing "user placed in session" (is (= "PI:EMAIL:<EMAIL>END_PI" (-> (session/login request) :session :okta/user)))) (testing "redirect after login" (is (= 303 (-> (session/login request) :status))) (is (= "http://foo.bar.com" (-> (session/login request) :headers (get "Location")))))))) (deftest test-logout (let [request {:params {:foo "foo"} :session {:okta/user "PI:EMAIL:<EMAIL>END_PI" :bar "bar"}}] (testing "logout removes only :okta/user from session" (is (= {:bar "bar"} (-> (session/logout request) :session)))) (testing "logout does not clear other items in the request" (is (= {:params {:foo "foo"} :session {:bar "bar"}} (session/logout request))))))
[ { "context": "doc \"Text manipulation utilities.\"\n :author \"Kyle Burton\"}\n clj-etl-utils.text\n (:use [clj-etl-utils.lan", "end": 72, "score": 0.9998714923858643, "start": 61, "tag": "NAME", "value": "Kyle Burton" } ]
data/clojure/e106da13afc209a2a9f2b391ad29173d_text.clj
maxim5/code-inspector
5
(ns ^{:doc "Text manipulation utilities." :author "Kyle Burton"} clj-etl-utils.text (:use [clj-etl-utils.lang-utils :only [raise]]) (:require [clojure.string :as str-utils]) (:import [org.apache.commons.lang WordUtils] [java.text NumberFormat DecimalFormat] [org.apache.commons.codec.binary Base64])) (defn ^{:doc "Convert string to upper case, null safe (returns empty string on null)."} uc [^String s] (if (nil? s) "" (.toUpperCase s))) (defn ^{:doc "Convert string to lower case, null safe (returns empty string on null)."} lc [^String s] (if (nil? s) "" (.toLowerCase s))) (defmacro ^{:doc "Binds a temporary file to the symbol indicated by var (java.io.File/createTempFile). prefix and suffix default to \"pfx\" and \"sfx\" respectively. Note that this macro does not create or clean up the actual temporary file itself. "} with-tmp-file [[var & [prefix suffix]] & body] `(let [prefix# ~prefix suffix# ~suffix ~var (java.io.File/createTempFile (or prefix# "pfx") (or suffix# "sfx"))] ~@body)) (defn ^{:doc "Compute the MD5 sum of a byte buffer, returning it as a hex-encoded string."} md5->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "MD5")] (.update digester bytes) (.toString (java.math.BigInteger. 1 (.digest digester)) 16))) (defn ^{:doc "Compute the SHA1 sum of a byte buffer, returning it as a hex-encoded string."} sha1->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA1")] (.update digester bytes) (.toString (java.math.BigInteger. 1 (.digest digester)) 16))) (defn ^{:doc "Returns a sequence of all the security providers available in the current JVM. The sequence consists of pairs of [provider-type provider-algorithm]"} security-providers-type-algorithm-seq [] (mapcat (fn [provider] (map (fn [^java.security.Provider$Service svc] [(.getType svc) (.getAlgorithm svc)]) (.getServices ^java.security.Provider provider))) (java.security.Security/getProviders))) (defn ^{:doc "Returns a seq of all of the provider types available in the current JVM."} security-providers-types [] (vec (set (map first (security-providers-type-algorithm-seq))))) (defn ^{:doc "Filters security-providers-type-algorithm-seq for those that match the given type. (security-providers-for-type \"MessageDigest\") "} security-providers-for-type [type] (filter #(= (first %) type) (security-providers-type-algorithm-seq))) (defn ^{:doc "Sequence of all the MessageDigest providers available in the current JVM."} message-digest-algorithms [] (security-providers-for-type "MessageDigest")) (comment (security-providers-types) (message-digest-algorithms) ) (defn ^{:doc "Compute and return the SHA1 sum of the given string, returned as a hex-encoded string."} string->sha1 [^String s] (sha1->string (.getBytes s))) (defn ^{:doc "Compute and return the MD5 sum of the given string, returned as a hex-encoded string."} string->md5 [^String s] (md5->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA256 sum of the given byte array, returned as a hex-encoded string."} sha256->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-256")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA256 sum of the given string, returned as a hex-encoded string."} string->sha256 [^String s] (sha256->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA384 sum of the byte array, returned as a hex-encoded string."} sha384->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-384")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA384 sum of the given string, returned as a hex-encoded string."} string->sha384 [^String s] (sha384->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA512 sum of the byte array, returned as a hex-encoded string."} sha512->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-512")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA512 sum of the given string, returned as a hex-encoded string."} string->sha512 [^String s] (sha512->string (.getBytes s))) (comment (count (string->sha1 "foof")) ;; 40 (count (string->sha256 "foof")) ;; 63 (count (string->sha384 "foof")) ;; 90 (count (string->sha512 "foof")) ;; 126 (time (dotimes [ii 10000] (string->sha1 "foof"))) (time (dotimes [ii 10000] (string->sha256 "foof"))) (time (dotimes [ii 10000] (string->sha512 "foof"))) ) ;; TODO this doesn't belong in text.clj, couldn't think of a better place for it (defn ^{:doc "Current time in milliseconds."} now-milliseconds [] (.getTime (java.util.Date.))) (defn ^{:doc "Substring that supports negative starting positions (negative takes the last N'th characters from the right-hand side of the string). (substr \"the quick brown fox\" 10) => \"brown fox\" (substr \"the quick brown fox\" -3) => \"fox\" "} substr [^String s start & [end]] (cond (and (< start 0) (not end)) (let [start (+ (count s) start)] (if (< start 0) s (.substring s start))) (> start (count s)) "" (or (not end) (> end (count s))) (.substring s start) :else (.substring s start end))) (comment (= "" (substr "" 0 0)) (= "" (substr "a" 0 0)) (= "a" (substr "a" 0)) (= "a" (substr "a" 0 1)) (= "a" (substr "a" 0 99)) (= "" (substr "a" 99)) (= "" (substr "a" 99 199)) (= "a" (substr "a" -1)) (= "bc" (clj-etl-utils.text/substr "abc" -2)) (= "" (substr "abc" -9))) ;; "public static String humanReadableByteCount(long bytes, boolean si) { ;; int unit = si ? 1000 : 1024; ;; if (bytes < unit) return bytes + " B "; ;; int exp = (int) (Math.log(bytes) / Math.log(unit)); ;; String pre = (si ? "kMGTPE " : "KMGTPE ").charAt(exp-1) + (si ? " " : "i "); ;; return String.format("%.1f %sB ", bytes / Math.pow(unit, exp), pre); ;; }" (defn ^{:doc "Prodcues a human-readable (friendly unit sizes) count of the number of bytes provided (as a string). (human-readable-byte-count 1023) \"1023B\" (human-readable-byte-count 1024) \"1.00KiB\" (human-readable-byte-count (* 1024 1024)) \"1.00MiB\" (human-readable-byte-count (* 1024 1024 1024)) \"1.00GiB\" (human-readable-byte-count (+ 1 (* 1024 1024 1024))) \"1.00GiB\" (human-readable-byte-count (* 1024 1024 1024 1024)) \"1.00TiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024)) \"1.00PiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024 1024)) \"1.00EiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024 1024 1024)) => Error, no Si prefix for this size Taken from: http://stackoverflow.com/questions/3758606/how-to-convert-byte-size-into-human-readable-format-in-java "} human-readable-byte-count ([nbytes] (human-readable-byte-count nbytes false)) ([nbytes use-si] (let [unit (if use-si 1000 1024) exp (int (/ (Math/log nbytes) (Math/log unit)))] (if (< nbytes unit) (str nbytes "B") (format "%.2f%sB" (/ nbytes (Math/pow unit exp)) (str (.charAt (if use-si "kMGTPE" "KMGTPE") (dec exp)) (if use-si "" "i"))))))) (comment ) (defn ^{:doc "Wrap a string (sentence or paragraph) at a maximum length. (word-split \"This is a long sentence, if it were documentation someone would be happy and someone would be unsatisified. That is the way of things.\" 50) => (\"This is a long sentence, if it were documentation\" \"someone would be happy and someone would be\" \"unsatisified. That is the way of things.\") "} word-split ([^String str size] (word-split str size "\0")) ([^String str size ^String delim] (if (>= (.indexOf str delim) 0) (raise "Input string must not contain delimiter string (%s). Unable to split (input string=%s" delim str) (seq (.split (WordUtils/wrap str size delim false) delim))))) (comment ) (def formatter-setters {:negative-prefix (fn [^DecimalFormat nf ^String x] (.setNegativePrefix nf x)) :negative-suffix (fn [^DecimalFormat nf ^String x] (.setNegativeSuffix nf x)) :positive-prefix (fn [^DecimalFormat nf ^String x] (.setPositivePrefix nf x)) :positive-suffix (fn [^DecimalFormat nf ^String x] (.setPositiveSuffix nf x))}) (defn ^{:doc ""} apply-format-setter [^NumberFormat nf k v] (if-not (contains? formatter-setters k) (raise "set-formatter-option: option not yet implemented: %s" k)) ((get formatter-setters k) nf v) nf) (declare default-formatters) (defn get-currency-formatter [opts-or-keyword] (cond (map? opts-or-keyword) (reduce (fn [formatter [k v]] (apply-format-setter formatter k v)) (java.text.NumberFormat/getCurrencyInstance) opts-or-keyword) (keyword? opts-or-keyword) (or (get @default-formatters opts-or-keyword) (raise "Error: formatter not found for keyword: %s" opts-or-keyword)) :else (raise "Error: unrecognized formatter spec (not a map or keyword): [%s] %s" (class opts-or-keyword) opts-or-keyword))) (def currency-with-negative (get-currency-formatter {:negative-prefix "-$" :negative-suffix ""})) (def default-formatters (atom {:currency-with-negative currency-with-negative :default (get-currency-formatter {})})) (defn format-as-currency ([num] (format-as-currency num :default)) ([num opts] (.format ^java.text.Format (get-currency-formatter opts) num))) (defonce rx-clean-phone-number #"\D+") (defn canonical-phone-number [^String mobile-number] (if (nil? mobile-number) "" (let [num (str-utils/replace mobile-number rx-clean-phone-number "")] (if (= 10 (count num)) (str 1 num) num)))) (defn uncanonicalize-phone-number [^String mobile-number] (let [phone-number (canonical-phone-number mobile-number) [_ area-code central-office subscriber-number] (re-find #"\d{1}(\d{3})(\d{3})(\d{4})" phone-number)] (format "%s-%s-%s" area-code central-office subscriber-number))) (defn snake-case [^String s] (.toString ^StringBuilder (reduce (fn [b c] (if (Character/isUpperCase c) (do (.append ^StringBuilder b "-") (.append ^StringBuilder b ^CharSequence (clojure.string/lower-case c))) (.append ^StringBuilder b c))) (StringBuilder.) (name s)))) (defn camel->snake [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (snake-case k)) v)) {} params)) (defn camel->underscore [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll ^String (snake-case k) "-" "_")) v)) {} params)) (defn snake->underscore [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll (name k) "-" "_")) v)) {} params)) (defn underscore->snake [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll (name k) "_" "-")) v)) {} params)) (defn camelize-keyword [k] (let [[res & parts] (.split (name k) "[-_]")] (loop [res res [n & parts] parts] (if-not n (keyword res) (recur (str res (org.apache.commons.lang.WordUtils/capitalize n)) parts))))) (defn camelize-map-keys [m] (reduce (fn [accum [k v]] (assoc accum (camelize-keyword k) v)) {} m)) (def encode-base64 (let [b (Base64.)] (fn encode-base64 [raw] (.encode b raw)))) (def decode-base64 (let [b (Base64.)] (fn decode-base64 [coded] (.decode b coded)))) (defn summarize-message ([msg len] (summarize-message msg len "'" "...")) ([msg len delimiter summary-marker] (if (> (count msg) len) (str delimiter (first (word-split msg len)) summary-marker delimiter) (str delimiter msg delimiter)))) (comment (.format (java.text.NumberFormat/getCurrencyInstance) -1234) (format-as-currency -1234 :currency-with-negative) (format-as-currency -1234 :default) (format-as-currency -1234) (format-as-currency 1234 :currency-with-negative) (human-readable-byte-count 1024) (human-readable-byte-count 1024 true) (human-readable-byte-count (* 3 1024 1024)) (human-readable-byte-count (* 3 1024 1024) true) ) (defn trim-and-truncate ([^String value ^Number max-len] (trim-and-truncate value max-len nil)) ([^String value ^Number max-len ^String default-value] (cond (nil? value) default-value :trim-and-truncate (let [value (.trim value)] (if (empty? value) default-value (.trim (substr value 0 max-len)))))))
108044
(ns ^{:doc "Text manipulation utilities." :author "<NAME>"} clj-etl-utils.text (:use [clj-etl-utils.lang-utils :only [raise]]) (:require [clojure.string :as str-utils]) (:import [org.apache.commons.lang WordUtils] [java.text NumberFormat DecimalFormat] [org.apache.commons.codec.binary Base64])) (defn ^{:doc "Convert string to upper case, null safe (returns empty string on null)."} uc [^String s] (if (nil? s) "" (.toUpperCase s))) (defn ^{:doc "Convert string to lower case, null safe (returns empty string on null)."} lc [^String s] (if (nil? s) "" (.toLowerCase s))) (defmacro ^{:doc "Binds a temporary file to the symbol indicated by var (java.io.File/createTempFile). prefix and suffix default to \"pfx\" and \"sfx\" respectively. Note that this macro does not create or clean up the actual temporary file itself. "} with-tmp-file [[var & [prefix suffix]] & body] `(let [prefix# ~prefix suffix# ~suffix ~var (java.io.File/createTempFile (or prefix# "pfx") (or suffix# "sfx"))] ~@body)) (defn ^{:doc "Compute the MD5 sum of a byte buffer, returning it as a hex-encoded string."} md5->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "MD5")] (.update digester bytes) (.toString (java.math.BigInteger. 1 (.digest digester)) 16))) (defn ^{:doc "Compute the SHA1 sum of a byte buffer, returning it as a hex-encoded string."} sha1->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA1")] (.update digester bytes) (.toString (java.math.BigInteger. 1 (.digest digester)) 16))) (defn ^{:doc "Returns a sequence of all the security providers available in the current JVM. The sequence consists of pairs of [provider-type provider-algorithm]"} security-providers-type-algorithm-seq [] (mapcat (fn [provider] (map (fn [^java.security.Provider$Service svc] [(.getType svc) (.getAlgorithm svc)]) (.getServices ^java.security.Provider provider))) (java.security.Security/getProviders))) (defn ^{:doc "Returns a seq of all of the provider types available in the current JVM."} security-providers-types [] (vec (set (map first (security-providers-type-algorithm-seq))))) (defn ^{:doc "Filters security-providers-type-algorithm-seq for those that match the given type. (security-providers-for-type \"MessageDigest\") "} security-providers-for-type [type] (filter #(= (first %) type) (security-providers-type-algorithm-seq))) (defn ^{:doc "Sequence of all the MessageDigest providers available in the current JVM."} message-digest-algorithms [] (security-providers-for-type "MessageDigest")) (comment (security-providers-types) (message-digest-algorithms) ) (defn ^{:doc "Compute and return the SHA1 sum of the given string, returned as a hex-encoded string."} string->sha1 [^String s] (sha1->string (.getBytes s))) (defn ^{:doc "Compute and return the MD5 sum of the given string, returned as a hex-encoded string."} string->md5 [^String s] (md5->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA256 sum of the given byte array, returned as a hex-encoded string."} sha256->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-256")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA256 sum of the given string, returned as a hex-encoded string."} string->sha256 [^String s] (sha256->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA384 sum of the byte array, returned as a hex-encoded string."} sha384->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-384")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA384 sum of the given string, returned as a hex-encoded string."} string->sha384 [^String s] (sha384->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA512 sum of the byte array, returned as a hex-encoded string."} sha512->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-512")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA512 sum of the given string, returned as a hex-encoded string."} string->sha512 [^String s] (sha512->string (.getBytes s))) (comment (count (string->sha1 "foof")) ;; 40 (count (string->sha256 "foof")) ;; 63 (count (string->sha384 "foof")) ;; 90 (count (string->sha512 "foof")) ;; 126 (time (dotimes [ii 10000] (string->sha1 "foof"))) (time (dotimes [ii 10000] (string->sha256 "foof"))) (time (dotimes [ii 10000] (string->sha512 "foof"))) ) ;; TODO this doesn't belong in text.clj, couldn't think of a better place for it (defn ^{:doc "Current time in milliseconds."} now-milliseconds [] (.getTime (java.util.Date.))) (defn ^{:doc "Substring that supports negative starting positions (negative takes the last N'th characters from the right-hand side of the string). (substr \"the quick brown fox\" 10) => \"brown fox\" (substr \"the quick brown fox\" -3) => \"fox\" "} substr [^String s start & [end]] (cond (and (< start 0) (not end)) (let [start (+ (count s) start)] (if (< start 0) s (.substring s start))) (> start (count s)) "" (or (not end) (> end (count s))) (.substring s start) :else (.substring s start end))) (comment (= "" (substr "" 0 0)) (= "" (substr "a" 0 0)) (= "a" (substr "a" 0)) (= "a" (substr "a" 0 1)) (= "a" (substr "a" 0 99)) (= "" (substr "a" 99)) (= "" (substr "a" 99 199)) (= "a" (substr "a" -1)) (= "bc" (clj-etl-utils.text/substr "abc" -2)) (= "" (substr "abc" -9))) ;; "public static String humanReadableByteCount(long bytes, boolean si) { ;; int unit = si ? 1000 : 1024; ;; if (bytes < unit) return bytes + " B "; ;; int exp = (int) (Math.log(bytes) / Math.log(unit)); ;; String pre = (si ? "kMGTPE " : "KMGTPE ").charAt(exp-1) + (si ? " " : "i "); ;; return String.format("%.1f %sB ", bytes / Math.pow(unit, exp), pre); ;; }" (defn ^{:doc "Prodcues a human-readable (friendly unit sizes) count of the number of bytes provided (as a string). (human-readable-byte-count 1023) \"1023B\" (human-readable-byte-count 1024) \"1.00KiB\" (human-readable-byte-count (* 1024 1024)) \"1.00MiB\" (human-readable-byte-count (* 1024 1024 1024)) \"1.00GiB\" (human-readable-byte-count (+ 1 (* 1024 1024 1024))) \"1.00GiB\" (human-readable-byte-count (* 1024 1024 1024 1024)) \"1.00TiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024)) \"1.00PiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024 1024)) \"1.00EiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024 1024 1024)) => Error, no Si prefix for this size Taken from: http://stackoverflow.com/questions/3758606/how-to-convert-byte-size-into-human-readable-format-in-java "} human-readable-byte-count ([nbytes] (human-readable-byte-count nbytes false)) ([nbytes use-si] (let [unit (if use-si 1000 1024) exp (int (/ (Math/log nbytes) (Math/log unit)))] (if (< nbytes unit) (str nbytes "B") (format "%.2f%sB" (/ nbytes (Math/pow unit exp)) (str (.charAt (if use-si "kMGTPE" "KMGTPE") (dec exp)) (if use-si "" "i"))))))) (comment ) (defn ^{:doc "Wrap a string (sentence or paragraph) at a maximum length. (word-split \"This is a long sentence, if it were documentation someone would be happy and someone would be unsatisified. That is the way of things.\" 50) => (\"This is a long sentence, if it were documentation\" \"someone would be happy and someone would be\" \"unsatisified. That is the way of things.\") "} word-split ([^String str size] (word-split str size "\0")) ([^String str size ^String delim] (if (>= (.indexOf str delim) 0) (raise "Input string must not contain delimiter string (%s). Unable to split (input string=%s" delim str) (seq (.split (WordUtils/wrap str size delim false) delim))))) (comment ) (def formatter-setters {:negative-prefix (fn [^DecimalFormat nf ^String x] (.setNegativePrefix nf x)) :negative-suffix (fn [^DecimalFormat nf ^String x] (.setNegativeSuffix nf x)) :positive-prefix (fn [^DecimalFormat nf ^String x] (.setPositivePrefix nf x)) :positive-suffix (fn [^DecimalFormat nf ^String x] (.setPositiveSuffix nf x))}) (defn ^{:doc ""} apply-format-setter [^NumberFormat nf k v] (if-not (contains? formatter-setters k) (raise "set-formatter-option: option not yet implemented: %s" k)) ((get formatter-setters k) nf v) nf) (declare default-formatters) (defn get-currency-formatter [opts-or-keyword] (cond (map? opts-or-keyword) (reduce (fn [formatter [k v]] (apply-format-setter formatter k v)) (java.text.NumberFormat/getCurrencyInstance) opts-or-keyword) (keyword? opts-or-keyword) (or (get @default-formatters opts-or-keyword) (raise "Error: formatter not found for keyword: %s" opts-or-keyword)) :else (raise "Error: unrecognized formatter spec (not a map or keyword): [%s] %s" (class opts-or-keyword) opts-or-keyword))) (def currency-with-negative (get-currency-formatter {:negative-prefix "-$" :negative-suffix ""})) (def default-formatters (atom {:currency-with-negative currency-with-negative :default (get-currency-formatter {})})) (defn format-as-currency ([num] (format-as-currency num :default)) ([num opts] (.format ^java.text.Format (get-currency-formatter opts) num))) (defonce rx-clean-phone-number #"\D+") (defn canonical-phone-number [^String mobile-number] (if (nil? mobile-number) "" (let [num (str-utils/replace mobile-number rx-clean-phone-number "")] (if (= 10 (count num)) (str 1 num) num)))) (defn uncanonicalize-phone-number [^String mobile-number] (let [phone-number (canonical-phone-number mobile-number) [_ area-code central-office subscriber-number] (re-find #"\d{1}(\d{3})(\d{3})(\d{4})" phone-number)] (format "%s-%s-%s" area-code central-office subscriber-number))) (defn snake-case [^String s] (.toString ^StringBuilder (reduce (fn [b c] (if (Character/isUpperCase c) (do (.append ^StringBuilder b "-") (.append ^StringBuilder b ^CharSequence (clojure.string/lower-case c))) (.append ^StringBuilder b c))) (StringBuilder.) (name s)))) (defn camel->snake [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (snake-case k)) v)) {} params)) (defn camel->underscore [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll ^String (snake-case k) "-" "_")) v)) {} params)) (defn snake->underscore [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll (name k) "-" "_")) v)) {} params)) (defn underscore->snake [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll (name k) "_" "-")) v)) {} params)) (defn camelize-keyword [k] (let [[res & parts] (.split (name k) "[-_]")] (loop [res res [n & parts] parts] (if-not n (keyword res) (recur (str res (org.apache.commons.lang.WordUtils/capitalize n)) parts))))) (defn camelize-map-keys [m] (reduce (fn [accum [k v]] (assoc accum (camelize-keyword k) v)) {} m)) (def encode-base64 (let [b (Base64.)] (fn encode-base64 [raw] (.encode b raw)))) (def decode-base64 (let [b (Base64.)] (fn decode-base64 [coded] (.decode b coded)))) (defn summarize-message ([msg len] (summarize-message msg len "'" "...")) ([msg len delimiter summary-marker] (if (> (count msg) len) (str delimiter (first (word-split msg len)) summary-marker delimiter) (str delimiter msg delimiter)))) (comment (.format (java.text.NumberFormat/getCurrencyInstance) -1234) (format-as-currency -1234 :currency-with-negative) (format-as-currency -1234 :default) (format-as-currency -1234) (format-as-currency 1234 :currency-with-negative) (human-readable-byte-count 1024) (human-readable-byte-count 1024 true) (human-readable-byte-count (* 3 1024 1024)) (human-readable-byte-count (* 3 1024 1024) true) ) (defn trim-and-truncate ([^String value ^Number max-len] (trim-and-truncate value max-len nil)) ([^String value ^Number max-len ^String default-value] (cond (nil? value) default-value :trim-and-truncate (let [value (.trim value)] (if (empty? value) default-value (.trim (substr value 0 max-len)))))))
true
(ns ^{:doc "Text manipulation utilities." :author "PI:NAME:<NAME>END_PI"} clj-etl-utils.text (:use [clj-etl-utils.lang-utils :only [raise]]) (:require [clojure.string :as str-utils]) (:import [org.apache.commons.lang WordUtils] [java.text NumberFormat DecimalFormat] [org.apache.commons.codec.binary Base64])) (defn ^{:doc "Convert string to upper case, null safe (returns empty string on null)."} uc [^String s] (if (nil? s) "" (.toUpperCase s))) (defn ^{:doc "Convert string to lower case, null safe (returns empty string on null)."} lc [^String s] (if (nil? s) "" (.toLowerCase s))) (defmacro ^{:doc "Binds a temporary file to the symbol indicated by var (java.io.File/createTempFile). prefix and suffix default to \"pfx\" and \"sfx\" respectively. Note that this macro does not create or clean up the actual temporary file itself. "} with-tmp-file [[var & [prefix suffix]] & body] `(let [prefix# ~prefix suffix# ~suffix ~var (java.io.File/createTempFile (or prefix# "pfx") (or suffix# "sfx"))] ~@body)) (defn ^{:doc "Compute the MD5 sum of a byte buffer, returning it as a hex-encoded string."} md5->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "MD5")] (.update digester bytes) (.toString (java.math.BigInteger. 1 (.digest digester)) 16))) (defn ^{:doc "Compute the SHA1 sum of a byte buffer, returning it as a hex-encoded string."} sha1->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA1")] (.update digester bytes) (.toString (java.math.BigInteger. 1 (.digest digester)) 16))) (defn ^{:doc "Returns a sequence of all the security providers available in the current JVM. The sequence consists of pairs of [provider-type provider-algorithm]"} security-providers-type-algorithm-seq [] (mapcat (fn [provider] (map (fn [^java.security.Provider$Service svc] [(.getType svc) (.getAlgorithm svc)]) (.getServices ^java.security.Provider provider))) (java.security.Security/getProviders))) (defn ^{:doc "Returns a seq of all of the provider types available in the current JVM."} security-providers-types [] (vec (set (map first (security-providers-type-algorithm-seq))))) (defn ^{:doc "Filters security-providers-type-algorithm-seq for those that match the given type. (security-providers-for-type \"MessageDigest\") "} security-providers-for-type [type] (filter #(= (first %) type) (security-providers-type-algorithm-seq))) (defn ^{:doc "Sequence of all the MessageDigest providers available in the current JVM."} message-digest-algorithms [] (security-providers-for-type "MessageDigest")) (comment (security-providers-types) (message-digest-algorithms) ) (defn ^{:doc "Compute and return the SHA1 sum of the given string, returned as a hex-encoded string."} string->sha1 [^String s] (sha1->string (.getBytes s))) (defn ^{:doc "Compute and return the MD5 sum of the given string, returned as a hex-encoded string."} string->md5 [^String s] (md5->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA256 sum of the given byte array, returned as a hex-encoded string."} sha256->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-256")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA256 sum of the given string, returned as a hex-encoded string."} string->sha256 [^String s] (sha256->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA384 sum of the byte array, returned as a hex-encoded string."} sha384->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-384")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA384 sum of the given string, returned as a hex-encoded string."} string->sha384 [^String s] (sha384->string (.getBytes s))) (defn ^{:doc "Compute and return the SHA512 sum of the byte array, returned as a hex-encoded string."} sha512->string [^bytes bytes] (let [digester (java.security.MessageDigest/getInstance "SHA-512")] (.update digester bytes) (apply str (map (fn [byte] (Integer/toHexString (bit-and 0xFF byte))) (.digest digester))))) (defn ^{:doc "Compute and return the SHA512 sum of the given string, returned as a hex-encoded string."} string->sha512 [^String s] (sha512->string (.getBytes s))) (comment (count (string->sha1 "foof")) ;; 40 (count (string->sha256 "foof")) ;; 63 (count (string->sha384 "foof")) ;; 90 (count (string->sha512 "foof")) ;; 126 (time (dotimes [ii 10000] (string->sha1 "foof"))) (time (dotimes [ii 10000] (string->sha256 "foof"))) (time (dotimes [ii 10000] (string->sha512 "foof"))) ) ;; TODO this doesn't belong in text.clj, couldn't think of a better place for it (defn ^{:doc "Current time in milliseconds."} now-milliseconds [] (.getTime (java.util.Date.))) (defn ^{:doc "Substring that supports negative starting positions (negative takes the last N'th characters from the right-hand side of the string). (substr \"the quick brown fox\" 10) => \"brown fox\" (substr \"the quick brown fox\" -3) => \"fox\" "} substr [^String s start & [end]] (cond (and (< start 0) (not end)) (let [start (+ (count s) start)] (if (< start 0) s (.substring s start))) (> start (count s)) "" (or (not end) (> end (count s))) (.substring s start) :else (.substring s start end))) (comment (= "" (substr "" 0 0)) (= "" (substr "a" 0 0)) (= "a" (substr "a" 0)) (= "a" (substr "a" 0 1)) (= "a" (substr "a" 0 99)) (= "" (substr "a" 99)) (= "" (substr "a" 99 199)) (= "a" (substr "a" -1)) (= "bc" (clj-etl-utils.text/substr "abc" -2)) (= "" (substr "abc" -9))) ;; "public static String humanReadableByteCount(long bytes, boolean si) { ;; int unit = si ? 1000 : 1024; ;; if (bytes < unit) return bytes + " B "; ;; int exp = (int) (Math.log(bytes) / Math.log(unit)); ;; String pre = (si ? "kMGTPE " : "KMGTPE ").charAt(exp-1) + (si ? " " : "i "); ;; return String.format("%.1f %sB ", bytes / Math.pow(unit, exp), pre); ;; }" (defn ^{:doc "Prodcues a human-readable (friendly unit sizes) count of the number of bytes provided (as a string). (human-readable-byte-count 1023) \"1023B\" (human-readable-byte-count 1024) \"1.00KiB\" (human-readable-byte-count (* 1024 1024)) \"1.00MiB\" (human-readable-byte-count (* 1024 1024 1024)) \"1.00GiB\" (human-readable-byte-count (+ 1 (* 1024 1024 1024))) \"1.00GiB\" (human-readable-byte-count (* 1024 1024 1024 1024)) \"1.00TiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024)) \"1.00PiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024 1024)) \"1.00EiB\" (human-readable-byte-count (* 1024 1024 1024 1024 1024 1024 1024)) => Error, no Si prefix for this size Taken from: http://stackoverflow.com/questions/3758606/how-to-convert-byte-size-into-human-readable-format-in-java "} human-readable-byte-count ([nbytes] (human-readable-byte-count nbytes false)) ([nbytes use-si] (let [unit (if use-si 1000 1024) exp (int (/ (Math/log nbytes) (Math/log unit)))] (if (< nbytes unit) (str nbytes "B") (format "%.2f%sB" (/ nbytes (Math/pow unit exp)) (str (.charAt (if use-si "kMGTPE" "KMGTPE") (dec exp)) (if use-si "" "i"))))))) (comment ) (defn ^{:doc "Wrap a string (sentence or paragraph) at a maximum length. (word-split \"This is a long sentence, if it were documentation someone would be happy and someone would be unsatisified. That is the way of things.\" 50) => (\"This is a long sentence, if it were documentation\" \"someone would be happy and someone would be\" \"unsatisified. That is the way of things.\") "} word-split ([^String str size] (word-split str size "\0")) ([^String str size ^String delim] (if (>= (.indexOf str delim) 0) (raise "Input string must not contain delimiter string (%s). Unable to split (input string=%s" delim str) (seq (.split (WordUtils/wrap str size delim false) delim))))) (comment ) (def formatter-setters {:negative-prefix (fn [^DecimalFormat nf ^String x] (.setNegativePrefix nf x)) :negative-suffix (fn [^DecimalFormat nf ^String x] (.setNegativeSuffix nf x)) :positive-prefix (fn [^DecimalFormat nf ^String x] (.setPositivePrefix nf x)) :positive-suffix (fn [^DecimalFormat nf ^String x] (.setPositiveSuffix nf x))}) (defn ^{:doc ""} apply-format-setter [^NumberFormat nf k v] (if-not (contains? formatter-setters k) (raise "set-formatter-option: option not yet implemented: %s" k)) ((get formatter-setters k) nf v) nf) (declare default-formatters) (defn get-currency-formatter [opts-or-keyword] (cond (map? opts-or-keyword) (reduce (fn [formatter [k v]] (apply-format-setter formatter k v)) (java.text.NumberFormat/getCurrencyInstance) opts-or-keyword) (keyword? opts-or-keyword) (or (get @default-formatters opts-or-keyword) (raise "Error: formatter not found for keyword: %s" opts-or-keyword)) :else (raise "Error: unrecognized formatter spec (not a map or keyword): [%s] %s" (class opts-or-keyword) opts-or-keyword))) (def currency-with-negative (get-currency-formatter {:negative-prefix "-$" :negative-suffix ""})) (def default-formatters (atom {:currency-with-negative currency-with-negative :default (get-currency-formatter {})})) (defn format-as-currency ([num] (format-as-currency num :default)) ([num opts] (.format ^java.text.Format (get-currency-formatter opts) num))) (defonce rx-clean-phone-number #"\D+") (defn canonical-phone-number [^String mobile-number] (if (nil? mobile-number) "" (let [num (str-utils/replace mobile-number rx-clean-phone-number "")] (if (= 10 (count num)) (str 1 num) num)))) (defn uncanonicalize-phone-number [^String mobile-number] (let [phone-number (canonical-phone-number mobile-number) [_ area-code central-office subscriber-number] (re-find #"\d{1}(\d{3})(\d{3})(\d{4})" phone-number)] (format "%s-%s-%s" area-code central-office subscriber-number))) (defn snake-case [^String s] (.toString ^StringBuilder (reduce (fn [b c] (if (Character/isUpperCase c) (do (.append ^StringBuilder b "-") (.append ^StringBuilder b ^CharSequence (clojure.string/lower-case c))) (.append ^StringBuilder b c))) (StringBuilder.) (name s)))) (defn camel->snake [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (snake-case k)) v)) {} params)) (defn camel->underscore [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll ^String (snake-case k) "-" "_")) v)) {} params)) (defn snake->underscore [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll (name k) "-" "_")) v)) {} params)) (defn underscore->snake [^java.util.Map params] (reduce (fn [accum [k v]] (assoc accum (keyword (.replaceAll (name k) "_" "-")) v)) {} params)) (defn camelize-keyword [k] (let [[res & parts] (.split (name k) "[-_]")] (loop [res res [n & parts] parts] (if-not n (keyword res) (recur (str res (org.apache.commons.lang.WordUtils/capitalize n)) parts))))) (defn camelize-map-keys [m] (reduce (fn [accum [k v]] (assoc accum (camelize-keyword k) v)) {} m)) (def encode-base64 (let [b (Base64.)] (fn encode-base64 [raw] (.encode b raw)))) (def decode-base64 (let [b (Base64.)] (fn decode-base64 [coded] (.decode b coded)))) (defn summarize-message ([msg len] (summarize-message msg len "'" "...")) ([msg len delimiter summary-marker] (if (> (count msg) len) (str delimiter (first (word-split msg len)) summary-marker delimiter) (str delimiter msg delimiter)))) (comment (.format (java.text.NumberFormat/getCurrencyInstance) -1234) (format-as-currency -1234 :currency-with-negative) (format-as-currency -1234 :default) (format-as-currency -1234) (format-as-currency 1234 :currency-with-negative) (human-readable-byte-count 1024) (human-readable-byte-count 1024 true) (human-readable-byte-count (* 3 1024 1024)) (human-readable-byte-count (* 3 1024 1024) true) ) (defn trim-and-truncate ([^String value ^Number max-len] (trim-and-truncate value max-len nil)) ([^String value ^Number max-len ^String default-value] (cond (nil? value) default-value :trim-and-truncate (let [value (.trim value)] (if (empty? value) default-value (.trim (substr value 0 max-len)))))))
[ { "context": " :commodity-id \"USD\"}\n {:name \"Dining\"\n :type :expense\n :co", "end": 767, "score": 0.6142892837524414, "start": 761, "tag": "NAME", "value": "Dining" } ]
test/clj_money/models/lots_test.clj
dgknght/clj-money
5
(ns clj-money.models.lots-test (:require [clojure.test :refer [deftest use-fixtures is]] [clj-time.core :as t] [clj-factory.core :refer [factory]] [dgknght.app-lib.test] [clj-money.factories.user-factory] [clj-money.test-context :refer [realize find-account find-commodity]] [clj-money.test-helpers :refer [reset-db]] [clj-money.models.lots :as lots])) (use-fixtures :each reset-db) (def ^:private lot-context {:users [(factory :user)] :entities [{:name "Personal"}] :accounts [{:name "IRA" :type :asset :commodity-id "USD"} {:name "Dining" :type :expense :commodity-id "USD"} {:name "Checking" :type :asset :commodity-id "USD"}] :commodities [{:name "US Dollar" :symbol "USD" :type :currency} {:name "Apple" :symbol "AAPL" :exchange :nasdaq :type :stock}]}) (defn- attributes [context] {:commodity-id (:id (find-commodity context "AAPL")) :account-id (:id (find-account context "IRA")) :purchase-date (t/local-date 2017 3 2) :purchase-price 10M :shares-purchased 100M}) (deftest create-a-lot (let [context (realize lot-context) commodity (find-commodity context "AAPL") result (lots/create (attributes context)) lots (lots/select-by-commodity-id (:id commodity))] (is (:id result) "The result receives an ID value") (is (valid? result)) (is (= [{:purchase-date (t/local-date 2017 3 2) :shares-owned 100M :purchase-price 10M}] ; shares-owned is set to shares-purchased (map #(select-keys % [:purchase-date :shares-owned :purchase-price]) lots)) "The value is retrieved after create"))) (deftest commodity-id-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :commodity-id))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:commodity-id] "Commodity is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest account-id-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :account-id))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:account-id] "Account is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-price-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :purchase-price))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-price] "Purchase price is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest account-id-must-reference-an-asset-account (let [context (realize lot-context) commodity (-> context :commodities first) dining (find-account context "Dining") result (lots/create (-> context attributes (assoc :account-id (:id dining)))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:account-id] "Account must be an asset")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-date-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :purchase-date))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-date] "Purchase date is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-date-must-be-a-date (let [context (realize lot-context) commodity (find-commodity context "IRA") result (lots/create (-> context attributes (assoc :purchase-date "not-a-date"))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-date] "Purchase date must be a date")) (is (empty? lots) "The value is not retrieved after create"))) (deftest shares-purchased-is-required (let [context (realize lot-context) commodity (find-commodity context "IRA") result (lots/create (-> context attributes (dissoc :shares-purchased))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:shares-purchased] "Shares purchased is required")) (is (empty? lots) "The value is not retrieved after create"))) (def ^:private existing-lot-context (assoc lot-context :lots [{:account-id "IRA" :commodity-id "AAPL" :purchase-price 10M :shares-purchased 100M :shares-owned 100M :purchase-date (t/local-date 2016 3 2)}])) (deftest update-a-lot (let [context (realize existing-lot-context) lot (-> context :lots first) updated (update-in lot [:shares-owned] #(- % 30M)) result (lots/update updated) retrieved (lots/find lot)] (is (valid? result)) (is (= 70M (:shares-owned retrieved)) "The retrieved map contains the updated value"))) (deftest search-lots-by-account (let [context (realize existing-lot-context) ira (find-account context "IRA") commodity (->> context :commodities (filter #(= "AAPL" (:symbol %))) first) actual (map #(dissoc % :updated-at :created-at :id) (lots/search {:account-id (:id ira)})) expected [{:commodity-id (:id commodity) :account-id (:id ira) :purchase-date (t/local-date 2016 3 2) :purchase-price 10M :shares-purchased 100M :shares-owned 100M}]] (is (= expected actual) "The correct data is returned"))) ; Test unrealized-gains with: ; Date that precedes some purchases ; Date that precedes some sales
79517
(ns clj-money.models.lots-test (:require [clojure.test :refer [deftest use-fixtures is]] [clj-time.core :as t] [clj-factory.core :refer [factory]] [dgknght.app-lib.test] [clj-money.factories.user-factory] [clj-money.test-context :refer [realize find-account find-commodity]] [clj-money.test-helpers :refer [reset-db]] [clj-money.models.lots :as lots])) (use-fixtures :each reset-db) (def ^:private lot-context {:users [(factory :user)] :entities [{:name "Personal"}] :accounts [{:name "IRA" :type :asset :commodity-id "USD"} {:name "<NAME>" :type :expense :commodity-id "USD"} {:name "Checking" :type :asset :commodity-id "USD"}] :commodities [{:name "US Dollar" :symbol "USD" :type :currency} {:name "Apple" :symbol "AAPL" :exchange :nasdaq :type :stock}]}) (defn- attributes [context] {:commodity-id (:id (find-commodity context "AAPL")) :account-id (:id (find-account context "IRA")) :purchase-date (t/local-date 2017 3 2) :purchase-price 10M :shares-purchased 100M}) (deftest create-a-lot (let [context (realize lot-context) commodity (find-commodity context "AAPL") result (lots/create (attributes context)) lots (lots/select-by-commodity-id (:id commodity))] (is (:id result) "The result receives an ID value") (is (valid? result)) (is (= [{:purchase-date (t/local-date 2017 3 2) :shares-owned 100M :purchase-price 10M}] ; shares-owned is set to shares-purchased (map #(select-keys % [:purchase-date :shares-owned :purchase-price]) lots)) "The value is retrieved after create"))) (deftest commodity-id-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :commodity-id))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:commodity-id] "Commodity is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest account-id-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :account-id))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:account-id] "Account is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-price-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :purchase-price))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-price] "Purchase price is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest account-id-must-reference-an-asset-account (let [context (realize lot-context) commodity (-> context :commodities first) dining (find-account context "Dining") result (lots/create (-> context attributes (assoc :account-id (:id dining)))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:account-id] "Account must be an asset")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-date-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :purchase-date))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-date] "Purchase date is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-date-must-be-a-date (let [context (realize lot-context) commodity (find-commodity context "IRA") result (lots/create (-> context attributes (assoc :purchase-date "not-a-date"))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-date] "Purchase date must be a date")) (is (empty? lots) "The value is not retrieved after create"))) (deftest shares-purchased-is-required (let [context (realize lot-context) commodity (find-commodity context "IRA") result (lots/create (-> context attributes (dissoc :shares-purchased))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:shares-purchased] "Shares purchased is required")) (is (empty? lots) "The value is not retrieved after create"))) (def ^:private existing-lot-context (assoc lot-context :lots [{:account-id "IRA" :commodity-id "AAPL" :purchase-price 10M :shares-purchased 100M :shares-owned 100M :purchase-date (t/local-date 2016 3 2)}])) (deftest update-a-lot (let [context (realize existing-lot-context) lot (-> context :lots first) updated (update-in lot [:shares-owned] #(- % 30M)) result (lots/update updated) retrieved (lots/find lot)] (is (valid? result)) (is (= 70M (:shares-owned retrieved)) "The retrieved map contains the updated value"))) (deftest search-lots-by-account (let [context (realize existing-lot-context) ira (find-account context "IRA") commodity (->> context :commodities (filter #(= "AAPL" (:symbol %))) first) actual (map #(dissoc % :updated-at :created-at :id) (lots/search {:account-id (:id ira)})) expected [{:commodity-id (:id commodity) :account-id (:id ira) :purchase-date (t/local-date 2016 3 2) :purchase-price 10M :shares-purchased 100M :shares-owned 100M}]] (is (= expected actual) "The correct data is returned"))) ; Test unrealized-gains with: ; Date that precedes some purchases ; Date that precedes some sales
true
(ns clj-money.models.lots-test (:require [clojure.test :refer [deftest use-fixtures is]] [clj-time.core :as t] [clj-factory.core :refer [factory]] [dgknght.app-lib.test] [clj-money.factories.user-factory] [clj-money.test-context :refer [realize find-account find-commodity]] [clj-money.test-helpers :refer [reset-db]] [clj-money.models.lots :as lots])) (use-fixtures :each reset-db) (def ^:private lot-context {:users [(factory :user)] :entities [{:name "Personal"}] :accounts [{:name "IRA" :type :asset :commodity-id "USD"} {:name "PI:NAME:<NAME>END_PI" :type :expense :commodity-id "USD"} {:name "Checking" :type :asset :commodity-id "USD"}] :commodities [{:name "US Dollar" :symbol "USD" :type :currency} {:name "Apple" :symbol "AAPL" :exchange :nasdaq :type :stock}]}) (defn- attributes [context] {:commodity-id (:id (find-commodity context "AAPL")) :account-id (:id (find-account context "IRA")) :purchase-date (t/local-date 2017 3 2) :purchase-price 10M :shares-purchased 100M}) (deftest create-a-lot (let [context (realize lot-context) commodity (find-commodity context "AAPL") result (lots/create (attributes context)) lots (lots/select-by-commodity-id (:id commodity))] (is (:id result) "The result receives an ID value") (is (valid? result)) (is (= [{:purchase-date (t/local-date 2017 3 2) :shares-owned 100M :purchase-price 10M}] ; shares-owned is set to shares-purchased (map #(select-keys % [:purchase-date :shares-owned :purchase-price]) lots)) "The value is retrieved after create"))) (deftest commodity-id-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :commodity-id))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:commodity-id] "Commodity is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest account-id-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :account-id))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:account-id] "Account is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-price-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :purchase-price))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-price] "Purchase price is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest account-id-must-reference-an-asset-account (let [context (realize lot-context) commodity (-> context :commodities first) dining (find-account context "Dining") result (lots/create (-> context attributes (assoc :account-id (:id dining)))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:account-id] "Account must be an asset")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-date-is-required (let [context (realize lot-context) commodity (-> context :commodities first) result (lots/create (-> context attributes (dissoc :purchase-date))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-date] "Purchase date is required")) (is (empty? lots) "The value is not retrieved after create"))) (deftest purchase-date-must-be-a-date (let [context (realize lot-context) commodity (find-commodity context "IRA") result (lots/create (-> context attributes (assoc :purchase-date "not-a-date"))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:purchase-date] "Purchase date must be a date")) (is (empty? lots) "The value is not retrieved after create"))) (deftest shares-purchased-is-required (let [context (realize lot-context) commodity (find-commodity context "IRA") result (lots/create (-> context attributes (dissoc :shares-purchased))) lots (lots/select-by-commodity-id (:id commodity))] (is (nil? (:id result)) "The result does not receive an ID value") (is (invalid? result [:shares-purchased] "Shares purchased is required")) (is (empty? lots) "The value is not retrieved after create"))) (def ^:private existing-lot-context (assoc lot-context :lots [{:account-id "IRA" :commodity-id "AAPL" :purchase-price 10M :shares-purchased 100M :shares-owned 100M :purchase-date (t/local-date 2016 3 2)}])) (deftest update-a-lot (let [context (realize existing-lot-context) lot (-> context :lots first) updated (update-in lot [:shares-owned] #(- % 30M)) result (lots/update updated) retrieved (lots/find lot)] (is (valid? result)) (is (= 70M (:shares-owned retrieved)) "The retrieved map contains the updated value"))) (deftest search-lots-by-account (let [context (realize existing-lot-context) ira (find-account context "IRA") commodity (->> context :commodities (filter #(= "AAPL" (:symbol %))) first) actual (map #(dissoc % :updated-at :created-at :id) (lots/search {:account-id (:id ira)})) expected [{:commodity-id (:id commodity) :account-id (:id ira) :purchase-date (t/local-date 2016 3 2) :purchase-price 10M :shares-purchased 100M :shares-owned 100M}]] (is (= expected actual) "The correct data is returned"))) ; Test unrealized-gains with: ; Date that precedes some purchases ; Date that precedes some sales
[ { "context": " \"?user=db_user_name_here&password=db_user_password_here\")\n :mysql (str \"mysql://localhost:3306/\" sa", "end": 873, "score": 0.9985952973365784, "start": 852, "tag": "PASSWORD", "value": "db_user_password_here" }, { "context": " \"?user=db_user_name_here&password=db_user_password_here\")\n :h2 (str \"jdbc:h2:./\" sanitized \"_\" s", "end": 1016, "score": 0.9942641854286194, "start": 995, "tag": "PASSWORD", "value": "db_user_password_here" }, { "context": "d \"_\" suffix \".db\")\n :mongo (str \"mongodb://127.0.0.1/\" sanitized \"_\" suffix)\n :datomic (str \"datom", "end": 1180, "score": 0.9985185265541077, "start": 1171, "tag": "IP_ADDRESS", "value": "127.0.0.1" } ]
src/leiningen/new/db.clj
spradnyesh/luminus-template
0
(ns leiningen.new.db (:require [leiningen.new.common :refer :all])) (defn select-db [{:keys [features]}] (cond (some #{"+postgres"} features) :postgres (some #{"+mysql"} features) :mysql (some #{"+mongodb"} features) :mongo (some #{"+datomic"} features) :datomic (some #{"+h2"} features) :h2 (some #{"+sqlite"} features) :sqlite)) (defn db-dependencies [options] [['luminus-migrations "0.4.5"] ['conman "0.7.5"] ({:postgres ['org.postgresql/postgresql "42.1.4"] :mysql ['mysql/mysql-connector-java "6.0.5"] :h2 ['com.h2database/h2 "1.4.196"] :sqlite ['org.xerial/sqlite-jdbc "3.20.0"]} (select-db options))]) (defn db-url [{:keys [sanitized] :as options} suffix] ({:postgres (str "postgresql://localhost/" sanitized "_" suffix "?user=db_user_name_here&password=db_user_password_here") :mysql (str "mysql://localhost:3306/" sanitized "_" suffix "?user=db_user_name_here&password=db_user_password_here") :h2 (str "jdbc:h2:./" sanitized "_" suffix ".db") :sqlite (str "jdbc:sqlite:" sanitized "_" suffix ".db") :mongo (str "mongodb://127.0.0.1/" sanitized "_" suffix) :datomic (str "datomic:free://localhost:4334/" sanitized "_" suffix)} (select-db options))) (defn relational-db-files [options] (let [timestamp (.format (java.text.SimpleDateFormat. "yyyyMMddHHmmss") (java.util.Date.))] [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/sql.db.clj"] ["{{resource-path}}/sql/queries.sql" "db/sql/queries.sql"] ["{{backend-test-path}}/{{sanitized}}/test/db/core.clj" "db/test/db/core.clj"] [(str "{{resource-path}}/migrations/" timestamp "-add-users-table.up.sql") "db/migrations/add-users-table.up.sql"] [(str "{{resource-path}}/migrations/" timestamp "-add-users-table.down.sql") "db/migrations/add-users-table.down.sql"]])) (defn db-profiles [options] {:database-profile-dev (str :database-url " \"" (db-url options "dev") "\"") :database-profile-test (str :database-url " \"" (db-url options "test") "\"")}) (def mongo-files [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/mongodb.clj"]]) (def datomic-files [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/datomic.clj"]]) (defn add-mongo [[assets options]] [(into assets mongo-files) (-> options (append-options :dependencies [['com.novemberain/monger "3.1.0" :exclusions ['com.google.guava/guava]] ['com.google.guava/guava "20.0"]]) (assoc :mongodb true :db-connection true :db-docs ((:selmer-renderer options) (slurp-resource "db/docs/mongo_instructions.md") options)) (merge (db-profiles options)))]) (defn add-datomic [[assets options]] [(into assets datomic-files) (-> options (append-options :dependencies [['com.datomic/datomic-free "0.9.5561" :exclusions ['org.slf4j/log4j-over-slf4j 'org.slf4j/slf4j-nop]] ['com.google.guava/guava "21.0"]]) (assoc :datomic true :db-connection true :db-docs ((:selmer-renderer options) (slurp-resource "db/docs/datomic_instructions.md") options)) (merge (db-profiles options)))]) (defn add-relational-db [db [assets options]] [(into assets (relational-db-files options)) (let [embedded-db? (some #{(name db)} ["h2" "sqlite"])] (-> options (append-options :dependencies (db-dependencies options)) (append-options :plugins [['migratus-lein "0.5.4"]]) (assoc :relational-db true :db-connection (not embedded-db?) :db-type (name db) :embedded-db embedded-db? :migrations "{:store :database :db ~(get (System/getenv) \"DATABASE_URL\")}" :db-docs ((:selmer-renderer options) (slurp-resource (if (= :h2 db) "db/docs/h2_instructions.md" "db/docs/db_instructions.md")) options)) (merge (db-profiles options))))]) (defn db-features [state] (if-let [db (select-db (second state))] (cond (= :mongo db) (add-mongo state) (= :datomic db) (add-datomic state) :else (add-relational-db db state)) state))
11197
(ns leiningen.new.db (:require [leiningen.new.common :refer :all])) (defn select-db [{:keys [features]}] (cond (some #{"+postgres"} features) :postgres (some #{"+mysql"} features) :mysql (some #{"+mongodb"} features) :mongo (some #{"+datomic"} features) :datomic (some #{"+h2"} features) :h2 (some #{"+sqlite"} features) :sqlite)) (defn db-dependencies [options] [['luminus-migrations "0.4.5"] ['conman "0.7.5"] ({:postgres ['org.postgresql/postgresql "42.1.4"] :mysql ['mysql/mysql-connector-java "6.0.5"] :h2 ['com.h2database/h2 "1.4.196"] :sqlite ['org.xerial/sqlite-jdbc "3.20.0"]} (select-db options))]) (defn db-url [{:keys [sanitized] :as options} suffix] ({:postgres (str "postgresql://localhost/" sanitized "_" suffix "?user=db_user_name_here&password=<PASSWORD>") :mysql (str "mysql://localhost:3306/" sanitized "_" suffix "?user=db_user_name_here&password=<PASSWORD>") :h2 (str "jdbc:h2:./" sanitized "_" suffix ".db") :sqlite (str "jdbc:sqlite:" sanitized "_" suffix ".db") :mongo (str "mongodb://127.0.0.1/" sanitized "_" suffix) :datomic (str "datomic:free://localhost:4334/" sanitized "_" suffix)} (select-db options))) (defn relational-db-files [options] (let [timestamp (.format (java.text.SimpleDateFormat. "yyyyMMddHHmmss") (java.util.Date.))] [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/sql.db.clj"] ["{{resource-path}}/sql/queries.sql" "db/sql/queries.sql"] ["{{backend-test-path}}/{{sanitized}}/test/db/core.clj" "db/test/db/core.clj"] [(str "{{resource-path}}/migrations/" timestamp "-add-users-table.up.sql") "db/migrations/add-users-table.up.sql"] [(str "{{resource-path}}/migrations/" timestamp "-add-users-table.down.sql") "db/migrations/add-users-table.down.sql"]])) (defn db-profiles [options] {:database-profile-dev (str :database-url " \"" (db-url options "dev") "\"") :database-profile-test (str :database-url " \"" (db-url options "test") "\"")}) (def mongo-files [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/mongodb.clj"]]) (def datomic-files [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/datomic.clj"]]) (defn add-mongo [[assets options]] [(into assets mongo-files) (-> options (append-options :dependencies [['com.novemberain/monger "3.1.0" :exclusions ['com.google.guava/guava]] ['com.google.guava/guava "20.0"]]) (assoc :mongodb true :db-connection true :db-docs ((:selmer-renderer options) (slurp-resource "db/docs/mongo_instructions.md") options)) (merge (db-profiles options)))]) (defn add-datomic [[assets options]] [(into assets datomic-files) (-> options (append-options :dependencies [['com.datomic/datomic-free "0.9.5561" :exclusions ['org.slf4j/log4j-over-slf4j 'org.slf4j/slf4j-nop]] ['com.google.guava/guava "21.0"]]) (assoc :datomic true :db-connection true :db-docs ((:selmer-renderer options) (slurp-resource "db/docs/datomic_instructions.md") options)) (merge (db-profiles options)))]) (defn add-relational-db [db [assets options]] [(into assets (relational-db-files options)) (let [embedded-db? (some #{(name db)} ["h2" "sqlite"])] (-> options (append-options :dependencies (db-dependencies options)) (append-options :plugins [['migratus-lein "0.5.4"]]) (assoc :relational-db true :db-connection (not embedded-db?) :db-type (name db) :embedded-db embedded-db? :migrations "{:store :database :db ~(get (System/getenv) \"DATABASE_URL\")}" :db-docs ((:selmer-renderer options) (slurp-resource (if (= :h2 db) "db/docs/h2_instructions.md" "db/docs/db_instructions.md")) options)) (merge (db-profiles options))))]) (defn db-features [state] (if-let [db (select-db (second state))] (cond (= :mongo db) (add-mongo state) (= :datomic db) (add-datomic state) :else (add-relational-db db state)) state))
true
(ns leiningen.new.db (:require [leiningen.new.common :refer :all])) (defn select-db [{:keys [features]}] (cond (some #{"+postgres"} features) :postgres (some #{"+mysql"} features) :mysql (some #{"+mongodb"} features) :mongo (some #{"+datomic"} features) :datomic (some #{"+h2"} features) :h2 (some #{"+sqlite"} features) :sqlite)) (defn db-dependencies [options] [['luminus-migrations "0.4.5"] ['conman "0.7.5"] ({:postgres ['org.postgresql/postgresql "42.1.4"] :mysql ['mysql/mysql-connector-java "6.0.5"] :h2 ['com.h2database/h2 "1.4.196"] :sqlite ['org.xerial/sqlite-jdbc "3.20.0"]} (select-db options))]) (defn db-url [{:keys [sanitized] :as options} suffix] ({:postgres (str "postgresql://localhost/" sanitized "_" suffix "?user=db_user_name_here&password=PI:PASSWORD:<PASSWORD>END_PI") :mysql (str "mysql://localhost:3306/" sanitized "_" suffix "?user=db_user_name_here&password=PI:PASSWORD:<PASSWORD>END_PI") :h2 (str "jdbc:h2:./" sanitized "_" suffix ".db") :sqlite (str "jdbc:sqlite:" sanitized "_" suffix ".db") :mongo (str "mongodb://127.0.0.1/" sanitized "_" suffix) :datomic (str "datomic:free://localhost:4334/" sanitized "_" suffix)} (select-db options))) (defn relational-db-files [options] (let [timestamp (.format (java.text.SimpleDateFormat. "yyyyMMddHHmmss") (java.util.Date.))] [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/sql.db.clj"] ["{{resource-path}}/sql/queries.sql" "db/sql/queries.sql"] ["{{backend-test-path}}/{{sanitized}}/test/db/core.clj" "db/test/db/core.clj"] [(str "{{resource-path}}/migrations/" timestamp "-add-users-table.up.sql") "db/migrations/add-users-table.up.sql"] [(str "{{resource-path}}/migrations/" timestamp "-add-users-table.down.sql") "db/migrations/add-users-table.down.sql"]])) (defn db-profiles [options] {:database-profile-dev (str :database-url " \"" (db-url options "dev") "\"") :database-profile-test (str :database-url " \"" (db-url options "test") "\"")}) (def mongo-files [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/mongodb.clj"]]) (def datomic-files [["{{db-path}}/{{sanitized}}/db/core.clj" "db/src/datomic.clj"]]) (defn add-mongo [[assets options]] [(into assets mongo-files) (-> options (append-options :dependencies [['com.novemberain/monger "3.1.0" :exclusions ['com.google.guava/guava]] ['com.google.guava/guava "20.0"]]) (assoc :mongodb true :db-connection true :db-docs ((:selmer-renderer options) (slurp-resource "db/docs/mongo_instructions.md") options)) (merge (db-profiles options)))]) (defn add-datomic [[assets options]] [(into assets datomic-files) (-> options (append-options :dependencies [['com.datomic/datomic-free "0.9.5561" :exclusions ['org.slf4j/log4j-over-slf4j 'org.slf4j/slf4j-nop]] ['com.google.guava/guava "21.0"]]) (assoc :datomic true :db-connection true :db-docs ((:selmer-renderer options) (slurp-resource "db/docs/datomic_instructions.md") options)) (merge (db-profiles options)))]) (defn add-relational-db [db [assets options]] [(into assets (relational-db-files options)) (let [embedded-db? (some #{(name db)} ["h2" "sqlite"])] (-> options (append-options :dependencies (db-dependencies options)) (append-options :plugins [['migratus-lein "0.5.4"]]) (assoc :relational-db true :db-connection (not embedded-db?) :db-type (name db) :embedded-db embedded-db? :migrations "{:store :database :db ~(get (System/getenv) \"DATABASE_URL\")}" :db-docs ((:selmer-renderer options) (slurp-resource (if (= :h2 db) "db/docs/h2_instructions.md" "db/docs/db_instructions.md")) options)) (merge (db-profiles options))))]) (defn db-features [state] (if-let [db (select-db (second state))] (cond (= :mongo db) (add-mongo state) (= :datomic db) (add-datomic state) :else (add-relational-db db state)) state))
[ { "context": ";;\n;; Clusterers\n;; @author Antonio Garrote\n;;\n\n(ns #^{:author \"Antonio Garrote <antoniogarro", "end": 43, "score": 0.999893844127655, "start": 28, "tag": "NAME", "value": "Antonio Garrote" }, { "context": "rs\n;; @author Antonio Garrote\n;;\n\n(ns #^{:author \"Antonio Garrote <[email protected]>\"}\n clj-ml.clusterers\n", "end": 79, "score": 0.99989253282547, "start": 64, "tag": "NAME", "value": "Antonio Garrote" }, { "context": "onio Garrote\n;;\n\n(ns #^{:author \"Antonio Garrote <[email protected]>\"}\n clj-ml.clusterers\n \"This namespace contains", "end": 105, "score": 0.9999317526817322, "start": 81, "tag": "EMAIL", "value": "[email protected]" } ]
src/clj_ml/clusterers.clj
yogsototh/clj-ml
0
;; ;; Clusterers ;; @author Antonio Garrote ;; (ns #^{:author "Antonio Garrote <[email protected]>"} clj-ml.clusterers "This namespace contains several functions for building clusterers using different clustering algorithms. K-means, Cobweb and Expectation maximization algorithms are currently supported. Some of these algorithms support incremental building of the clustering without having the full data set in main memory. Functions for evaluating the clusterer as well as for clustering new instances are also supported " (:use [clj-ml utils data distance-functions options-utils]) (:import (java.util Date Random) (weka.clusterers ClusterEvaluation SimpleKMeans Cobweb EM))) ;; Setting up clusterer options (defmulti #^{:skip-wiki true} make-clusterer-options "Creates ther right parameters for a clusterer" (fn [kind map] kind)) (defmethod make-clusterer-options :k-means ([kind m] (let [cols-val (check-options m {:display-standard-deviation "-V" :replace-missing-values "-M" :preserve-instances-order "-O"} [""]) cols-val-a (check-option-values m {:number-clusters "-N" :random-seed "-S" :number-iterations "-I"} cols-val)] (into-array cols-val-a)))) (defmethod make-clusterer-options :cobweb ([kind m] (let [cols-val-a (check-option-values m {:acuity "-A" :cutoff "-C" :random-seed "-S"} [""])] (into-array cols-val-a)))) (defmethod make-clusterer-options :expectation-maximization ([kind m] (let [cols-val-a (check-option-values m {:number-clusters "-N" :maximum-iterations "-I" :minimum-standard-deviation "-M" :random-seed "-S"} [""])] (into-array cols-val-a)))) ;; Building clusterers (defmacro make-clusterer-m ([kind clusterer-class options] `(let [options-read# (if (empty? ~options) {} (first ~options)) clusterer# (new ~clusterer-class) opts# (make-clusterer-options ~kind options-read#)] (.setOptions clusterer# opts#) (when (not (empty? (get options-read# :distance-function))) ;; We have to setup a different distance function (let [dist# (get options-read# :distance-function) real-dist# (if (map? dist#) (make-distance-function (first (keys dist#)) (first (vals dist#))) dist#)] (.setDistanceFunction clusterer# real-dist#))) clusterer#))) (defmulti make-clusterer "Creates a new clusterer for the given kind algorithm and options. The first argument identifies the kind of clusterer. The second argument is a map of parameters particular to each clusterer. The clusterers currently supported are: - :k-means - :cobweb - :expectation-maximization This is the description of the supported clusterers and the parameters accepted by each clusterer algorithm: * :k-means A clusterer that uses the simple K-Means algorithm to build the clusters Parameters: - :display-standard-deviation Display the standard deviation of the centroids in the output for the clusterer. Sample value: true - :replace-missing-values Replaces the missing values with the mean/mode. Sample value: true - :number-clusters The number of clusters to be built. Sample value: 3 - :random-seed Seed for the random number generator. Sample value: 0.3 - :number-iterations Maximum number of iterations that the algorithm will run. Sample value: 1000 * :cobweb Implementation of the Cobweb incremental algorithm for herarchical conceptual clustering. Parameters: - :acuity Acuity. Default value: 1.0 - :cutoff Cutoff. Default value: 0.002 - :random-seed Seed for the random number generator. Default value: 42. * :expectation-maximization Implementation of the probabilistic clusterer algorithm for expectation maximization. Parameters: - :number-clusters Number of clusters to be built. If ommitted or -1 is passed as a value, cross-validation will be used to select the number of clusters. Sample value: 3 - :maximum-iterations Maximum number of iterations the algorithm will run. Default value: 100 - :minimum-standard-deviation Minimum allowable standard deviation for normal density computation. Default value: 1e-6 - :random-seed Seed for the random number generator. Default value: 100 " (fn [kind & options] kind)) (defmethod make-clusterer :k-means ([kind & options] (make-clusterer-m kind SimpleKMeans options))) (defmethod make-clusterer :cobweb ([kind & options] (make-clusterer-m kind Cobweb options))) (defmethod make-clusterer :expectation-maximization ([kind & options] (make-clusterer-m kind EM options))) ;; Clustering data (defn clusterer-build "Applies a clustering algorithm to a set of data" ([clusterer dataset] (.buildClusterer clusterer dataset))) (defn clusterer-update "If the clusterer is updateable it updates the cluster with the given instance or set of instances" ([clusterer instance-s] (if (is-dataset? instance-s) (do (for [i (dataset-seq instance-s)] (.updateClusterer clusterer i)) (.updateFinished clusterer) clusterer) (do (.updateClusterer clusterer instance-s) (.updateFinished clusterer) clusterer)))) ;; Retrieving information from a clusterer (defmulti clusterer-info "Retrieves the data from a cluster, these data are clustering-algorithm dependent" (fn [clusterer] (class clusterer))) (defmethod clusterer-info SimpleKMeans ([clusterer] "Accepts a k-means clusterer Returns a map with: :number-clusters The number of clusters in the clusterer :centroids Map with the identifier and the centroid values for each cluster :cluster-sizes Number of data points classified in each cluster :squared-error Minimized squared error" {:number-clusters (.numberOfClusters clusterer) :centroids (second (reduce (fn [acum item] (let [counter (first acum) map (second acum)] (list (+ counter 1) (conj map {counter item})))) (list 0 {}) (dataset-seq (.getClusterCentroids clusterer)))) :cluster-sizes (let [sizes (.getClusterSizes clusterer)] (reduce (fn [acum item] (conj acum {item (aget sizes item)})) {} (range 0 (.numberOfClusters clusterer)))) :squared-error (.getSquaredError clusterer)})) ;; Evaluating clusterers (defn- collect-evaluation-results "Collects all the statistics from the evaluation of a clusterer" ([evaluation] (do (println "hola?") (println (.clusterResultsToString evaluation)) {:classes-to-clusters (try-metric #(reduce (fn [acum i] (conj acum {i (aget (.getClassesToClusters evaluation) i)})) {} (range 0 (.getNumClusters evaluation)))) :log-likelihood (try-metric #(.getLogLikelihood evaluation)) :evaluation-object evaluation}))) (defmulti clusterer-evaluate "Evaluates a trained clusterer using the provided dataset or cross-validation" (fn [clusterer mode & evaluation-data] mode)) (defmethod clusterer-evaluate :dataset ([clusterer mode & evaluation-data] (let [test-data (nth evaluation-data 0) evaluation (do (let [evl (new ClusterEvaluation)] (.setClusterer evl clusterer) evl))] (.evaluateClusterer evaluation test-data) (println (.clusterResultsToString evaluation)) (collect-evaluation-results evaluation)))) (defmethod clusterer-evaluate :cross-validation ([clusterer mode & evaluation-data] (let [training-data (nth evaluation-data 0) folds (nth evaluation-data 1) evaluation (doto (new ClusterEvaluation) (.setClusterer clusterer)) log-likelihood (ClusterEvaluation/crossValidateModel clusterer training-data folds (new Random (.getTime (new Date))))] {:log-likelihood log-likelihood}))) ;; Clustering collections (defn clusterer-cluster "Add a class to each instance according to the provided clusterer" ([clusterer dataset] (let [attributes (conj (clj-ml.data/dataset-format dataset) {:class (map #(keyword (str %1)) (range 0 (.numberOfClusters clusterer)))}) clustered (map (fn [i] (conj (instance-to-vector i) (keyword (str (.clusterInstance clusterer i))))) (dataset-seq dataset)) nds (make-dataset (keyword (str "clustered " (dataset-name dataset))) attributes clustered)] (dataset-set-class nds (- (count attributes) 1)) nds)))
47500
;; ;; Clusterers ;; @author <NAME> ;; (ns #^{:author "<NAME> <<EMAIL>>"} clj-ml.clusterers "This namespace contains several functions for building clusterers using different clustering algorithms. K-means, Cobweb and Expectation maximization algorithms are currently supported. Some of these algorithms support incremental building of the clustering without having the full data set in main memory. Functions for evaluating the clusterer as well as for clustering new instances are also supported " (:use [clj-ml utils data distance-functions options-utils]) (:import (java.util Date Random) (weka.clusterers ClusterEvaluation SimpleKMeans Cobweb EM))) ;; Setting up clusterer options (defmulti #^{:skip-wiki true} make-clusterer-options "Creates ther right parameters for a clusterer" (fn [kind map] kind)) (defmethod make-clusterer-options :k-means ([kind m] (let [cols-val (check-options m {:display-standard-deviation "-V" :replace-missing-values "-M" :preserve-instances-order "-O"} [""]) cols-val-a (check-option-values m {:number-clusters "-N" :random-seed "-S" :number-iterations "-I"} cols-val)] (into-array cols-val-a)))) (defmethod make-clusterer-options :cobweb ([kind m] (let [cols-val-a (check-option-values m {:acuity "-A" :cutoff "-C" :random-seed "-S"} [""])] (into-array cols-val-a)))) (defmethod make-clusterer-options :expectation-maximization ([kind m] (let [cols-val-a (check-option-values m {:number-clusters "-N" :maximum-iterations "-I" :minimum-standard-deviation "-M" :random-seed "-S"} [""])] (into-array cols-val-a)))) ;; Building clusterers (defmacro make-clusterer-m ([kind clusterer-class options] `(let [options-read# (if (empty? ~options) {} (first ~options)) clusterer# (new ~clusterer-class) opts# (make-clusterer-options ~kind options-read#)] (.setOptions clusterer# opts#) (when (not (empty? (get options-read# :distance-function))) ;; We have to setup a different distance function (let [dist# (get options-read# :distance-function) real-dist# (if (map? dist#) (make-distance-function (first (keys dist#)) (first (vals dist#))) dist#)] (.setDistanceFunction clusterer# real-dist#))) clusterer#))) (defmulti make-clusterer "Creates a new clusterer for the given kind algorithm and options. The first argument identifies the kind of clusterer. The second argument is a map of parameters particular to each clusterer. The clusterers currently supported are: - :k-means - :cobweb - :expectation-maximization This is the description of the supported clusterers and the parameters accepted by each clusterer algorithm: * :k-means A clusterer that uses the simple K-Means algorithm to build the clusters Parameters: - :display-standard-deviation Display the standard deviation of the centroids in the output for the clusterer. Sample value: true - :replace-missing-values Replaces the missing values with the mean/mode. Sample value: true - :number-clusters The number of clusters to be built. Sample value: 3 - :random-seed Seed for the random number generator. Sample value: 0.3 - :number-iterations Maximum number of iterations that the algorithm will run. Sample value: 1000 * :cobweb Implementation of the Cobweb incremental algorithm for herarchical conceptual clustering. Parameters: - :acuity Acuity. Default value: 1.0 - :cutoff Cutoff. Default value: 0.002 - :random-seed Seed for the random number generator. Default value: 42. * :expectation-maximization Implementation of the probabilistic clusterer algorithm for expectation maximization. Parameters: - :number-clusters Number of clusters to be built. If ommitted or -1 is passed as a value, cross-validation will be used to select the number of clusters. Sample value: 3 - :maximum-iterations Maximum number of iterations the algorithm will run. Default value: 100 - :minimum-standard-deviation Minimum allowable standard deviation for normal density computation. Default value: 1e-6 - :random-seed Seed for the random number generator. Default value: 100 " (fn [kind & options] kind)) (defmethod make-clusterer :k-means ([kind & options] (make-clusterer-m kind SimpleKMeans options))) (defmethod make-clusterer :cobweb ([kind & options] (make-clusterer-m kind Cobweb options))) (defmethod make-clusterer :expectation-maximization ([kind & options] (make-clusterer-m kind EM options))) ;; Clustering data (defn clusterer-build "Applies a clustering algorithm to a set of data" ([clusterer dataset] (.buildClusterer clusterer dataset))) (defn clusterer-update "If the clusterer is updateable it updates the cluster with the given instance or set of instances" ([clusterer instance-s] (if (is-dataset? instance-s) (do (for [i (dataset-seq instance-s)] (.updateClusterer clusterer i)) (.updateFinished clusterer) clusterer) (do (.updateClusterer clusterer instance-s) (.updateFinished clusterer) clusterer)))) ;; Retrieving information from a clusterer (defmulti clusterer-info "Retrieves the data from a cluster, these data are clustering-algorithm dependent" (fn [clusterer] (class clusterer))) (defmethod clusterer-info SimpleKMeans ([clusterer] "Accepts a k-means clusterer Returns a map with: :number-clusters The number of clusters in the clusterer :centroids Map with the identifier and the centroid values for each cluster :cluster-sizes Number of data points classified in each cluster :squared-error Minimized squared error" {:number-clusters (.numberOfClusters clusterer) :centroids (second (reduce (fn [acum item] (let [counter (first acum) map (second acum)] (list (+ counter 1) (conj map {counter item})))) (list 0 {}) (dataset-seq (.getClusterCentroids clusterer)))) :cluster-sizes (let [sizes (.getClusterSizes clusterer)] (reduce (fn [acum item] (conj acum {item (aget sizes item)})) {} (range 0 (.numberOfClusters clusterer)))) :squared-error (.getSquaredError clusterer)})) ;; Evaluating clusterers (defn- collect-evaluation-results "Collects all the statistics from the evaluation of a clusterer" ([evaluation] (do (println "hola?") (println (.clusterResultsToString evaluation)) {:classes-to-clusters (try-metric #(reduce (fn [acum i] (conj acum {i (aget (.getClassesToClusters evaluation) i)})) {} (range 0 (.getNumClusters evaluation)))) :log-likelihood (try-metric #(.getLogLikelihood evaluation)) :evaluation-object evaluation}))) (defmulti clusterer-evaluate "Evaluates a trained clusterer using the provided dataset or cross-validation" (fn [clusterer mode & evaluation-data] mode)) (defmethod clusterer-evaluate :dataset ([clusterer mode & evaluation-data] (let [test-data (nth evaluation-data 0) evaluation (do (let [evl (new ClusterEvaluation)] (.setClusterer evl clusterer) evl))] (.evaluateClusterer evaluation test-data) (println (.clusterResultsToString evaluation)) (collect-evaluation-results evaluation)))) (defmethod clusterer-evaluate :cross-validation ([clusterer mode & evaluation-data] (let [training-data (nth evaluation-data 0) folds (nth evaluation-data 1) evaluation (doto (new ClusterEvaluation) (.setClusterer clusterer)) log-likelihood (ClusterEvaluation/crossValidateModel clusterer training-data folds (new Random (.getTime (new Date))))] {:log-likelihood log-likelihood}))) ;; Clustering collections (defn clusterer-cluster "Add a class to each instance according to the provided clusterer" ([clusterer dataset] (let [attributes (conj (clj-ml.data/dataset-format dataset) {:class (map #(keyword (str %1)) (range 0 (.numberOfClusters clusterer)))}) clustered (map (fn [i] (conj (instance-to-vector i) (keyword (str (.clusterInstance clusterer i))))) (dataset-seq dataset)) nds (make-dataset (keyword (str "clustered " (dataset-name dataset))) attributes clustered)] (dataset-set-class nds (- (count attributes) 1)) nds)))
true
;; ;; Clusterers ;; @author PI:NAME:<NAME>END_PI ;; (ns #^{:author "PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>"} clj-ml.clusterers "This namespace contains several functions for building clusterers using different clustering algorithms. K-means, Cobweb and Expectation maximization algorithms are currently supported. Some of these algorithms support incremental building of the clustering without having the full data set in main memory. Functions for evaluating the clusterer as well as for clustering new instances are also supported " (:use [clj-ml utils data distance-functions options-utils]) (:import (java.util Date Random) (weka.clusterers ClusterEvaluation SimpleKMeans Cobweb EM))) ;; Setting up clusterer options (defmulti #^{:skip-wiki true} make-clusterer-options "Creates ther right parameters for a clusterer" (fn [kind map] kind)) (defmethod make-clusterer-options :k-means ([kind m] (let [cols-val (check-options m {:display-standard-deviation "-V" :replace-missing-values "-M" :preserve-instances-order "-O"} [""]) cols-val-a (check-option-values m {:number-clusters "-N" :random-seed "-S" :number-iterations "-I"} cols-val)] (into-array cols-val-a)))) (defmethod make-clusterer-options :cobweb ([kind m] (let [cols-val-a (check-option-values m {:acuity "-A" :cutoff "-C" :random-seed "-S"} [""])] (into-array cols-val-a)))) (defmethod make-clusterer-options :expectation-maximization ([kind m] (let [cols-val-a (check-option-values m {:number-clusters "-N" :maximum-iterations "-I" :minimum-standard-deviation "-M" :random-seed "-S"} [""])] (into-array cols-val-a)))) ;; Building clusterers (defmacro make-clusterer-m ([kind clusterer-class options] `(let [options-read# (if (empty? ~options) {} (first ~options)) clusterer# (new ~clusterer-class) opts# (make-clusterer-options ~kind options-read#)] (.setOptions clusterer# opts#) (when (not (empty? (get options-read# :distance-function))) ;; We have to setup a different distance function (let [dist# (get options-read# :distance-function) real-dist# (if (map? dist#) (make-distance-function (first (keys dist#)) (first (vals dist#))) dist#)] (.setDistanceFunction clusterer# real-dist#))) clusterer#))) (defmulti make-clusterer "Creates a new clusterer for the given kind algorithm and options. The first argument identifies the kind of clusterer. The second argument is a map of parameters particular to each clusterer. The clusterers currently supported are: - :k-means - :cobweb - :expectation-maximization This is the description of the supported clusterers and the parameters accepted by each clusterer algorithm: * :k-means A clusterer that uses the simple K-Means algorithm to build the clusters Parameters: - :display-standard-deviation Display the standard deviation of the centroids in the output for the clusterer. Sample value: true - :replace-missing-values Replaces the missing values with the mean/mode. Sample value: true - :number-clusters The number of clusters to be built. Sample value: 3 - :random-seed Seed for the random number generator. Sample value: 0.3 - :number-iterations Maximum number of iterations that the algorithm will run. Sample value: 1000 * :cobweb Implementation of the Cobweb incremental algorithm for herarchical conceptual clustering. Parameters: - :acuity Acuity. Default value: 1.0 - :cutoff Cutoff. Default value: 0.002 - :random-seed Seed for the random number generator. Default value: 42. * :expectation-maximization Implementation of the probabilistic clusterer algorithm for expectation maximization. Parameters: - :number-clusters Number of clusters to be built. If ommitted or -1 is passed as a value, cross-validation will be used to select the number of clusters. Sample value: 3 - :maximum-iterations Maximum number of iterations the algorithm will run. Default value: 100 - :minimum-standard-deviation Minimum allowable standard deviation for normal density computation. Default value: 1e-6 - :random-seed Seed for the random number generator. Default value: 100 " (fn [kind & options] kind)) (defmethod make-clusterer :k-means ([kind & options] (make-clusterer-m kind SimpleKMeans options))) (defmethod make-clusterer :cobweb ([kind & options] (make-clusterer-m kind Cobweb options))) (defmethod make-clusterer :expectation-maximization ([kind & options] (make-clusterer-m kind EM options))) ;; Clustering data (defn clusterer-build "Applies a clustering algorithm to a set of data" ([clusterer dataset] (.buildClusterer clusterer dataset))) (defn clusterer-update "If the clusterer is updateable it updates the cluster with the given instance or set of instances" ([clusterer instance-s] (if (is-dataset? instance-s) (do (for [i (dataset-seq instance-s)] (.updateClusterer clusterer i)) (.updateFinished clusterer) clusterer) (do (.updateClusterer clusterer instance-s) (.updateFinished clusterer) clusterer)))) ;; Retrieving information from a clusterer (defmulti clusterer-info "Retrieves the data from a cluster, these data are clustering-algorithm dependent" (fn [clusterer] (class clusterer))) (defmethod clusterer-info SimpleKMeans ([clusterer] "Accepts a k-means clusterer Returns a map with: :number-clusters The number of clusters in the clusterer :centroids Map with the identifier and the centroid values for each cluster :cluster-sizes Number of data points classified in each cluster :squared-error Minimized squared error" {:number-clusters (.numberOfClusters clusterer) :centroids (second (reduce (fn [acum item] (let [counter (first acum) map (second acum)] (list (+ counter 1) (conj map {counter item})))) (list 0 {}) (dataset-seq (.getClusterCentroids clusterer)))) :cluster-sizes (let [sizes (.getClusterSizes clusterer)] (reduce (fn [acum item] (conj acum {item (aget sizes item)})) {} (range 0 (.numberOfClusters clusterer)))) :squared-error (.getSquaredError clusterer)})) ;; Evaluating clusterers (defn- collect-evaluation-results "Collects all the statistics from the evaluation of a clusterer" ([evaluation] (do (println "hola?") (println (.clusterResultsToString evaluation)) {:classes-to-clusters (try-metric #(reduce (fn [acum i] (conj acum {i (aget (.getClassesToClusters evaluation) i)})) {} (range 0 (.getNumClusters evaluation)))) :log-likelihood (try-metric #(.getLogLikelihood evaluation)) :evaluation-object evaluation}))) (defmulti clusterer-evaluate "Evaluates a trained clusterer using the provided dataset or cross-validation" (fn [clusterer mode & evaluation-data] mode)) (defmethod clusterer-evaluate :dataset ([clusterer mode & evaluation-data] (let [test-data (nth evaluation-data 0) evaluation (do (let [evl (new ClusterEvaluation)] (.setClusterer evl clusterer) evl))] (.evaluateClusterer evaluation test-data) (println (.clusterResultsToString evaluation)) (collect-evaluation-results evaluation)))) (defmethod clusterer-evaluate :cross-validation ([clusterer mode & evaluation-data] (let [training-data (nth evaluation-data 0) folds (nth evaluation-data 1) evaluation (doto (new ClusterEvaluation) (.setClusterer clusterer)) log-likelihood (ClusterEvaluation/crossValidateModel clusterer training-data folds (new Random (.getTime (new Date))))] {:log-likelihood log-likelihood}))) ;; Clustering collections (defn clusterer-cluster "Add a class to each instance according to the provided clusterer" ([clusterer dataset] (let [attributes (conj (clj-ml.data/dataset-format dataset) {:class (map #(keyword (str %1)) (range 0 (.numberOfClusters clusterer)))}) clustered (map (fn [i] (conj (instance-to-vector i) (keyword (str (.clusterInstance clusterer i))))) (dataset-seq dataset)) nds (make-dataset (keyword (str "clustered " (dataset-name dataset))) attributes clustered)] (dataset-set-class nds (- (count attributes) 1)) nds)))
[ { "context": " (b/button {:key \"add-button\" :className \"btn-fill\" :kind :info\n ", "end": 12868, "score": 0.8054378032684326, "start": 12862, "tag": "KEY", "value": "button" }, { "context": " (b/button {:key \"cancel-button\" :className \"btn-fill\" :kind :danger\n ", "end": 13164, "score": 0.5122183561325073, "start": 13164, "tag": "KEY", "value": "" }, { "context": " (b/button {:key \"edit-button\" :className \"btn-fill\" :kind :info\n ", "end": 17136, "score": 0.8982290029525757, "start": 17130, "tag": "KEY", "value": "button" }, { "context": " (b/button {:key \"cancel-button\" :className \"btn-fill\" :kind :danger\n ", "end": 17372, "score": 0.9763134121894836, "start": 17366, "tag": "KEY", "value": "button" } ]
src/main/org/edgexfoundry/ui/manager/ui/exports.cljs
jpwhitemn/edgex-ui-clojure
0
;;; Copyright (c) 2018 ;;; IoTech Ltd ;;; SPDX-License-Identifier: Apache-2.0 (ns org.edgexfoundry.ui.manager.ui.exports (:require [fulcro.client.primitives :as prim :refer [defui defsc]] [fulcro.i18n :refer [tr]] [fulcro.client.dom :as dom] [fulcro.client.data-fetch :as df :refer [load-field-action]] [fulcro.client.routing :as r] [fulcro.ui.bootstrap3 :as b] [fulcro.ui.forms :as f] [fulcro.client.mutations :refer [defmutation]] [org.edgexfoundry.ui.manager.ui.table :as t :refer [deftable]] [org.edgexfoundry.ui.manager.ui.common :as co] [org.edgexfoundry.ui.manager.api.mutations :as mu] [org.edgexfoundry.ui.manager.ui.dialogs :as d] [clojure.set :as set])) (defn reset-add-export* [state] (update-in state co/new-export-ident #(merge % {:name "" :format :JSON :destination :REST_ENDPOINT :compression :NONE :encryptionAlgorithm :NONE :encryptionKey "" :initializingVector "" :enable false}))) (defn assoc-options [state ident field opts default] (let [path (into ident [:fulcro.ui.forms/form :elements/by-name field])] (-> state (assoc-in (conj path :input/options) opts) (assoc-in (conj path :input/default-value) default) (assoc-in (conj ident field) default)))) (defn set-unused-addressables* [state ident current-addr] (let [mk-id-set (fn [m] (into #{} (map #(-> % :addressable :id) (vals m)))) dsa-ids (mk-id-set (:device-service state)) sa-ids (mk-id-set (:schedule-event state)) addrs (vals (:addressable state)) a-ids (into #{} (map :id addrs)) unused-ids (set/difference a-ids dsa-ids sa-ids) unused-plus-current (if current-addr (conj unused-ids current-addr) unused-ids) selected-addr (filter #(contains? unused-plus-current (:id %)) addrs) opts (mapv #(f/option (:id %) (:name %)) selected-addr) default (or current-addr (-> selected-addr first :id))] (assoc-options state ident :addressable opts default))) (defmutation prepare-add-export [noargs] (action [{:keys [state]}] (swap! state (fn [s] (-> s (set-unused-addressables* co/new-export-ident nil) (reset-add-export*)))))) (defn- initialize-form [state-map form-class form-ident] (let [exp (-> state-map (get-in form-ident))] (-> state-map (update-in co/edit-export-ident #(merge % exp))))) (declare EditExportModal) (defn fixup-export-addressable-id [state type id] (let [addr-name (get-in state [type id :addressable :name]) match-name #(= addr-name (:name %)) addr (->> state :addressable vals (filter match-name) first)] (:id addr))) (defmutation prepare-edit-modal [{:keys [type id]}] (action [{:keys [state]}] (swap! state (fn [s] (-> s (initialize-form EditExportModal [type id]) (set-unused-addressables* co/edit-export-ident (fixup-export-addressable-id s type id))))))) (defn show-add-export-modal [comp] (prim/transact! comp `[(prepare-add-export {}) (r/set-route {:router :root/modal-router :target ~co/new-export-ident}) (b/show-modal {:id :add-export-modal})])) (defn show-edit-modal [comp type id] (prim/transact! comp `[(prepare-edit-modal {:type ~type :id ~id}) (r/set-route {:router :root/modal-router :target ~co/edit-export-ident}) (b/show-modal {:id :edit-export-modal})])) (defn add-export [comp {:keys [name addressable format destination compression encryptionAlgorithm encryptionKey initializingVector enable addressables] :as form}] (let [tmp-id (prim/tempid) addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (prim/transact! comp `[(b/hide-modal {:id :add-export-modal}) (mu/add-export {:tempid ~tmp-id :name ~name :addressable ~addr-data :format ~format :destination ~destination :compression ~compression :encryptionAlgorithm ~encryptionAlgorithm :encryptionKey ~encryptionKey :initializingVector ~initializingVector :enable ~enable})]))) (defn edit-export [comp {:keys [id addressable format destination compression encryptionAlgorithm encryptionKey initializingVector enable addressables] :as form}] (let [addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (prim/transact! comp `[(b/hide-modal {:id :edit-export-modal}) (mu/edit-export {:id ~id :addressable ~addr-data :format ~format :destination ~destination :compression ~compression :encryptionAlgorithm ~encryptionAlgorithm :encryptionKey ~encryptionKey :initializingVector ~initializingVector :enable ~enable})]))) (defn do-delete-export [this id] (prim/transact! this `[(mu/delete-export {:id ~id}) (t/reset-table-page {:id :show-profiles})])) (defsc AddressableListEntry [this {:keys [id type origin name protocol address port path method publisher topic user password]}] {:ident [:addressable :id] :query [:id :type :origin :name :protocol :address :port :path :method :publisher :topic :user :password]}) (defn address-table [{:keys [protocol address port path method publisher topic user]}] (let [if-avail #(or % "N/A")] (dom/div #js {:className "table-responsive"} (dom/table #js {:className "table table-bordered"} (dom/tbody nil (dom/tr nil (dom/th nil "Protocol") (dom/td nil protocol)) (dom/tr nil (dom/th nil "Address") (dom/td nil address)) (dom/tr nil (dom/th nil "Port") (dom/td nil port)) (dom/tr nil (dom/th nil "Path") (dom/td nil path)) (dom/tr nil (dom/th nil "Method") (dom/td nil method)) (dom/tr nil (dom/th nil "Publisher") (dom/td nil (if-avail publisher))) (dom/tr nil (dom/th nil "Topic") (dom/td nil (if-avail topic))) (dom/tr nil (dom/th nil "User") (dom/td nil (if-avail user)))))))) (def format-options [(f/option :JSON "JSON") (f/option :XML "XML") (f/option :IOTCORE_JSON "JSON (Google IoT Core)") (f/option :AZURE_JSON "JSON (Azure)") (f/option :THINGSBOARD_JSON "JSON (ThingsBoard)") (f/option :NOOP "None")]) (def destination-options [(f/option :REST_ENDPOINT "REST Endpoint") ;(f/option :ZMQ_TOPIC "ZMQ Topic")]) (f/option :MQTT_TOPIC "MQTT Topic") (f/option :IOTCORE_TOPIC "MQTT (Google IoT Core)") (f/option :AZURE_TOPIC "MQTT (Azure)") (f/option :XMPP_TOPIC "XMPP") (f/option :AWS_TOPIC "AWS") (f/option :INFLUXDB_ENDPOINT "InfluxDB")]) (def compression-options [(f/option :NONE "None") (f/option :GZIP "GZIP") (f/option :ZIP "ZIP")]) (def encryption-options [(f/option :NONE "None") (f/option :AES "AES")]) (defsc AddExportModal [this {:keys [modal encryptionAlgorithm addressable addressables modal/page] :as props}] {:initial-state (fn [p] (merge (f/build-form this {:db/id 6}) {:addressables (prim/get-initial-state AddressableListEntry {}) :modal (prim/get-initial-state b/Modal {:id :add-export-modal :backdrop true}) :modal/page :new-export})) :ident (fn [] co/new-export-ident) :query [f/form-key :db/id :name :addressable :format :destination :compression :encryptionAlgorithm :encryptionKey :initializingVector :enable :modal/page {:addressables (prim/get-query AddressableListEntry)} {:modal (prim/get-query b/Modal)}] :form-fields [(f/id-field :db/id) (f/text-input :name :placeholder "Export name" :validator `f/not-empty?) (f/dropdown-input :addressable [(f/option :none "No addressables available")] :default-value :none) (f/dropdown-input :format format-options :default-value :JSON) (f/dropdown-input :destination destination-options :default-value :REST_ENDPOINT) (f/dropdown-input :compression compression-options :default-value :NONE) (f/dropdown-input :encryptionAlgorithm encryption-options :default-value :NONE) (f/text-input :encryptionKey :placeholder "key") (f/text-input :initializingVector :placeholder "vector") (f/checkbox-input :enable)]} (let [not-encrypted? (= encryptionAlgorithm :NONE) addr-data (-> (filter #(= addressable (:id %)) addressables) first) valid? (f/valid? (f/validate-fields props))] (b/ui-modal modal (b/ui-modal-title nil (dom/div #js {:key "title" :style #js {:fontSize "22px"}} "Add Export")) (b/ui-modal-body nil (dom/div #js {:className "card"} (dom/div #js {:className "content"} (co/field-with-label this props :name "Name" :className "form-control") (co/field-with-label this props :addressable "Addressable" :className "form-control") (address-table addr-data) (co/field-with-label this props :format "Export format" :className "form-control") (co/field-with-label this props :destination "Destination" :className "form-control") (co/field-with-label this props :compression "Compression method" :className "form-control") (co/field-with-label this props :encryptionAlgorithm "Encryption method" :className "form-control") (co/field-with-label this props :encryptionKey "Encryption key" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :initializingVector "Initializing vector" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :enable "Enable" :className "form-control")))) (b/ui-modal-footer nil (b/button {:key "add-button" :className "btn-fill" :kind :info :onClick #(add-export this props) :disabled (not valid?)} "Add") (b/button {:key "cancel-button" :className "btn-fill" :kind :danger :onClick #(prim/transact! this `[(b/hide-modal {:id :add-export-modal})])} "Cancel"))))) (defsc EditExportModal [this {:keys [modal encryptionAlgorithm addressable addressables modal/page] :as props}] {:initial-state (fn [p] (merge (f/build-form this {:db/id 8}) {:addressables (prim/get-initial-state AddressableListEntry {}) :modal (prim/get-initial-state b/Modal {:id :edit-export-modal :backdrop true}) :modal/page :edit-export})) :ident (fn [] co/edit-export-ident) :query [f/form-key :db/id :id :addressable :format :destination :compression :encryptionAlgorithm :encryptionKey :initializingVector :enable :modal/page {:addressables (prim/get-query AddressableListEntry)} {:modal (prim/get-query b/Modal)}] :form-fields [(f/id-field :db/id) (f/dropdown-input :addressable [(f/option :none "No addressables available")] :default-value :none) (f/dropdown-input :format format-options :default-value :JSON) (f/dropdown-input :destination destination-options :default-value :REST_ENDPOINT) (f/dropdown-input :compression compression-options :default-value :NONE) (f/dropdown-input :encryptionAlgorithm encryption-options :default-value :NONE) (f/text-input :encryptionKey :placeholder "key") (f/text-input :initializingVector :placeholder "vector") (f/checkbox-input :enable)]} (let [not-encrypted? (= encryptionAlgorithm :NONE) addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (b/ui-modal modal (b/ui-modal-title nil (dom/div #js {:key "title" :style #js {:fontSize "22px"}} "Edit Export")) (b/ui-modal-body nil (dom/div #js {:className "card"} (dom/div #js {:className "content"} (co/field-with-label this props :addressable "Addressable" :className "form-control") (address-table addr-data) (co/field-with-label this props :format "Export format" :className "form-control") (co/field-with-label this props :destination "Destination" :className "form-control") (co/field-with-label this props :compression "Compression method" :className "form-control") (co/field-with-label this props :encryptionAlgorithm "Encryption method" :className "form-control") (co/field-with-label this props :encryptionKey "Encryption key" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :initializingVector "Initializing vector" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :enable "Enable" :className "form-control")))) (b/ui-modal-footer nil (b/button {:key "edit-button" :className "btn-fill" :kind :info :onClick #(edit-export this props)} "Edit") (b/button {:key "cancel-button" :className "btn-fill" :kind :danger :onClick #(prim/transact! this `[(b/hide-modal {:id :edit-export-modal})])} "Cancel"))))) (defn conv-dest [_ dest] (case dest :MQTT_TOPIC "MQTT" :IOTCORE_TOPIC "IoT Core" :AZURE_TOPIC "Azure" :XMPP_TOPIC "XMPP" :ZMQ_TOPIC "ZMQ" :REST_ENDPOINT "REST" :AWS_TOPIC "AWS" :INFLUXDB_ENDPOINT "InfluxDB" "Unknown")) (defn conv-format [_ format] (case format :JSON "JSON" :XML "XML" :IOTCORE_JSON "JSON (IoT Core)" :AZURE_JSON "JSON (Azure)" :THINGSBOARD_JSON "JSON (ThingsBoard)" :NOOP "None" "Unknown")) (defn conv-compression [_ comp] (case comp :NONE "None" :GZIP "GZIP" :ZIP "ZIP" "Unknown")) (defn conv-encryption [_ enc] (case (:encryptionAlgorithm enc) :NONE "None" :AES "AES" "Unknown")) (deftable ExportList :show-exports :export [[:name "Name"] [:format "Format" conv-format] [:destination "Destination" conv-dest] [:encryption "Encryption" conv-encryption] [:compression "Compression" conv-compression] [:enable "Enable"]] [{:onClick #(show-add-export-modal this) :icon "plus"} {:onClick #(df/refresh! this {:fallback `d/show-error}) :icon "refresh"}] :modals [{:modal d/DeleteModal :params {:modal-id :de-modal} :callbacks {:onDelete do-delete-export}}] :actions [{:title "Edit Export" :action-class :danger :symbol "edit" :onClick show-edit-modal} {:title "Delete Export" :action-class :danger :symbol "times" :onClick (d/mk-show-modal :de-modal)}])
25369
;;; Copyright (c) 2018 ;;; IoTech Ltd ;;; SPDX-License-Identifier: Apache-2.0 (ns org.edgexfoundry.ui.manager.ui.exports (:require [fulcro.client.primitives :as prim :refer [defui defsc]] [fulcro.i18n :refer [tr]] [fulcro.client.dom :as dom] [fulcro.client.data-fetch :as df :refer [load-field-action]] [fulcro.client.routing :as r] [fulcro.ui.bootstrap3 :as b] [fulcro.ui.forms :as f] [fulcro.client.mutations :refer [defmutation]] [org.edgexfoundry.ui.manager.ui.table :as t :refer [deftable]] [org.edgexfoundry.ui.manager.ui.common :as co] [org.edgexfoundry.ui.manager.api.mutations :as mu] [org.edgexfoundry.ui.manager.ui.dialogs :as d] [clojure.set :as set])) (defn reset-add-export* [state] (update-in state co/new-export-ident #(merge % {:name "" :format :JSON :destination :REST_ENDPOINT :compression :NONE :encryptionAlgorithm :NONE :encryptionKey "" :initializingVector "" :enable false}))) (defn assoc-options [state ident field opts default] (let [path (into ident [:fulcro.ui.forms/form :elements/by-name field])] (-> state (assoc-in (conj path :input/options) opts) (assoc-in (conj path :input/default-value) default) (assoc-in (conj ident field) default)))) (defn set-unused-addressables* [state ident current-addr] (let [mk-id-set (fn [m] (into #{} (map #(-> % :addressable :id) (vals m)))) dsa-ids (mk-id-set (:device-service state)) sa-ids (mk-id-set (:schedule-event state)) addrs (vals (:addressable state)) a-ids (into #{} (map :id addrs)) unused-ids (set/difference a-ids dsa-ids sa-ids) unused-plus-current (if current-addr (conj unused-ids current-addr) unused-ids) selected-addr (filter #(contains? unused-plus-current (:id %)) addrs) opts (mapv #(f/option (:id %) (:name %)) selected-addr) default (or current-addr (-> selected-addr first :id))] (assoc-options state ident :addressable opts default))) (defmutation prepare-add-export [noargs] (action [{:keys [state]}] (swap! state (fn [s] (-> s (set-unused-addressables* co/new-export-ident nil) (reset-add-export*)))))) (defn- initialize-form [state-map form-class form-ident] (let [exp (-> state-map (get-in form-ident))] (-> state-map (update-in co/edit-export-ident #(merge % exp))))) (declare EditExportModal) (defn fixup-export-addressable-id [state type id] (let [addr-name (get-in state [type id :addressable :name]) match-name #(= addr-name (:name %)) addr (->> state :addressable vals (filter match-name) first)] (:id addr))) (defmutation prepare-edit-modal [{:keys [type id]}] (action [{:keys [state]}] (swap! state (fn [s] (-> s (initialize-form EditExportModal [type id]) (set-unused-addressables* co/edit-export-ident (fixup-export-addressable-id s type id))))))) (defn show-add-export-modal [comp] (prim/transact! comp `[(prepare-add-export {}) (r/set-route {:router :root/modal-router :target ~co/new-export-ident}) (b/show-modal {:id :add-export-modal})])) (defn show-edit-modal [comp type id] (prim/transact! comp `[(prepare-edit-modal {:type ~type :id ~id}) (r/set-route {:router :root/modal-router :target ~co/edit-export-ident}) (b/show-modal {:id :edit-export-modal})])) (defn add-export [comp {:keys [name addressable format destination compression encryptionAlgorithm encryptionKey initializingVector enable addressables] :as form}] (let [tmp-id (prim/tempid) addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (prim/transact! comp `[(b/hide-modal {:id :add-export-modal}) (mu/add-export {:tempid ~tmp-id :name ~name :addressable ~addr-data :format ~format :destination ~destination :compression ~compression :encryptionAlgorithm ~encryptionAlgorithm :encryptionKey ~encryptionKey :initializingVector ~initializingVector :enable ~enable})]))) (defn edit-export [comp {:keys [id addressable format destination compression encryptionAlgorithm encryptionKey initializingVector enable addressables] :as form}] (let [addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (prim/transact! comp `[(b/hide-modal {:id :edit-export-modal}) (mu/edit-export {:id ~id :addressable ~addr-data :format ~format :destination ~destination :compression ~compression :encryptionAlgorithm ~encryptionAlgorithm :encryptionKey ~encryptionKey :initializingVector ~initializingVector :enable ~enable})]))) (defn do-delete-export [this id] (prim/transact! this `[(mu/delete-export {:id ~id}) (t/reset-table-page {:id :show-profiles})])) (defsc AddressableListEntry [this {:keys [id type origin name protocol address port path method publisher topic user password]}] {:ident [:addressable :id] :query [:id :type :origin :name :protocol :address :port :path :method :publisher :topic :user :password]}) (defn address-table [{:keys [protocol address port path method publisher topic user]}] (let [if-avail #(or % "N/A")] (dom/div #js {:className "table-responsive"} (dom/table #js {:className "table table-bordered"} (dom/tbody nil (dom/tr nil (dom/th nil "Protocol") (dom/td nil protocol)) (dom/tr nil (dom/th nil "Address") (dom/td nil address)) (dom/tr nil (dom/th nil "Port") (dom/td nil port)) (dom/tr nil (dom/th nil "Path") (dom/td nil path)) (dom/tr nil (dom/th nil "Method") (dom/td nil method)) (dom/tr nil (dom/th nil "Publisher") (dom/td nil (if-avail publisher))) (dom/tr nil (dom/th nil "Topic") (dom/td nil (if-avail topic))) (dom/tr nil (dom/th nil "User") (dom/td nil (if-avail user)))))))) (def format-options [(f/option :JSON "JSON") (f/option :XML "XML") (f/option :IOTCORE_JSON "JSON (Google IoT Core)") (f/option :AZURE_JSON "JSON (Azure)") (f/option :THINGSBOARD_JSON "JSON (ThingsBoard)") (f/option :NOOP "None")]) (def destination-options [(f/option :REST_ENDPOINT "REST Endpoint") ;(f/option :ZMQ_TOPIC "ZMQ Topic")]) (f/option :MQTT_TOPIC "MQTT Topic") (f/option :IOTCORE_TOPIC "MQTT (Google IoT Core)") (f/option :AZURE_TOPIC "MQTT (Azure)") (f/option :XMPP_TOPIC "XMPP") (f/option :AWS_TOPIC "AWS") (f/option :INFLUXDB_ENDPOINT "InfluxDB")]) (def compression-options [(f/option :NONE "None") (f/option :GZIP "GZIP") (f/option :ZIP "ZIP")]) (def encryption-options [(f/option :NONE "None") (f/option :AES "AES")]) (defsc AddExportModal [this {:keys [modal encryptionAlgorithm addressable addressables modal/page] :as props}] {:initial-state (fn [p] (merge (f/build-form this {:db/id 6}) {:addressables (prim/get-initial-state AddressableListEntry {}) :modal (prim/get-initial-state b/Modal {:id :add-export-modal :backdrop true}) :modal/page :new-export})) :ident (fn [] co/new-export-ident) :query [f/form-key :db/id :name :addressable :format :destination :compression :encryptionAlgorithm :encryptionKey :initializingVector :enable :modal/page {:addressables (prim/get-query AddressableListEntry)} {:modal (prim/get-query b/Modal)}] :form-fields [(f/id-field :db/id) (f/text-input :name :placeholder "Export name" :validator `f/not-empty?) (f/dropdown-input :addressable [(f/option :none "No addressables available")] :default-value :none) (f/dropdown-input :format format-options :default-value :JSON) (f/dropdown-input :destination destination-options :default-value :REST_ENDPOINT) (f/dropdown-input :compression compression-options :default-value :NONE) (f/dropdown-input :encryptionAlgorithm encryption-options :default-value :NONE) (f/text-input :encryptionKey :placeholder "key") (f/text-input :initializingVector :placeholder "vector") (f/checkbox-input :enable)]} (let [not-encrypted? (= encryptionAlgorithm :NONE) addr-data (-> (filter #(= addressable (:id %)) addressables) first) valid? (f/valid? (f/validate-fields props))] (b/ui-modal modal (b/ui-modal-title nil (dom/div #js {:key "title" :style #js {:fontSize "22px"}} "Add Export")) (b/ui-modal-body nil (dom/div #js {:className "card"} (dom/div #js {:className "content"} (co/field-with-label this props :name "Name" :className "form-control") (co/field-with-label this props :addressable "Addressable" :className "form-control") (address-table addr-data) (co/field-with-label this props :format "Export format" :className "form-control") (co/field-with-label this props :destination "Destination" :className "form-control") (co/field-with-label this props :compression "Compression method" :className "form-control") (co/field-with-label this props :encryptionAlgorithm "Encryption method" :className "form-control") (co/field-with-label this props :encryptionKey "Encryption key" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :initializingVector "Initializing vector" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :enable "Enable" :className "form-control")))) (b/ui-modal-footer nil (b/button {:key "add-<KEY>" :className "btn-fill" :kind :info :onClick #(add-export this props) :disabled (not valid?)} "Add") (b/button {:key "cancel<KEY>-button" :className "btn-fill" :kind :danger :onClick #(prim/transact! this `[(b/hide-modal {:id :add-export-modal})])} "Cancel"))))) (defsc EditExportModal [this {:keys [modal encryptionAlgorithm addressable addressables modal/page] :as props}] {:initial-state (fn [p] (merge (f/build-form this {:db/id 8}) {:addressables (prim/get-initial-state AddressableListEntry {}) :modal (prim/get-initial-state b/Modal {:id :edit-export-modal :backdrop true}) :modal/page :edit-export})) :ident (fn [] co/edit-export-ident) :query [f/form-key :db/id :id :addressable :format :destination :compression :encryptionAlgorithm :encryptionKey :initializingVector :enable :modal/page {:addressables (prim/get-query AddressableListEntry)} {:modal (prim/get-query b/Modal)}] :form-fields [(f/id-field :db/id) (f/dropdown-input :addressable [(f/option :none "No addressables available")] :default-value :none) (f/dropdown-input :format format-options :default-value :JSON) (f/dropdown-input :destination destination-options :default-value :REST_ENDPOINT) (f/dropdown-input :compression compression-options :default-value :NONE) (f/dropdown-input :encryptionAlgorithm encryption-options :default-value :NONE) (f/text-input :encryptionKey :placeholder "key") (f/text-input :initializingVector :placeholder "vector") (f/checkbox-input :enable)]} (let [not-encrypted? (= encryptionAlgorithm :NONE) addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (b/ui-modal modal (b/ui-modal-title nil (dom/div #js {:key "title" :style #js {:fontSize "22px"}} "Edit Export")) (b/ui-modal-body nil (dom/div #js {:className "card"} (dom/div #js {:className "content"} (co/field-with-label this props :addressable "Addressable" :className "form-control") (address-table addr-data) (co/field-with-label this props :format "Export format" :className "form-control") (co/field-with-label this props :destination "Destination" :className "form-control") (co/field-with-label this props :compression "Compression method" :className "form-control") (co/field-with-label this props :encryptionAlgorithm "Encryption method" :className "form-control") (co/field-with-label this props :encryptionKey "Encryption key" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :initializingVector "Initializing vector" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :enable "Enable" :className "form-control")))) (b/ui-modal-footer nil (b/button {:key "edit-<KEY>" :className "btn-fill" :kind :info :onClick #(edit-export this props)} "Edit") (b/button {:key "cancel-<KEY>" :className "btn-fill" :kind :danger :onClick #(prim/transact! this `[(b/hide-modal {:id :edit-export-modal})])} "Cancel"))))) (defn conv-dest [_ dest] (case dest :MQTT_TOPIC "MQTT" :IOTCORE_TOPIC "IoT Core" :AZURE_TOPIC "Azure" :XMPP_TOPIC "XMPP" :ZMQ_TOPIC "ZMQ" :REST_ENDPOINT "REST" :AWS_TOPIC "AWS" :INFLUXDB_ENDPOINT "InfluxDB" "Unknown")) (defn conv-format [_ format] (case format :JSON "JSON" :XML "XML" :IOTCORE_JSON "JSON (IoT Core)" :AZURE_JSON "JSON (Azure)" :THINGSBOARD_JSON "JSON (ThingsBoard)" :NOOP "None" "Unknown")) (defn conv-compression [_ comp] (case comp :NONE "None" :GZIP "GZIP" :ZIP "ZIP" "Unknown")) (defn conv-encryption [_ enc] (case (:encryptionAlgorithm enc) :NONE "None" :AES "AES" "Unknown")) (deftable ExportList :show-exports :export [[:name "Name"] [:format "Format" conv-format] [:destination "Destination" conv-dest] [:encryption "Encryption" conv-encryption] [:compression "Compression" conv-compression] [:enable "Enable"]] [{:onClick #(show-add-export-modal this) :icon "plus"} {:onClick #(df/refresh! this {:fallback `d/show-error}) :icon "refresh"}] :modals [{:modal d/DeleteModal :params {:modal-id :de-modal} :callbacks {:onDelete do-delete-export}}] :actions [{:title "Edit Export" :action-class :danger :symbol "edit" :onClick show-edit-modal} {:title "Delete Export" :action-class :danger :symbol "times" :onClick (d/mk-show-modal :de-modal)}])
true
;;; Copyright (c) 2018 ;;; IoTech Ltd ;;; SPDX-License-Identifier: Apache-2.0 (ns org.edgexfoundry.ui.manager.ui.exports (:require [fulcro.client.primitives :as prim :refer [defui defsc]] [fulcro.i18n :refer [tr]] [fulcro.client.dom :as dom] [fulcro.client.data-fetch :as df :refer [load-field-action]] [fulcro.client.routing :as r] [fulcro.ui.bootstrap3 :as b] [fulcro.ui.forms :as f] [fulcro.client.mutations :refer [defmutation]] [org.edgexfoundry.ui.manager.ui.table :as t :refer [deftable]] [org.edgexfoundry.ui.manager.ui.common :as co] [org.edgexfoundry.ui.manager.api.mutations :as mu] [org.edgexfoundry.ui.manager.ui.dialogs :as d] [clojure.set :as set])) (defn reset-add-export* [state] (update-in state co/new-export-ident #(merge % {:name "" :format :JSON :destination :REST_ENDPOINT :compression :NONE :encryptionAlgorithm :NONE :encryptionKey "" :initializingVector "" :enable false}))) (defn assoc-options [state ident field opts default] (let [path (into ident [:fulcro.ui.forms/form :elements/by-name field])] (-> state (assoc-in (conj path :input/options) opts) (assoc-in (conj path :input/default-value) default) (assoc-in (conj ident field) default)))) (defn set-unused-addressables* [state ident current-addr] (let [mk-id-set (fn [m] (into #{} (map #(-> % :addressable :id) (vals m)))) dsa-ids (mk-id-set (:device-service state)) sa-ids (mk-id-set (:schedule-event state)) addrs (vals (:addressable state)) a-ids (into #{} (map :id addrs)) unused-ids (set/difference a-ids dsa-ids sa-ids) unused-plus-current (if current-addr (conj unused-ids current-addr) unused-ids) selected-addr (filter #(contains? unused-plus-current (:id %)) addrs) opts (mapv #(f/option (:id %) (:name %)) selected-addr) default (or current-addr (-> selected-addr first :id))] (assoc-options state ident :addressable opts default))) (defmutation prepare-add-export [noargs] (action [{:keys [state]}] (swap! state (fn [s] (-> s (set-unused-addressables* co/new-export-ident nil) (reset-add-export*)))))) (defn- initialize-form [state-map form-class form-ident] (let [exp (-> state-map (get-in form-ident))] (-> state-map (update-in co/edit-export-ident #(merge % exp))))) (declare EditExportModal) (defn fixup-export-addressable-id [state type id] (let [addr-name (get-in state [type id :addressable :name]) match-name #(= addr-name (:name %)) addr (->> state :addressable vals (filter match-name) first)] (:id addr))) (defmutation prepare-edit-modal [{:keys [type id]}] (action [{:keys [state]}] (swap! state (fn [s] (-> s (initialize-form EditExportModal [type id]) (set-unused-addressables* co/edit-export-ident (fixup-export-addressable-id s type id))))))) (defn show-add-export-modal [comp] (prim/transact! comp `[(prepare-add-export {}) (r/set-route {:router :root/modal-router :target ~co/new-export-ident}) (b/show-modal {:id :add-export-modal})])) (defn show-edit-modal [comp type id] (prim/transact! comp `[(prepare-edit-modal {:type ~type :id ~id}) (r/set-route {:router :root/modal-router :target ~co/edit-export-ident}) (b/show-modal {:id :edit-export-modal})])) (defn add-export [comp {:keys [name addressable format destination compression encryptionAlgorithm encryptionKey initializingVector enable addressables] :as form}] (let [tmp-id (prim/tempid) addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (prim/transact! comp `[(b/hide-modal {:id :add-export-modal}) (mu/add-export {:tempid ~tmp-id :name ~name :addressable ~addr-data :format ~format :destination ~destination :compression ~compression :encryptionAlgorithm ~encryptionAlgorithm :encryptionKey ~encryptionKey :initializingVector ~initializingVector :enable ~enable})]))) (defn edit-export [comp {:keys [id addressable format destination compression encryptionAlgorithm encryptionKey initializingVector enable addressables] :as form}] (let [addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (prim/transact! comp `[(b/hide-modal {:id :edit-export-modal}) (mu/edit-export {:id ~id :addressable ~addr-data :format ~format :destination ~destination :compression ~compression :encryptionAlgorithm ~encryptionAlgorithm :encryptionKey ~encryptionKey :initializingVector ~initializingVector :enable ~enable})]))) (defn do-delete-export [this id] (prim/transact! this `[(mu/delete-export {:id ~id}) (t/reset-table-page {:id :show-profiles})])) (defsc AddressableListEntry [this {:keys [id type origin name protocol address port path method publisher topic user password]}] {:ident [:addressable :id] :query [:id :type :origin :name :protocol :address :port :path :method :publisher :topic :user :password]}) (defn address-table [{:keys [protocol address port path method publisher topic user]}] (let [if-avail #(or % "N/A")] (dom/div #js {:className "table-responsive"} (dom/table #js {:className "table table-bordered"} (dom/tbody nil (dom/tr nil (dom/th nil "Protocol") (dom/td nil protocol)) (dom/tr nil (dom/th nil "Address") (dom/td nil address)) (dom/tr nil (dom/th nil "Port") (dom/td nil port)) (dom/tr nil (dom/th nil "Path") (dom/td nil path)) (dom/tr nil (dom/th nil "Method") (dom/td nil method)) (dom/tr nil (dom/th nil "Publisher") (dom/td nil (if-avail publisher))) (dom/tr nil (dom/th nil "Topic") (dom/td nil (if-avail topic))) (dom/tr nil (dom/th nil "User") (dom/td nil (if-avail user)))))))) (def format-options [(f/option :JSON "JSON") (f/option :XML "XML") (f/option :IOTCORE_JSON "JSON (Google IoT Core)") (f/option :AZURE_JSON "JSON (Azure)") (f/option :THINGSBOARD_JSON "JSON (ThingsBoard)") (f/option :NOOP "None")]) (def destination-options [(f/option :REST_ENDPOINT "REST Endpoint") ;(f/option :ZMQ_TOPIC "ZMQ Topic")]) (f/option :MQTT_TOPIC "MQTT Topic") (f/option :IOTCORE_TOPIC "MQTT (Google IoT Core)") (f/option :AZURE_TOPIC "MQTT (Azure)") (f/option :XMPP_TOPIC "XMPP") (f/option :AWS_TOPIC "AWS") (f/option :INFLUXDB_ENDPOINT "InfluxDB")]) (def compression-options [(f/option :NONE "None") (f/option :GZIP "GZIP") (f/option :ZIP "ZIP")]) (def encryption-options [(f/option :NONE "None") (f/option :AES "AES")]) (defsc AddExportModal [this {:keys [modal encryptionAlgorithm addressable addressables modal/page] :as props}] {:initial-state (fn [p] (merge (f/build-form this {:db/id 6}) {:addressables (prim/get-initial-state AddressableListEntry {}) :modal (prim/get-initial-state b/Modal {:id :add-export-modal :backdrop true}) :modal/page :new-export})) :ident (fn [] co/new-export-ident) :query [f/form-key :db/id :name :addressable :format :destination :compression :encryptionAlgorithm :encryptionKey :initializingVector :enable :modal/page {:addressables (prim/get-query AddressableListEntry)} {:modal (prim/get-query b/Modal)}] :form-fields [(f/id-field :db/id) (f/text-input :name :placeholder "Export name" :validator `f/not-empty?) (f/dropdown-input :addressable [(f/option :none "No addressables available")] :default-value :none) (f/dropdown-input :format format-options :default-value :JSON) (f/dropdown-input :destination destination-options :default-value :REST_ENDPOINT) (f/dropdown-input :compression compression-options :default-value :NONE) (f/dropdown-input :encryptionAlgorithm encryption-options :default-value :NONE) (f/text-input :encryptionKey :placeholder "key") (f/text-input :initializingVector :placeholder "vector") (f/checkbox-input :enable)]} (let [not-encrypted? (= encryptionAlgorithm :NONE) addr-data (-> (filter #(= addressable (:id %)) addressables) first) valid? (f/valid? (f/validate-fields props))] (b/ui-modal modal (b/ui-modal-title nil (dom/div #js {:key "title" :style #js {:fontSize "22px"}} "Add Export")) (b/ui-modal-body nil (dom/div #js {:className "card"} (dom/div #js {:className "content"} (co/field-with-label this props :name "Name" :className "form-control") (co/field-with-label this props :addressable "Addressable" :className "form-control") (address-table addr-data) (co/field-with-label this props :format "Export format" :className "form-control") (co/field-with-label this props :destination "Destination" :className "form-control") (co/field-with-label this props :compression "Compression method" :className "form-control") (co/field-with-label this props :encryptionAlgorithm "Encryption method" :className "form-control") (co/field-with-label this props :encryptionKey "Encryption key" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :initializingVector "Initializing vector" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :enable "Enable" :className "form-control")))) (b/ui-modal-footer nil (b/button {:key "add-PI:KEY:<KEY>END_PI" :className "btn-fill" :kind :info :onClick #(add-export this props) :disabled (not valid?)} "Add") (b/button {:key "cancelPI:KEY:<KEY>END_PI-button" :className "btn-fill" :kind :danger :onClick #(prim/transact! this `[(b/hide-modal {:id :add-export-modal})])} "Cancel"))))) (defsc EditExportModal [this {:keys [modal encryptionAlgorithm addressable addressables modal/page] :as props}] {:initial-state (fn [p] (merge (f/build-form this {:db/id 8}) {:addressables (prim/get-initial-state AddressableListEntry {}) :modal (prim/get-initial-state b/Modal {:id :edit-export-modal :backdrop true}) :modal/page :edit-export})) :ident (fn [] co/edit-export-ident) :query [f/form-key :db/id :id :addressable :format :destination :compression :encryptionAlgorithm :encryptionKey :initializingVector :enable :modal/page {:addressables (prim/get-query AddressableListEntry)} {:modal (prim/get-query b/Modal)}] :form-fields [(f/id-field :db/id) (f/dropdown-input :addressable [(f/option :none "No addressables available")] :default-value :none) (f/dropdown-input :format format-options :default-value :JSON) (f/dropdown-input :destination destination-options :default-value :REST_ENDPOINT) (f/dropdown-input :compression compression-options :default-value :NONE) (f/dropdown-input :encryptionAlgorithm encryption-options :default-value :NONE) (f/text-input :encryptionKey :placeholder "key") (f/text-input :initializingVector :placeholder "vector") (f/checkbox-input :enable)]} (let [not-encrypted? (= encryptionAlgorithm :NONE) addr-data (-> (filter #(= addressable (:id %)) addressables) first)] (b/ui-modal modal (b/ui-modal-title nil (dom/div #js {:key "title" :style #js {:fontSize "22px"}} "Edit Export")) (b/ui-modal-body nil (dom/div #js {:className "card"} (dom/div #js {:className "content"} (co/field-with-label this props :addressable "Addressable" :className "form-control") (address-table addr-data) (co/field-with-label this props :format "Export format" :className "form-control") (co/field-with-label this props :destination "Destination" :className "form-control") (co/field-with-label this props :compression "Compression method" :className "form-control") (co/field-with-label this props :encryptionAlgorithm "Encryption method" :className "form-control") (co/field-with-label this props :encryptionKey "Encryption key" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :initializingVector "Initializing vector" :className "form-control" :disabled not-encrypted?) (co/field-with-label this props :enable "Enable" :className "form-control")))) (b/ui-modal-footer nil (b/button {:key "edit-PI:KEY:<KEY>END_PI" :className "btn-fill" :kind :info :onClick #(edit-export this props)} "Edit") (b/button {:key "cancel-PI:KEY:<KEY>END_PI" :className "btn-fill" :kind :danger :onClick #(prim/transact! this `[(b/hide-modal {:id :edit-export-modal})])} "Cancel"))))) (defn conv-dest [_ dest] (case dest :MQTT_TOPIC "MQTT" :IOTCORE_TOPIC "IoT Core" :AZURE_TOPIC "Azure" :XMPP_TOPIC "XMPP" :ZMQ_TOPIC "ZMQ" :REST_ENDPOINT "REST" :AWS_TOPIC "AWS" :INFLUXDB_ENDPOINT "InfluxDB" "Unknown")) (defn conv-format [_ format] (case format :JSON "JSON" :XML "XML" :IOTCORE_JSON "JSON (IoT Core)" :AZURE_JSON "JSON (Azure)" :THINGSBOARD_JSON "JSON (ThingsBoard)" :NOOP "None" "Unknown")) (defn conv-compression [_ comp] (case comp :NONE "None" :GZIP "GZIP" :ZIP "ZIP" "Unknown")) (defn conv-encryption [_ enc] (case (:encryptionAlgorithm enc) :NONE "None" :AES "AES" "Unknown")) (deftable ExportList :show-exports :export [[:name "Name"] [:format "Format" conv-format] [:destination "Destination" conv-dest] [:encryption "Encryption" conv-encryption] [:compression "Compression" conv-compression] [:enable "Enable"]] [{:onClick #(show-add-export-modal this) :icon "plus"} {:onClick #(df/refresh! this {:fallback `d/show-error}) :icon "refresh"}] :modals [{:modal d/DeleteModal :params {:modal-id :de-modal} :callbacks {:onDelete do-delete-export}}] :actions [{:title "Edit Export" :action-class :danger :symbol "edit" :onClick show-edit-modal} {:title "Delete Export" :action-class :danger :symbol "times" :onClick (d/mk-show-modal :de-modal)}])
[ { "context": "r owner :home)\n :tab-index -1}\n \"Kanopi\"]]))))\n\n(defmethod left-team-dropdown :spa.authen", "end": 2621, "score": 0.9916031360626221, "start": 2615, "tag": "NAME", "value": "Kanopi" } ]
data/train/clojure/4d0d00e281a64c327c39cbc7cd22b560c075ad18header.cljs
harshp8l/deep-learning-lang-detection
84
(ns kanopi.view.header (:require [om.core :as om] [taoensso.timbre :as timbre :refer-macros (log trace debug info warn error fatal report)] [kanopi.util.browser :as browser] [kanopi.view.widgets.selector.dropdown :as dropdown] [kanopi.view.widgets.typeahead :as typeahead] [kanopi.model.schema :as schema] [kanopi.model.message :as msg] [kanopi.view.icons :as icons] [sablono.core :refer-macros [html] :include-macros true])) (defn center-search-field [props owner] (reify om/IRender (render [_] (html [:div.navbar-center ;; NOTE: do I need the icon? (icons/search {}) ;; FIXME: this breaks when screen width <= 544px ;; Consider a clever interface, maybe only the searchglass ;; icon, when clicked, cover entire header with typeahead ;; search. [:span.search (om/build typeahead/typeahead props (typeahead/search-config :placeholder "" :result-display-fn schema/display-entity :result-href-fn (fn [result] (when-let [id (:db/id result)] (case (schema/describe-entity result) :datum (browser/route-for owner :datum :id id) :literal (browser/route-for owner :literal :id id) ; default nil))) ))] ])))) (defn- team->menu-item [owner current-team team] (hash-map :type :link :on-click (fn [_] (->> (msg/switch-team (:team/id team)) (msg/send! owner))) :label (get team :team/id))) (defn- manage-teams-menu-item [owner] (hash-map :type :link :href (browser/route-for owner :teams) :label "Manage Teams" )) (defn header-intent-dispatcher [props _] (get-in props [:intent :id] :spa.unauthenticated/navigate)) (defmulti left-team-dropdown header-intent-dispatcher) (defmethod left-team-dropdown :spa.unauthenticated/navigate [props owner] (reify om/IRender (render [_] (html [:div.navbar-header [:a.navbar-brand {:href (browser/route-for owner :home) :tab-index -1} "Kanopi"]])))) (defmethod left-team-dropdown :spa.authenticated/navigate [props owner] (reify om/IRender (render [_] (let [current-team (get-in props [:user :current-team])] (html [:div.navbar-header [:div.navbar-brand (om/build dropdown/dropdown props {:init-state {:tab-index -1 :toggle-type :split-button} :state { :button-on-click (fn [_] (browser/set-page! owner [:home])) :toggle-label (:team/id current-team) :menu-items (conj (mapv (partial team->menu-item owner current-team) (get-in props [:user :teams])) (dropdown/divider-item) (manage-teams-menu-item owner))} })] ])) ))) (defmulti right-controls header-intent-dispatcher) (defmethod right-controls :spa.unauthenticated/navigate [props owner] (reify om/IRender (render [_] (html [:ul.nav.navbar-nav.navbar-right (->> (icons/create {}) (icons/on-click #(->> (msg/create-datum) (msg/send! owner)) {:class ["navbar-brand"]})) #_(->> (icons/goal {}) (icons/on-click #(->> (msg/create-goal) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/insights {}) (icons/on-click #(->> (msg/capture-insight) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/log-in {}) (icons/link-to owner :enter {:class "navbar-brand", :tab-index -1})) ])))) (defmethod right-controls :spa.authenticated/navigate [props owner] (reify om/IRender (render [_] (html [:ul.nav.navbar-nav.navbar-right (->> (icons/create {}) (icons/on-click #(->> (msg/create-datum) (msg/send! owner)) {:class ["navbar-brand"]})) #_(->> (icons/goal {}) (icons/on-click #(->> (msg/create-goal) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/insights {}) (icons/on-click #(->> (msg/capture-insight) (msg/send! owner)) {:class ["navbar-brand"]})) (om/build dropdown/dropdown props {:init-state {:toggle-label (get-in props [:user :identity]) :toggle-icon-fn icons/user :classes ["navbar-brand"] :caret? true :tab-index -1 :menu-items [ {:type :text :label (str "Signed in as" " ") :content (get-in props [:user :identity])} (dropdown/divider-item) {:type :link :href "" :label "Feedback"} {:type :link :href "" :label "About"} {:type :link :href "" :label "Help"} (dropdown/divider-item) {:type :link :href (browser/route-for owner :settings) :label "Settings"} {:type :link :href (browser/route-for owner :logout) :label "Logout"}] }}) ])))) (defn header "A modal header. Contents will change based on the user's present intent. By default, we assume the user is trying to navigate. UI components will interpret user actions as user intentions, update the app state, and thus allow the header to help the user achieve her intention. TODO: figure out how we'll provide the header with any relevant state. The trick will be doing so in a relatively decoupled way so the header does not need to know about every possible mode, but instead categories of modes. " [props owner opts] (reify om/IDisplayName (display-name [_] "header") om/IRenderState (render-state [_ state] (html [:div.header.navbar.navbar-default.navbar-fixed-top [:div.container-fluid (om/build left-team-dropdown props) (om/build center-search-field props) (om/build right-controls props) ] ]))))
7218
(ns kanopi.view.header (:require [om.core :as om] [taoensso.timbre :as timbre :refer-macros (log trace debug info warn error fatal report)] [kanopi.util.browser :as browser] [kanopi.view.widgets.selector.dropdown :as dropdown] [kanopi.view.widgets.typeahead :as typeahead] [kanopi.model.schema :as schema] [kanopi.model.message :as msg] [kanopi.view.icons :as icons] [sablono.core :refer-macros [html] :include-macros true])) (defn center-search-field [props owner] (reify om/IRender (render [_] (html [:div.navbar-center ;; NOTE: do I need the icon? (icons/search {}) ;; FIXME: this breaks when screen width <= 544px ;; Consider a clever interface, maybe only the searchglass ;; icon, when clicked, cover entire header with typeahead ;; search. [:span.search (om/build typeahead/typeahead props (typeahead/search-config :placeholder "" :result-display-fn schema/display-entity :result-href-fn (fn [result] (when-let [id (:db/id result)] (case (schema/describe-entity result) :datum (browser/route-for owner :datum :id id) :literal (browser/route-for owner :literal :id id) ; default nil))) ))] ])))) (defn- team->menu-item [owner current-team team] (hash-map :type :link :on-click (fn [_] (->> (msg/switch-team (:team/id team)) (msg/send! owner))) :label (get team :team/id))) (defn- manage-teams-menu-item [owner] (hash-map :type :link :href (browser/route-for owner :teams) :label "Manage Teams" )) (defn header-intent-dispatcher [props _] (get-in props [:intent :id] :spa.unauthenticated/navigate)) (defmulti left-team-dropdown header-intent-dispatcher) (defmethod left-team-dropdown :spa.unauthenticated/navigate [props owner] (reify om/IRender (render [_] (html [:div.navbar-header [:a.navbar-brand {:href (browser/route-for owner :home) :tab-index -1} "<NAME>"]])))) (defmethod left-team-dropdown :spa.authenticated/navigate [props owner] (reify om/IRender (render [_] (let [current-team (get-in props [:user :current-team])] (html [:div.navbar-header [:div.navbar-brand (om/build dropdown/dropdown props {:init-state {:tab-index -1 :toggle-type :split-button} :state { :button-on-click (fn [_] (browser/set-page! owner [:home])) :toggle-label (:team/id current-team) :menu-items (conj (mapv (partial team->menu-item owner current-team) (get-in props [:user :teams])) (dropdown/divider-item) (manage-teams-menu-item owner))} })] ])) ))) (defmulti right-controls header-intent-dispatcher) (defmethod right-controls :spa.unauthenticated/navigate [props owner] (reify om/IRender (render [_] (html [:ul.nav.navbar-nav.navbar-right (->> (icons/create {}) (icons/on-click #(->> (msg/create-datum) (msg/send! owner)) {:class ["navbar-brand"]})) #_(->> (icons/goal {}) (icons/on-click #(->> (msg/create-goal) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/insights {}) (icons/on-click #(->> (msg/capture-insight) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/log-in {}) (icons/link-to owner :enter {:class "navbar-brand", :tab-index -1})) ])))) (defmethod right-controls :spa.authenticated/navigate [props owner] (reify om/IRender (render [_] (html [:ul.nav.navbar-nav.navbar-right (->> (icons/create {}) (icons/on-click #(->> (msg/create-datum) (msg/send! owner)) {:class ["navbar-brand"]})) #_(->> (icons/goal {}) (icons/on-click #(->> (msg/create-goal) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/insights {}) (icons/on-click #(->> (msg/capture-insight) (msg/send! owner)) {:class ["navbar-brand"]})) (om/build dropdown/dropdown props {:init-state {:toggle-label (get-in props [:user :identity]) :toggle-icon-fn icons/user :classes ["navbar-brand"] :caret? true :tab-index -1 :menu-items [ {:type :text :label (str "Signed in as" " ") :content (get-in props [:user :identity])} (dropdown/divider-item) {:type :link :href "" :label "Feedback"} {:type :link :href "" :label "About"} {:type :link :href "" :label "Help"} (dropdown/divider-item) {:type :link :href (browser/route-for owner :settings) :label "Settings"} {:type :link :href (browser/route-for owner :logout) :label "Logout"}] }}) ])))) (defn header "A modal header. Contents will change based on the user's present intent. By default, we assume the user is trying to navigate. UI components will interpret user actions as user intentions, update the app state, and thus allow the header to help the user achieve her intention. TODO: figure out how we'll provide the header with any relevant state. The trick will be doing so in a relatively decoupled way so the header does not need to know about every possible mode, but instead categories of modes. " [props owner opts] (reify om/IDisplayName (display-name [_] "header") om/IRenderState (render-state [_ state] (html [:div.header.navbar.navbar-default.navbar-fixed-top [:div.container-fluid (om/build left-team-dropdown props) (om/build center-search-field props) (om/build right-controls props) ] ]))))
true
(ns kanopi.view.header (:require [om.core :as om] [taoensso.timbre :as timbre :refer-macros (log trace debug info warn error fatal report)] [kanopi.util.browser :as browser] [kanopi.view.widgets.selector.dropdown :as dropdown] [kanopi.view.widgets.typeahead :as typeahead] [kanopi.model.schema :as schema] [kanopi.model.message :as msg] [kanopi.view.icons :as icons] [sablono.core :refer-macros [html] :include-macros true])) (defn center-search-field [props owner] (reify om/IRender (render [_] (html [:div.navbar-center ;; NOTE: do I need the icon? (icons/search {}) ;; FIXME: this breaks when screen width <= 544px ;; Consider a clever interface, maybe only the searchglass ;; icon, when clicked, cover entire header with typeahead ;; search. [:span.search (om/build typeahead/typeahead props (typeahead/search-config :placeholder "" :result-display-fn schema/display-entity :result-href-fn (fn [result] (when-let [id (:db/id result)] (case (schema/describe-entity result) :datum (browser/route-for owner :datum :id id) :literal (browser/route-for owner :literal :id id) ; default nil))) ))] ])))) (defn- team->menu-item [owner current-team team] (hash-map :type :link :on-click (fn [_] (->> (msg/switch-team (:team/id team)) (msg/send! owner))) :label (get team :team/id))) (defn- manage-teams-menu-item [owner] (hash-map :type :link :href (browser/route-for owner :teams) :label "Manage Teams" )) (defn header-intent-dispatcher [props _] (get-in props [:intent :id] :spa.unauthenticated/navigate)) (defmulti left-team-dropdown header-intent-dispatcher) (defmethod left-team-dropdown :spa.unauthenticated/navigate [props owner] (reify om/IRender (render [_] (html [:div.navbar-header [:a.navbar-brand {:href (browser/route-for owner :home) :tab-index -1} "PI:NAME:<NAME>END_PI"]])))) (defmethod left-team-dropdown :spa.authenticated/navigate [props owner] (reify om/IRender (render [_] (let [current-team (get-in props [:user :current-team])] (html [:div.navbar-header [:div.navbar-brand (om/build dropdown/dropdown props {:init-state {:tab-index -1 :toggle-type :split-button} :state { :button-on-click (fn [_] (browser/set-page! owner [:home])) :toggle-label (:team/id current-team) :menu-items (conj (mapv (partial team->menu-item owner current-team) (get-in props [:user :teams])) (dropdown/divider-item) (manage-teams-menu-item owner))} })] ])) ))) (defmulti right-controls header-intent-dispatcher) (defmethod right-controls :spa.unauthenticated/navigate [props owner] (reify om/IRender (render [_] (html [:ul.nav.navbar-nav.navbar-right (->> (icons/create {}) (icons/on-click #(->> (msg/create-datum) (msg/send! owner)) {:class ["navbar-brand"]})) #_(->> (icons/goal {}) (icons/on-click #(->> (msg/create-goal) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/insights {}) (icons/on-click #(->> (msg/capture-insight) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/log-in {}) (icons/link-to owner :enter {:class "navbar-brand", :tab-index -1})) ])))) (defmethod right-controls :spa.authenticated/navigate [props owner] (reify om/IRender (render [_] (html [:ul.nav.navbar-nav.navbar-right (->> (icons/create {}) (icons/on-click #(->> (msg/create-datum) (msg/send! owner)) {:class ["navbar-brand"]})) #_(->> (icons/goal {}) (icons/on-click #(->> (msg/create-goal) (msg/send! owner)) {:class ["navbar-brand"]})) (->> (icons/insights {}) (icons/on-click #(->> (msg/capture-insight) (msg/send! owner)) {:class ["navbar-brand"]})) (om/build dropdown/dropdown props {:init-state {:toggle-label (get-in props [:user :identity]) :toggle-icon-fn icons/user :classes ["navbar-brand"] :caret? true :tab-index -1 :menu-items [ {:type :text :label (str "Signed in as" " ") :content (get-in props [:user :identity])} (dropdown/divider-item) {:type :link :href "" :label "Feedback"} {:type :link :href "" :label "About"} {:type :link :href "" :label "Help"} (dropdown/divider-item) {:type :link :href (browser/route-for owner :settings) :label "Settings"} {:type :link :href (browser/route-for owner :logout) :label "Logout"}] }}) ])))) (defn header "A modal header. Contents will change based on the user's present intent. By default, we assume the user is trying to navigate. UI components will interpret user actions as user intentions, update the app state, and thus allow the header to help the user achieve her intention. TODO: figure out how we'll provide the header with any relevant state. The trick will be doing so in a relatively decoupled way so the header does not need to know about every possible mode, but instead categories of modes. " [props owner opts] (reify om/IDisplayName (display-name [_] "header") om/IRenderState (render-state [_ state] (html [:div.header.navbar.navbar-default.navbar-fixed-top [:div.container-fluid (om/build left-team-dropdown props) (om/build center-search-field props) (om/build right-controls props) ] ]))))
[ { "context": "r for the Thrift IDL.\"\n :url \"https://github.com/sritchie/thrift-clj\"\n :license {:name \"Apache 2\"\n ", "end": 139, "score": 0.999464750289917, "start": 131, "tag": "USERNAME", "value": "sritchie" }, { "context": " [:developer\n [:id \"oscar\"]\n [:name \"Oscar Boykin\"]\n ", "end": 956, "score": 0.9976822733879089, "start": 951, "tag": "USERNAME", "value": "oscar" }, { "context": " [:id \"oscar\"]\n [:name \"Oscar Boykin\"]\n [:url \"http://twitter.com/pos", "end": 997, "score": 0.9998596906661987, "start": 985, "tag": "NAME", "value": "Oscar Boykin" }, { "context": "kin\"]\n [:url \"http://twitter.com/posco\"]]\n [:developer\n ", "end": 1049, "score": 0.9996546506881714, "start": 1044, "tag": "USERNAME", "value": "posco" }, { "context": " [:developer\n [:id \"sritchie\"]\n [:name \"Sam Ritchie\"]\n ", "end": 1114, "score": 0.9996724724769592, "start": 1106, "tag": "USERNAME", "value": "sritchie" }, { "context": " [:id \"sritchie\"]\n [:name \"Sam Ritchie\"]\n [:url \"http://twitter.com/sri", "end": 1154, "score": 0.999875545501709, "start": 1143, "tag": "NAME", "value": "Sam Ritchie" }, { "context": "hie\"]\n [:url \"http://twitter.com/sritchie\"]]])\n", "end": 1209, "score": 0.9996923208236694, "start": 1201, "tag": "USERNAME", "value": "sritchie" } ]
project.clj
sritchie/thrift-clj
1
(defproject com.twitter/thrift-clj "0.1.0-SNAPSHOT" :description "Clojure parser for the Thrift IDL." :url "https://github.com/sritchie/thrift-clj" :license {:name "Apache 2" :url "http://www.apache.org/licenses/LICENSE-2.0.html" :distribution :repo :comments "A business-friendly OSS license."} :deploy-repositories [["releases" "https://oss.sonatype.org/service/local/staging/deploy/maven2"] ["snapshots" "https://oss.sonatype.org/content/repositories/snapshots"]] :min-lein-version "2.0.0" :warn-on-reflection true :dependencies [[org.clojure/clojure "1.5.1"] [org.clojure/algo.monads "0.1.0"] [org.apache.thrift/libthrift "0.8.0"] [instaparse "1.2.2"]] :plugins [[lein-midje "2.0.0"]] :profiles {:dev {:dependencies [[midje "1.4.0"]]}} :pom-addition [:developers [:developer [:id "oscar"] [:name "Oscar Boykin"] [:url "http://twitter.com/posco"]] [:developer [:id "sritchie"] [:name "Sam Ritchie"] [:url "http://twitter.com/sritchie"]]])
31762
(defproject com.twitter/thrift-clj "0.1.0-SNAPSHOT" :description "Clojure parser for the Thrift IDL." :url "https://github.com/sritchie/thrift-clj" :license {:name "Apache 2" :url "http://www.apache.org/licenses/LICENSE-2.0.html" :distribution :repo :comments "A business-friendly OSS license."} :deploy-repositories [["releases" "https://oss.sonatype.org/service/local/staging/deploy/maven2"] ["snapshots" "https://oss.sonatype.org/content/repositories/snapshots"]] :min-lein-version "2.0.0" :warn-on-reflection true :dependencies [[org.clojure/clojure "1.5.1"] [org.clojure/algo.monads "0.1.0"] [org.apache.thrift/libthrift "0.8.0"] [instaparse "1.2.2"]] :plugins [[lein-midje "2.0.0"]] :profiles {:dev {:dependencies [[midje "1.4.0"]]}} :pom-addition [:developers [:developer [:id "oscar"] [:name "<NAME>"] [:url "http://twitter.com/posco"]] [:developer [:id "sritchie"] [:name "<NAME>"] [:url "http://twitter.com/sritchie"]]])
true
(defproject com.twitter/thrift-clj "0.1.0-SNAPSHOT" :description "Clojure parser for the Thrift IDL." :url "https://github.com/sritchie/thrift-clj" :license {:name "Apache 2" :url "http://www.apache.org/licenses/LICENSE-2.0.html" :distribution :repo :comments "A business-friendly OSS license."} :deploy-repositories [["releases" "https://oss.sonatype.org/service/local/staging/deploy/maven2"] ["snapshots" "https://oss.sonatype.org/content/repositories/snapshots"]] :min-lein-version "2.0.0" :warn-on-reflection true :dependencies [[org.clojure/clojure "1.5.1"] [org.clojure/algo.monads "0.1.0"] [org.apache.thrift/libthrift "0.8.0"] [instaparse "1.2.2"]] :plugins [[lein-midje "2.0.0"]] :profiles {:dev {:dependencies [[midje "1.4.0"]]}} :pom-addition [:developers [:developer [:id "oscar"] [:name "PI:NAME:<NAME>END_PI"] [:url "http://twitter.com/posco"]] [:developer [:id "sritchie"] [:name "PI:NAME:<NAME>END_PI"] [:url "http://twitter.com/sritchie"]]])
[ { "context": " the generalized English auction.\"\n :author \"Anna Shchiptsova\"}\n phosphorus-markets.simulator\n (:require [comm", "end": 635, "score": 0.9998882412910461, "start": 619, "tag": "NAME", "value": "Anna Shchiptsova" } ]
clj/phosphorus-markets/src/phosphorus_markets/simulator.clj
shchipts/phosphorus-affordability
0
; Copyright (c) 2020 International Institute for Applied Systems Analysis. ; All rights reserved. The use and distribution terms for this software ; are covered by the MIT License (http://opensource.org/licenses/MIT) ; which can be found in the file LICENSE at the root of this distribution. ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. ; You must not remove this notice, or any other, from this software. (ns ^{:doc "Simulation of equilibrium prices for distributed commodity market through the generalized English auction." :author "Anna Shchiptsova"} phosphorus-markets.simulator (:require [commodities-auction.auction :as auction])) (defn- record "Records simulation results." [m i j k] (let [n (count (first (vals m)))] (assoc m :simulation (repeat n (+ (* i k) j)) :iteration (range 1 (inc n))))) (defn- iterations "Combines results of separate auction runs." [i coll size] (reduce-kv (fn [m j [k v]] (merge-with concat m (if (= k :sub-iterations) {k [v]} (record v i (inc j) size)))) {} (vec coll))) (defn prun "Runs generalized English auction for different parameterization of distributed commodity market. Returns a lazy sequence of equilibrium prices for a chunk of parameters' tuples. Runs auction procedure in parallel for each chunk." [coll {chunk-size :k}] (map-indexed (fn [i chunk-itm] (->> (pmap #(apply auction/run %) chunk-itm) ((juxt #(keys (first %)) #(apply map (fn [& more] (iterations i more chunk-size)) %))) (apply map #(conj %& (inc i))))) (partition-all chunk-size coll)))
6245
; Copyright (c) 2020 International Institute for Applied Systems Analysis. ; All rights reserved. The use and distribution terms for this software ; are covered by the MIT License (http://opensource.org/licenses/MIT) ; which can be found in the file LICENSE at the root of this distribution. ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. ; You must not remove this notice, or any other, from this software. (ns ^{:doc "Simulation of equilibrium prices for distributed commodity market through the generalized English auction." :author "<NAME>"} phosphorus-markets.simulator (:require [commodities-auction.auction :as auction])) (defn- record "Records simulation results." [m i j k] (let [n (count (first (vals m)))] (assoc m :simulation (repeat n (+ (* i k) j)) :iteration (range 1 (inc n))))) (defn- iterations "Combines results of separate auction runs." [i coll size] (reduce-kv (fn [m j [k v]] (merge-with concat m (if (= k :sub-iterations) {k [v]} (record v i (inc j) size)))) {} (vec coll))) (defn prun "Runs generalized English auction for different parameterization of distributed commodity market. Returns a lazy sequence of equilibrium prices for a chunk of parameters' tuples. Runs auction procedure in parallel for each chunk." [coll {chunk-size :k}] (map-indexed (fn [i chunk-itm] (->> (pmap #(apply auction/run %) chunk-itm) ((juxt #(keys (first %)) #(apply map (fn [& more] (iterations i more chunk-size)) %))) (apply map #(conj %& (inc i))))) (partition-all chunk-size coll)))
true
; Copyright (c) 2020 International Institute for Applied Systems Analysis. ; All rights reserved. The use and distribution terms for this software ; are covered by the MIT License (http://opensource.org/licenses/MIT) ; which can be found in the file LICENSE at the root of this distribution. ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. ; You must not remove this notice, or any other, from this software. (ns ^{:doc "Simulation of equilibrium prices for distributed commodity market through the generalized English auction." :author "PI:NAME:<NAME>END_PI"} phosphorus-markets.simulator (:require [commodities-auction.auction :as auction])) (defn- record "Records simulation results." [m i j k] (let [n (count (first (vals m)))] (assoc m :simulation (repeat n (+ (* i k) j)) :iteration (range 1 (inc n))))) (defn- iterations "Combines results of separate auction runs." [i coll size] (reduce-kv (fn [m j [k v]] (merge-with concat m (if (= k :sub-iterations) {k [v]} (record v i (inc j) size)))) {} (vec coll))) (defn prun "Runs generalized English auction for different parameterization of distributed commodity market. Returns a lazy sequence of equilibrium prices for a chunk of parameters' tuples. Runs auction procedure in parallel for each chunk." [coll {chunk-size :k}] (map-indexed (fn [i chunk-itm] (->> (pmap #(apply auction/run %) chunk-itm) ((juxt #(keys (first %)) #(apply map (fn [& more] (iterations i more chunk-size)) %))) (apply map #(conj %& (inc i))))) (partition-all chunk-size coll)))
[ { "context": ";; My first code in Clojure\n;; Manoel Vilela\n\n;; solution of problem001\n(defn solution [x]\n ", "end": 44, "score": 0.9998888969421387, "start": 31, "tag": "NAME", "value": "Manoel Vilela" } ]
Problem001/Clojure/solution_1.clj
drocha87/ProjectEuler
167
;; My first code in Clojure ;; Manoel Vilela ;; solution of problem001 (defn solution [x] (reduce + (filter (fn [n] (or (= (mod n 5) 0) (= (mod n 3) 0))) (range 1 x))) ) (println (solution 1000))
54870
;; My first code in Clojure ;; <NAME> ;; solution of problem001 (defn solution [x] (reduce + (filter (fn [n] (or (= (mod n 5) 0) (= (mod n 3) 0))) (range 1 x))) ) (println (solution 1000))
true
;; My first code in Clojure ;; PI:NAME:<NAME>END_PI ;; solution of problem001 (defn solution [x] (reduce + (filter (fn [n] (or (= (mod n 5) 0) (= (mod n 3) 0))) (range 1 x))) ) (println (solution 1000))
[ { "context": ";; Copyright 2014-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold Li", "end": 111, "score": 0.9998164772987366, "start": 96, "tag": "NAME", "value": "Ragnar Svensson" }, { "context": "-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold License version 1.0 ", "end": 129, "score": 0.9998233318328857, "start": 113, "tag": "NAME", "value": "Christian Murray" } ]
editor/src/clj/internal/graph/error_values.clj
cmarincia/defold
0
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 Ragnar Svensson, Christian Murray ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns internal.graph.error-values (:require [clojure.string :as string] [internal.graph.types :as gt])) (set! *warn-on-reflection* true) (def ^:private severity-levels {:info 0 :warning 10 :fatal 20}) (defprotocol ErrorSeverityProvider (error-severity [this])) (defrecord ErrorValue [_node-id _label severity value message causes user-data] ErrorSeverityProvider (error-severity [_this] severity)) (defn error-value ([severity message] (error-value severity message nil)) ([severity message user-data] (map->ErrorValue {:severity severity :message message :user-data user-data}))) (def error-info (partial error-value :info)) (def error-warning (partial error-value :warning)) (def error-fatal (partial error-value :fatal)) (defn map->error [m] (assert (if-some [node-id (:_node-id m)] (gt/node-id? node-id) true)) (assert (if-some [label (:_label m)] (keyword? label) true)) (assert (if-some [severity (:severity m)] (contains? severity-levels severity) true)) (assert (if-some [message (:message m)] (string? message) true)) (map->ErrorValue m)) (defn ->error ([node-id label severity value message] (->error node-id label severity value message nil)) ([node-id label severity value message user-data] (->ErrorValue node-id label severity value message nil user-data))) (defn error-value? [x] (instance? ErrorValue x)) (defn error? [x] (cond (sequential? x) (some error? x) (error-value? x) x :else nil)) (defn- sev? [level x] (< (or level 0) (or (severity-levels (error-severity x)) 0))) (defn worse-than [severity x] (when (instance? ErrorValue x) (sev? (severity-levels severity) x))) (defn- severity? [severity e] (and (satisfies? ErrorSeverityProvider e) (= (severity-levels severity) (severity-levels (error-severity e))))) (def error-info? (partial severity? :info)) (def error-warning? (partial severity? :warning)) (def error-fatal? (partial severity? :fatal)) (defn- error-seq [e] (tree-seq :causes :causes e)) (defn- error-messages [e] (distinct (keep :message (error-seq e)))) (defn error-message [e] (string/join "\n" (error-messages e))) (defn error-aggregate ([es] (let [max-severity (reduce (fn [result severity] (if (> (severity-levels result) (severity-levels severity)) result severity)) :info (keep :severity es))] (map->ErrorValue {:severity max-severity :causes (vec es)}))) ([es & kvs] (apply assoc (error-aggregate es) kvs))) (defrecord ErrorPackage [packaged-errors] ErrorSeverityProvider (error-severity [_this] (if (some? packaged-errors) (error-severity packaged-errors) :info))) (defn error-package? [value] (instance? ErrorPackage value)) (defn- unpack-if-package [error-or-package] (if (instance? ErrorPackage error-or-package) (:packaged-errors error-or-package) error-or-package)) (defn- flatten-packages [values node-id] (mapcat (fn [value] (cond (nil? value) nil (instance? ErrorValue value) [value] (instance? ErrorPackage value) (let [error-value (:packaged-errors value)] (if (= node-id (:_node-id error-value)) (:causes error-value) [error-value])) (sequential? value) (flatten-packages value node-id) :else (throw (ex-info (str "Unsupported value of " (type value)) {:value value})))) values)) (defn flatten-errors [& errors] (some->> errors flatten (map unpack-if-package) (filter error-value?) not-empty error-aggregate)) (defmacro precluding-errors [errors result] `(let [error-value# (flatten-errors ~errors)] (if (worse-than :info error-value#) error-value# ~result))) (defn package-errors [node-id & errors] (assert (gt/node-id? node-id)) (some-> errors (flatten-packages node-id) flatten-errors (assoc :_node-id node-id) ->ErrorPackage)) (defn unpack-errors [error-package] (assert (or (nil? error-package) (instance? ErrorPackage error-package))) (some-> error-package :packaged-errors))
44413
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 <NAME>, <NAME> ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns internal.graph.error-values (:require [clojure.string :as string] [internal.graph.types :as gt])) (set! *warn-on-reflection* true) (def ^:private severity-levels {:info 0 :warning 10 :fatal 20}) (defprotocol ErrorSeverityProvider (error-severity [this])) (defrecord ErrorValue [_node-id _label severity value message causes user-data] ErrorSeverityProvider (error-severity [_this] severity)) (defn error-value ([severity message] (error-value severity message nil)) ([severity message user-data] (map->ErrorValue {:severity severity :message message :user-data user-data}))) (def error-info (partial error-value :info)) (def error-warning (partial error-value :warning)) (def error-fatal (partial error-value :fatal)) (defn map->error [m] (assert (if-some [node-id (:_node-id m)] (gt/node-id? node-id) true)) (assert (if-some [label (:_label m)] (keyword? label) true)) (assert (if-some [severity (:severity m)] (contains? severity-levels severity) true)) (assert (if-some [message (:message m)] (string? message) true)) (map->ErrorValue m)) (defn ->error ([node-id label severity value message] (->error node-id label severity value message nil)) ([node-id label severity value message user-data] (->ErrorValue node-id label severity value message nil user-data))) (defn error-value? [x] (instance? ErrorValue x)) (defn error? [x] (cond (sequential? x) (some error? x) (error-value? x) x :else nil)) (defn- sev? [level x] (< (or level 0) (or (severity-levels (error-severity x)) 0))) (defn worse-than [severity x] (when (instance? ErrorValue x) (sev? (severity-levels severity) x))) (defn- severity? [severity e] (and (satisfies? ErrorSeverityProvider e) (= (severity-levels severity) (severity-levels (error-severity e))))) (def error-info? (partial severity? :info)) (def error-warning? (partial severity? :warning)) (def error-fatal? (partial severity? :fatal)) (defn- error-seq [e] (tree-seq :causes :causes e)) (defn- error-messages [e] (distinct (keep :message (error-seq e)))) (defn error-message [e] (string/join "\n" (error-messages e))) (defn error-aggregate ([es] (let [max-severity (reduce (fn [result severity] (if (> (severity-levels result) (severity-levels severity)) result severity)) :info (keep :severity es))] (map->ErrorValue {:severity max-severity :causes (vec es)}))) ([es & kvs] (apply assoc (error-aggregate es) kvs))) (defrecord ErrorPackage [packaged-errors] ErrorSeverityProvider (error-severity [_this] (if (some? packaged-errors) (error-severity packaged-errors) :info))) (defn error-package? [value] (instance? ErrorPackage value)) (defn- unpack-if-package [error-or-package] (if (instance? ErrorPackage error-or-package) (:packaged-errors error-or-package) error-or-package)) (defn- flatten-packages [values node-id] (mapcat (fn [value] (cond (nil? value) nil (instance? ErrorValue value) [value] (instance? ErrorPackage value) (let [error-value (:packaged-errors value)] (if (= node-id (:_node-id error-value)) (:causes error-value) [error-value])) (sequential? value) (flatten-packages value node-id) :else (throw (ex-info (str "Unsupported value of " (type value)) {:value value})))) values)) (defn flatten-errors [& errors] (some->> errors flatten (map unpack-if-package) (filter error-value?) not-empty error-aggregate)) (defmacro precluding-errors [errors result] `(let [error-value# (flatten-errors ~errors)] (if (worse-than :info error-value#) error-value# ~result))) (defn package-errors [node-id & errors] (assert (gt/node-id? node-id)) (some-> errors (flatten-packages node-id) flatten-errors (assoc :_node-id node-id) ->ErrorPackage)) (defn unpack-errors [error-package] (assert (or (nil? error-package) (instance? ErrorPackage error-package))) (some-> error-package :packaged-errors))
true
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns internal.graph.error-values (:require [clojure.string :as string] [internal.graph.types :as gt])) (set! *warn-on-reflection* true) (def ^:private severity-levels {:info 0 :warning 10 :fatal 20}) (defprotocol ErrorSeverityProvider (error-severity [this])) (defrecord ErrorValue [_node-id _label severity value message causes user-data] ErrorSeverityProvider (error-severity [_this] severity)) (defn error-value ([severity message] (error-value severity message nil)) ([severity message user-data] (map->ErrorValue {:severity severity :message message :user-data user-data}))) (def error-info (partial error-value :info)) (def error-warning (partial error-value :warning)) (def error-fatal (partial error-value :fatal)) (defn map->error [m] (assert (if-some [node-id (:_node-id m)] (gt/node-id? node-id) true)) (assert (if-some [label (:_label m)] (keyword? label) true)) (assert (if-some [severity (:severity m)] (contains? severity-levels severity) true)) (assert (if-some [message (:message m)] (string? message) true)) (map->ErrorValue m)) (defn ->error ([node-id label severity value message] (->error node-id label severity value message nil)) ([node-id label severity value message user-data] (->ErrorValue node-id label severity value message nil user-data))) (defn error-value? [x] (instance? ErrorValue x)) (defn error? [x] (cond (sequential? x) (some error? x) (error-value? x) x :else nil)) (defn- sev? [level x] (< (or level 0) (or (severity-levels (error-severity x)) 0))) (defn worse-than [severity x] (when (instance? ErrorValue x) (sev? (severity-levels severity) x))) (defn- severity? [severity e] (and (satisfies? ErrorSeverityProvider e) (= (severity-levels severity) (severity-levels (error-severity e))))) (def error-info? (partial severity? :info)) (def error-warning? (partial severity? :warning)) (def error-fatal? (partial severity? :fatal)) (defn- error-seq [e] (tree-seq :causes :causes e)) (defn- error-messages [e] (distinct (keep :message (error-seq e)))) (defn error-message [e] (string/join "\n" (error-messages e))) (defn error-aggregate ([es] (let [max-severity (reduce (fn [result severity] (if (> (severity-levels result) (severity-levels severity)) result severity)) :info (keep :severity es))] (map->ErrorValue {:severity max-severity :causes (vec es)}))) ([es & kvs] (apply assoc (error-aggregate es) kvs))) (defrecord ErrorPackage [packaged-errors] ErrorSeverityProvider (error-severity [_this] (if (some? packaged-errors) (error-severity packaged-errors) :info))) (defn error-package? [value] (instance? ErrorPackage value)) (defn- unpack-if-package [error-or-package] (if (instance? ErrorPackage error-or-package) (:packaged-errors error-or-package) error-or-package)) (defn- flatten-packages [values node-id] (mapcat (fn [value] (cond (nil? value) nil (instance? ErrorValue value) [value] (instance? ErrorPackage value) (let [error-value (:packaged-errors value)] (if (= node-id (:_node-id error-value)) (:causes error-value) [error-value])) (sequential? value) (flatten-packages value node-id) :else (throw (ex-info (str "Unsupported value of " (type value)) {:value value})))) values)) (defn flatten-errors [& errors] (some->> errors flatten (map unpack-if-package) (filter error-value?) not-empty error-aggregate)) (defmacro precluding-errors [errors result] `(let [error-value# (flatten-errors ~errors)] (if (worse-than :info error-value#) error-value# ~result))) (defn package-errors [node-id & errors] (assert (gt/node-id? node-id)) (some-> errors (flatten-packages node-id) flatten-errors (assoc :_node-id node-id) ->ErrorPackage)) (defn unpack-errors [error-package] (assert (or (nil? error-package) (instance? ErrorPackage error-package))) (some-> error-package :packaged-errors))
[ { "context": "jks\"\n ;:key-password \"password\"\n ;:ssl-port 8443\n ", "end": 2013, "score": 0.9989415407180786, "start": 2005, "tag": "PASSWORD", "value": "password" } ]
vase/src/eiga/service.clj
defndaines/eiga
0
(ns eiga.service (:require [io.pedestal.http :as http] [io.pedestal.http.route :as route] [io.pedestal.http.body-params :as body-params] [ring.util.response :as ring-resp] [com.cognitect.vase :as vase])) (defn about-page [request] (ring-resp/response (format "Clojure %s - served from %s" (clojure-version) (route/url-for ::about-page)))) (defn home-page [request] (ring-resp/response "Hello World!")) ;; Defines "/" and "/about" routes with their associated :get handlers. ;; The interceptors defined after the verb map (e.g., {:get home-page} ;; apply to / and its children (/about). (def common-interceptors [(body-params/body-params) http/html-body]) ;; Tabular routes (def routes #{["/" :get (conj common-interceptors `home-page)] ["/about" :get (conj common-interceptors `about-page)]}) (def service {:env :prod ;; You can bring your own non-default interceptors. Make ;; sure you include routing and set it up right for ;; dev-mode. If you do, many other keys for configuring ;; default interceptors will be ignored. ;; ::http/interceptors [] ;; Uncomment next line to enable CORS support, add ;; string(s) specifying scheme, host and port for ;; allowed source(s): ;; ;; "http://localhost:8080" ;; ;;::http/allowed-origins ["scheme://host:port"] ::route-set routes ::vase/api-root "/api" ::vase/spec-resources ["eiga_service.edn"] ;; Root for resource interceptor that is available by default. ::http/resource-path "/public" ;; Either :jetty, :immutant or :tomcat (see comments in project.clj) ::http/type :jetty ;;::http/host "localhost" ::http/port 8080 ;; Options to pass to the container (Jetty) ::http/container-options {:h2c? true :h2? false ;:keystore "test/hp/keystore.jks" ;:key-password "password" ;:ssl-port 8443 :ssl? false}})
116349
(ns eiga.service (:require [io.pedestal.http :as http] [io.pedestal.http.route :as route] [io.pedestal.http.body-params :as body-params] [ring.util.response :as ring-resp] [com.cognitect.vase :as vase])) (defn about-page [request] (ring-resp/response (format "Clojure %s - served from %s" (clojure-version) (route/url-for ::about-page)))) (defn home-page [request] (ring-resp/response "Hello World!")) ;; Defines "/" and "/about" routes with their associated :get handlers. ;; The interceptors defined after the verb map (e.g., {:get home-page} ;; apply to / and its children (/about). (def common-interceptors [(body-params/body-params) http/html-body]) ;; Tabular routes (def routes #{["/" :get (conj common-interceptors `home-page)] ["/about" :get (conj common-interceptors `about-page)]}) (def service {:env :prod ;; You can bring your own non-default interceptors. Make ;; sure you include routing and set it up right for ;; dev-mode. If you do, many other keys for configuring ;; default interceptors will be ignored. ;; ::http/interceptors [] ;; Uncomment next line to enable CORS support, add ;; string(s) specifying scheme, host and port for ;; allowed source(s): ;; ;; "http://localhost:8080" ;; ;;::http/allowed-origins ["scheme://host:port"] ::route-set routes ::vase/api-root "/api" ::vase/spec-resources ["eiga_service.edn"] ;; Root for resource interceptor that is available by default. ::http/resource-path "/public" ;; Either :jetty, :immutant or :tomcat (see comments in project.clj) ::http/type :jetty ;;::http/host "localhost" ::http/port 8080 ;; Options to pass to the container (Jetty) ::http/container-options {:h2c? true :h2? false ;:keystore "test/hp/keystore.jks" ;:key-password "<PASSWORD>" ;:ssl-port 8443 :ssl? false}})
true
(ns eiga.service (:require [io.pedestal.http :as http] [io.pedestal.http.route :as route] [io.pedestal.http.body-params :as body-params] [ring.util.response :as ring-resp] [com.cognitect.vase :as vase])) (defn about-page [request] (ring-resp/response (format "Clojure %s - served from %s" (clojure-version) (route/url-for ::about-page)))) (defn home-page [request] (ring-resp/response "Hello World!")) ;; Defines "/" and "/about" routes with their associated :get handlers. ;; The interceptors defined after the verb map (e.g., {:get home-page} ;; apply to / and its children (/about). (def common-interceptors [(body-params/body-params) http/html-body]) ;; Tabular routes (def routes #{["/" :get (conj common-interceptors `home-page)] ["/about" :get (conj common-interceptors `about-page)]}) (def service {:env :prod ;; You can bring your own non-default interceptors. Make ;; sure you include routing and set it up right for ;; dev-mode. If you do, many other keys for configuring ;; default interceptors will be ignored. ;; ::http/interceptors [] ;; Uncomment next line to enable CORS support, add ;; string(s) specifying scheme, host and port for ;; allowed source(s): ;; ;; "http://localhost:8080" ;; ;;::http/allowed-origins ["scheme://host:port"] ::route-set routes ::vase/api-root "/api" ::vase/spec-resources ["eiga_service.edn"] ;; Root for resource interceptor that is available by default. ::http/resource-path "/public" ;; Either :jetty, :immutant or :tomcat (see comments in project.clj) ::http/type :jetty ;;::http/host "localhost" ::http/port 8080 ;; Options to pass to the container (Jetty) ::http/container-options {:h2c? true :h2? false ;:keystore "test/hp/keystore.jks" ;:key-password "PI:PASSWORD:<PASSWORD>END_PI" ;:ssl-port 8443 :ssl? false}})
[ { "context": "\n(deftest dump-opts\n (let [data [{:age 33 :name \"jon\"} {:age 44 :name \"boo\"}]]\n\n (is (= \"- age: 33\\", "end": 265, "score": 0.9992175102233887, "start": 262, "tag": "NAME", "value": "jon" }, { "context": "(let [data [{:age 33 :name \"jon\"} {:age 44 :name \"boo\"}]]\n\n (is (= \"- age: 33\\n name: jon\\n- age: 4", "end": 287, "score": 0.9309874176979065, "start": 284, "tag": "NAME", "value": "boo" }, { "context": " 44 :name \"boo\"}]]\n\n (is (= \"- age: 33\\n name: jon\\n- age: 44\\n name: boo\\n\"\n (generate-s", "end": 327, "score": 0.9988266229629517, "start": 324, "tag": "NAME", "value": "jon" }, { "context": "flow-style :block})))\n (is (= \"[{age: 33, name: jon}, {age: 44, name: boo}]\\n\"\n (generate-s", "end": 460, "score": 0.9990947842597961, "start": 457, "tag": "NAME", "value": "jon" }, { "context": " (is (= \"- \\\"age\\\": !!int \\\"33\\\"\\n \\\"name\\\": \\\"jon\\\"\\n- \\\"age\\\": !!int \\\"44\\\"\\n \\\"name\\\": \\\"boo\\\"\\n", "end": 615, "score": 0.9992191195487976, "start": 612, "tag": "NAME", "value": "jon" }, { "context": "d})))\n (is (= \"- 'age': !!int '33'\\n 'name': 'jon'\\n- 'age': !!int '44'\\n 'name': 'boo'\\n\"\n ", "end": 797, "score": 0.9989396333694458, "start": 794, "tag": "NAME", "value": "jon" }, { "context": " \"- \\\"age\\\": !!int |-\\n 33\\n \\\"name\\\": |-\\n jon\\n- \\\"age\\\": !!int |-\\n 44\\n \\\"name\\\": |-\\n ", "end": 987, "score": 0.9548283815383911, "start": 984, "tag": "NAME", "value": "jon" }, { "context": "ar-style :folded})))\n (is (= \"- {age: 33, name: jon}\\n- {age: 44, name: boo}\\n\"\n (generate-", "end": 1353, "score": 0.9984269142150879, "start": 1350, "tag": "NAME", "value": "jon" }, { "context": " (is (= \"- {age: 33, name: jon}\\n- {age: 44, name: boo}\\n\"\n (generate-string data :dumper-op", "end": 1375, "score": 0.6131361722946167, "start": 1374, "tag": "NAME", "value": "b" } ]
test/yaml/writer_test.clj
camsaul/yaml
73
(ns yaml.writer-test (:require [clojure.test :refer :all] [yaml.writer :refer :all] [flatland.ordered.set :refer [ordered-set]] [flatland.ordered.map :refer [ordered-map]])) (deftest dump-opts (let [data [{:age 33 :name "jon"} {:age 44 :name "boo"}]] (is (= "- age: 33\n name: jon\n- age: 44\n name: boo\n" (generate-string data :dumper-options {:flow-style :block}))) (is (= "[{age: 33, name: jon}, {age: 44, name: boo}]\n" (generate-string data :dumper-options {:flow-style :flow}))) (is (= "- \"age\": !!int \"33\"\n \"name\": \"jon\"\n- \"age\": !!int \"44\"\n \"name\": \"boo\"\n" (generate-string data :dumper-options {:scalar-style :double-quoted}))) (is (= "- 'age': !!int '33'\n 'name': 'jon'\n- 'age': !!int '44'\n 'name': 'boo'\n" (generate-string data :dumper-options {:scalar-style :single-quoted}))) (is (= "- \"age\": !!int |-\n 33\n \"name\": |-\n jon\n- \"age\": !!int |-\n 44\n \"name\": |-\n boo\n" (generate-string data :dumper-options {:scalar-style :literal}))) (is (= "- \"age\": !!int >-\n 33\n \"name\": >-\n jon\n- \"age\": !!int >-\n 44\n \"name\": >-\n boo\n" (generate-string data :dumper-options {:scalar-style :folded}))) (is (= "- {age: 33, name: jon}\n- {age: 44, name: boo}\n" (generate-string data :dumper-options {:scalar-style :plain}))))) (deftest preserve-namespaces (let [data {:foo/bar "baz"}] (is (= "{foo/bar: baz}\n" (generate-string data))))) (deftest writing-order (let [om (into (ordered-map) (partition 2 (range 0 20))) os (into (ordered-set) (range 0 10)) v (into [] (range 0 10))] (= "0: 1\n2: 3\n4: 5\n6: 7\n8: 9\n10: 11\n12: 13\n14: 15\n16: 17\n18: 19\n" (generate-string om :dumper-options {:flow-style :block})) (= "!!set\n0: null\n1: null\n2: null\n3: null\n4: null\n5: null\n6: null\n7: null\n8: null\n9: null\n" (generate-string os :dumper-options {:flow-style :block})) (= "- 0\n- 1\n- 2\n- 3\n- 4\n- 5\n- 6\n- 7\n- 8\n- 9\n" (generate-string v :dumper-options {:flow-style :block}))))
94306
(ns yaml.writer-test (:require [clojure.test :refer :all] [yaml.writer :refer :all] [flatland.ordered.set :refer [ordered-set]] [flatland.ordered.map :refer [ordered-map]])) (deftest dump-opts (let [data [{:age 33 :name "<NAME>"} {:age 44 :name "<NAME>"}]] (is (= "- age: 33\n name: <NAME>\n- age: 44\n name: boo\n" (generate-string data :dumper-options {:flow-style :block}))) (is (= "[{age: 33, name: <NAME>}, {age: 44, name: boo}]\n" (generate-string data :dumper-options {:flow-style :flow}))) (is (= "- \"age\": !!int \"33\"\n \"name\": \"<NAME>\"\n- \"age\": !!int \"44\"\n \"name\": \"boo\"\n" (generate-string data :dumper-options {:scalar-style :double-quoted}))) (is (= "- 'age': !!int '33'\n 'name': '<NAME>'\n- 'age': !!int '44'\n 'name': 'boo'\n" (generate-string data :dumper-options {:scalar-style :single-quoted}))) (is (= "- \"age\": !!int |-\n 33\n \"name\": |-\n <NAME>\n- \"age\": !!int |-\n 44\n \"name\": |-\n boo\n" (generate-string data :dumper-options {:scalar-style :literal}))) (is (= "- \"age\": !!int >-\n 33\n \"name\": >-\n jon\n- \"age\": !!int >-\n 44\n \"name\": >-\n boo\n" (generate-string data :dumper-options {:scalar-style :folded}))) (is (= "- {age: 33, name: <NAME>}\n- {age: 44, name: <NAME>oo}\n" (generate-string data :dumper-options {:scalar-style :plain}))))) (deftest preserve-namespaces (let [data {:foo/bar "baz"}] (is (= "{foo/bar: baz}\n" (generate-string data))))) (deftest writing-order (let [om (into (ordered-map) (partition 2 (range 0 20))) os (into (ordered-set) (range 0 10)) v (into [] (range 0 10))] (= "0: 1\n2: 3\n4: 5\n6: 7\n8: 9\n10: 11\n12: 13\n14: 15\n16: 17\n18: 19\n" (generate-string om :dumper-options {:flow-style :block})) (= "!!set\n0: null\n1: null\n2: null\n3: null\n4: null\n5: null\n6: null\n7: null\n8: null\n9: null\n" (generate-string os :dumper-options {:flow-style :block})) (= "- 0\n- 1\n- 2\n- 3\n- 4\n- 5\n- 6\n- 7\n- 8\n- 9\n" (generate-string v :dumper-options {:flow-style :block}))))
true
(ns yaml.writer-test (:require [clojure.test :refer :all] [yaml.writer :refer :all] [flatland.ordered.set :refer [ordered-set]] [flatland.ordered.map :refer [ordered-map]])) (deftest dump-opts (let [data [{:age 33 :name "PI:NAME:<NAME>END_PI"} {:age 44 :name "PI:NAME:<NAME>END_PI"}]] (is (= "- age: 33\n name: PI:NAME:<NAME>END_PI\n- age: 44\n name: boo\n" (generate-string data :dumper-options {:flow-style :block}))) (is (= "[{age: 33, name: PI:NAME:<NAME>END_PI}, {age: 44, name: boo}]\n" (generate-string data :dumper-options {:flow-style :flow}))) (is (= "- \"age\": !!int \"33\"\n \"name\": \"PI:NAME:<NAME>END_PI\"\n- \"age\": !!int \"44\"\n \"name\": \"boo\"\n" (generate-string data :dumper-options {:scalar-style :double-quoted}))) (is (= "- 'age': !!int '33'\n 'name': 'PI:NAME:<NAME>END_PI'\n- 'age': !!int '44'\n 'name': 'boo'\n" (generate-string data :dumper-options {:scalar-style :single-quoted}))) (is (= "- \"age\": !!int |-\n 33\n \"name\": |-\n PI:NAME:<NAME>END_PI\n- \"age\": !!int |-\n 44\n \"name\": |-\n boo\n" (generate-string data :dumper-options {:scalar-style :literal}))) (is (= "- \"age\": !!int >-\n 33\n \"name\": >-\n jon\n- \"age\": !!int >-\n 44\n \"name\": >-\n boo\n" (generate-string data :dumper-options {:scalar-style :folded}))) (is (= "- {age: 33, name: PI:NAME:<NAME>END_PI}\n- {age: 44, name: PI:NAME:<NAME>END_PIoo}\n" (generate-string data :dumper-options {:scalar-style :plain}))))) (deftest preserve-namespaces (let [data {:foo/bar "baz"}] (is (= "{foo/bar: baz}\n" (generate-string data))))) (deftest writing-order (let [om (into (ordered-map) (partition 2 (range 0 20))) os (into (ordered-set) (range 0 10)) v (into [] (range 0 10))] (= "0: 1\n2: 3\n4: 5\n6: 7\n8: 9\n10: 11\n12: 13\n14: 15\n16: 17\n18: 19\n" (generate-string om :dumper-options {:flow-style :block})) (= "!!set\n0: null\n1: null\n2: null\n3: null\n4: null\n5: null\n6: null\n7: null\n8: null\n9: null\n" (generate-string os :dumper-options {:flow-style :block})) (= "- 0\n- 1\n- 2\n- 3\n- 4\n- 5\n- 6\n- 7\n- 8\n- 9\n" (generate-string v :dumper-options {:flow-style :block}))))
[ { "context": "ate :runner)\n (play-from-hand state :corp \"Caprice Nisei\" \"Server 1\")\n (is (= 11 (:credit (get-corp", "end": 5098, "score": 0.9992964863777161, "start": 5085, "tag": "NAME", "value": "Caprice Nisei" } ]
test/clj/game_test/games/scenarios.clj
Odie/netrunner
0
(ns game-test.games.scenarios (:require [game.core :as core] [game-test.core :refer :all] [game-test.utils :refer :all] [game-test.macros :refer :all] [clojure.test :refer :all])) (use-fixtures :once load-all-cards) (deftest minigame-prevent-netdmg-resourcetrash (testing "Mini-game testing prevention of net damage and resource trashing, with hosted Fall Guy" (do-game (new-game (default-corp [(qty "Neural EMP" 1) (qty "Hedge Fund" 3) (qty "SEA Source" 1)]) (default-runner [(qty "Fall Guy" 1) (qty "Off-Campus Apartment" 1) (qty "Net Shield" 1) (qty "Wireless Net Pavilion" 1) (qty "Sure Gamble" 1)])) (play-from-hand state :corp "Hedge Fund") (play-from-hand state :corp "Hedge Fund") (take-credits state :corp 1) (is (= 14 (:credit (get-corp)))) (core/gain state :runner :click 2) (run-empty-server state "Archives") ; enable Corp play of Neural and SEA next turn (play-from-hand state :runner "Sure Gamble") (play-from-hand state :runner "Off-Campus Apartment") (play-from-hand state :runner "Wireless Net Pavilion") (play-from-hand state :runner "Net Shield") (let [apt (get-resource state 0)] (card-ability state :runner apt 0) (prompt-select :runner (find-card "Fall Guy" (:hand (get-runner)))) (take-credits state :runner) (is (= 6 (:credit (get-runner)))) (play-from-hand state :corp "Neural EMP") (let [ns (get-program state 0) fg (first (:hosted (refresh apt)))] (card-ability state :runner ns 0) (is (= 5 (:credit (get-runner))) "Runner paid 1c to survive Neural EMP") (prompt-choice :runner "Done") (play-from-hand state :corp "SEA Source") (prompt-choice :corp 3) ; boost trace to 6 (prompt-choice :runner 0) (is (= 1 (:tag (get-runner))) "Runner took tag from SEA Source") (is (= 7 (:credit (get-corp)))) (core/trash-resource state :corp nil) (prompt-select :corp (find-card "Off-Campus Apartment" (:rig (get-runner)))) (is (= 3 (:credit (get-corp))) "WNP increased cost to trash a resource by 2") (card-ability state :runner fg 0) ; Trash Fall Guy to save the Apartment! (is (= (:title (get-resource state 0)) "Off-Campus Apartment") "Apartment still standing") (is (= (:title (last (:discard (get-runner)))) "Fall Guy") "Fall Guy trashed")))))) (deftest hb-glacier (testing "HB Glacier econ and server protection with upgrades - Ash, Caprice, Breaker Bay Grid, positional ice strength boost" (do-game (new-game (make-deck "Haas-Bioroid: Engineering the Future" [(qty "Adonis Campaign" 1) (qty "Global Food Initiative" 1) (qty "Breaker Bay Grid" 1) (qty "Caprice Nisei" 1) (qty "Ash 2X3ZB9CY" 1) (qty "Turing" 1) (qty "Hedge Fund" 1)]) (default-runner [(qty "Desperado" 1) (qty "Dirty Laundry" 1) (qty "Emergency Shutdown" 1) (qty "Lamprey" 1) (qty "Data Folding" 1) (qty "Career Fair" 1)])) (core/draw state :corp 1) (core/gain state :corp :click 1) (play-from-hand state :corp "Hedge Fund") (play-from-hand state :corp "Adonis Campaign" "New remote") (is (= 10 (:credit (get-corp))) "HB:EtF ability paid 1 credit") (play-from-hand state :corp "Breaker Bay Grid" "Server 1") (play-from-hand state :corp "Ash 2X3ZB9CY" "HQ") (let [adon (get-content state :remote1 0) bbg (get-content state :remote1 1) ash (get-content state :hq 0)] (core/rez state :corp bbg) (core/rez state :corp adon) (is (= 10 (:credit (get-corp))) "Breaker Bay Grid allowed rez of Adonis for free") (take-credits state :corp) (core/draw state :runner 1) (play-from-hand state :runner "Career Fair") (prompt-select :runner (find-card "Data Folding" (:hand (get-runner)))) (is (= 5 (:credit (get-runner))) "Data Folding installed for free by Career Fair") (play-from-hand state :runner "Lamprey") (play-from-hand state :runner "Desperado") (is (= 1 (:credit (get-runner)))) (run-on state "HQ") (core/rez state :corp ash) (run-successful state) (prompt-choice :corp 0) (prompt-choice :runner 0) (is (and (= 2 (:credit (get-runner))) (= 7 (:credit (get-corp)))) "Desperado paid 1 to Runner, Lamprey took 1 from Corp") (prompt-choice :runner "No") ; can't afford to trash Ash (take-credits state :runner) (play-from-hand state :corp "Caprice Nisei" "Server 1") (is (= 11 (:credit (get-corp))) "Gained 3 from Adonis and 1 from HB:EtF") (play-from-hand state :corp "Turing" "Server 1") (take-credits state :corp 1) (is (= 3 (:credit (get-runner))) "Gained 1 from Data Folding") (core/gain state :runner :click 2) (run-empty-server state "HQ") (prompt-choice :corp 0) (prompt-choice :runner 0) (prompt-choice :runner "Yes") ; trash Ash (is (and (= 1 (:credit (get-runner))) (= 11 (:credit (get-corp))))) (core/gain state :runner :credit 1) (play-from-hand state :runner "Dirty Laundry") (prompt-choice :runner "HQ") (run-successful state) (prompt-choice :runner "Steal") (is (= 2 (:agenda-point (get-runner))) "Stole Global Food Initiative") (is (and (= 6 (:credit (get-runner))) (= 10 (:credit (get-corp)))) "Desperado plus Dirty Laundry, Lamprey took 1 from Corp") (run-on state "Server 1") (let [tur (get-ice state :remote1 0) cap (get-content state :remote1 2)] (core/rez state :corp tur) (is (= 5 (:current-strength (refresh tur))) "Turing +3 strength protecting a remote") (card-subroutine state :corp tur 0) ; end the run (play-from-hand state :runner "Emergency Shutdown") (prompt-select :runner tur) (is (not (get-in (refresh tur) [:rezzed])) "Turing derezzed") (run-on state "Server 1") ; letting Runner in this time to use Caprice (core/rez state :corp cap) (run-continue state) ;; Caprice psi game started automatically (prompt-choice :corp "1 [Credits]") (prompt-choice :runner "2 [Credits]") (is (not (:run @state)) "Corp won Caprice psi game and ended the run"))))))
7026
(ns game-test.games.scenarios (:require [game.core :as core] [game-test.core :refer :all] [game-test.utils :refer :all] [game-test.macros :refer :all] [clojure.test :refer :all])) (use-fixtures :once load-all-cards) (deftest minigame-prevent-netdmg-resourcetrash (testing "Mini-game testing prevention of net damage and resource trashing, with hosted Fall Guy" (do-game (new-game (default-corp [(qty "Neural EMP" 1) (qty "Hedge Fund" 3) (qty "SEA Source" 1)]) (default-runner [(qty "Fall Guy" 1) (qty "Off-Campus Apartment" 1) (qty "Net Shield" 1) (qty "Wireless Net Pavilion" 1) (qty "Sure Gamble" 1)])) (play-from-hand state :corp "Hedge Fund") (play-from-hand state :corp "Hedge Fund") (take-credits state :corp 1) (is (= 14 (:credit (get-corp)))) (core/gain state :runner :click 2) (run-empty-server state "Archives") ; enable Corp play of Neural and SEA next turn (play-from-hand state :runner "Sure Gamble") (play-from-hand state :runner "Off-Campus Apartment") (play-from-hand state :runner "Wireless Net Pavilion") (play-from-hand state :runner "Net Shield") (let [apt (get-resource state 0)] (card-ability state :runner apt 0) (prompt-select :runner (find-card "Fall Guy" (:hand (get-runner)))) (take-credits state :runner) (is (= 6 (:credit (get-runner)))) (play-from-hand state :corp "Neural EMP") (let [ns (get-program state 0) fg (first (:hosted (refresh apt)))] (card-ability state :runner ns 0) (is (= 5 (:credit (get-runner))) "Runner paid 1c to survive Neural EMP") (prompt-choice :runner "Done") (play-from-hand state :corp "SEA Source") (prompt-choice :corp 3) ; boost trace to 6 (prompt-choice :runner 0) (is (= 1 (:tag (get-runner))) "Runner took tag from SEA Source") (is (= 7 (:credit (get-corp)))) (core/trash-resource state :corp nil) (prompt-select :corp (find-card "Off-Campus Apartment" (:rig (get-runner)))) (is (= 3 (:credit (get-corp))) "WNP increased cost to trash a resource by 2") (card-ability state :runner fg 0) ; Trash Fall Guy to save the Apartment! (is (= (:title (get-resource state 0)) "Off-Campus Apartment") "Apartment still standing") (is (= (:title (last (:discard (get-runner)))) "Fall Guy") "Fall Guy trashed")))))) (deftest hb-glacier (testing "HB Glacier econ and server protection with upgrades - Ash, Caprice, Breaker Bay Grid, positional ice strength boost" (do-game (new-game (make-deck "Haas-Bioroid: Engineering the Future" [(qty "Adonis Campaign" 1) (qty "Global Food Initiative" 1) (qty "Breaker Bay Grid" 1) (qty "Caprice Nisei" 1) (qty "Ash 2X3ZB9CY" 1) (qty "Turing" 1) (qty "Hedge Fund" 1)]) (default-runner [(qty "Desperado" 1) (qty "Dirty Laundry" 1) (qty "Emergency Shutdown" 1) (qty "Lamprey" 1) (qty "Data Folding" 1) (qty "Career Fair" 1)])) (core/draw state :corp 1) (core/gain state :corp :click 1) (play-from-hand state :corp "Hedge Fund") (play-from-hand state :corp "Adonis Campaign" "New remote") (is (= 10 (:credit (get-corp))) "HB:EtF ability paid 1 credit") (play-from-hand state :corp "Breaker Bay Grid" "Server 1") (play-from-hand state :corp "Ash 2X3ZB9CY" "HQ") (let [adon (get-content state :remote1 0) bbg (get-content state :remote1 1) ash (get-content state :hq 0)] (core/rez state :corp bbg) (core/rez state :corp adon) (is (= 10 (:credit (get-corp))) "Breaker Bay Grid allowed rez of Adonis for free") (take-credits state :corp) (core/draw state :runner 1) (play-from-hand state :runner "Career Fair") (prompt-select :runner (find-card "Data Folding" (:hand (get-runner)))) (is (= 5 (:credit (get-runner))) "Data Folding installed for free by Career Fair") (play-from-hand state :runner "Lamprey") (play-from-hand state :runner "Desperado") (is (= 1 (:credit (get-runner)))) (run-on state "HQ") (core/rez state :corp ash) (run-successful state) (prompt-choice :corp 0) (prompt-choice :runner 0) (is (and (= 2 (:credit (get-runner))) (= 7 (:credit (get-corp)))) "Desperado paid 1 to Runner, Lamprey took 1 from Corp") (prompt-choice :runner "No") ; can't afford to trash Ash (take-credits state :runner) (play-from-hand state :corp "<NAME>" "Server 1") (is (= 11 (:credit (get-corp))) "Gained 3 from Adonis and 1 from HB:EtF") (play-from-hand state :corp "Turing" "Server 1") (take-credits state :corp 1) (is (= 3 (:credit (get-runner))) "Gained 1 from Data Folding") (core/gain state :runner :click 2) (run-empty-server state "HQ") (prompt-choice :corp 0) (prompt-choice :runner 0) (prompt-choice :runner "Yes") ; trash Ash (is (and (= 1 (:credit (get-runner))) (= 11 (:credit (get-corp))))) (core/gain state :runner :credit 1) (play-from-hand state :runner "Dirty Laundry") (prompt-choice :runner "HQ") (run-successful state) (prompt-choice :runner "Steal") (is (= 2 (:agenda-point (get-runner))) "Stole Global Food Initiative") (is (and (= 6 (:credit (get-runner))) (= 10 (:credit (get-corp)))) "Desperado plus Dirty Laundry, Lamprey took 1 from Corp") (run-on state "Server 1") (let [tur (get-ice state :remote1 0) cap (get-content state :remote1 2)] (core/rez state :corp tur) (is (= 5 (:current-strength (refresh tur))) "Turing +3 strength protecting a remote") (card-subroutine state :corp tur 0) ; end the run (play-from-hand state :runner "Emergency Shutdown") (prompt-select :runner tur) (is (not (get-in (refresh tur) [:rezzed])) "Turing derezzed") (run-on state "Server 1") ; letting Runner in this time to use Caprice (core/rez state :corp cap) (run-continue state) ;; Caprice psi game started automatically (prompt-choice :corp "1 [Credits]") (prompt-choice :runner "2 [Credits]") (is (not (:run @state)) "Corp won Caprice psi game and ended the run"))))))
true
(ns game-test.games.scenarios (:require [game.core :as core] [game-test.core :refer :all] [game-test.utils :refer :all] [game-test.macros :refer :all] [clojure.test :refer :all])) (use-fixtures :once load-all-cards) (deftest minigame-prevent-netdmg-resourcetrash (testing "Mini-game testing prevention of net damage and resource trashing, with hosted Fall Guy" (do-game (new-game (default-corp [(qty "Neural EMP" 1) (qty "Hedge Fund" 3) (qty "SEA Source" 1)]) (default-runner [(qty "Fall Guy" 1) (qty "Off-Campus Apartment" 1) (qty "Net Shield" 1) (qty "Wireless Net Pavilion" 1) (qty "Sure Gamble" 1)])) (play-from-hand state :corp "Hedge Fund") (play-from-hand state :corp "Hedge Fund") (take-credits state :corp 1) (is (= 14 (:credit (get-corp)))) (core/gain state :runner :click 2) (run-empty-server state "Archives") ; enable Corp play of Neural and SEA next turn (play-from-hand state :runner "Sure Gamble") (play-from-hand state :runner "Off-Campus Apartment") (play-from-hand state :runner "Wireless Net Pavilion") (play-from-hand state :runner "Net Shield") (let [apt (get-resource state 0)] (card-ability state :runner apt 0) (prompt-select :runner (find-card "Fall Guy" (:hand (get-runner)))) (take-credits state :runner) (is (= 6 (:credit (get-runner)))) (play-from-hand state :corp "Neural EMP") (let [ns (get-program state 0) fg (first (:hosted (refresh apt)))] (card-ability state :runner ns 0) (is (= 5 (:credit (get-runner))) "Runner paid 1c to survive Neural EMP") (prompt-choice :runner "Done") (play-from-hand state :corp "SEA Source") (prompt-choice :corp 3) ; boost trace to 6 (prompt-choice :runner 0) (is (= 1 (:tag (get-runner))) "Runner took tag from SEA Source") (is (= 7 (:credit (get-corp)))) (core/trash-resource state :corp nil) (prompt-select :corp (find-card "Off-Campus Apartment" (:rig (get-runner)))) (is (= 3 (:credit (get-corp))) "WNP increased cost to trash a resource by 2") (card-ability state :runner fg 0) ; Trash Fall Guy to save the Apartment! (is (= (:title (get-resource state 0)) "Off-Campus Apartment") "Apartment still standing") (is (= (:title (last (:discard (get-runner)))) "Fall Guy") "Fall Guy trashed")))))) (deftest hb-glacier (testing "HB Glacier econ and server protection with upgrades - Ash, Caprice, Breaker Bay Grid, positional ice strength boost" (do-game (new-game (make-deck "Haas-Bioroid: Engineering the Future" [(qty "Adonis Campaign" 1) (qty "Global Food Initiative" 1) (qty "Breaker Bay Grid" 1) (qty "Caprice Nisei" 1) (qty "Ash 2X3ZB9CY" 1) (qty "Turing" 1) (qty "Hedge Fund" 1)]) (default-runner [(qty "Desperado" 1) (qty "Dirty Laundry" 1) (qty "Emergency Shutdown" 1) (qty "Lamprey" 1) (qty "Data Folding" 1) (qty "Career Fair" 1)])) (core/draw state :corp 1) (core/gain state :corp :click 1) (play-from-hand state :corp "Hedge Fund") (play-from-hand state :corp "Adonis Campaign" "New remote") (is (= 10 (:credit (get-corp))) "HB:EtF ability paid 1 credit") (play-from-hand state :corp "Breaker Bay Grid" "Server 1") (play-from-hand state :corp "Ash 2X3ZB9CY" "HQ") (let [adon (get-content state :remote1 0) bbg (get-content state :remote1 1) ash (get-content state :hq 0)] (core/rez state :corp bbg) (core/rez state :corp adon) (is (= 10 (:credit (get-corp))) "Breaker Bay Grid allowed rez of Adonis for free") (take-credits state :corp) (core/draw state :runner 1) (play-from-hand state :runner "Career Fair") (prompt-select :runner (find-card "Data Folding" (:hand (get-runner)))) (is (= 5 (:credit (get-runner))) "Data Folding installed for free by Career Fair") (play-from-hand state :runner "Lamprey") (play-from-hand state :runner "Desperado") (is (= 1 (:credit (get-runner)))) (run-on state "HQ") (core/rez state :corp ash) (run-successful state) (prompt-choice :corp 0) (prompt-choice :runner 0) (is (and (= 2 (:credit (get-runner))) (= 7 (:credit (get-corp)))) "Desperado paid 1 to Runner, Lamprey took 1 from Corp") (prompt-choice :runner "No") ; can't afford to trash Ash (take-credits state :runner) (play-from-hand state :corp "PI:NAME:<NAME>END_PI" "Server 1") (is (= 11 (:credit (get-corp))) "Gained 3 from Adonis and 1 from HB:EtF") (play-from-hand state :corp "Turing" "Server 1") (take-credits state :corp 1) (is (= 3 (:credit (get-runner))) "Gained 1 from Data Folding") (core/gain state :runner :click 2) (run-empty-server state "HQ") (prompt-choice :corp 0) (prompt-choice :runner 0) (prompt-choice :runner "Yes") ; trash Ash (is (and (= 1 (:credit (get-runner))) (= 11 (:credit (get-corp))))) (core/gain state :runner :credit 1) (play-from-hand state :runner "Dirty Laundry") (prompt-choice :runner "HQ") (run-successful state) (prompt-choice :runner "Steal") (is (= 2 (:agenda-point (get-runner))) "Stole Global Food Initiative") (is (and (= 6 (:credit (get-runner))) (= 10 (:credit (get-corp)))) "Desperado plus Dirty Laundry, Lamprey took 1 from Corp") (run-on state "Server 1") (let [tur (get-ice state :remote1 0) cap (get-content state :remote1 2)] (core/rez state :corp tur) (is (= 5 (:current-strength (refresh tur))) "Turing +3 strength protecting a remote") (card-subroutine state :corp tur 0) ; end the run (play-from-hand state :runner "Emergency Shutdown") (prompt-select :runner tur) (is (not (get-in (refresh tur) [:rezzed])) "Turing derezzed") (run-on state "Server 1") ; letting Runner in this time to use Caprice (core/rez state :corp cap) (run-continue state) ;; Caprice psi game started automatically (prompt-choice :corp "1 [Credits]") (prompt-choice :runner "2 [Credits]") (is (not (:run @state)) "Corp won Caprice psi game and ended the run"))))))
[ { "context": " expected-organizations)\n (assoc :personnel personnel)\n ;; DIF only support some portion of the ", "end": 6921, "score": 0.6845988035202026, "start": 6912, "tag": "NAME", "value": "personnel" }, { "context": " <Role>TECHNICAL CONTACT</Role>\n <First_Name>ANDREA</First_Name>\n <Last_Name>DE BONO</Last_Name>", "end": 10659, "score": 0.9995477199554443, "start": 10653, "tag": "NAME", "value": "ANDREA" }, { "context": " <First_Name>ANDREA</First_Name>\n <Last_Name>DE BONO</Last_Name>\n <Email>[email protected]</Email>\n", "end": 10697, "score": 0.9996591210365295, "start": 10690, "tag": "NAME", "value": "DE BONO" }, { "context": " <Last_Name>DE BONO</Last_Name>\n <Email>[email protected]</Email>\n </Personnel>\n <Parameters>\n <", "end": 10738, "score": 0.9999094009399414, "start": 10723, "tag": "EMAIL", "value": "[email protected]" }, { "context": "ole>DATA CENTER CONTACT</Role>\n <First_Name>ETIENNE</First_Name>\n <Last_Name>BARTHOLOME", "end": 13823, "score": 0.5953984260559082, "start": 13823, "tag": "NAME", "value": "" }, { "context": "le>DATA CENTER CONTACT</Role>\n <First_Name>ETIENNE</First_Name>\n <Last_Name>BARTHOLOME</Last_", "end": 13831, "score": 0.9947346448898315, "start": 13824, "tag": "NAME", "value": "ETIENNE" }, { "context": "First_Name>ETIENNE</First_Name>\n <Last_Name>BARTHOLOME</Last_Name>\n <Email>etienne.bart", "end": 13863, "score": 0.8033966422080994, "start": 13863, "tag": "NAME", "value": "" }, { "context": "irst_Name>ETIENNE</First_Name>\n <Last_Name>BARTHOLOME</Last_Name>\n <Email>etienne.bartholome@jrc", "end": 13874, "score": 0.9989835619926453, "start": 13864, "tag": "NAME", "value": "BARTHOLOME" }, { "context": " <Last_Name>BARTHOLOME</Last_Name>\n <Email>[email protected]</Email>\n <Phone>+39 332 789908</Phone>\n ", "end": 13927, "score": 0.9998856782913208, "start": 13902, "tag": "EMAIL", "value": "[email protected]" }, { "context": " <Last_Name>UNEP/GRID</Last_Name>\n <Email>[email protected]</Email>\n <Phone>+254-2-621234</Phone>\n ", "end": 14816, "score": 0.9999061822891235, "start": 14799, "tag": "EMAIL", "value": "[email protected]" }, { "context": "ference\n {:author \"author\"\n :publication-da", "end": 22441, "score": 0.9268112778663635, "start": 22435, "tag": "USERNAME", "value": "author" }, { "context": "c/map->Personnel\n {:first-name \"ANDREA\"\n :last-name \"DE BONO\"\n ", "end": 29012, "score": 0.9997186660766602, "start": 29006, "tag": "NAME", "value": "ANDREA" }, { "context": "rst-name \"ANDREA\"\n :last-name \"DE BONO\"\n :roles [\"DIF AUTHOR\" \"TECHNI", "end": 29053, "score": 0.9996569752693176, "start": 29046, "tag": "NAME", "value": "DE BONO" }, { "context": " :email\n :value \"[email protected]\"})]})]\n :access-value 1.0}))\n\n(deftest valida", "end": 29272, "score": 0.9999159574508667, "start": 29257, "tag": "EMAIL", "value": "[email protected]" } ]
umm-lib/test/cmr/umm/test/dif/dif_collection_tests.clj
eereiter/Common-Metadata-Repository
0
(ns cmr.umm.test.dif.dif-collection-tests "Tests parsing and generating DIF Collection XML." (:require [clojure.test :refer :all] ; [clojure.test.check.clojure-test :refer [defspec]] ;; Temporarily included to use the fixed defspec. Remove once issue is fixed. [cmr.common.test.test-check-ext :refer [defspec]] [clojure.test.check.properties :refer [for-all]] [clojure.test.check.generators :as gen] [clojure.string :as s] [cmr.common.joda-time] [cmr.common.date-time-parser :as p] [cmr.common.util :as util] [cmr.umm.test.generators.collection :as coll-gen] [cmr.umm.dif.dif-collection :as c] [cmr.umm.echo10.echo10-collection :as echo10-c] [cmr.umm.echo10.echo10-core :as echo10] [cmr.umm.umm-collection :as umm-c] [cmr.umm.dif.dif-core :as dif] [cmr.spatial.mbr :as m] [cmr.umm.test.echo10.echo10-collection-tests :as test-echo10] [cmr.umm.validation.validation-core :as v] [cmr.common.test.test-check-ext :as ext :refer [checking]]) (:import cmr.spatial.mbr.Mbr)) (defn- spatial-coverage->expected-parsed "Takes the spatial-coverage used to generate the dif and returns the expected parsed spatial-coverage from the dif." [spatial-coverage] (when spatial-coverage (let [{:keys [granule-spatial-representation spatial-representation geometries]} spatial-coverage ;; DIF only support bounding rectangles geometries (seq (filter (comp (partial = Mbr) type) geometries)) ;; DIF only supports the cartesian coordinate system for the collection spatial representation spatial-representation (when geometries :cartesian)] (when (or granule-spatial-representation spatial-representation) (assoc spatial-coverage :granule-spatial-representation granule-spatial-representation :spatial-representation spatial-representation :geometries geometries :orbit-parameters nil))))) (defn- instruments->expected "Returns the expected instruments for the given instruments" [instruments] (seq (map #(assoc % :technique nil, :sensors nil, :characteristics nil, :operation-modes nil) instruments))) (defn- platform->expected "Returns the expected platform for the given platform" [platform] (-> platform (assoc :type umm-c/not-provided :characteristics nil) (update-in [:instruments] instruments->expected))) (defn- platforms->expected-parsed "Returns the expected parsed platforms for the given platforms." [platforms] (let [platforms (seq (map platform->expected platforms))] (if (= 1 (count platforms)) platforms (if-let [instruments (seq (mapcat :instruments platforms))] (conj (map #(assoc % :instruments nil) platforms) (umm-c/map->Platform {:short-name umm-c/not-provided :long-name umm-c/not-provided :type umm-c/not-provided :instruments instruments})) platforms)))) (defn- related-urls->expected-parsed "Returns the expected parsed related-urls for the given related-urls." [related-urls] (seq (map #(assoc % :size nil :mime-type nil) related-urls))) (defn- collection-associations->expected-collection-associations "Returns the expected parsed collection-associations for the given collection-associations." [collection-associations] (seq (map #(assoc % :version-id umm-c/not-provided) collection-associations))) (defn- filter-contacts "Remove contacts from a Personnel record that are not emails." [person] (update-in person [:contacts] (fn [contacts] (filter #(= :email (:type %)) contacts)))) (defn- science-keywords->expected-parsed "Returns expected parsed science keywords if science keywords is empty" [science-keywords] (if (empty? science-keywords) [(umm-c/map->ScienceKeyword {:category umm-c/not-provided :topic umm-c/not-provided :term umm-c/not-provided})] science-keywords)) (defn- expected-organizations "Re-order the organizations by distribution centers, add an archive center for each distribution center, then processing centers" [organizations] (let [distribution-centers (filter #(= :distribution-center (:type %)) organizations)] (concat distribution-centers (map #(assoc % :type :archive-center) distribution-centers) (filter #(= :processing-center (:type %)) organizations)))) (defn- umm->expected-parsed-dif "Modifies the UMM record for testing DIF. DIF contains a subset of the total UMM fields so certain fields are removed for comparison of the parsed record" [coll] (let [{{:keys [short-name version-id processing-level-id collection-data-type]} :product :keys [entry-title spatial-coverage personnel]} coll range-date-times (get-in coll [:temporal :range-date-times]) temporal (if (seq range-date-times) (umm-c/map->Temporal {:range-date-times range-date-times :single-date-times [] :periodic-date-times []}) nil) personnel (not-empty (->> personnel ;; only support email right now (map filter-contacts) ;; DIF has no Middle_Name tag (map #(assoc % :middle-name nil))))] (-> coll ;; DIF does not have short-name or long-name, so we assign them to be entry-id and entry-title respectively ;; long-name will only take the first 1024 characters of entry-title if entry-title is too long ;; DIF also does not have version-description. (assoc :product (umm-c/map->Product {:short-name short-name :long-name (util/trunc entry-title 1024) :version-id version-id :processing-level-id processing-level-id :collection-data-type collection-data-type})) ;; There is no delete-time in DIF (assoc-in [:data-provider-timestamps :delete-time] nil) (assoc-in [:data-provider-timestamps :revision-date-time] (get-in coll [:data-provider-timestamps :update-time])) ;; DIF only has range-date-times (assoc :temporal temporal) ;; DIF only has distribution centers as Organization (update :organizations expected-organizations) (assoc :personnel personnel) ;; DIF only support some portion of the spatial (update-in [:spatial-coverage] spatial-coverage->expected-parsed) ;; DIF 9 requires science keywords (update-in [:science-keywords] science-keywords->expected-parsed) ;; DIF does not support size or mime-type in RelatedURLs (update-in [:related-urls] related-urls->expected-parsed) ;; DIF does not have version-id in collection associations and we hardcoded it to "dummy" (update-in [:collection-associations] collection-associations->expected-collection-associations) ;; CMR-588: UMM doesn't have a good recommendation on how to handle spatial-keywords (dissoc :spatial-keywords) ;; DIF platform does not have type, instruments or characteristics fields (update-in [:platforms] platforms->expected-parsed) ;; DIF does not have two-d-coordinate-systems (dissoc :two-d-coordinate-systems) ;; DIF does not have associated-difs (dissoc :associated-difs) ;; DIF does not have metadata-language (dissoc :metadata-language) ;; DIF 9 does not have collection citation (dissoc :collection-citations) ;; DIF9 does not support ranges for additional attributes (update-in [:product-specific-attributes] (fn [psas] (seq (map (fn [psa] (assoc psa :parameter-range-begin nil :parameter-range-end nil :parsed-parameter-range-begin nil :parsed-parameter-range-end nil)) psas)))) umm-c/map->UmmCollection))) (defspec generate-collection-is-valid-xml-test 100 (for-all [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection)] (and (seq xml) (empty? (c/validate-xml xml)))))) (deftest generate-and-parse-collection-test (checking "dif collection round tripping" 100 [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection) parsed (c/parse-collection xml) expected-parsed (umm->expected-parsed-dif collection)] (is (= expected-parsed parsed))))) (deftest generate-and-parse-collection-between-formats-test (checking "dif parse between formats" 100 [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection) parsed-dif (c/parse-collection xml) echo10-xml (echo10/umm->echo10-xml parsed-dif) parsed-echo10 (echo10-c/parse-collection echo10-xml) expected-parsed (test-echo10/umm->expected-parsed-echo10 (umm->expected-parsed-dif collection))] (is (= expected-parsed parsed-echo10)) (is (= 0 (count (echo10-c/validate-xml echo10-xml))))))) ;; This is a made-up include all fields collection xml sample for the parse collection test (def all-fields-collection-xml "<DIF xmlns=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:dif=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/ http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/dif_v9.8.4.xsd\"> <Entry_ID>geodata_1848</Entry_ID> <Entry_Title>Global Land Cover 2000 (GLC 2000)</Entry_Title> <Data_Set_Citation> <Dataset_Title>Global Land Cover 2000 (GLC 2000)</Dataset_Title> <Dataset_Release_Date>2003-01-01</Dataset_Release_Date> <Version>006</Version> </Data_Set_Citation> <Personnel> <Role>DIF AUTHOR</Role> <Role>TECHNICAL CONTACT</Role> <First_Name>ANDREA</First_Name> <Last_Name>DE BONO</Last_Name> <Email>[email protected]</Email> </Personnel> <Parameters> <Category>EARTH SCIENCE</Category> <Topic>LAND SURFACE</Topic> <Term>LAND USE/LAND COVER</Term> <Variable_Level_1>LAND COVER</Variable_Level_1> </Parameters> <Parameters uuid=\"cad5c02a-e771-434e-bef6-8dced38a68e8\"> <Category>EARTH SCIENCE</Category> <Topic>ATMOSPHERE</Topic> <Term>PRECIPITATION</Term> <Variable_Level_1>PRECIPITATION AMOUNT</Variable_Level_1> <Variable_Level_2>PRECIPITATION Level 2</Variable_Level_2> <Variable_Level_3>PRECIPITATION Level 3</Variable_Level_3> <Detailed_Variable>PRECIPITATION Details</Detailed_Variable> </Parameters> <ISO_Topic_Category>ENVIRONMENT</ISO_Topic_Category> <Keyword>Land Cover</Keyword> <Keyword>1Km</Keyword> <Keyword>JRC</Keyword> <Keyword>GLC,</Keyword> <Keyword>2000</Keyword> <Keyword>satellite</Keyword> <Sensor_Name> <Short_Name>VEGETATION-1</Short_Name> <Long_Name>VEGETATION INSTRUMENT 1 (SPOT 4)</Long_Name> </Sensor_Name> <Source_Name> <Short_Name>SPOT-1</Short_Name> <Long_Name>Systeme Probatoire Pour l'Observation de la Terre-1</Long_Name> </Source_Name> <Source_Name> <Short_Name>SPOT-4</Short_Name> <Long_Name>Systeme Probatoire Pour l'Observation de la Terre-4</Long_Name> </Source_Name> <Temporal_Coverage> <Start_Date>1996-02-24</Start_Date> <Stop_Date>1997-03-24</Stop_Date> </Temporal_Coverage> <Temporal_Coverage> <Start_Date>1998-02-24T22:20:41-05:00</Start_Date> <Stop_Date>1999-03-24T22:20:41-05:00</Stop_Date> </Temporal_Coverage> <Data_Set_Progress>ONGOING</Data_Set_Progress> <Spatial_Coverage> <Southernmost_Latitude>-90.0</Southernmost_Latitude> <Northernmost_Latitude>-60.5033</Northernmost_Latitude> <Westernmost_Longitude>-180.0</Westernmost_Longitude> <Easternmost_Longitude>180.0</Easternmost_Longitude> </Spatial_Coverage> <Location> <Location_Category>GEOGRAPHIC REGION</Location_Category> <Location_Type>GLOBAL</Location_Type> </Location> <Data_Resolution> <Latitude_Resolution>1 km</Latitude_Resolution> <Longitude_Resolution>1 km</Longitude_Resolution> <Horizontal_Resolution_Range>1 km - &lt; 10 km or approximately .01 degree - &lt; .09 degree</Horizontal_Resolution_Range> </Data_Resolution> <Project> <Short_Name>ESI</Short_Name> <Long_Name>Environmental Sustainability Index</Long_Name> </Project> <Project> <Short_Name>UNEP/GRID</Short_Name> <Long_Name>UNEP/Global Resources Information Database</Long_Name> </Project> <Quality>High Quality Metadata</Quality> <Use_Constraints>Public</Use_Constraints> <Data_Center> <Data_Center_Name> <Short_Name>EU/JRC/IES</Short_Name> <Long_Name>Institute for Environment and Sustainability, Joint Research Center, European Union</Long_Name> </Data_Center_Name> <Personnel> <Role>DATA CENTER CONTACT</Role> <First_Name>ETIENNE</First_Name> <Last_Name>BARTHOLOME</Last_Name> <Email>[email protected]</Email> <Phone>+39 332 789908</Phone> <Fax>+39 332 789073</Fax> <Contact_Address> <Address>Space Applications Institute, T.P. 440</Address> <Address>EC Joint Research Centre JRC</Address> <City>Ispra (VA)</City> <Postal_Code>21020</Postal_Code> <Country>Italy</Country> </Contact_Address> </Personnel> </Data_Center> <Data_Center> <Data_Center_Name> <Short_Name>UNEP/DEWA/GRID-EUROPE</Short_Name> <Long_Name>Global Resource Information Database - Geneva, Division of Early Warning and Assessment, United Nations Environment Programme</Long_Name> </Data_Center_Name> <Data_Center_URL>http://www.grid.unep.ch/</Data_Center_URL> <Personnel> <Role>DATA CENTER CONTACT</Role> <Last_Name>UNEP/GRID</Last_Name> <Email>[email protected]</Email> <Phone>+254-2-621234</Phone> <Fax>+254-2-226890 or 215787</Fax> <Contact_Address> <Address>United Nations Environment Programme</Address> <Address>Global Resource Information Database UNEP/GRID</Address> <Address>P.O.Box 30552</Address> <Province_or_State>Nairobi</Province_or_State> <Country>KENYA</Country> </Contact_Address> </Personnel> </Data_Center> <Summary> <Abstract>Summary of collection.</Abstract> <Purpose>A grand purpose</Purpose> </Summary> <Reference> <Author>author</Author> <Publication_Date>2015</Publication_Date> <Title>title</Title> <Series>1</Series> <Edition>2</Edition> <Volume>3</Volume> <Issue>4</Issue> <Report_Number>5</Report_Number> <Publication_Place>Frederick, MD</Publication_Place> <Publisher>publisher</Publisher> <Pages>678</Pages> <ISBN>978-0-394-80001-1</ISBN> <Online_Resource>http://example.com</Online_Resource> <DOI>http://dx.doi.org/12.3456/ABC012XYZ</DOI> <Other_Reference_Details>blah</Other_Reference_Details> </Reference> <Related_URL> <URL_Content_Type> <Type>GET DATA</Type> </URL_Content_Type> <URL>http://geodata.grid.unep.ch/</URL> </Related_URL> <Related_URL> <URL_Content_Type> <Type>GET DATA</Type> <Subtype>ON-LINE ARCHIVE</Subtype> </URL_Content_Type> <URL>ftp://airsl2.gesdisc.eosdis.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level2/AIRH2CCF.006/</URL> <Description>Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP.</Description> </Related_URL> <Parent_DIF>CNDP-ESP_IPY_POL2006-11139-C02-01CGL_ESASSI</Parent_DIF> <Parent_DIF>CNDP-ESP_2</Parent_DIF> <IDN_Node> <Short_Name>UNEP/GRID</Short_Name> </IDN_Node> <Originating_Metadata_Node>GCMD</Originating_Metadata_Node> <Metadata_Name>CEOS IDN DIF</Metadata_Name> <Metadata_Version>VERSION 9.8.4</Metadata_Version> <DIF_Creation_Date>2013-02-21</DIF_Creation_Date> <Last_DIF_Revision_Date>2013-10-22</Last_DIF_Revision_Date> <Extended_Metadata> <Metadata> <Group>gov.nasa.gsfc.gcmd</Group> <Name>metadata.uuid</Name> <Value>743933e5-1404-4502-915f-83cde56af440</Value> </Metadata> <Metadata> <Group>gov.nasa.gsfc.gcmd</Group> <Name>metadata.extraction_date</Name> <Value>2013-09-30 09:45:15</Value> </Metadata> <Metadata> <Group>EMS</Group> <Name>ProductLevelId</Name> <Value>2</Value> </Metadata> <Metadata> <Group>ECHO</Group> <Name>CollectionDataType</Name> <Value>NEAR_REAL_TIME</Value> </Metadata> <Metadata> <Group>spatial coverage</Group> <Name>GranuleSpatialRepresentation</Name> <Value>GEODETIC</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>String attribute</Name> <Description>something string</Description> <Value>alpha</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Float attribute</Name> <Description>something float</Description> <Value>12.3</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Int attribute</Name> <Description>something int</Description> <Value>42</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Date attribute</Name> <Description>something date</Description> <Value>2015-09-14</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Datetime attribute</Name> <Description>something datetime</Description> <Value>2015-09-14T13:01:00Z</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Time attribute</Name> <Description>something time</Description> <Value>13:01:00Z</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Bool attribute</Name> <Description>something bool</Description> <Value>false</Value> </Metadata> <Metadata> <Group>gov.nasa.earthdata.cmr</Group> <Name>Restriction</Name> <Value>1</Value> </Metadata> <Metadata> <Name>Processor</Name> <Value>LPDAAC</Value> </Metadata> </Extended_Metadata> </DIF>") (def valid-collection-xml "<DIF xmlns=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:dif=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/ http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/dif_v9.8.4.xsd\"> <Entry_ID>minimal_dif_dataset</Entry_ID> <Entry_Title>A minimal dif dataset</Entry_Title> <Data_Set_Citation> <Dataset_Title>dataset_title</Dataset_Title> </Data_Set_Citation> <Parameters> <Category>category</Category> <Topic>topic</Topic> <Term>term</Term> </Parameters> <ISO_Topic_Category>iso topic category</ISO_Topic_Category> <Data_Center> <Data_Center_Name> <Short_Name>datacenter_short_name</Short_Name> <Long_Name>data center long name</Long_Name> </Data_Center_Name> <Personnel> <Role>DummyRole</Role> <Last_Name>UNEP</Last_Name> </Personnel> </Data_Center> <Reference> <Author>author</Author> <Publication_Date>2015</Publication_Date> <Title>title</Title> <Publication_Place>Frederick, MD</Publication_Place> <Publisher>publisher</Publisher> <DOI>http://dx.doi.org/12.3456/ABC012XYZ</DOI> </Reference> <Summary> <Abstract>summary of the dataset</Abstract> <Purpose>A grand purpose</Purpose> </Summary> <Metadata_Name>CEOS IDN DIF</Metadata_Name> <Metadata_Version>VERSION 9.8.4</Metadata_Version> <Last_DIF_Revision_Date>2013-10-22</Last_DIF_Revision_Date> </DIF>") (def expected-temporal (umm-c/map->Temporal {:range-date-times [(umm-c/map->RangeDateTime {:beginning-date-time (p/parse-datetime "1996-02-24") :ending-date-time (p/parse-datetime "1997-03-24T23:59:59.999")}) (umm-c/map->RangeDateTime {:beginning-date-time (p/parse-datetime "1998-02-24T22:20:41-05:00") :ending-date-time (p/parse-datetime "1999-03-24T22:20:41-05:00")})] :single-date-times [] :periodic-date-times []})) (def expected-collection (umm-c/map->UmmCollection {:entry-title "Global Land Cover 2000 (GLC 2000)" :summary "Summary of collection." :purpose "A grand purpose" :quality "High Quality Metadata" :use-constraints "Public" :product (umm-c/map->Product {:short-name "geodata_1848" :long-name "Global Land Cover 2000 (GLC 2000)" :version-id "006" :processing-level-id "2" :collection-data-type "NEAR_REAL_TIME"}) :data-provider-timestamps (umm-c/map->DataProviderTimestamps {:insert-time (p/parse-datetime "2013-02-21") :update-time (p/parse-datetime "2013-10-22") :revision-date-time (p/parse-datetime "2013-10-22")}) :publication-references [(umm-c/map->PublicationReference {:author "author" :publication-date "2015" :title "title" :series "1" :edition "2" :volume "3" :issue "4" :report-number "5" :publication-place "Frederick, MD" :publisher "publisher" :pages "678" :isbn "978-0-394-80001-1" :related-url "http://example.com" :doi "http://dx.doi.org/12.3456/ABC012XYZ" :other-reference-details "blah"})] :spatial-keywords ["GLOBAL"] :platforms [(umm-c/map->Platform {:short-name umm-c/not-provided :long-name umm-c/not-provided :type umm-c/not-provided :instruments [(umm-c/map->Instrument {:short-name "VEGETATION-1" :long-name "VEGETATION INSTRUMENT 1 (SPOT 4)"})]}) (umm-c/map->Platform {:short-name "SPOT-1" :long-name "Systeme Probatoire Pour l'Observation de la Terre-1" :type umm-c/not-provided}) (umm-c/map->Platform {:short-name "SPOT-4" :long-name "Systeme Probatoire Pour l'Observation de la Terre-4" :type umm-c/not-provided})] :temporal expected-temporal :collection-progress :in-work :science-keywords [(umm-c/map->ScienceKeyword {:category "EARTH SCIENCE" :topic "LAND SURFACE" :term "LAND USE/LAND COVER" :variable-level-1 "LAND COVER"}) (umm-c/map->ScienceKeyword {:category "EARTH SCIENCE" :topic "ATMOSPHERE" :term "PRECIPITATION" :variable-level-1 "PRECIPITATION AMOUNT" :variable-level-2 "PRECIPITATION Level 2" :variable-level-3 "PRECIPITATION Level 3" :detailed-variable "PRECIPITATION Details"})] :product-specific-attributes [(umm-c/map->ProductSpecificAttribute {:group "gov.nasa.gsfc.gcmd" :name "metadata.uuid" :data-type :string :value "743933e5-1404-4502-915f-83cde56af440" :parsed-value "743933e5-1404-4502-915f-83cde56af440" :description "Not provided"}) (umm-c/map->ProductSpecificAttribute {:group "gov.nasa.gsfc.gcmd" :name "metadata.extraction_date" :data-type :string :value "2013-09-30 09:45:15" :parsed-value "2013-09-30 09:45:15" :description "Not provided"}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "String attribute" :description "something string" :data-type :string :value "alpha" :parsed-value "alpha"}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Float attribute" :description "something float" :data-type :float :value "12.3" :parsed-value 12.3}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Int attribute" :description "something int" :data-type :int :value "42" :parsed-value 42}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Date attribute" :description "something date" :data-type :date :value "2015-09-14" :parsed-value (p/parse-datetime "2015-09-14")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Datetime attribute" :description "something datetime" :data-type :datetime :value "2015-09-14T13:01:00Z" :parsed-value (p/parse-datetime "2015-09-14T13:01:00Z")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Time attribute" :description "something time" :data-type :time :value "13:01:00Z" :parsed-value (p/parse-time "13:01:00Z")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Bool attribute" :description "something bool" :data-type :boolean :value "false" :parsed-value false})] :spatial-coverage (umm-c/map->SpatialCoverage {:granule-spatial-representation :geodetic :spatial-representation :cartesian :geometries [(m/mbr -180 -60.5033 180 -90)]}) :collection-associations [(umm-c/map->CollectionAssociation {:short-name "CNDP-ESP_IPY_POL2006-11139-C02-01CGL_ESASSI" :version-id umm-c/not-provided}) (umm-c/map->CollectionAssociation {:short-name "CNDP-ESP_2" :version-id umm-c/not-provided})] :projects [(umm-c/map->Project {:short-name "ESI" :long-name "Environmental Sustainability Index"}) (umm-c/map->Project {:short-name "UNEP/GRID" :long-name "UNEP/Global Resources Information Database"})] :related-urls [(umm-c/map->RelatedURL {:type "GET DATA" :url "http://geodata.grid.unep.ch/"}) (umm-c/map->RelatedURL {:type "GET DATA" :sub-type "ON-LINE ARCHIVE" :url "ftp://airsl2.gesdisc.eosdis.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level2/AIRH2CCF.006/" :description "Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP." :title "Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP."})] :organizations [(umm-c/map->Organization {:type :distribution-center :org-name "EU/JRC/IES"}) (umm-c/map->Organization {:type :distribution-center :org-name "UNEP/DEWA/GRID-EUROPE"}) (umm-c/map->Organization {:type :archive-center :org-name "EU/JRC/IES"}) (umm-c/map->Organization {:type :archive-center :org-name "UNEP/DEWA/GRID-EUROPE"}) (umm-c/map->Organization {:type :processing-center :org-name "LPDAAC"})] :personnel [(umm-c/map->Personnel {:first-name "ANDREA" :last-name "DE BONO" :roles ["DIF AUTHOR" "TECHNICAL CONTACT"] :contacts [(umm-c/map->Contact {:type :email :value "[email protected]"})]})] :access-value 1.0})) (deftest validate-parsed-dif-test (testing "Validate DIF to UMM Collection" (let [parsed-dif (c/parse-collection all-fields-collection-xml)] (is (empty? (v/validate-collection parsed-dif)))))) (deftest parse-collection-test (testing "parse collection" (is (= expected-collection (c/parse-collection all-fields-collection-xml)))) (testing "parse temporal" (is (= expected-temporal (c/parse-temporal all-fields-collection-xml)))) (testing "parse collection access value" (is (= 1.0 (c/parse-access-value all-fields-collection-xml))))) (deftest validate-xml (testing "valid xml" (is (empty? (c/validate-xml valid-collection-xml)))) (testing "invalid xml" (is (= [(str "Line 18 - cvc-complex-type.2.4.a: Invalid content was found starting with element 'XXXX'. " "One of '{\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Data_Center_URL, " "\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Data_Set_ID, " "\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Personnel}' is expected.")] (c/validate-xml (s/replace valid-collection-xml "Personnel" "XXXX")))))) (deftest parse-nil-version-test ;; UMM-C is now making the version field a required field. It is optional in DIF-9 so we provide ;; a default of Not provided when it is missing from the DIF-9 metadata. (is (= umm-c/not-provided (get-in (c/parse-collection valid-collection-xml) [:product :version-id]))))
65140
(ns cmr.umm.test.dif.dif-collection-tests "Tests parsing and generating DIF Collection XML." (:require [clojure.test :refer :all] ; [clojure.test.check.clojure-test :refer [defspec]] ;; Temporarily included to use the fixed defspec. Remove once issue is fixed. [cmr.common.test.test-check-ext :refer [defspec]] [clojure.test.check.properties :refer [for-all]] [clojure.test.check.generators :as gen] [clojure.string :as s] [cmr.common.joda-time] [cmr.common.date-time-parser :as p] [cmr.common.util :as util] [cmr.umm.test.generators.collection :as coll-gen] [cmr.umm.dif.dif-collection :as c] [cmr.umm.echo10.echo10-collection :as echo10-c] [cmr.umm.echo10.echo10-core :as echo10] [cmr.umm.umm-collection :as umm-c] [cmr.umm.dif.dif-core :as dif] [cmr.spatial.mbr :as m] [cmr.umm.test.echo10.echo10-collection-tests :as test-echo10] [cmr.umm.validation.validation-core :as v] [cmr.common.test.test-check-ext :as ext :refer [checking]]) (:import cmr.spatial.mbr.Mbr)) (defn- spatial-coverage->expected-parsed "Takes the spatial-coverage used to generate the dif and returns the expected parsed spatial-coverage from the dif." [spatial-coverage] (when spatial-coverage (let [{:keys [granule-spatial-representation spatial-representation geometries]} spatial-coverage ;; DIF only support bounding rectangles geometries (seq (filter (comp (partial = Mbr) type) geometries)) ;; DIF only supports the cartesian coordinate system for the collection spatial representation spatial-representation (when geometries :cartesian)] (when (or granule-spatial-representation spatial-representation) (assoc spatial-coverage :granule-spatial-representation granule-spatial-representation :spatial-representation spatial-representation :geometries geometries :orbit-parameters nil))))) (defn- instruments->expected "Returns the expected instruments for the given instruments" [instruments] (seq (map #(assoc % :technique nil, :sensors nil, :characteristics nil, :operation-modes nil) instruments))) (defn- platform->expected "Returns the expected platform for the given platform" [platform] (-> platform (assoc :type umm-c/not-provided :characteristics nil) (update-in [:instruments] instruments->expected))) (defn- platforms->expected-parsed "Returns the expected parsed platforms for the given platforms." [platforms] (let [platforms (seq (map platform->expected platforms))] (if (= 1 (count platforms)) platforms (if-let [instruments (seq (mapcat :instruments platforms))] (conj (map #(assoc % :instruments nil) platforms) (umm-c/map->Platform {:short-name umm-c/not-provided :long-name umm-c/not-provided :type umm-c/not-provided :instruments instruments})) platforms)))) (defn- related-urls->expected-parsed "Returns the expected parsed related-urls for the given related-urls." [related-urls] (seq (map #(assoc % :size nil :mime-type nil) related-urls))) (defn- collection-associations->expected-collection-associations "Returns the expected parsed collection-associations for the given collection-associations." [collection-associations] (seq (map #(assoc % :version-id umm-c/not-provided) collection-associations))) (defn- filter-contacts "Remove contacts from a Personnel record that are not emails." [person] (update-in person [:contacts] (fn [contacts] (filter #(= :email (:type %)) contacts)))) (defn- science-keywords->expected-parsed "Returns expected parsed science keywords if science keywords is empty" [science-keywords] (if (empty? science-keywords) [(umm-c/map->ScienceKeyword {:category umm-c/not-provided :topic umm-c/not-provided :term umm-c/not-provided})] science-keywords)) (defn- expected-organizations "Re-order the organizations by distribution centers, add an archive center for each distribution center, then processing centers" [organizations] (let [distribution-centers (filter #(= :distribution-center (:type %)) organizations)] (concat distribution-centers (map #(assoc % :type :archive-center) distribution-centers) (filter #(= :processing-center (:type %)) organizations)))) (defn- umm->expected-parsed-dif "Modifies the UMM record for testing DIF. DIF contains a subset of the total UMM fields so certain fields are removed for comparison of the parsed record" [coll] (let [{{:keys [short-name version-id processing-level-id collection-data-type]} :product :keys [entry-title spatial-coverage personnel]} coll range-date-times (get-in coll [:temporal :range-date-times]) temporal (if (seq range-date-times) (umm-c/map->Temporal {:range-date-times range-date-times :single-date-times [] :periodic-date-times []}) nil) personnel (not-empty (->> personnel ;; only support email right now (map filter-contacts) ;; DIF has no Middle_Name tag (map #(assoc % :middle-name nil))))] (-> coll ;; DIF does not have short-name or long-name, so we assign them to be entry-id and entry-title respectively ;; long-name will only take the first 1024 characters of entry-title if entry-title is too long ;; DIF also does not have version-description. (assoc :product (umm-c/map->Product {:short-name short-name :long-name (util/trunc entry-title 1024) :version-id version-id :processing-level-id processing-level-id :collection-data-type collection-data-type})) ;; There is no delete-time in DIF (assoc-in [:data-provider-timestamps :delete-time] nil) (assoc-in [:data-provider-timestamps :revision-date-time] (get-in coll [:data-provider-timestamps :update-time])) ;; DIF only has range-date-times (assoc :temporal temporal) ;; DIF only has distribution centers as Organization (update :organizations expected-organizations) (assoc :personnel <NAME>) ;; DIF only support some portion of the spatial (update-in [:spatial-coverage] spatial-coverage->expected-parsed) ;; DIF 9 requires science keywords (update-in [:science-keywords] science-keywords->expected-parsed) ;; DIF does not support size or mime-type in RelatedURLs (update-in [:related-urls] related-urls->expected-parsed) ;; DIF does not have version-id in collection associations and we hardcoded it to "dummy" (update-in [:collection-associations] collection-associations->expected-collection-associations) ;; CMR-588: UMM doesn't have a good recommendation on how to handle spatial-keywords (dissoc :spatial-keywords) ;; DIF platform does not have type, instruments or characteristics fields (update-in [:platforms] platforms->expected-parsed) ;; DIF does not have two-d-coordinate-systems (dissoc :two-d-coordinate-systems) ;; DIF does not have associated-difs (dissoc :associated-difs) ;; DIF does not have metadata-language (dissoc :metadata-language) ;; DIF 9 does not have collection citation (dissoc :collection-citations) ;; DIF9 does not support ranges for additional attributes (update-in [:product-specific-attributes] (fn [psas] (seq (map (fn [psa] (assoc psa :parameter-range-begin nil :parameter-range-end nil :parsed-parameter-range-begin nil :parsed-parameter-range-end nil)) psas)))) umm-c/map->UmmCollection))) (defspec generate-collection-is-valid-xml-test 100 (for-all [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection)] (and (seq xml) (empty? (c/validate-xml xml)))))) (deftest generate-and-parse-collection-test (checking "dif collection round tripping" 100 [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection) parsed (c/parse-collection xml) expected-parsed (umm->expected-parsed-dif collection)] (is (= expected-parsed parsed))))) (deftest generate-and-parse-collection-between-formats-test (checking "dif parse between formats" 100 [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection) parsed-dif (c/parse-collection xml) echo10-xml (echo10/umm->echo10-xml parsed-dif) parsed-echo10 (echo10-c/parse-collection echo10-xml) expected-parsed (test-echo10/umm->expected-parsed-echo10 (umm->expected-parsed-dif collection))] (is (= expected-parsed parsed-echo10)) (is (= 0 (count (echo10-c/validate-xml echo10-xml))))))) ;; This is a made-up include all fields collection xml sample for the parse collection test (def all-fields-collection-xml "<DIF xmlns=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:dif=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/ http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/dif_v9.8.4.xsd\"> <Entry_ID>geodata_1848</Entry_ID> <Entry_Title>Global Land Cover 2000 (GLC 2000)</Entry_Title> <Data_Set_Citation> <Dataset_Title>Global Land Cover 2000 (GLC 2000)</Dataset_Title> <Dataset_Release_Date>2003-01-01</Dataset_Release_Date> <Version>006</Version> </Data_Set_Citation> <Personnel> <Role>DIF AUTHOR</Role> <Role>TECHNICAL CONTACT</Role> <First_Name><NAME></First_Name> <Last_Name><NAME></Last_Name> <Email><EMAIL></Email> </Personnel> <Parameters> <Category>EARTH SCIENCE</Category> <Topic>LAND SURFACE</Topic> <Term>LAND USE/LAND COVER</Term> <Variable_Level_1>LAND COVER</Variable_Level_1> </Parameters> <Parameters uuid=\"cad5c02a-e771-434e-bef6-8dced38a68e8\"> <Category>EARTH SCIENCE</Category> <Topic>ATMOSPHERE</Topic> <Term>PRECIPITATION</Term> <Variable_Level_1>PRECIPITATION AMOUNT</Variable_Level_1> <Variable_Level_2>PRECIPITATION Level 2</Variable_Level_2> <Variable_Level_3>PRECIPITATION Level 3</Variable_Level_3> <Detailed_Variable>PRECIPITATION Details</Detailed_Variable> </Parameters> <ISO_Topic_Category>ENVIRONMENT</ISO_Topic_Category> <Keyword>Land Cover</Keyword> <Keyword>1Km</Keyword> <Keyword>JRC</Keyword> <Keyword>GLC,</Keyword> <Keyword>2000</Keyword> <Keyword>satellite</Keyword> <Sensor_Name> <Short_Name>VEGETATION-1</Short_Name> <Long_Name>VEGETATION INSTRUMENT 1 (SPOT 4)</Long_Name> </Sensor_Name> <Source_Name> <Short_Name>SPOT-1</Short_Name> <Long_Name>Systeme Probatoire Pour l'Observation de la Terre-1</Long_Name> </Source_Name> <Source_Name> <Short_Name>SPOT-4</Short_Name> <Long_Name>Systeme Probatoire Pour l'Observation de la Terre-4</Long_Name> </Source_Name> <Temporal_Coverage> <Start_Date>1996-02-24</Start_Date> <Stop_Date>1997-03-24</Stop_Date> </Temporal_Coverage> <Temporal_Coverage> <Start_Date>1998-02-24T22:20:41-05:00</Start_Date> <Stop_Date>1999-03-24T22:20:41-05:00</Stop_Date> </Temporal_Coverage> <Data_Set_Progress>ONGOING</Data_Set_Progress> <Spatial_Coverage> <Southernmost_Latitude>-90.0</Southernmost_Latitude> <Northernmost_Latitude>-60.5033</Northernmost_Latitude> <Westernmost_Longitude>-180.0</Westernmost_Longitude> <Easternmost_Longitude>180.0</Easternmost_Longitude> </Spatial_Coverage> <Location> <Location_Category>GEOGRAPHIC REGION</Location_Category> <Location_Type>GLOBAL</Location_Type> </Location> <Data_Resolution> <Latitude_Resolution>1 km</Latitude_Resolution> <Longitude_Resolution>1 km</Longitude_Resolution> <Horizontal_Resolution_Range>1 km - &lt; 10 km or approximately .01 degree - &lt; .09 degree</Horizontal_Resolution_Range> </Data_Resolution> <Project> <Short_Name>ESI</Short_Name> <Long_Name>Environmental Sustainability Index</Long_Name> </Project> <Project> <Short_Name>UNEP/GRID</Short_Name> <Long_Name>UNEP/Global Resources Information Database</Long_Name> </Project> <Quality>High Quality Metadata</Quality> <Use_Constraints>Public</Use_Constraints> <Data_Center> <Data_Center_Name> <Short_Name>EU/JRC/IES</Short_Name> <Long_Name>Institute for Environment and Sustainability, Joint Research Center, European Union</Long_Name> </Data_Center_Name> <Personnel> <Role>DATA CENTER CONTACT</Role> <First_Name<NAME>><NAME></First_Name> <Last_Name<NAME>><NAME></Last_Name> <Email><EMAIL></Email> <Phone>+39 332 789908</Phone> <Fax>+39 332 789073</Fax> <Contact_Address> <Address>Space Applications Institute, T.P. 440</Address> <Address>EC Joint Research Centre JRC</Address> <City>Ispra (VA)</City> <Postal_Code>21020</Postal_Code> <Country>Italy</Country> </Contact_Address> </Personnel> </Data_Center> <Data_Center> <Data_Center_Name> <Short_Name>UNEP/DEWA/GRID-EUROPE</Short_Name> <Long_Name>Global Resource Information Database - Geneva, Division of Early Warning and Assessment, United Nations Environment Programme</Long_Name> </Data_Center_Name> <Data_Center_URL>http://www.grid.unep.ch/</Data_Center_URL> <Personnel> <Role>DATA CENTER CONTACT</Role> <Last_Name>UNEP/GRID</Last_Name> <Email><EMAIL></Email> <Phone>+254-2-621234</Phone> <Fax>+254-2-226890 or 215787</Fax> <Contact_Address> <Address>United Nations Environment Programme</Address> <Address>Global Resource Information Database UNEP/GRID</Address> <Address>P.O.Box 30552</Address> <Province_or_State>Nairobi</Province_or_State> <Country>KENYA</Country> </Contact_Address> </Personnel> </Data_Center> <Summary> <Abstract>Summary of collection.</Abstract> <Purpose>A grand purpose</Purpose> </Summary> <Reference> <Author>author</Author> <Publication_Date>2015</Publication_Date> <Title>title</Title> <Series>1</Series> <Edition>2</Edition> <Volume>3</Volume> <Issue>4</Issue> <Report_Number>5</Report_Number> <Publication_Place>Frederick, MD</Publication_Place> <Publisher>publisher</Publisher> <Pages>678</Pages> <ISBN>978-0-394-80001-1</ISBN> <Online_Resource>http://example.com</Online_Resource> <DOI>http://dx.doi.org/12.3456/ABC012XYZ</DOI> <Other_Reference_Details>blah</Other_Reference_Details> </Reference> <Related_URL> <URL_Content_Type> <Type>GET DATA</Type> </URL_Content_Type> <URL>http://geodata.grid.unep.ch/</URL> </Related_URL> <Related_URL> <URL_Content_Type> <Type>GET DATA</Type> <Subtype>ON-LINE ARCHIVE</Subtype> </URL_Content_Type> <URL>ftp://airsl2.gesdisc.eosdis.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level2/AIRH2CCF.006/</URL> <Description>Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP.</Description> </Related_URL> <Parent_DIF>CNDP-ESP_IPY_POL2006-11139-C02-01CGL_ESASSI</Parent_DIF> <Parent_DIF>CNDP-ESP_2</Parent_DIF> <IDN_Node> <Short_Name>UNEP/GRID</Short_Name> </IDN_Node> <Originating_Metadata_Node>GCMD</Originating_Metadata_Node> <Metadata_Name>CEOS IDN DIF</Metadata_Name> <Metadata_Version>VERSION 9.8.4</Metadata_Version> <DIF_Creation_Date>2013-02-21</DIF_Creation_Date> <Last_DIF_Revision_Date>2013-10-22</Last_DIF_Revision_Date> <Extended_Metadata> <Metadata> <Group>gov.nasa.gsfc.gcmd</Group> <Name>metadata.uuid</Name> <Value>743933e5-1404-4502-915f-83cde56af440</Value> </Metadata> <Metadata> <Group>gov.nasa.gsfc.gcmd</Group> <Name>metadata.extraction_date</Name> <Value>2013-09-30 09:45:15</Value> </Metadata> <Metadata> <Group>EMS</Group> <Name>ProductLevelId</Name> <Value>2</Value> </Metadata> <Metadata> <Group>ECHO</Group> <Name>CollectionDataType</Name> <Value>NEAR_REAL_TIME</Value> </Metadata> <Metadata> <Group>spatial coverage</Group> <Name>GranuleSpatialRepresentation</Name> <Value>GEODETIC</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>String attribute</Name> <Description>something string</Description> <Value>alpha</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Float attribute</Name> <Description>something float</Description> <Value>12.3</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Int attribute</Name> <Description>something int</Description> <Value>42</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Date attribute</Name> <Description>something date</Description> <Value>2015-09-14</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Datetime attribute</Name> <Description>something datetime</Description> <Value>2015-09-14T13:01:00Z</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Time attribute</Name> <Description>something time</Description> <Value>13:01:00Z</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Bool attribute</Name> <Description>something bool</Description> <Value>false</Value> </Metadata> <Metadata> <Group>gov.nasa.earthdata.cmr</Group> <Name>Restriction</Name> <Value>1</Value> </Metadata> <Metadata> <Name>Processor</Name> <Value>LPDAAC</Value> </Metadata> </Extended_Metadata> </DIF>") (def valid-collection-xml "<DIF xmlns=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:dif=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/ http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/dif_v9.8.4.xsd\"> <Entry_ID>minimal_dif_dataset</Entry_ID> <Entry_Title>A minimal dif dataset</Entry_Title> <Data_Set_Citation> <Dataset_Title>dataset_title</Dataset_Title> </Data_Set_Citation> <Parameters> <Category>category</Category> <Topic>topic</Topic> <Term>term</Term> </Parameters> <ISO_Topic_Category>iso topic category</ISO_Topic_Category> <Data_Center> <Data_Center_Name> <Short_Name>datacenter_short_name</Short_Name> <Long_Name>data center long name</Long_Name> </Data_Center_Name> <Personnel> <Role>DummyRole</Role> <Last_Name>UNEP</Last_Name> </Personnel> </Data_Center> <Reference> <Author>author</Author> <Publication_Date>2015</Publication_Date> <Title>title</Title> <Publication_Place>Frederick, MD</Publication_Place> <Publisher>publisher</Publisher> <DOI>http://dx.doi.org/12.3456/ABC012XYZ</DOI> </Reference> <Summary> <Abstract>summary of the dataset</Abstract> <Purpose>A grand purpose</Purpose> </Summary> <Metadata_Name>CEOS IDN DIF</Metadata_Name> <Metadata_Version>VERSION 9.8.4</Metadata_Version> <Last_DIF_Revision_Date>2013-10-22</Last_DIF_Revision_Date> </DIF>") (def expected-temporal (umm-c/map->Temporal {:range-date-times [(umm-c/map->RangeDateTime {:beginning-date-time (p/parse-datetime "1996-02-24") :ending-date-time (p/parse-datetime "1997-03-24T23:59:59.999")}) (umm-c/map->RangeDateTime {:beginning-date-time (p/parse-datetime "1998-02-24T22:20:41-05:00") :ending-date-time (p/parse-datetime "1999-03-24T22:20:41-05:00")})] :single-date-times [] :periodic-date-times []})) (def expected-collection (umm-c/map->UmmCollection {:entry-title "Global Land Cover 2000 (GLC 2000)" :summary "Summary of collection." :purpose "A grand purpose" :quality "High Quality Metadata" :use-constraints "Public" :product (umm-c/map->Product {:short-name "geodata_1848" :long-name "Global Land Cover 2000 (GLC 2000)" :version-id "006" :processing-level-id "2" :collection-data-type "NEAR_REAL_TIME"}) :data-provider-timestamps (umm-c/map->DataProviderTimestamps {:insert-time (p/parse-datetime "2013-02-21") :update-time (p/parse-datetime "2013-10-22") :revision-date-time (p/parse-datetime "2013-10-22")}) :publication-references [(umm-c/map->PublicationReference {:author "author" :publication-date "2015" :title "title" :series "1" :edition "2" :volume "3" :issue "4" :report-number "5" :publication-place "Frederick, MD" :publisher "publisher" :pages "678" :isbn "978-0-394-80001-1" :related-url "http://example.com" :doi "http://dx.doi.org/12.3456/ABC012XYZ" :other-reference-details "blah"})] :spatial-keywords ["GLOBAL"] :platforms [(umm-c/map->Platform {:short-name umm-c/not-provided :long-name umm-c/not-provided :type umm-c/not-provided :instruments [(umm-c/map->Instrument {:short-name "VEGETATION-1" :long-name "VEGETATION INSTRUMENT 1 (SPOT 4)"})]}) (umm-c/map->Platform {:short-name "SPOT-1" :long-name "Systeme Probatoire Pour l'Observation de la Terre-1" :type umm-c/not-provided}) (umm-c/map->Platform {:short-name "SPOT-4" :long-name "Systeme Probatoire Pour l'Observation de la Terre-4" :type umm-c/not-provided})] :temporal expected-temporal :collection-progress :in-work :science-keywords [(umm-c/map->ScienceKeyword {:category "EARTH SCIENCE" :topic "LAND SURFACE" :term "LAND USE/LAND COVER" :variable-level-1 "LAND COVER"}) (umm-c/map->ScienceKeyword {:category "EARTH SCIENCE" :topic "ATMOSPHERE" :term "PRECIPITATION" :variable-level-1 "PRECIPITATION AMOUNT" :variable-level-2 "PRECIPITATION Level 2" :variable-level-3 "PRECIPITATION Level 3" :detailed-variable "PRECIPITATION Details"})] :product-specific-attributes [(umm-c/map->ProductSpecificAttribute {:group "gov.nasa.gsfc.gcmd" :name "metadata.uuid" :data-type :string :value "743933e5-1404-4502-915f-83cde56af440" :parsed-value "743933e5-1404-4502-915f-83cde56af440" :description "Not provided"}) (umm-c/map->ProductSpecificAttribute {:group "gov.nasa.gsfc.gcmd" :name "metadata.extraction_date" :data-type :string :value "2013-09-30 09:45:15" :parsed-value "2013-09-30 09:45:15" :description "Not provided"}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "String attribute" :description "something string" :data-type :string :value "alpha" :parsed-value "alpha"}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Float attribute" :description "something float" :data-type :float :value "12.3" :parsed-value 12.3}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Int attribute" :description "something int" :data-type :int :value "42" :parsed-value 42}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Date attribute" :description "something date" :data-type :date :value "2015-09-14" :parsed-value (p/parse-datetime "2015-09-14")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Datetime attribute" :description "something datetime" :data-type :datetime :value "2015-09-14T13:01:00Z" :parsed-value (p/parse-datetime "2015-09-14T13:01:00Z")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Time attribute" :description "something time" :data-type :time :value "13:01:00Z" :parsed-value (p/parse-time "13:01:00Z")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Bool attribute" :description "something bool" :data-type :boolean :value "false" :parsed-value false})] :spatial-coverage (umm-c/map->SpatialCoverage {:granule-spatial-representation :geodetic :spatial-representation :cartesian :geometries [(m/mbr -180 -60.5033 180 -90)]}) :collection-associations [(umm-c/map->CollectionAssociation {:short-name "CNDP-ESP_IPY_POL2006-11139-C02-01CGL_ESASSI" :version-id umm-c/not-provided}) (umm-c/map->CollectionAssociation {:short-name "CNDP-ESP_2" :version-id umm-c/not-provided})] :projects [(umm-c/map->Project {:short-name "ESI" :long-name "Environmental Sustainability Index"}) (umm-c/map->Project {:short-name "UNEP/GRID" :long-name "UNEP/Global Resources Information Database"})] :related-urls [(umm-c/map->RelatedURL {:type "GET DATA" :url "http://geodata.grid.unep.ch/"}) (umm-c/map->RelatedURL {:type "GET DATA" :sub-type "ON-LINE ARCHIVE" :url "ftp://airsl2.gesdisc.eosdis.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level2/AIRH2CCF.006/" :description "Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP." :title "Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP."})] :organizations [(umm-c/map->Organization {:type :distribution-center :org-name "EU/JRC/IES"}) (umm-c/map->Organization {:type :distribution-center :org-name "UNEP/DEWA/GRID-EUROPE"}) (umm-c/map->Organization {:type :archive-center :org-name "EU/JRC/IES"}) (umm-c/map->Organization {:type :archive-center :org-name "UNEP/DEWA/GRID-EUROPE"}) (umm-c/map->Organization {:type :processing-center :org-name "LPDAAC"})] :personnel [(umm-c/map->Personnel {:first-name "<NAME>" :last-name "<NAME>" :roles ["DIF AUTHOR" "TECHNICAL CONTACT"] :contacts [(umm-c/map->Contact {:type :email :value "<EMAIL>"})]})] :access-value 1.0})) (deftest validate-parsed-dif-test (testing "Validate DIF to UMM Collection" (let [parsed-dif (c/parse-collection all-fields-collection-xml)] (is (empty? (v/validate-collection parsed-dif)))))) (deftest parse-collection-test (testing "parse collection" (is (= expected-collection (c/parse-collection all-fields-collection-xml)))) (testing "parse temporal" (is (= expected-temporal (c/parse-temporal all-fields-collection-xml)))) (testing "parse collection access value" (is (= 1.0 (c/parse-access-value all-fields-collection-xml))))) (deftest validate-xml (testing "valid xml" (is (empty? (c/validate-xml valid-collection-xml)))) (testing "invalid xml" (is (= [(str "Line 18 - cvc-complex-type.2.4.a: Invalid content was found starting with element 'XXXX'. " "One of '{\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Data_Center_URL, " "\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Data_Set_ID, " "\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Personnel}' is expected.")] (c/validate-xml (s/replace valid-collection-xml "Personnel" "XXXX")))))) (deftest parse-nil-version-test ;; UMM-C is now making the version field a required field. It is optional in DIF-9 so we provide ;; a default of Not provided when it is missing from the DIF-9 metadata. (is (= umm-c/not-provided (get-in (c/parse-collection valid-collection-xml) [:product :version-id]))))
true
(ns cmr.umm.test.dif.dif-collection-tests "Tests parsing and generating DIF Collection XML." (:require [clojure.test :refer :all] ; [clojure.test.check.clojure-test :refer [defspec]] ;; Temporarily included to use the fixed defspec. Remove once issue is fixed. [cmr.common.test.test-check-ext :refer [defspec]] [clojure.test.check.properties :refer [for-all]] [clojure.test.check.generators :as gen] [clojure.string :as s] [cmr.common.joda-time] [cmr.common.date-time-parser :as p] [cmr.common.util :as util] [cmr.umm.test.generators.collection :as coll-gen] [cmr.umm.dif.dif-collection :as c] [cmr.umm.echo10.echo10-collection :as echo10-c] [cmr.umm.echo10.echo10-core :as echo10] [cmr.umm.umm-collection :as umm-c] [cmr.umm.dif.dif-core :as dif] [cmr.spatial.mbr :as m] [cmr.umm.test.echo10.echo10-collection-tests :as test-echo10] [cmr.umm.validation.validation-core :as v] [cmr.common.test.test-check-ext :as ext :refer [checking]]) (:import cmr.spatial.mbr.Mbr)) (defn- spatial-coverage->expected-parsed "Takes the spatial-coverage used to generate the dif and returns the expected parsed spatial-coverage from the dif." [spatial-coverage] (when spatial-coverage (let [{:keys [granule-spatial-representation spatial-representation geometries]} spatial-coverage ;; DIF only support bounding rectangles geometries (seq (filter (comp (partial = Mbr) type) geometries)) ;; DIF only supports the cartesian coordinate system for the collection spatial representation spatial-representation (when geometries :cartesian)] (when (or granule-spatial-representation spatial-representation) (assoc spatial-coverage :granule-spatial-representation granule-spatial-representation :spatial-representation spatial-representation :geometries geometries :orbit-parameters nil))))) (defn- instruments->expected "Returns the expected instruments for the given instruments" [instruments] (seq (map #(assoc % :technique nil, :sensors nil, :characteristics nil, :operation-modes nil) instruments))) (defn- platform->expected "Returns the expected platform for the given platform" [platform] (-> platform (assoc :type umm-c/not-provided :characteristics nil) (update-in [:instruments] instruments->expected))) (defn- platforms->expected-parsed "Returns the expected parsed platforms for the given platforms." [platforms] (let [platforms (seq (map platform->expected platforms))] (if (= 1 (count platforms)) platforms (if-let [instruments (seq (mapcat :instruments platforms))] (conj (map #(assoc % :instruments nil) platforms) (umm-c/map->Platform {:short-name umm-c/not-provided :long-name umm-c/not-provided :type umm-c/not-provided :instruments instruments})) platforms)))) (defn- related-urls->expected-parsed "Returns the expected parsed related-urls for the given related-urls." [related-urls] (seq (map #(assoc % :size nil :mime-type nil) related-urls))) (defn- collection-associations->expected-collection-associations "Returns the expected parsed collection-associations for the given collection-associations." [collection-associations] (seq (map #(assoc % :version-id umm-c/not-provided) collection-associations))) (defn- filter-contacts "Remove contacts from a Personnel record that are not emails." [person] (update-in person [:contacts] (fn [contacts] (filter #(= :email (:type %)) contacts)))) (defn- science-keywords->expected-parsed "Returns expected parsed science keywords if science keywords is empty" [science-keywords] (if (empty? science-keywords) [(umm-c/map->ScienceKeyword {:category umm-c/not-provided :topic umm-c/not-provided :term umm-c/not-provided})] science-keywords)) (defn- expected-organizations "Re-order the organizations by distribution centers, add an archive center for each distribution center, then processing centers" [organizations] (let [distribution-centers (filter #(= :distribution-center (:type %)) organizations)] (concat distribution-centers (map #(assoc % :type :archive-center) distribution-centers) (filter #(= :processing-center (:type %)) organizations)))) (defn- umm->expected-parsed-dif "Modifies the UMM record for testing DIF. DIF contains a subset of the total UMM fields so certain fields are removed for comparison of the parsed record" [coll] (let [{{:keys [short-name version-id processing-level-id collection-data-type]} :product :keys [entry-title spatial-coverage personnel]} coll range-date-times (get-in coll [:temporal :range-date-times]) temporal (if (seq range-date-times) (umm-c/map->Temporal {:range-date-times range-date-times :single-date-times [] :periodic-date-times []}) nil) personnel (not-empty (->> personnel ;; only support email right now (map filter-contacts) ;; DIF has no Middle_Name tag (map #(assoc % :middle-name nil))))] (-> coll ;; DIF does not have short-name or long-name, so we assign them to be entry-id and entry-title respectively ;; long-name will only take the first 1024 characters of entry-title if entry-title is too long ;; DIF also does not have version-description. (assoc :product (umm-c/map->Product {:short-name short-name :long-name (util/trunc entry-title 1024) :version-id version-id :processing-level-id processing-level-id :collection-data-type collection-data-type})) ;; There is no delete-time in DIF (assoc-in [:data-provider-timestamps :delete-time] nil) (assoc-in [:data-provider-timestamps :revision-date-time] (get-in coll [:data-provider-timestamps :update-time])) ;; DIF only has range-date-times (assoc :temporal temporal) ;; DIF only has distribution centers as Organization (update :organizations expected-organizations) (assoc :personnel PI:NAME:<NAME>END_PI) ;; DIF only support some portion of the spatial (update-in [:spatial-coverage] spatial-coverage->expected-parsed) ;; DIF 9 requires science keywords (update-in [:science-keywords] science-keywords->expected-parsed) ;; DIF does not support size or mime-type in RelatedURLs (update-in [:related-urls] related-urls->expected-parsed) ;; DIF does not have version-id in collection associations and we hardcoded it to "dummy" (update-in [:collection-associations] collection-associations->expected-collection-associations) ;; CMR-588: UMM doesn't have a good recommendation on how to handle spatial-keywords (dissoc :spatial-keywords) ;; DIF platform does not have type, instruments or characteristics fields (update-in [:platforms] platforms->expected-parsed) ;; DIF does not have two-d-coordinate-systems (dissoc :two-d-coordinate-systems) ;; DIF does not have associated-difs (dissoc :associated-difs) ;; DIF does not have metadata-language (dissoc :metadata-language) ;; DIF 9 does not have collection citation (dissoc :collection-citations) ;; DIF9 does not support ranges for additional attributes (update-in [:product-specific-attributes] (fn [psas] (seq (map (fn [psa] (assoc psa :parameter-range-begin nil :parameter-range-end nil :parsed-parameter-range-begin nil :parsed-parameter-range-end nil)) psas)))) umm-c/map->UmmCollection))) (defspec generate-collection-is-valid-xml-test 100 (for-all [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection)] (and (seq xml) (empty? (c/validate-xml xml)))))) (deftest generate-and-parse-collection-test (checking "dif collection round tripping" 100 [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection) parsed (c/parse-collection xml) expected-parsed (umm->expected-parsed-dif collection)] (is (= expected-parsed parsed))))) (deftest generate-and-parse-collection-between-formats-test (checking "dif parse between formats" 100 [collection coll-gen/collections] (let [xml (dif/umm->dif-xml collection) parsed-dif (c/parse-collection xml) echo10-xml (echo10/umm->echo10-xml parsed-dif) parsed-echo10 (echo10-c/parse-collection echo10-xml) expected-parsed (test-echo10/umm->expected-parsed-echo10 (umm->expected-parsed-dif collection))] (is (= expected-parsed parsed-echo10)) (is (= 0 (count (echo10-c/validate-xml echo10-xml))))))) ;; This is a made-up include all fields collection xml sample for the parse collection test (def all-fields-collection-xml "<DIF xmlns=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:dif=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/ http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/dif_v9.8.4.xsd\"> <Entry_ID>geodata_1848</Entry_ID> <Entry_Title>Global Land Cover 2000 (GLC 2000)</Entry_Title> <Data_Set_Citation> <Dataset_Title>Global Land Cover 2000 (GLC 2000)</Dataset_Title> <Dataset_Release_Date>2003-01-01</Dataset_Release_Date> <Version>006</Version> </Data_Set_Citation> <Personnel> <Role>DIF AUTHOR</Role> <Role>TECHNICAL CONTACT</Role> <First_Name>PI:NAME:<NAME>END_PI</First_Name> <Last_Name>PI:NAME:<NAME>END_PI</Last_Name> <Email>PI:EMAIL:<EMAIL>END_PI</Email> </Personnel> <Parameters> <Category>EARTH SCIENCE</Category> <Topic>LAND SURFACE</Topic> <Term>LAND USE/LAND COVER</Term> <Variable_Level_1>LAND COVER</Variable_Level_1> </Parameters> <Parameters uuid=\"cad5c02a-e771-434e-bef6-8dced38a68e8\"> <Category>EARTH SCIENCE</Category> <Topic>ATMOSPHERE</Topic> <Term>PRECIPITATION</Term> <Variable_Level_1>PRECIPITATION AMOUNT</Variable_Level_1> <Variable_Level_2>PRECIPITATION Level 2</Variable_Level_2> <Variable_Level_3>PRECIPITATION Level 3</Variable_Level_3> <Detailed_Variable>PRECIPITATION Details</Detailed_Variable> </Parameters> <ISO_Topic_Category>ENVIRONMENT</ISO_Topic_Category> <Keyword>Land Cover</Keyword> <Keyword>1Km</Keyword> <Keyword>JRC</Keyword> <Keyword>GLC,</Keyword> <Keyword>2000</Keyword> <Keyword>satellite</Keyword> <Sensor_Name> <Short_Name>VEGETATION-1</Short_Name> <Long_Name>VEGETATION INSTRUMENT 1 (SPOT 4)</Long_Name> </Sensor_Name> <Source_Name> <Short_Name>SPOT-1</Short_Name> <Long_Name>Systeme Probatoire Pour l'Observation de la Terre-1</Long_Name> </Source_Name> <Source_Name> <Short_Name>SPOT-4</Short_Name> <Long_Name>Systeme Probatoire Pour l'Observation de la Terre-4</Long_Name> </Source_Name> <Temporal_Coverage> <Start_Date>1996-02-24</Start_Date> <Stop_Date>1997-03-24</Stop_Date> </Temporal_Coverage> <Temporal_Coverage> <Start_Date>1998-02-24T22:20:41-05:00</Start_Date> <Stop_Date>1999-03-24T22:20:41-05:00</Stop_Date> </Temporal_Coverage> <Data_Set_Progress>ONGOING</Data_Set_Progress> <Spatial_Coverage> <Southernmost_Latitude>-90.0</Southernmost_Latitude> <Northernmost_Latitude>-60.5033</Northernmost_Latitude> <Westernmost_Longitude>-180.0</Westernmost_Longitude> <Easternmost_Longitude>180.0</Easternmost_Longitude> </Spatial_Coverage> <Location> <Location_Category>GEOGRAPHIC REGION</Location_Category> <Location_Type>GLOBAL</Location_Type> </Location> <Data_Resolution> <Latitude_Resolution>1 km</Latitude_Resolution> <Longitude_Resolution>1 km</Longitude_Resolution> <Horizontal_Resolution_Range>1 km - &lt; 10 km or approximately .01 degree - &lt; .09 degree</Horizontal_Resolution_Range> </Data_Resolution> <Project> <Short_Name>ESI</Short_Name> <Long_Name>Environmental Sustainability Index</Long_Name> </Project> <Project> <Short_Name>UNEP/GRID</Short_Name> <Long_Name>UNEP/Global Resources Information Database</Long_Name> </Project> <Quality>High Quality Metadata</Quality> <Use_Constraints>Public</Use_Constraints> <Data_Center> <Data_Center_Name> <Short_Name>EU/JRC/IES</Short_Name> <Long_Name>Institute for Environment and Sustainability, Joint Research Center, European Union</Long_Name> </Data_Center_Name> <Personnel> <Role>DATA CENTER CONTACT</Role> <First_NamePI:NAME:<NAME>END_PI>PI:NAME:<NAME>END_PI</First_Name> <Last_NamePI:NAME:<NAME>END_PI>PI:NAME:<NAME>END_PI</Last_Name> <Email>PI:EMAIL:<EMAIL>END_PI</Email> <Phone>+39 332 789908</Phone> <Fax>+39 332 789073</Fax> <Contact_Address> <Address>Space Applications Institute, T.P. 440</Address> <Address>EC Joint Research Centre JRC</Address> <City>Ispra (VA)</City> <Postal_Code>21020</Postal_Code> <Country>Italy</Country> </Contact_Address> </Personnel> </Data_Center> <Data_Center> <Data_Center_Name> <Short_Name>UNEP/DEWA/GRID-EUROPE</Short_Name> <Long_Name>Global Resource Information Database - Geneva, Division of Early Warning and Assessment, United Nations Environment Programme</Long_Name> </Data_Center_Name> <Data_Center_URL>http://www.grid.unep.ch/</Data_Center_URL> <Personnel> <Role>DATA CENTER CONTACT</Role> <Last_Name>UNEP/GRID</Last_Name> <Email>PI:EMAIL:<EMAIL>END_PI</Email> <Phone>+254-2-621234</Phone> <Fax>+254-2-226890 or 215787</Fax> <Contact_Address> <Address>United Nations Environment Programme</Address> <Address>Global Resource Information Database UNEP/GRID</Address> <Address>P.O.Box 30552</Address> <Province_or_State>Nairobi</Province_or_State> <Country>KENYA</Country> </Contact_Address> </Personnel> </Data_Center> <Summary> <Abstract>Summary of collection.</Abstract> <Purpose>A grand purpose</Purpose> </Summary> <Reference> <Author>author</Author> <Publication_Date>2015</Publication_Date> <Title>title</Title> <Series>1</Series> <Edition>2</Edition> <Volume>3</Volume> <Issue>4</Issue> <Report_Number>5</Report_Number> <Publication_Place>Frederick, MD</Publication_Place> <Publisher>publisher</Publisher> <Pages>678</Pages> <ISBN>978-0-394-80001-1</ISBN> <Online_Resource>http://example.com</Online_Resource> <DOI>http://dx.doi.org/12.3456/ABC012XYZ</DOI> <Other_Reference_Details>blah</Other_Reference_Details> </Reference> <Related_URL> <URL_Content_Type> <Type>GET DATA</Type> </URL_Content_Type> <URL>http://geodata.grid.unep.ch/</URL> </Related_URL> <Related_URL> <URL_Content_Type> <Type>GET DATA</Type> <Subtype>ON-LINE ARCHIVE</Subtype> </URL_Content_Type> <URL>ftp://airsl2.gesdisc.eosdis.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level2/AIRH2CCF.006/</URL> <Description>Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP.</Description> </Related_URL> <Parent_DIF>CNDP-ESP_IPY_POL2006-11139-C02-01CGL_ESASSI</Parent_DIF> <Parent_DIF>CNDP-ESP_2</Parent_DIF> <IDN_Node> <Short_Name>UNEP/GRID</Short_Name> </IDN_Node> <Originating_Metadata_Node>GCMD</Originating_Metadata_Node> <Metadata_Name>CEOS IDN DIF</Metadata_Name> <Metadata_Version>VERSION 9.8.4</Metadata_Version> <DIF_Creation_Date>2013-02-21</DIF_Creation_Date> <Last_DIF_Revision_Date>2013-10-22</Last_DIF_Revision_Date> <Extended_Metadata> <Metadata> <Group>gov.nasa.gsfc.gcmd</Group> <Name>metadata.uuid</Name> <Value>743933e5-1404-4502-915f-83cde56af440</Value> </Metadata> <Metadata> <Group>gov.nasa.gsfc.gcmd</Group> <Name>metadata.extraction_date</Name> <Value>2013-09-30 09:45:15</Value> </Metadata> <Metadata> <Group>EMS</Group> <Name>ProductLevelId</Name> <Value>2</Value> </Metadata> <Metadata> <Group>ECHO</Group> <Name>CollectionDataType</Name> <Value>NEAR_REAL_TIME</Value> </Metadata> <Metadata> <Group>spatial coverage</Group> <Name>GranuleSpatialRepresentation</Name> <Value>GEODETIC</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>String attribute</Name> <Description>something string</Description> <Value>alpha</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Float attribute</Name> <Description>something float</Description> <Value>12.3</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Int attribute</Name> <Description>something int</Description> <Value>42</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Date attribute</Name> <Description>something date</Description> <Value>2015-09-14</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Datetime attribute</Name> <Description>something datetime</Description> <Value>2015-09-14T13:01:00Z</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Time attribute</Name> <Description>something time</Description> <Value>13:01:00Z</Value> </Metadata> <Metadata> <Group>custom.group</Group> <Name>Bool attribute</Name> <Description>something bool</Description> <Value>false</Value> </Metadata> <Metadata> <Group>gov.nasa.earthdata.cmr</Group> <Name>Restriction</Name> <Value>1</Value> </Metadata> <Metadata> <Name>Processor</Name> <Value>LPDAAC</Value> </Metadata> </Extended_Metadata> </DIF>") (def valid-collection-xml "<DIF xmlns=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:dif=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/ http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/dif_v9.8.4.xsd\"> <Entry_ID>minimal_dif_dataset</Entry_ID> <Entry_Title>A minimal dif dataset</Entry_Title> <Data_Set_Citation> <Dataset_Title>dataset_title</Dataset_Title> </Data_Set_Citation> <Parameters> <Category>category</Category> <Topic>topic</Topic> <Term>term</Term> </Parameters> <ISO_Topic_Category>iso topic category</ISO_Topic_Category> <Data_Center> <Data_Center_Name> <Short_Name>datacenter_short_name</Short_Name> <Long_Name>data center long name</Long_Name> </Data_Center_Name> <Personnel> <Role>DummyRole</Role> <Last_Name>UNEP</Last_Name> </Personnel> </Data_Center> <Reference> <Author>author</Author> <Publication_Date>2015</Publication_Date> <Title>title</Title> <Publication_Place>Frederick, MD</Publication_Place> <Publisher>publisher</Publisher> <DOI>http://dx.doi.org/12.3456/ABC012XYZ</DOI> </Reference> <Summary> <Abstract>summary of the dataset</Abstract> <Purpose>A grand purpose</Purpose> </Summary> <Metadata_Name>CEOS IDN DIF</Metadata_Name> <Metadata_Version>VERSION 9.8.4</Metadata_Version> <Last_DIF_Revision_Date>2013-10-22</Last_DIF_Revision_Date> </DIF>") (def expected-temporal (umm-c/map->Temporal {:range-date-times [(umm-c/map->RangeDateTime {:beginning-date-time (p/parse-datetime "1996-02-24") :ending-date-time (p/parse-datetime "1997-03-24T23:59:59.999")}) (umm-c/map->RangeDateTime {:beginning-date-time (p/parse-datetime "1998-02-24T22:20:41-05:00") :ending-date-time (p/parse-datetime "1999-03-24T22:20:41-05:00")})] :single-date-times [] :periodic-date-times []})) (def expected-collection (umm-c/map->UmmCollection {:entry-title "Global Land Cover 2000 (GLC 2000)" :summary "Summary of collection." :purpose "A grand purpose" :quality "High Quality Metadata" :use-constraints "Public" :product (umm-c/map->Product {:short-name "geodata_1848" :long-name "Global Land Cover 2000 (GLC 2000)" :version-id "006" :processing-level-id "2" :collection-data-type "NEAR_REAL_TIME"}) :data-provider-timestamps (umm-c/map->DataProviderTimestamps {:insert-time (p/parse-datetime "2013-02-21") :update-time (p/parse-datetime "2013-10-22") :revision-date-time (p/parse-datetime "2013-10-22")}) :publication-references [(umm-c/map->PublicationReference {:author "author" :publication-date "2015" :title "title" :series "1" :edition "2" :volume "3" :issue "4" :report-number "5" :publication-place "Frederick, MD" :publisher "publisher" :pages "678" :isbn "978-0-394-80001-1" :related-url "http://example.com" :doi "http://dx.doi.org/12.3456/ABC012XYZ" :other-reference-details "blah"})] :spatial-keywords ["GLOBAL"] :platforms [(umm-c/map->Platform {:short-name umm-c/not-provided :long-name umm-c/not-provided :type umm-c/not-provided :instruments [(umm-c/map->Instrument {:short-name "VEGETATION-1" :long-name "VEGETATION INSTRUMENT 1 (SPOT 4)"})]}) (umm-c/map->Platform {:short-name "SPOT-1" :long-name "Systeme Probatoire Pour l'Observation de la Terre-1" :type umm-c/not-provided}) (umm-c/map->Platform {:short-name "SPOT-4" :long-name "Systeme Probatoire Pour l'Observation de la Terre-4" :type umm-c/not-provided})] :temporal expected-temporal :collection-progress :in-work :science-keywords [(umm-c/map->ScienceKeyword {:category "EARTH SCIENCE" :topic "LAND SURFACE" :term "LAND USE/LAND COVER" :variable-level-1 "LAND COVER"}) (umm-c/map->ScienceKeyword {:category "EARTH SCIENCE" :topic "ATMOSPHERE" :term "PRECIPITATION" :variable-level-1 "PRECIPITATION AMOUNT" :variable-level-2 "PRECIPITATION Level 2" :variable-level-3 "PRECIPITATION Level 3" :detailed-variable "PRECIPITATION Details"})] :product-specific-attributes [(umm-c/map->ProductSpecificAttribute {:group "gov.nasa.gsfc.gcmd" :name "metadata.uuid" :data-type :string :value "743933e5-1404-4502-915f-83cde56af440" :parsed-value "743933e5-1404-4502-915f-83cde56af440" :description "Not provided"}) (umm-c/map->ProductSpecificAttribute {:group "gov.nasa.gsfc.gcmd" :name "metadata.extraction_date" :data-type :string :value "2013-09-30 09:45:15" :parsed-value "2013-09-30 09:45:15" :description "Not provided"}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "String attribute" :description "something string" :data-type :string :value "alpha" :parsed-value "alpha"}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Float attribute" :description "something float" :data-type :float :value "12.3" :parsed-value 12.3}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Int attribute" :description "something int" :data-type :int :value "42" :parsed-value 42}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Date attribute" :description "something date" :data-type :date :value "2015-09-14" :parsed-value (p/parse-datetime "2015-09-14")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Datetime attribute" :description "something datetime" :data-type :datetime :value "2015-09-14T13:01:00Z" :parsed-value (p/parse-datetime "2015-09-14T13:01:00Z")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Time attribute" :description "something time" :data-type :time :value "13:01:00Z" :parsed-value (p/parse-time "13:01:00Z")}) (umm-c/map->ProductSpecificAttribute {:group "custom.group" :name "Bool attribute" :description "something bool" :data-type :boolean :value "false" :parsed-value false})] :spatial-coverage (umm-c/map->SpatialCoverage {:granule-spatial-representation :geodetic :spatial-representation :cartesian :geometries [(m/mbr -180 -60.5033 180 -90)]}) :collection-associations [(umm-c/map->CollectionAssociation {:short-name "CNDP-ESP_IPY_POL2006-11139-C02-01CGL_ESASSI" :version-id umm-c/not-provided}) (umm-c/map->CollectionAssociation {:short-name "CNDP-ESP_2" :version-id umm-c/not-provided})] :projects [(umm-c/map->Project {:short-name "ESI" :long-name "Environmental Sustainability Index"}) (umm-c/map->Project {:short-name "UNEP/GRID" :long-name "UNEP/Global Resources Information Database"})] :related-urls [(umm-c/map->RelatedURL {:type "GET DATA" :url "http://geodata.grid.unep.ch/"}) (umm-c/map->RelatedURL {:type "GET DATA" :sub-type "ON-LINE ARCHIVE" :url "ftp://airsl2.gesdisc.eosdis.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level2/AIRH2CCF.006/" :description "Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP." :title "Access the AIRS/Aqua FINAL AIRS Level 2 Cloud Clear Radiance Product (With HSB) data by FTP."})] :organizations [(umm-c/map->Organization {:type :distribution-center :org-name "EU/JRC/IES"}) (umm-c/map->Organization {:type :distribution-center :org-name "UNEP/DEWA/GRID-EUROPE"}) (umm-c/map->Organization {:type :archive-center :org-name "EU/JRC/IES"}) (umm-c/map->Organization {:type :archive-center :org-name "UNEP/DEWA/GRID-EUROPE"}) (umm-c/map->Organization {:type :processing-center :org-name "LPDAAC"})] :personnel [(umm-c/map->Personnel {:first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI" :roles ["DIF AUTHOR" "TECHNICAL CONTACT"] :contacts [(umm-c/map->Contact {:type :email :value "PI:EMAIL:<EMAIL>END_PI"})]})] :access-value 1.0})) (deftest validate-parsed-dif-test (testing "Validate DIF to UMM Collection" (let [parsed-dif (c/parse-collection all-fields-collection-xml)] (is (empty? (v/validate-collection parsed-dif)))))) (deftest parse-collection-test (testing "parse collection" (is (= expected-collection (c/parse-collection all-fields-collection-xml)))) (testing "parse temporal" (is (= expected-temporal (c/parse-temporal all-fields-collection-xml)))) (testing "parse collection access value" (is (= 1.0 (c/parse-access-value all-fields-collection-xml))))) (deftest validate-xml (testing "valid xml" (is (empty? (c/validate-xml valid-collection-xml)))) (testing "invalid xml" (is (= [(str "Line 18 - cvc-complex-type.2.4.a: Invalid content was found starting with element 'XXXX'. " "One of '{\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Data_Center_URL, " "\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Data_Set_ID, " "\"http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/\":Personnel}' is expected.")] (c/validate-xml (s/replace valid-collection-xml "Personnel" "XXXX")))))) (deftest parse-nil-version-test ;; UMM-C is now making the version field a required field. It is optional in DIF-9 so we provide ;; a default of Not provided when it is missing from the DIF-9 metadata. (is (= umm-c/not-provided (get-in (c/parse-collection valid-collection-xml) [:product :version-id]))))
[ { "context": " 1\n ::person-name \"Joe\"\n ::phone-numbers [{:db", "end": 3267, "score": 0.9993941783905029, "start": 3264, "tag": "NAME", "value": "Joe" }, { "context": "/pristine-state form-config) => {::person-name \"Joe\"\n ::", "end": 4127, "score": 0.9997260570526123, "start": 4124, "tag": "NAME", "value": "Joe" }, { "context": "ue}\n ::person-name \"Joe\"\n ::phone-numbers [{:", "end": 5365, "score": 0.9997422695159912, "start": 5362, "tag": "NAME", "value": "Joe" }, { "context": " true}\n ::person-name \"Joe\"\n ::phone-numbers [{:db/", "end": 6122, "score": 0.9997669458389282, "start": 6119, "tag": "NAME", "value": "Joe" }, { "context": " {:person/id {1 {:db/id 1 ::person-name \"Joe\" :ui/checked? true\n ", "end": 7860, "score": 0.999780535697937, "start": 7857, "tag": "NAME", "value": "Joe" }, { "context": " {:person/id {1 {:db/id 1 ::person-name \"Joe\" :ui/checked? true\n ", "end": 10319, "score": 0.9995672702789307, "start": 10316, "tag": "NAME", "value": "Joe" }, { "context": " {:db/id 1 ::person-name \"Bo\" ::phone-numbers phone-number-forms}\n person", "end": 11560, "score": 0.9498335719108582, "start": 11558, "tag": "NAME", "value": "Bo" }, { "context": "delta [[:person/id 1] ::person-name :before]) => \"Bo\"\n (get-in delta [[:person/id 1] ::person", "end": 15478, "score": 0.8963633179664612, "start": 15476, "tag": "NAME", "value": "Bo" }, { "context": " delta [[:person/id 1] ::person-name :after]) => \"New Name\"\n (get-in delta [[:phone/id 3] ::ph", "end": 15550, "score": 0.8522011041641235, "start": 15547, "tag": "NAME", "value": "New" }, { "context": "(get-in delta [[:person/id 1] ::person-name]) => \"New Name\"\n (get-in delta [[:phone/id 3] ::ph", "end": 16406, "score": 0.8085172772407532, "start": 16403, "tag": "NAME", "value": "New" }, { "context": " (assoc-in [:person/id 1 ::person-name] \"Bobby\"))\n reset-state-map (fs/pristine->e", "end": 26218, "score": 0.9998084902763367, "start": 26213, "tag": "NAME", "value": "Bobby" }, { "context": " (assoc-in [:person/id 1 ::person-name] \"Bobby\"))\n modified-ui-tree (person-ui-tre", "end": 27446, "score": 0.9860496520996094, "start": 27441, "tag": "NAME", "value": "Bobby" }, { "context": ":fs/pristine-state])\n => {::person-name \"Bobby\" ::phone-numbers [[:phone/id 2] [:phone/id 3]]}\n\n", "end": 28043, "score": 0.9078018665313721, "start": 28038, "tag": "NAME", "value": "Bobby" }, { "context": " (get-in committed-ui-tree [::person-name]) => \"Bobby\"\n (get-in committed-ui-tree [::phone-num", "end": 28573, "score": 0.9369613528251648, "start": 28568, "tag": "NAME", "value": "Bobby" } ]
src/test/com/fulcrologic/fulcro/algorithms/form_state_spec.cljc
janezj/fulcro
0
(ns com.fulcrologic.fulcro.algorithms.form-state-spec (:require [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [com.fulcrologic.fulcro.raw.components :as rc] [com.fulcrologic.fulcro.algorithms.form-state :as fs] [com.fulcrologic.fulcro.algorithms.tempid :as tempid] [com.fulcrologic.fulcro.algorithms.denormalize :as fdn] [com.fulcrologic.fulcro.algorithms.normalize :as fnorm] [fulcro-spec.core :refer [behavior specification assertions component when-mocking provided]] [clojure.spec.alpha :as s] [clojure.string :as str] [taoensso.timbre :as log])) (declare =>) (defsc Locale [this props] {:query [:db/id ::country fs/form-config-join] :ident [:locale/by-id :db/id] :form-fields #{::country}}) (s/def ::country keyword?) (defsc Phone [this props] {:query [:db/id {::locale (comp/get-query Locale)} ::phone-number fs/form-config-join] :ident [:phone/id :db/id] :form-fields #{::locale ::phone-number}}) (defsc UnusedForm [this props] {:query [:db/id ::data fs/form-config-join] :ident [:unused/by-id :db/id] :form-fields #{::data}}) (s/def ::phone-number (s/and string? #(re-matches #"[-0-9()]+" %))) (defsc Person [this props] {:query [:db/id ::person-name ::person-age {::unused (comp/get-query UnusedForm)} {::phone-numbers (comp/get-query Phone)} fs/form-config-join] :ident [:person/id :db/id] :form-fields #{::person-name ::unused ::person-age ::phone-numbers}}) (defsc NonForm [this props] {:query [:id :x] :ident [:ntop :id] :form-fields #{:ntop}}) (defsc FormNoFields [this props] {:query [:id :x fs/form-config-join] :ident [:ntop :id]}) (defsc BadlyNestedForm [this props] {:query [:id :name {:thing (comp/get-query NonForm)} fs/form-config-join] :ident [:top :id] :form-fields #{:name :thing}}) (s/def ::person-name (s/and string? #(not (empty? (str/trim %))))) (s/def ::picker any?) (defsc Thing [_ _] {:ident :thing/id :query [:thing/id]}) (defsc Entity [_ _] {:ident :entity/id :query [:entity/id :entity/value {:entity/thing (comp/get-query Thing)} fs/form-config-join] :form-fields #{:entity/value :entity/thing}}) (specification "add-form-config" (component "treats joins in a way that will just track their ident" (let [data-tree {:db/id 1 :entity/value 42 :entity/thing {:thing/id 1}} initial-form (fs/add-form-config Entity data-tree)] (assertions "adds form config to the top level" (contains? initial-form ::fs/config) => true "places the non-form join into subforms (for normalized ident-tracking)" (get-in initial-form [::fs/config ::fs/fields]) => #{:entity/value} (get-in initial-form [::fs/config ::fs/subforms]) => {:entity/thing {}} "leaves the nested entity alone" (contains? (:entity/thing initial-form) ::fs/config) => false))) (component "returns the entity with added configuration data, where:" (let [data-tree {:db/id 1 ::person-name "Joe" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US}}]} configured-form (fs/add-form-config Person data-tree) form-config (get configured-form ::fs/config)] (assertions "::f/config is a spec-valid config" (s/valid? ::fs/config form-config) => true (s/explain-data ::fs/config form-config) => nil "the original entity fields are unchanged" (-> configured-form (dissoc ::fs/config) (update-in [::phone-numbers 0] dissoc ::fs/config) (update-in [::phone-numbers 0 ::locale] dissoc ::fs/config)) => data-tree "the original fields (and subform idents) are saved to pristine state" (::fs/pristine-state form-config) => {::person-name "Joe" ::phone-numbers [[:phone/id 2]]} "the entity's ident is the form's ID" (get-in configured-form [::fs/config ::fs/id]) => [:person/id 1] "has the scalar declared fields" (get-in configured-form [::fs/config ::fs/fields]) => #{::person-name ::person-age} "data about each populated subform is included (recursively)" (some-> form-config ::fs/subforms ::phone-numbers meta :component) => Phone (some-> configured-form ::phone-numbers first ::fs/config ::fs/subforms ::locale meta :component) => Locale "data about empty subforms is included" (some-> form-config ::fs/subforms ::unused meta :component) => UnusedForm "each subform is recursively initialized" (get-in configured-form [::phone-numbers 0 ::fs/config ::fs/id]) => [:phone/id 2] (get-in configured-form [::phone-numbers 0 ::locale ::fs/config ::fs/id]) => [:locale/by-id 5]))) (behavior "for an entity with form-config" (component "by default, without destructive parameter" (let [data-tree {:db/id 1 ::fs/config {::stub true} ::person-name "Joe" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US} ::fs/config {::stub true}}]} configured-form (fs/add-form-config Person data-tree) nested-form (-> configured-form ::phone-numbers first)] (assertions "previous form-config is unmodified" (::fs/config configured-form) => {::stub true} (::fs/config nested-form) => {::stub true}))) (component "with destructive parameter" (let [data-tree {:db/id 1 ::fs/config {::stub true} ::person-name "Joe" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US} ::fs/config {::stub true}}]} initial-form (fs/add-form-config Person data-tree {:destructive? true}) nested-form (-> initial-form ::phone-numbers first)] (assertions "adds form config to the top level" (contains? initial-form ::fs/config) => true (contains? nested-form ::fs/config) => true "places the non-form join into subforms (for normalized ident-tracking)" (get-in initial-form [::fs/config ::fs/fields]) => #{::person-age ::person-name} (get-in initial-form [::fs/config ::fs/subforms]) => {::phone-numbers {} ::unused {}} (get-in nested-form [::fs/config ::fs/fields]) => #{::phone-number} (get-in nested-form [::fs/config ::fs/subforms]) => {::locale {}})))) (component "error checking" (let [data-tree {:id 1 :name "A" :thing {:id 2 :x 42}}] (assertions "throws an exception if the target fails to query for form config" (fs/add-form-config NonForm data-tree) =throws=> #"to .*NonForm, but it does not query for config" "throws an exception if the target fails to declare fields" (fs/add-form-config FormNoFields data-tree) =throws=> #"to .*FormNoFields, but it does not declare any fields" "does recursive checks on subforms" (fs/add-form-config BadlyNestedForm data-tree) =throws=> #"Subform .*NonForm of .*BadlyNestedForm")))) (specification "add-form-config*" (let [state-map {:person/id {1 {:db/id 1 ::person-name "Joe" :ui/checked? true ::phone-numbers [[:phone/id 5]]}} :root-prop 99 :phone/id {5 {:db/id 5 ::phone-number "555-4444" :ui/n 22}}} fconfig-id-person [::fs/forms-by-ident (fs/form-id [:person/id 1])] fconfig-id-phone [::fs/forms-by-ident (fs/form-id [:phone/id 5])]] (behavior "for an entity without form-config" (let [configured-db (fs/add-form-config* state-map Person [:person/id 1])] (assertions "Adds form configuration to normalized tables" (get-in configured-db [:person/id 1 ::fs/config]) => fconfig-id-person (get-in configured-db [:phone/id 5 ::fs/config]) => fconfig-id-phone (get-in configured-db fconfig-id-person) =fn=> (fn [c] (contains? c ::fs/id)) "leaves existing (non-form) data alone" (get-in configured-db [:person/id 1 :ui/checked?]) => true (get-in configured-db [:phone/id 5 :ui/n]) => 22))) (behavior "for an entity with form-config" (component "without destructive parameter" (let [configured-db (-> state-map (assoc-in [:person/id 1 ::fs/config] {::stub true}) (assoc-in [:phone/id 5 ::fs/config] {::stub true}) (fs/add-form-config* Person [:person/id 1]))] (assertions "Form config remains unchanged" (get-in configured-db [:person/id 1 ::fs/config]) => {::stub true} (get-in configured-db [:phone/id 5 ::fs/config]) => {::stub true}))) (component "with destructive parameter" (let [configured-db (-> state-map (assoc-in [:person/id 1 ::fs/config] {:stub true}) (assoc-in [:phone/id 5 ::fs/config] {::stub true}) (fs/add-form-config* Person [:person/id 1] {:destructive? true}))] (assertions "Adds for configuration to normalized tables" (get-in configured-db [:person/id 1 ::fs/config]) => fconfig-id-person (get-in configured-db [:phone/id 5 ::fs/config]) => fconfig-id-phone (get-in configured-db fconfig-id-person) =fn=> (fn [c] (contains? c ::fs/id)))))))) (specification "delete-form-state*" (let [state-map {:person/id {1 {:db/id 1 ::person-name "Joe" :ui/checked? true ::phone-numbers [[:phone/id 5]]}} :root-prop 99 :phone/id {5 {:db/id 5 ::phone-number "555-4444" :ui/n 22}}} configured-db (fs/add-form-config* state-map Person [:person/id 1])] (assertions "Removes form states of multiple entity-idents" (-> configured-db (fs/delete-form-state* [[:person/id 1] [:phone/id 5]]) ::fs/forms-by-ident) => {} "Removes form states of one entity-ident at a time" (-> configured-db (fs/delete-form-state* [:person/id 1]) (fs/delete-form-state* [:phone/id 5]) ::fs/forms-by-ident) => {}))) (let [locale {:db/id 22 ::country :US} locale (fs/add-form-config Locale locale) phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale locale} {:db/id 3 ::phone-number "555-1212"}] phone-number-forms (mapv #(fs/add-form-config Phone %) phone-numbers) person {:db/id 1 ::person-name "Bo" ::phone-numbers phone-number-forms} person-form (fs/add-form-config Person person) state-map (fnorm/tree->db [{:the-person (comp/get-query Person)}] {:the-person person-form} true) validated-person (-> person-form (assoc-in [::fs/config ::fs/complete?] #{::person-name ::person-age}) (assoc-in [::phone-numbers 0 ::fs/config ::fs/complete?] #{::phone-number}) (assoc-in [::phone-numbers 0 ::locale ::fs/config ::fs/complete?] #{::country}) (assoc-in [::phone-numbers 1 ::fs/config ::fs/complete?] #{::phone-number})) person-with-incomplete-name (assoc-in validated-person [::fs/config ::fs/complete?] #{}) person-with-incomplete-nested-form (assoc-in validated-person [::phone-numbers 0 ::locale ::fs/config ::fs/complete?] #{}) person-with-invalid-name (assoc validated-person ::person-name "") person-with-invalid-nested-phone-locale (assoc-in validated-person [::phone-numbers 0 ::locale ::country] "England") person-ui-tree (fn [state id] (get (fdn/db->tree [{[:person/id id] (comp/get-query Person)}] state state) [:person/id id])) new-phone-id (tempid/tempid) new-phone-number {:db/id new-phone-id ::phone-number "444-111-3333"} existing-phone-number {:db/id 10 ::phone-number "444-111-3333"} new-phone-ident (comp/get-ident Phone new-phone-number) formified-phone (fs/add-form-config Phone new-phone-number) edited-form-state-map (-> state-map (assoc-in [:phone/id new-phone-id] formified-phone) (assoc-in [:person/id 1 ::person-name] "New Name") (update-in [:person/id 1 ::phone-numbers] conj new-phone-ident) (assoc-in [:phone/id 3 ::phone-number] "555-9999")) new-person-id (tempid/tempid) new-person-to-many (fs/add-form-config Person {:db/id new-person-id ::person-name "New" ::person-age 22 ::phone-numbers [new-phone-number]}) new-person-to-one (fs/add-form-config Person {:db/id new-person-id ::person-name "New" ::person-age 22 ::phone-numbers new-phone-number}) existing-person-no-child (fs/add-form-config Person {:db/id 1 ::person-name "Existing" ::person-age 22}) existing-person-to-one (fs/add-form-config Person {:db/id 1 ::person-name "Existing" ::person-age 22 ::phone-numbers existing-phone-number})] (specification "dirty-fields" (behavior "(as delta)" (let [delta (fs/dirty-fields (person-ui-tree edited-form-state-map 1) true)] (assertions "Reports all fields of any entity with a temporary ID" (get-in delta [new-phone-ident ::phone-number :after]) => "444-111-3333" "Reports the modified fields of entities with a regular ID" (get-in delta [[:person/id 1] ::person-name :before]) => "Bo" (get-in delta [[:person/id 1] ::person-name :after]) => "New Name" (get-in delta [[:phone/id 3] ::phone-number :before]) => "555-1212" (get-in delta [[:phone/id 3] ::phone-number :after]) => "555-9999" "Includes the list of changes to subform idents" (get-in delta [[:person/id 1] ::phone-numbers :before]) => [[:phone/id 2] [:phone/id 3]] (get-in delta [[:person/id 1] ::phone-numbers :after]) => [[:phone/id 2] [:phone/id 3] [:phone/id new-phone-id]]))) (behavior "(not as delta)" (let [delta (fs/dirty-fields (person-ui-tree edited-form-state-map 1) false)] (assertions "Reports all fields of any entity with a temporary ID" (get-in delta [new-phone-ident ::phone-number]) => "444-111-3333" "Reports the modified fields of entities with a regular ID" (get-in delta [[:person/id 1] ::person-name]) => "New Name" (get-in delta [[:phone/id 3] ::phone-number]) => "555-9999" "Includes the list of changes to subform idents" (get-in delta [[:person/id 1] ::phone-numbers]) => [[:phone/id 2] [:phone/id 3] [:phone/id new-phone-id]]))) (behavior "(new-entity? flag)" (let [delta (fs/dirty-fields existing-person-to-one true {:new-entity? true})] (assertions "Reports all entity fields" (get-in delta [[:person/id 1] ::person-name :after]) => "Existing" (get-in delta [[:person/id 1] ::person-age :after]) => 22 "Reports all nested entity fields" (get-in delta [[:phone/id 10] ::phone-number :after]) => "444-111-3333"))) (behavior "Brand new forms with relations" (assertions "Includes subform idents" (get-in (fs/dirty-fields new-person-to-many false) [[:person/id new-person-id] ::phone-numbers]) => [[:phone/id new-phone-id]] (get-in (fs/dirty-fields new-person-to-one false) [[:person/id new-person-id] ::phone-numbers]) => [:phone/id new-phone-id])) (behavior "Existing forms with empty relations" (assertions "Report empty list of changes" (fs/dirty-fields existing-person-no-child false) => {} (fs/dirty-fields existing-person-no-child true) => {}))) (specification "dirty?" (behavior "is a UI (tree) operation for checking if the form has been modified from pristine" (let [new-phone-number {:db/id 4 ::phone-number "888-1212"} new-phone-number-form (fs/add-form-config Phone new-phone-number)] (assertions "is false if there are no changes" (fs/dirty? person-form) => false "is true if the data has changed in the top-level form" (fs/dirty? (assoc person-form ::person-name "New name")) => true "is true if any subform item has changed" (fs/dirty? (assoc-in person-form [::phone-numbers 0 ::phone-number] "555-1111")) => true (fs/dirty? (assoc-in person-form [::phone-numbers 0 ::locale ::country] :MX)) => true (fs/dirty? (assoc-in person-form [::phone-numbers 1 ::phone-number] "555-1111")) => true "is true if new subform item is added" (fs/dirty? (update person-form ::phone-numbers conj new-phone-number-form)) => true "is true if new subform item is removed" (fs/dirty? (assoc person-form ::phone-numbers [(first phone-number-forms)])) => true)))) (specification "get-spec-validity" (behavior "is a UI (tree) operation for checking if the form (or fields) are valid. It:" (assertions "returns :unchecked if the fields have not been interacted with" (fs/get-spec-validity person-form) => :unchecked "returns :valid if all fields are complete and valid" (fs/get-spec-validity validated-person) => :valid "returns :unchecked if any field is not marked as complete" (fs/get-spec-validity person-with-incomplete-name) => :unchecked "returns :unchecked if any NESTED fields are not marked as complete" (fs/get-spec-validity person-with-incomplete-nested-form) => :unchecked "returns :invalid if any top-level property is invalid" (fs/get-spec-validity person-with-invalid-name) => :invalid "returns :invalid if any nexted property is invalid" (fs/get-spec-validity person-with-invalid-nested-phone-locale) => :invalid))) (specification "valid-spec?" (assertions "Returns true if validity is :valid" (fs/valid-spec? validated-person) => true "Returns false if validity is :unchecked" (fs/valid-spec? person-with-incomplete-nested-form) => false "Returns false if validity is :invalid" (fs/valid-spec? person-with-invalid-name) => false)) (specification "checked?" (assertions "Returns true if validity is :valid or :invalid" (fs/checked? validated-person) => true (fs/checked? person-with-invalid-name) => true (fs/checked? person-with-invalid-nested-phone-locale) => true "Returns false if validity is :unchecked" (fs/checked? person-with-incomplete-nested-form) => false)) (specification "invalid?" (assertions "Returns true if validity is :invalid" (fs/invalid-spec? person-with-invalid-name) => true (fs/invalid-spec? person-with-invalid-nested-phone-locale) => true "Returns false if validity is :unchecked" (fs/invalid-spec? person-with-incomplete-nested-form) => false "Returns false if validity is :valid" (fs/invalid-spec? validated-person) => false)) (specification "update-forms" (behavior "Allows one to traverse a nested form set in the app state database and apply xforms to the form and config" (let [updated-state (fs/update-forms state-map (fn [e c] [(assoc e ::touched true) (assoc c ::touched true)]) [:person/id 1])] (assertions "Touches the top-level form config" (get-in updated-state [::fs/forms-by-ident (fs/form-id [:person/id 1]) ::touched]) => true "Touches the nested form configs" (get-in updated-state [::fs/forms-by-ident (fs/form-id [:phone/id 2]) ::touched]) => true (get-in updated-state [::fs/forms-by-ident (fs/form-id [:phone/id 3]) ::touched]) => true (get-in updated-state [::fs/forms-by-ident (fs/form-id [:locale/by-id 22]) ::touched]) => true "Touches the top-level entity" (get-in updated-state [:person/id 1 ::touched]) => true "Touches the nested form entities" (get-in updated-state [:phone/id 2 ::touched]) => true (get-in updated-state [:phone/id 3 ::touched]) => true (get-in updated-state [:locale/by-id 22 ::touched]) => true))) (component "Accidental form configuration loops" (let [TraitTypeForm1 (rc/nc [:trait-type/id :trait-type/title fs/form-config-join] {:componentName ::TT1 :form-fields #{:trait-type/title}}) TraitForm1 (rc/nc [:trait/id :trait/title {:trait/trait-type (comp/get-query TraitTypeForm1)} fs/form-config-join] {:componentName ::TF1 :form-fields #{:trait/title :trait/trait-type}}) ; meant to track the ident as a picker in the context TraitTypeForm2 (rc/nc [:trait-type/id :trait-type/title {:trait-type/traits (comp/get-query TraitForm1)} fs/form-config-join] {:componentName ::TTF2 :form-fields #{:trait-type/title :trait-type/traits}}) state-map {:trait/id {1 {:trait/id 1 :trait/title "TraitA" :trait/trait-type [:trait-type/id 100]} 2 {:trait/id 2 :trait/title "TraitB" :trait/trait-type [:trait-type/id 100]}} :trait-type/id {100 {:trait-type/id 100 :trait-type/title "TT1" :trait-type/traits [[:trait/id 1] [:trait/id 2]]}}} added? (atom false) xforms (atom 0)] (try (fs/add-form-config* state-map TraitTypeForm2 [:trait-type/id 100]) (reset! added? true) (catch #?(:cljs :default :clj Throwable) e nil)) (try (let [state-map (fs/add-form-config* state-map TraitTypeForm2 [:trait-type/id 100])] (fs/update-forms state-map (fn [entity form-config] (swap! xforms inc) [entity form-config]) [:trait-type/id 100])) (catch #?(:cljs :default :clj Throwable) e nil)) (assertions "Adding config to forms that a accidental loop properly terminates" @added? => true "An accidental form config loop is properly terminated" @xforms => 3)))) (specification "mark-complete*" (behavior "is a state map operation that marks field(s) as complete, so validation checks can be applied" (let [get-person (fn [state id validate?] (let [validated-state (cond-> state validate? (fs/mark-complete* [:person/id id]))] (person-ui-tree validated-state id)))] (assertions "makes the form checked? = true" (fs/checked? (get-person state-map 1 false)) => false (fs/checked? (get-person state-map 1 true)) => true "valid forms become valid" (fs/valid-spec? (get-person state-map 1 false)) => false (fs/valid-spec? (get-person state-map 1 true)) => true)))) (specification "pristine->entity*" (behavior "is a state map operation that recursively undoes any entity state changes that differ from pristine" (let [modified-state-map (-> state-map (assoc-in [:phone/id 3 ::phone-number] "111") (assoc-in [:locale/by-id 22 ::country] :UK) (assoc-in [:person/id 1 ::person-age] 42) (assoc-in [:person/id 1 ::person-name] "Bobby")) reset-state-map (fs/pristine->entity* modified-state-map [:person/id 1]) actual-person (get-in reset-state-map [:person/id 1]) expected-person (get-in state-map [:person/id 1])] (assertions "The modified flag returns to normal" (not= modified-state-map state-map) => true "Fields that were missing are properly removed" (contains? actual-person ::person-age) => false "Recursive fields have returned to their original value" (get-in reset-state-map [:phone/id 3 ::phone-number]) => "555-1212" "Top-level Fields that changed have returned to their original values" (::person-name actual-person) => (::person-name expected-person))))) (specification "entity->pristine*" (behavior "is a state map operation that recursively updates any entity pristine form state so that the form is no longer dirty" (let [modified-state-map (-> state-map (update-in [:phone/id 3] dissoc ::phone-number) (assoc-in [:locale/by-id 22 ::country] :UK) (assoc-in [:person/id 1 ::person-name] "Bobby")) modified-ui-tree (person-ui-tree modified-state-map 1) committed-state-map (fs/entity->pristine* modified-state-map [:person/id 1]) committed-ui-tree (person-ui-tree committed-state-map 1)] (assertions "committing transitions dirty -> clean" (fs/dirty? modified-ui-tree) => true (fs/dirty? committed-ui-tree) => false "The pristine form state has the new data" (get-in committed-state-map [::fs/forms-by-ident {:table :person/id, :row 1} ::fs/pristine-state]) => {::person-name "Bobby" ::phone-numbers [[:phone/id 2] [:phone/id 3]]} (get-in committed-state-map [::fs/forms-by-ident {:table :locale/by-id, :row 22} ::fs/pristine-state]) => {::country :UK} "Removes things from the clean version that disappeared" (contains? (get-in committed-state-map [::fs/forms-by-ident {:table :phone/id, :row 3} ::fs/pristine-state]) ::phone-number) => false "the clean version has the updated data" (get-in committed-ui-tree [::person-name]) => "Bobby" (get-in committed-ui-tree [::phone-numbers 0 ::locale ::country]) => :UK))))) (defsc SomeEntity [_ _] {:query [:entity/id] :ident :entity/id}) (defsc FormPickingEntity [_ _] {:query [:form/id :form/field {:form/entity (comp/get-query SomeEntity)} fs/form-config-join] :ident :form/id :form-fields #{:form/field :form/entity}}) (specification "Working with joins to entities that are meant to be selected" (let [initial-form (fs/add-form-config FormPickingEntity {:form/id 1 :form/field "A" :form/entity {:entity/id 22}}) updated-form (assoc initial-form :form/entity {:entity/id 23})] (assertions "Reports no dirty fields on a pristine form" (fs/dirty-fields initial-form true) => {} "Reports the updated target ident if the entity changes" (fs/dirty-fields updated-form true) => {[:form/id 1] {:form/entity {:before [:entity/id 22] :after [:entity/id 23]}}}))) (specification "Working with joins to entities that are meant to be selected (to-many)" (let [initial-form (fs/add-form-config FormPickingEntity {:form/id 1 :form/field "A" :form/entity [{:entity/id 22} {:entity/id 23}]}) first-entity (get-in initial-form [:form/entity 0]) updated-form (assoc initial-form :form/entity [{:entity/id 23}])] (assertions "Skips adding config to sub-entities that have no fields" (contains? first-entity ::fs/config) => false "Reports no dirty fields on a pristine form" (fs/dirty-fields initial-form true) => {} "Reports the updated target ident if the entity changes" (fs/dirty-fields updated-form true) => {[:form/id 1] {:form/entity {:before [[:entity/id 22] [:entity/id 23]] :after [[:entity/id 23]]}}})))
105586
(ns com.fulcrologic.fulcro.algorithms.form-state-spec (:require [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [com.fulcrologic.fulcro.raw.components :as rc] [com.fulcrologic.fulcro.algorithms.form-state :as fs] [com.fulcrologic.fulcro.algorithms.tempid :as tempid] [com.fulcrologic.fulcro.algorithms.denormalize :as fdn] [com.fulcrologic.fulcro.algorithms.normalize :as fnorm] [fulcro-spec.core :refer [behavior specification assertions component when-mocking provided]] [clojure.spec.alpha :as s] [clojure.string :as str] [taoensso.timbre :as log])) (declare =>) (defsc Locale [this props] {:query [:db/id ::country fs/form-config-join] :ident [:locale/by-id :db/id] :form-fields #{::country}}) (s/def ::country keyword?) (defsc Phone [this props] {:query [:db/id {::locale (comp/get-query Locale)} ::phone-number fs/form-config-join] :ident [:phone/id :db/id] :form-fields #{::locale ::phone-number}}) (defsc UnusedForm [this props] {:query [:db/id ::data fs/form-config-join] :ident [:unused/by-id :db/id] :form-fields #{::data}}) (s/def ::phone-number (s/and string? #(re-matches #"[-0-9()]+" %))) (defsc Person [this props] {:query [:db/id ::person-name ::person-age {::unused (comp/get-query UnusedForm)} {::phone-numbers (comp/get-query Phone)} fs/form-config-join] :ident [:person/id :db/id] :form-fields #{::person-name ::unused ::person-age ::phone-numbers}}) (defsc NonForm [this props] {:query [:id :x] :ident [:ntop :id] :form-fields #{:ntop}}) (defsc FormNoFields [this props] {:query [:id :x fs/form-config-join] :ident [:ntop :id]}) (defsc BadlyNestedForm [this props] {:query [:id :name {:thing (comp/get-query NonForm)} fs/form-config-join] :ident [:top :id] :form-fields #{:name :thing}}) (s/def ::person-name (s/and string? #(not (empty? (str/trim %))))) (s/def ::picker any?) (defsc Thing [_ _] {:ident :thing/id :query [:thing/id]}) (defsc Entity [_ _] {:ident :entity/id :query [:entity/id :entity/value {:entity/thing (comp/get-query Thing)} fs/form-config-join] :form-fields #{:entity/value :entity/thing}}) (specification "add-form-config" (component "treats joins in a way that will just track their ident" (let [data-tree {:db/id 1 :entity/value 42 :entity/thing {:thing/id 1}} initial-form (fs/add-form-config Entity data-tree)] (assertions "adds form config to the top level" (contains? initial-form ::fs/config) => true "places the non-form join into subforms (for normalized ident-tracking)" (get-in initial-form [::fs/config ::fs/fields]) => #{:entity/value} (get-in initial-form [::fs/config ::fs/subforms]) => {:entity/thing {}} "leaves the nested entity alone" (contains? (:entity/thing initial-form) ::fs/config) => false))) (component "returns the entity with added configuration data, where:" (let [data-tree {:db/id 1 ::person-name "<NAME>" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US}}]} configured-form (fs/add-form-config Person data-tree) form-config (get configured-form ::fs/config)] (assertions "::f/config is a spec-valid config" (s/valid? ::fs/config form-config) => true (s/explain-data ::fs/config form-config) => nil "the original entity fields are unchanged" (-> configured-form (dissoc ::fs/config) (update-in [::phone-numbers 0] dissoc ::fs/config) (update-in [::phone-numbers 0 ::locale] dissoc ::fs/config)) => data-tree "the original fields (and subform idents) are saved to pristine state" (::fs/pristine-state form-config) => {::person-name "<NAME>" ::phone-numbers [[:phone/id 2]]} "the entity's ident is the form's ID" (get-in configured-form [::fs/config ::fs/id]) => [:person/id 1] "has the scalar declared fields" (get-in configured-form [::fs/config ::fs/fields]) => #{::person-name ::person-age} "data about each populated subform is included (recursively)" (some-> form-config ::fs/subforms ::phone-numbers meta :component) => Phone (some-> configured-form ::phone-numbers first ::fs/config ::fs/subforms ::locale meta :component) => Locale "data about empty subforms is included" (some-> form-config ::fs/subforms ::unused meta :component) => UnusedForm "each subform is recursively initialized" (get-in configured-form [::phone-numbers 0 ::fs/config ::fs/id]) => [:phone/id 2] (get-in configured-form [::phone-numbers 0 ::locale ::fs/config ::fs/id]) => [:locale/by-id 5]))) (behavior "for an entity with form-config" (component "by default, without destructive parameter" (let [data-tree {:db/id 1 ::fs/config {::stub true} ::person-name "<NAME>" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US} ::fs/config {::stub true}}]} configured-form (fs/add-form-config Person data-tree) nested-form (-> configured-form ::phone-numbers first)] (assertions "previous form-config is unmodified" (::fs/config configured-form) => {::stub true} (::fs/config nested-form) => {::stub true}))) (component "with destructive parameter" (let [data-tree {:db/id 1 ::fs/config {::stub true} ::person-name "<NAME>" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US} ::fs/config {::stub true}}]} initial-form (fs/add-form-config Person data-tree {:destructive? true}) nested-form (-> initial-form ::phone-numbers first)] (assertions "adds form config to the top level" (contains? initial-form ::fs/config) => true (contains? nested-form ::fs/config) => true "places the non-form join into subforms (for normalized ident-tracking)" (get-in initial-form [::fs/config ::fs/fields]) => #{::person-age ::person-name} (get-in initial-form [::fs/config ::fs/subforms]) => {::phone-numbers {} ::unused {}} (get-in nested-form [::fs/config ::fs/fields]) => #{::phone-number} (get-in nested-form [::fs/config ::fs/subforms]) => {::locale {}})))) (component "error checking" (let [data-tree {:id 1 :name "A" :thing {:id 2 :x 42}}] (assertions "throws an exception if the target fails to query for form config" (fs/add-form-config NonForm data-tree) =throws=> #"to .*NonForm, but it does not query for config" "throws an exception if the target fails to declare fields" (fs/add-form-config FormNoFields data-tree) =throws=> #"to .*FormNoFields, but it does not declare any fields" "does recursive checks on subforms" (fs/add-form-config BadlyNestedForm data-tree) =throws=> #"Subform .*NonForm of .*BadlyNestedForm")))) (specification "add-form-config*" (let [state-map {:person/id {1 {:db/id 1 ::person-name "<NAME>" :ui/checked? true ::phone-numbers [[:phone/id 5]]}} :root-prop 99 :phone/id {5 {:db/id 5 ::phone-number "555-4444" :ui/n 22}}} fconfig-id-person [::fs/forms-by-ident (fs/form-id [:person/id 1])] fconfig-id-phone [::fs/forms-by-ident (fs/form-id [:phone/id 5])]] (behavior "for an entity without form-config" (let [configured-db (fs/add-form-config* state-map Person [:person/id 1])] (assertions "Adds form configuration to normalized tables" (get-in configured-db [:person/id 1 ::fs/config]) => fconfig-id-person (get-in configured-db [:phone/id 5 ::fs/config]) => fconfig-id-phone (get-in configured-db fconfig-id-person) =fn=> (fn [c] (contains? c ::fs/id)) "leaves existing (non-form) data alone" (get-in configured-db [:person/id 1 :ui/checked?]) => true (get-in configured-db [:phone/id 5 :ui/n]) => 22))) (behavior "for an entity with form-config" (component "without destructive parameter" (let [configured-db (-> state-map (assoc-in [:person/id 1 ::fs/config] {::stub true}) (assoc-in [:phone/id 5 ::fs/config] {::stub true}) (fs/add-form-config* Person [:person/id 1]))] (assertions "Form config remains unchanged" (get-in configured-db [:person/id 1 ::fs/config]) => {::stub true} (get-in configured-db [:phone/id 5 ::fs/config]) => {::stub true}))) (component "with destructive parameter" (let [configured-db (-> state-map (assoc-in [:person/id 1 ::fs/config] {:stub true}) (assoc-in [:phone/id 5 ::fs/config] {::stub true}) (fs/add-form-config* Person [:person/id 1] {:destructive? true}))] (assertions "Adds for configuration to normalized tables" (get-in configured-db [:person/id 1 ::fs/config]) => fconfig-id-person (get-in configured-db [:phone/id 5 ::fs/config]) => fconfig-id-phone (get-in configured-db fconfig-id-person) =fn=> (fn [c] (contains? c ::fs/id)))))))) (specification "delete-form-state*" (let [state-map {:person/id {1 {:db/id 1 ::person-name "<NAME>" :ui/checked? true ::phone-numbers [[:phone/id 5]]}} :root-prop 99 :phone/id {5 {:db/id 5 ::phone-number "555-4444" :ui/n 22}}} configured-db (fs/add-form-config* state-map Person [:person/id 1])] (assertions "Removes form states of multiple entity-idents" (-> configured-db (fs/delete-form-state* [[:person/id 1] [:phone/id 5]]) ::fs/forms-by-ident) => {} "Removes form states of one entity-ident at a time" (-> configured-db (fs/delete-form-state* [:person/id 1]) (fs/delete-form-state* [:phone/id 5]) ::fs/forms-by-ident) => {}))) (let [locale {:db/id 22 ::country :US} locale (fs/add-form-config Locale locale) phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale locale} {:db/id 3 ::phone-number "555-1212"}] phone-number-forms (mapv #(fs/add-form-config Phone %) phone-numbers) person {:db/id 1 ::person-name "<NAME>" ::phone-numbers phone-number-forms} person-form (fs/add-form-config Person person) state-map (fnorm/tree->db [{:the-person (comp/get-query Person)}] {:the-person person-form} true) validated-person (-> person-form (assoc-in [::fs/config ::fs/complete?] #{::person-name ::person-age}) (assoc-in [::phone-numbers 0 ::fs/config ::fs/complete?] #{::phone-number}) (assoc-in [::phone-numbers 0 ::locale ::fs/config ::fs/complete?] #{::country}) (assoc-in [::phone-numbers 1 ::fs/config ::fs/complete?] #{::phone-number})) person-with-incomplete-name (assoc-in validated-person [::fs/config ::fs/complete?] #{}) person-with-incomplete-nested-form (assoc-in validated-person [::phone-numbers 0 ::locale ::fs/config ::fs/complete?] #{}) person-with-invalid-name (assoc validated-person ::person-name "") person-with-invalid-nested-phone-locale (assoc-in validated-person [::phone-numbers 0 ::locale ::country] "England") person-ui-tree (fn [state id] (get (fdn/db->tree [{[:person/id id] (comp/get-query Person)}] state state) [:person/id id])) new-phone-id (tempid/tempid) new-phone-number {:db/id new-phone-id ::phone-number "444-111-3333"} existing-phone-number {:db/id 10 ::phone-number "444-111-3333"} new-phone-ident (comp/get-ident Phone new-phone-number) formified-phone (fs/add-form-config Phone new-phone-number) edited-form-state-map (-> state-map (assoc-in [:phone/id new-phone-id] formified-phone) (assoc-in [:person/id 1 ::person-name] "New Name") (update-in [:person/id 1 ::phone-numbers] conj new-phone-ident) (assoc-in [:phone/id 3 ::phone-number] "555-9999")) new-person-id (tempid/tempid) new-person-to-many (fs/add-form-config Person {:db/id new-person-id ::person-name "New" ::person-age 22 ::phone-numbers [new-phone-number]}) new-person-to-one (fs/add-form-config Person {:db/id new-person-id ::person-name "New" ::person-age 22 ::phone-numbers new-phone-number}) existing-person-no-child (fs/add-form-config Person {:db/id 1 ::person-name "Existing" ::person-age 22}) existing-person-to-one (fs/add-form-config Person {:db/id 1 ::person-name "Existing" ::person-age 22 ::phone-numbers existing-phone-number})] (specification "dirty-fields" (behavior "(as delta)" (let [delta (fs/dirty-fields (person-ui-tree edited-form-state-map 1) true)] (assertions "Reports all fields of any entity with a temporary ID" (get-in delta [new-phone-ident ::phone-number :after]) => "444-111-3333" "Reports the modified fields of entities with a regular ID" (get-in delta [[:person/id 1] ::person-name :before]) => "<NAME>" (get-in delta [[:person/id 1] ::person-name :after]) => "<NAME> Name" (get-in delta [[:phone/id 3] ::phone-number :before]) => "555-1212" (get-in delta [[:phone/id 3] ::phone-number :after]) => "555-9999" "Includes the list of changes to subform idents" (get-in delta [[:person/id 1] ::phone-numbers :before]) => [[:phone/id 2] [:phone/id 3]] (get-in delta [[:person/id 1] ::phone-numbers :after]) => [[:phone/id 2] [:phone/id 3] [:phone/id new-phone-id]]))) (behavior "(not as delta)" (let [delta (fs/dirty-fields (person-ui-tree edited-form-state-map 1) false)] (assertions "Reports all fields of any entity with a temporary ID" (get-in delta [new-phone-ident ::phone-number]) => "444-111-3333" "Reports the modified fields of entities with a regular ID" (get-in delta [[:person/id 1] ::person-name]) => "<NAME> Name" (get-in delta [[:phone/id 3] ::phone-number]) => "555-9999" "Includes the list of changes to subform idents" (get-in delta [[:person/id 1] ::phone-numbers]) => [[:phone/id 2] [:phone/id 3] [:phone/id new-phone-id]]))) (behavior "(new-entity? flag)" (let [delta (fs/dirty-fields existing-person-to-one true {:new-entity? true})] (assertions "Reports all entity fields" (get-in delta [[:person/id 1] ::person-name :after]) => "Existing" (get-in delta [[:person/id 1] ::person-age :after]) => 22 "Reports all nested entity fields" (get-in delta [[:phone/id 10] ::phone-number :after]) => "444-111-3333"))) (behavior "Brand new forms with relations" (assertions "Includes subform idents" (get-in (fs/dirty-fields new-person-to-many false) [[:person/id new-person-id] ::phone-numbers]) => [[:phone/id new-phone-id]] (get-in (fs/dirty-fields new-person-to-one false) [[:person/id new-person-id] ::phone-numbers]) => [:phone/id new-phone-id])) (behavior "Existing forms with empty relations" (assertions "Report empty list of changes" (fs/dirty-fields existing-person-no-child false) => {} (fs/dirty-fields existing-person-no-child true) => {}))) (specification "dirty?" (behavior "is a UI (tree) operation for checking if the form has been modified from pristine" (let [new-phone-number {:db/id 4 ::phone-number "888-1212"} new-phone-number-form (fs/add-form-config Phone new-phone-number)] (assertions "is false if there are no changes" (fs/dirty? person-form) => false "is true if the data has changed in the top-level form" (fs/dirty? (assoc person-form ::person-name "New name")) => true "is true if any subform item has changed" (fs/dirty? (assoc-in person-form [::phone-numbers 0 ::phone-number] "555-1111")) => true (fs/dirty? (assoc-in person-form [::phone-numbers 0 ::locale ::country] :MX)) => true (fs/dirty? (assoc-in person-form [::phone-numbers 1 ::phone-number] "555-1111")) => true "is true if new subform item is added" (fs/dirty? (update person-form ::phone-numbers conj new-phone-number-form)) => true "is true if new subform item is removed" (fs/dirty? (assoc person-form ::phone-numbers [(first phone-number-forms)])) => true)))) (specification "get-spec-validity" (behavior "is a UI (tree) operation for checking if the form (or fields) are valid. It:" (assertions "returns :unchecked if the fields have not been interacted with" (fs/get-spec-validity person-form) => :unchecked "returns :valid if all fields are complete and valid" (fs/get-spec-validity validated-person) => :valid "returns :unchecked if any field is not marked as complete" (fs/get-spec-validity person-with-incomplete-name) => :unchecked "returns :unchecked if any NESTED fields are not marked as complete" (fs/get-spec-validity person-with-incomplete-nested-form) => :unchecked "returns :invalid if any top-level property is invalid" (fs/get-spec-validity person-with-invalid-name) => :invalid "returns :invalid if any nexted property is invalid" (fs/get-spec-validity person-with-invalid-nested-phone-locale) => :invalid))) (specification "valid-spec?" (assertions "Returns true if validity is :valid" (fs/valid-spec? validated-person) => true "Returns false if validity is :unchecked" (fs/valid-spec? person-with-incomplete-nested-form) => false "Returns false if validity is :invalid" (fs/valid-spec? person-with-invalid-name) => false)) (specification "checked?" (assertions "Returns true if validity is :valid or :invalid" (fs/checked? validated-person) => true (fs/checked? person-with-invalid-name) => true (fs/checked? person-with-invalid-nested-phone-locale) => true "Returns false if validity is :unchecked" (fs/checked? person-with-incomplete-nested-form) => false)) (specification "invalid?" (assertions "Returns true if validity is :invalid" (fs/invalid-spec? person-with-invalid-name) => true (fs/invalid-spec? person-with-invalid-nested-phone-locale) => true "Returns false if validity is :unchecked" (fs/invalid-spec? person-with-incomplete-nested-form) => false "Returns false if validity is :valid" (fs/invalid-spec? validated-person) => false)) (specification "update-forms" (behavior "Allows one to traverse a nested form set in the app state database and apply xforms to the form and config" (let [updated-state (fs/update-forms state-map (fn [e c] [(assoc e ::touched true) (assoc c ::touched true)]) [:person/id 1])] (assertions "Touches the top-level form config" (get-in updated-state [::fs/forms-by-ident (fs/form-id [:person/id 1]) ::touched]) => true "Touches the nested form configs" (get-in updated-state [::fs/forms-by-ident (fs/form-id [:phone/id 2]) ::touched]) => true (get-in updated-state [::fs/forms-by-ident (fs/form-id [:phone/id 3]) ::touched]) => true (get-in updated-state [::fs/forms-by-ident (fs/form-id [:locale/by-id 22]) ::touched]) => true "Touches the top-level entity" (get-in updated-state [:person/id 1 ::touched]) => true "Touches the nested form entities" (get-in updated-state [:phone/id 2 ::touched]) => true (get-in updated-state [:phone/id 3 ::touched]) => true (get-in updated-state [:locale/by-id 22 ::touched]) => true))) (component "Accidental form configuration loops" (let [TraitTypeForm1 (rc/nc [:trait-type/id :trait-type/title fs/form-config-join] {:componentName ::TT1 :form-fields #{:trait-type/title}}) TraitForm1 (rc/nc [:trait/id :trait/title {:trait/trait-type (comp/get-query TraitTypeForm1)} fs/form-config-join] {:componentName ::TF1 :form-fields #{:trait/title :trait/trait-type}}) ; meant to track the ident as a picker in the context TraitTypeForm2 (rc/nc [:trait-type/id :trait-type/title {:trait-type/traits (comp/get-query TraitForm1)} fs/form-config-join] {:componentName ::TTF2 :form-fields #{:trait-type/title :trait-type/traits}}) state-map {:trait/id {1 {:trait/id 1 :trait/title "TraitA" :trait/trait-type [:trait-type/id 100]} 2 {:trait/id 2 :trait/title "TraitB" :trait/trait-type [:trait-type/id 100]}} :trait-type/id {100 {:trait-type/id 100 :trait-type/title "TT1" :trait-type/traits [[:trait/id 1] [:trait/id 2]]}}} added? (atom false) xforms (atom 0)] (try (fs/add-form-config* state-map TraitTypeForm2 [:trait-type/id 100]) (reset! added? true) (catch #?(:cljs :default :clj Throwable) e nil)) (try (let [state-map (fs/add-form-config* state-map TraitTypeForm2 [:trait-type/id 100])] (fs/update-forms state-map (fn [entity form-config] (swap! xforms inc) [entity form-config]) [:trait-type/id 100])) (catch #?(:cljs :default :clj Throwable) e nil)) (assertions "Adding config to forms that a accidental loop properly terminates" @added? => true "An accidental form config loop is properly terminated" @xforms => 3)))) (specification "mark-complete*" (behavior "is a state map operation that marks field(s) as complete, so validation checks can be applied" (let [get-person (fn [state id validate?] (let [validated-state (cond-> state validate? (fs/mark-complete* [:person/id id]))] (person-ui-tree validated-state id)))] (assertions "makes the form checked? = true" (fs/checked? (get-person state-map 1 false)) => false (fs/checked? (get-person state-map 1 true)) => true "valid forms become valid" (fs/valid-spec? (get-person state-map 1 false)) => false (fs/valid-spec? (get-person state-map 1 true)) => true)))) (specification "pristine->entity*" (behavior "is a state map operation that recursively undoes any entity state changes that differ from pristine" (let [modified-state-map (-> state-map (assoc-in [:phone/id 3 ::phone-number] "111") (assoc-in [:locale/by-id 22 ::country] :UK) (assoc-in [:person/id 1 ::person-age] 42) (assoc-in [:person/id 1 ::person-name] "<NAME>")) reset-state-map (fs/pristine->entity* modified-state-map [:person/id 1]) actual-person (get-in reset-state-map [:person/id 1]) expected-person (get-in state-map [:person/id 1])] (assertions "The modified flag returns to normal" (not= modified-state-map state-map) => true "Fields that were missing are properly removed" (contains? actual-person ::person-age) => false "Recursive fields have returned to their original value" (get-in reset-state-map [:phone/id 3 ::phone-number]) => "555-1212" "Top-level Fields that changed have returned to their original values" (::person-name actual-person) => (::person-name expected-person))))) (specification "entity->pristine*" (behavior "is a state map operation that recursively updates any entity pristine form state so that the form is no longer dirty" (let [modified-state-map (-> state-map (update-in [:phone/id 3] dissoc ::phone-number) (assoc-in [:locale/by-id 22 ::country] :UK) (assoc-in [:person/id 1 ::person-name] "<NAME>")) modified-ui-tree (person-ui-tree modified-state-map 1) committed-state-map (fs/entity->pristine* modified-state-map [:person/id 1]) committed-ui-tree (person-ui-tree committed-state-map 1)] (assertions "committing transitions dirty -> clean" (fs/dirty? modified-ui-tree) => true (fs/dirty? committed-ui-tree) => false "The pristine form state has the new data" (get-in committed-state-map [::fs/forms-by-ident {:table :person/id, :row 1} ::fs/pristine-state]) => {::person-name "<NAME>" ::phone-numbers [[:phone/id 2] [:phone/id 3]]} (get-in committed-state-map [::fs/forms-by-ident {:table :locale/by-id, :row 22} ::fs/pristine-state]) => {::country :UK} "Removes things from the clean version that disappeared" (contains? (get-in committed-state-map [::fs/forms-by-ident {:table :phone/id, :row 3} ::fs/pristine-state]) ::phone-number) => false "the clean version has the updated data" (get-in committed-ui-tree [::person-name]) => "<NAME>" (get-in committed-ui-tree [::phone-numbers 0 ::locale ::country]) => :UK))))) (defsc SomeEntity [_ _] {:query [:entity/id] :ident :entity/id}) (defsc FormPickingEntity [_ _] {:query [:form/id :form/field {:form/entity (comp/get-query SomeEntity)} fs/form-config-join] :ident :form/id :form-fields #{:form/field :form/entity}}) (specification "Working with joins to entities that are meant to be selected" (let [initial-form (fs/add-form-config FormPickingEntity {:form/id 1 :form/field "A" :form/entity {:entity/id 22}}) updated-form (assoc initial-form :form/entity {:entity/id 23})] (assertions "Reports no dirty fields on a pristine form" (fs/dirty-fields initial-form true) => {} "Reports the updated target ident if the entity changes" (fs/dirty-fields updated-form true) => {[:form/id 1] {:form/entity {:before [:entity/id 22] :after [:entity/id 23]}}}))) (specification "Working with joins to entities that are meant to be selected (to-many)" (let [initial-form (fs/add-form-config FormPickingEntity {:form/id 1 :form/field "A" :form/entity [{:entity/id 22} {:entity/id 23}]}) first-entity (get-in initial-form [:form/entity 0]) updated-form (assoc initial-form :form/entity [{:entity/id 23}])] (assertions "Skips adding config to sub-entities that have no fields" (contains? first-entity ::fs/config) => false "Reports no dirty fields on a pristine form" (fs/dirty-fields initial-form true) => {} "Reports the updated target ident if the entity changes" (fs/dirty-fields updated-form true) => {[:form/id 1] {:form/entity {:before [[:entity/id 22] [:entity/id 23]] :after [[:entity/id 23]]}}})))
true
(ns com.fulcrologic.fulcro.algorithms.form-state-spec (:require [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [com.fulcrologic.fulcro.raw.components :as rc] [com.fulcrologic.fulcro.algorithms.form-state :as fs] [com.fulcrologic.fulcro.algorithms.tempid :as tempid] [com.fulcrologic.fulcro.algorithms.denormalize :as fdn] [com.fulcrologic.fulcro.algorithms.normalize :as fnorm] [fulcro-spec.core :refer [behavior specification assertions component when-mocking provided]] [clojure.spec.alpha :as s] [clojure.string :as str] [taoensso.timbre :as log])) (declare =>) (defsc Locale [this props] {:query [:db/id ::country fs/form-config-join] :ident [:locale/by-id :db/id] :form-fields #{::country}}) (s/def ::country keyword?) (defsc Phone [this props] {:query [:db/id {::locale (comp/get-query Locale)} ::phone-number fs/form-config-join] :ident [:phone/id :db/id] :form-fields #{::locale ::phone-number}}) (defsc UnusedForm [this props] {:query [:db/id ::data fs/form-config-join] :ident [:unused/by-id :db/id] :form-fields #{::data}}) (s/def ::phone-number (s/and string? #(re-matches #"[-0-9()]+" %))) (defsc Person [this props] {:query [:db/id ::person-name ::person-age {::unused (comp/get-query UnusedForm)} {::phone-numbers (comp/get-query Phone)} fs/form-config-join] :ident [:person/id :db/id] :form-fields #{::person-name ::unused ::person-age ::phone-numbers}}) (defsc NonForm [this props] {:query [:id :x] :ident [:ntop :id] :form-fields #{:ntop}}) (defsc FormNoFields [this props] {:query [:id :x fs/form-config-join] :ident [:ntop :id]}) (defsc BadlyNestedForm [this props] {:query [:id :name {:thing (comp/get-query NonForm)} fs/form-config-join] :ident [:top :id] :form-fields #{:name :thing}}) (s/def ::person-name (s/and string? #(not (empty? (str/trim %))))) (s/def ::picker any?) (defsc Thing [_ _] {:ident :thing/id :query [:thing/id]}) (defsc Entity [_ _] {:ident :entity/id :query [:entity/id :entity/value {:entity/thing (comp/get-query Thing)} fs/form-config-join] :form-fields #{:entity/value :entity/thing}}) (specification "add-form-config" (component "treats joins in a way that will just track their ident" (let [data-tree {:db/id 1 :entity/value 42 :entity/thing {:thing/id 1}} initial-form (fs/add-form-config Entity data-tree)] (assertions "adds form config to the top level" (contains? initial-form ::fs/config) => true "places the non-form join into subforms (for normalized ident-tracking)" (get-in initial-form [::fs/config ::fs/fields]) => #{:entity/value} (get-in initial-form [::fs/config ::fs/subforms]) => {:entity/thing {}} "leaves the nested entity alone" (contains? (:entity/thing initial-form) ::fs/config) => false))) (component "returns the entity with added configuration data, where:" (let [data-tree {:db/id 1 ::person-name "PI:NAME:<NAME>END_PI" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US}}]} configured-form (fs/add-form-config Person data-tree) form-config (get configured-form ::fs/config)] (assertions "::f/config is a spec-valid config" (s/valid? ::fs/config form-config) => true (s/explain-data ::fs/config form-config) => nil "the original entity fields are unchanged" (-> configured-form (dissoc ::fs/config) (update-in [::phone-numbers 0] dissoc ::fs/config) (update-in [::phone-numbers 0 ::locale] dissoc ::fs/config)) => data-tree "the original fields (and subform idents) are saved to pristine state" (::fs/pristine-state form-config) => {::person-name "PI:NAME:<NAME>END_PI" ::phone-numbers [[:phone/id 2]]} "the entity's ident is the form's ID" (get-in configured-form [::fs/config ::fs/id]) => [:person/id 1] "has the scalar declared fields" (get-in configured-form [::fs/config ::fs/fields]) => #{::person-name ::person-age} "data about each populated subform is included (recursively)" (some-> form-config ::fs/subforms ::phone-numbers meta :component) => Phone (some-> configured-form ::phone-numbers first ::fs/config ::fs/subforms ::locale meta :component) => Locale "data about empty subforms is included" (some-> form-config ::fs/subforms ::unused meta :component) => UnusedForm "each subform is recursively initialized" (get-in configured-form [::phone-numbers 0 ::fs/config ::fs/id]) => [:phone/id 2] (get-in configured-form [::phone-numbers 0 ::locale ::fs/config ::fs/id]) => [:locale/by-id 5]))) (behavior "for an entity with form-config" (component "by default, without destructive parameter" (let [data-tree {:db/id 1 ::fs/config {::stub true} ::person-name "PI:NAME:<NAME>END_PI" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US} ::fs/config {::stub true}}]} configured-form (fs/add-form-config Person data-tree) nested-form (-> configured-form ::phone-numbers first)] (assertions "previous form-config is unmodified" (::fs/config configured-form) => {::stub true} (::fs/config nested-form) => {::stub true}))) (component "with destructive parameter" (let [data-tree {:db/id 1 ::fs/config {::stub true} ::person-name "PI:NAME:<NAME>END_PI" ::phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale {:db/id 5 ::country :US} ::fs/config {::stub true}}]} initial-form (fs/add-form-config Person data-tree {:destructive? true}) nested-form (-> initial-form ::phone-numbers first)] (assertions "adds form config to the top level" (contains? initial-form ::fs/config) => true (contains? nested-form ::fs/config) => true "places the non-form join into subforms (for normalized ident-tracking)" (get-in initial-form [::fs/config ::fs/fields]) => #{::person-age ::person-name} (get-in initial-form [::fs/config ::fs/subforms]) => {::phone-numbers {} ::unused {}} (get-in nested-form [::fs/config ::fs/fields]) => #{::phone-number} (get-in nested-form [::fs/config ::fs/subforms]) => {::locale {}})))) (component "error checking" (let [data-tree {:id 1 :name "A" :thing {:id 2 :x 42}}] (assertions "throws an exception if the target fails to query for form config" (fs/add-form-config NonForm data-tree) =throws=> #"to .*NonForm, but it does not query for config" "throws an exception if the target fails to declare fields" (fs/add-form-config FormNoFields data-tree) =throws=> #"to .*FormNoFields, but it does not declare any fields" "does recursive checks on subforms" (fs/add-form-config BadlyNestedForm data-tree) =throws=> #"Subform .*NonForm of .*BadlyNestedForm")))) (specification "add-form-config*" (let [state-map {:person/id {1 {:db/id 1 ::person-name "PI:NAME:<NAME>END_PI" :ui/checked? true ::phone-numbers [[:phone/id 5]]}} :root-prop 99 :phone/id {5 {:db/id 5 ::phone-number "555-4444" :ui/n 22}}} fconfig-id-person [::fs/forms-by-ident (fs/form-id [:person/id 1])] fconfig-id-phone [::fs/forms-by-ident (fs/form-id [:phone/id 5])]] (behavior "for an entity without form-config" (let [configured-db (fs/add-form-config* state-map Person [:person/id 1])] (assertions "Adds form configuration to normalized tables" (get-in configured-db [:person/id 1 ::fs/config]) => fconfig-id-person (get-in configured-db [:phone/id 5 ::fs/config]) => fconfig-id-phone (get-in configured-db fconfig-id-person) =fn=> (fn [c] (contains? c ::fs/id)) "leaves existing (non-form) data alone" (get-in configured-db [:person/id 1 :ui/checked?]) => true (get-in configured-db [:phone/id 5 :ui/n]) => 22))) (behavior "for an entity with form-config" (component "without destructive parameter" (let [configured-db (-> state-map (assoc-in [:person/id 1 ::fs/config] {::stub true}) (assoc-in [:phone/id 5 ::fs/config] {::stub true}) (fs/add-form-config* Person [:person/id 1]))] (assertions "Form config remains unchanged" (get-in configured-db [:person/id 1 ::fs/config]) => {::stub true} (get-in configured-db [:phone/id 5 ::fs/config]) => {::stub true}))) (component "with destructive parameter" (let [configured-db (-> state-map (assoc-in [:person/id 1 ::fs/config] {:stub true}) (assoc-in [:phone/id 5 ::fs/config] {::stub true}) (fs/add-form-config* Person [:person/id 1] {:destructive? true}))] (assertions "Adds for configuration to normalized tables" (get-in configured-db [:person/id 1 ::fs/config]) => fconfig-id-person (get-in configured-db [:phone/id 5 ::fs/config]) => fconfig-id-phone (get-in configured-db fconfig-id-person) =fn=> (fn [c] (contains? c ::fs/id)))))))) (specification "delete-form-state*" (let [state-map {:person/id {1 {:db/id 1 ::person-name "PI:NAME:<NAME>END_PI" :ui/checked? true ::phone-numbers [[:phone/id 5]]}} :root-prop 99 :phone/id {5 {:db/id 5 ::phone-number "555-4444" :ui/n 22}}} configured-db (fs/add-form-config* state-map Person [:person/id 1])] (assertions "Removes form states of multiple entity-idents" (-> configured-db (fs/delete-form-state* [[:person/id 1] [:phone/id 5]]) ::fs/forms-by-ident) => {} "Removes form states of one entity-ident at a time" (-> configured-db (fs/delete-form-state* [:person/id 1]) (fs/delete-form-state* [:phone/id 5]) ::fs/forms-by-ident) => {}))) (let [locale {:db/id 22 ::country :US} locale (fs/add-form-config Locale locale) phone-numbers [{:db/id 2 ::phone-number "555-1212" ::locale locale} {:db/id 3 ::phone-number "555-1212"}] phone-number-forms (mapv #(fs/add-form-config Phone %) phone-numbers) person {:db/id 1 ::person-name "PI:NAME:<NAME>END_PI" ::phone-numbers phone-number-forms} person-form (fs/add-form-config Person person) state-map (fnorm/tree->db [{:the-person (comp/get-query Person)}] {:the-person person-form} true) validated-person (-> person-form (assoc-in [::fs/config ::fs/complete?] #{::person-name ::person-age}) (assoc-in [::phone-numbers 0 ::fs/config ::fs/complete?] #{::phone-number}) (assoc-in [::phone-numbers 0 ::locale ::fs/config ::fs/complete?] #{::country}) (assoc-in [::phone-numbers 1 ::fs/config ::fs/complete?] #{::phone-number})) person-with-incomplete-name (assoc-in validated-person [::fs/config ::fs/complete?] #{}) person-with-incomplete-nested-form (assoc-in validated-person [::phone-numbers 0 ::locale ::fs/config ::fs/complete?] #{}) person-with-invalid-name (assoc validated-person ::person-name "") person-with-invalid-nested-phone-locale (assoc-in validated-person [::phone-numbers 0 ::locale ::country] "England") person-ui-tree (fn [state id] (get (fdn/db->tree [{[:person/id id] (comp/get-query Person)}] state state) [:person/id id])) new-phone-id (tempid/tempid) new-phone-number {:db/id new-phone-id ::phone-number "444-111-3333"} existing-phone-number {:db/id 10 ::phone-number "444-111-3333"} new-phone-ident (comp/get-ident Phone new-phone-number) formified-phone (fs/add-form-config Phone new-phone-number) edited-form-state-map (-> state-map (assoc-in [:phone/id new-phone-id] formified-phone) (assoc-in [:person/id 1 ::person-name] "New Name") (update-in [:person/id 1 ::phone-numbers] conj new-phone-ident) (assoc-in [:phone/id 3 ::phone-number] "555-9999")) new-person-id (tempid/tempid) new-person-to-many (fs/add-form-config Person {:db/id new-person-id ::person-name "New" ::person-age 22 ::phone-numbers [new-phone-number]}) new-person-to-one (fs/add-form-config Person {:db/id new-person-id ::person-name "New" ::person-age 22 ::phone-numbers new-phone-number}) existing-person-no-child (fs/add-form-config Person {:db/id 1 ::person-name "Existing" ::person-age 22}) existing-person-to-one (fs/add-form-config Person {:db/id 1 ::person-name "Existing" ::person-age 22 ::phone-numbers existing-phone-number})] (specification "dirty-fields" (behavior "(as delta)" (let [delta (fs/dirty-fields (person-ui-tree edited-form-state-map 1) true)] (assertions "Reports all fields of any entity with a temporary ID" (get-in delta [new-phone-ident ::phone-number :after]) => "444-111-3333" "Reports the modified fields of entities with a regular ID" (get-in delta [[:person/id 1] ::person-name :before]) => "PI:NAME:<NAME>END_PI" (get-in delta [[:person/id 1] ::person-name :after]) => "PI:NAME:<NAME>END_PI Name" (get-in delta [[:phone/id 3] ::phone-number :before]) => "555-1212" (get-in delta [[:phone/id 3] ::phone-number :after]) => "555-9999" "Includes the list of changes to subform idents" (get-in delta [[:person/id 1] ::phone-numbers :before]) => [[:phone/id 2] [:phone/id 3]] (get-in delta [[:person/id 1] ::phone-numbers :after]) => [[:phone/id 2] [:phone/id 3] [:phone/id new-phone-id]]))) (behavior "(not as delta)" (let [delta (fs/dirty-fields (person-ui-tree edited-form-state-map 1) false)] (assertions "Reports all fields of any entity with a temporary ID" (get-in delta [new-phone-ident ::phone-number]) => "444-111-3333" "Reports the modified fields of entities with a regular ID" (get-in delta [[:person/id 1] ::person-name]) => "PI:NAME:<NAME>END_PI Name" (get-in delta [[:phone/id 3] ::phone-number]) => "555-9999" "Includes the list of changes to subform idents" (get-in delta [[:person/id 1] ::phone-numbers]) => [[:phone/id 2] [:phone/id 3] [:phone/id new-phone-id]]))) (behavior "(new-entity? flag)" (let [delta (fs/dirty-fields existing-person-to-one true {:new-entity? true})] (assertions "Reports all entity fields" (get-in delta [[:person/id 1] ::person-name :after]) => "Existing" (get-in delta [[:person/id 1] ::person-age :after]) => 22 "Reports all nested entity fields" (get-in delta [[:phone/id 10] ::phone-number :after]) => "444-111-3333"))) (behavior "Brand new forms with relations" (assertions "Includes subform idents" (get-in (fs/dirty-fields new-person-to-many false) [[:person/id new-person-id] ::phone-numbers]) => [[:phone/id new-phone-id]] (get-in (fs/dirty-fields new-person-to-one false) [[:person/id new-person-id] ::phone-numbers]) => [:phone/id new-phone-id])) (behavior "Existing forms with empty relations" (assertions "Report empty list of changes" (fs/dirty-fields existing-person-no-child false) => {} (fs/dirty-fields existing-person-no-child true) => {}))) (specification "dirty?" (behavior "is a UI (tree) operation for checking if the form has been modified from pristine" (let [new-phone-number {:db/id 4 ::phone-number "888-1212"} new-phone-number-form (fs/add-form-config Phone new-phone-number)] (assertions "is false if there are no changes" (fs/dirty? person-form) => false "is true if the data has changed in the top-level form" (fs/dirty? (assoc person-form ::person-name "New name")) => true "is true if any subform item has changed" (fs/dirty? (assoc-in person-form [::phone-numbers 0 ::phone-number] "555-1111")) => true (fs/dirty? (assoc-in person-form [::phone-numbers 0 ::locale ::country] :MX)) => true (fs/dirty? (assoc-in person-form [::phone-numbers 1 ::phone-number] "555-1111")) => true "is true if new subform item is added" (fs/dirty? (update person-form ::phone-numbers conj new-phone-number-form)) => true "is true if new subform item is removed" (fs/dirty? (assoc person-form ::phone-numbers [(first phone-number-forms)])) => true)))) (specification "get-spec-validity" (behavior "is a UI (tree) operation for checking if the form (or fields) are valid. It:" (assertions "returns :unchecked if the fields have not been interacted with" (fs/get-spec-validity person-form) => :unchecked "returns :valid if all fields are complete and valid" (fs/get-spec-validity validated-person) => :valid "returns :unchecked if any field is not marked as complete" (fs/get-spec-validity person-with-incomplete-name) => :unchecked "returns :unchecked if any NESTED fields are not marked as complete" (fs/get-spec-validity person-with-incomplete-nested-form) => :unchecked "returns :invalid if any top-level property is invalid" (fs/get-spec-validity person-with-invalid-name) => :invalid "returns :invalid if any nexted property is invalid" (fs/get-spec-validity person-with-invalid-nested-phone-locale) => :invalid))) (specification "valid-spec?" (assertions "Returns true if validity is :valid" (fs/valid-spec? validated-person) => true "Returns false if validity is :unchecked" (fs/valid-spec? person-with-incomplete-nested-form) => false "Returns false if validity is :invalid" (fs/valid-spec? person-with-invalid-name) => false)) (specification "checked?" (assertions "Returns true if validity is :valid or :invalid" (fs/checked? validated-person) => true (fs/checked? person-with-invalid-name) => true (fs/checked? person-with-invalid-nested-phone-locale) => true "Returns false if validity is :unchecked" (fs/checked? person-with-incomplete-nested-form) => false)) (specification "invalid?" (assertions "Returns true if validity is :invalid" (fs/invalid-spec? person-with-invalid-name) => true (fs/invalid-spec? person-with-invalid-nested-phone-locale) => true "Returns false if validity is :unchecked" (fs/invalid-spec? person-with-incomplete-nested-form) => false "Returns false if validity is :valid" (fs/invalid-spec? validated-person) => false)) (specification "update-forms" (behavior "Allows one to traverse a nested form set in the app state database and apply xforms to the form and config" (let [updated-state (fs/update-forms state-map (fn [e c] [(assoc e ::touched true) (assoc c ::touched true)]) [:person/id 1])] (assertions "Touches the top-level form config" (get-in updated-state [::fs/forms-by-ident (fs/form-id [:person/id 1]) ::touched]) => true "Touches the nested form configs" (get-in updated-state [::fs/forms-by-ident (fs/form-id [:phone/id 2]) ::touched]) => true (get-in updated-state [::fs/forms-by-ident (fs/form-id [:phone/id 3]) ::touched]) => true (get-in updated-state [::fs/forms-by-ident (fs/form-id [:locale/by-id 22]) ::touched]) => true "Touches the top-level entity" (get-in updated-state [:person/id 1 ::touched]) => true "Touches the nested form entities" (get-in updated-state [:phone/id 2 ::touched]) => true (get-in updated-state [:phone/id 3 ::touched]) => true (get-in updated-state [:locale/by-id 22 ::touched]) => true))) (component "Accidental form configuration loops" (let [TraitTypeForm1 (rc/nc [:trait-type/id :trait-type/title fs/form-config-join] {:componentName ::TT1 :form-fields #{:trait-type/title}}) TraitForm1 (rc/nc [:trait/id :trait/title {:trait/trait-type (comp/get-query TraitTypeForm1)} fs/form-config-join] {:componentName ::TF1 :form-fields #{:trait/title :trait/trait-type}}) ; meant to track the ident as a picker in the context TraitTypeForm2 (rc/nc [:trait-type/id :trait-type/title {:trait-type/traits (comp/get-query TraitForm1)} fs/form-config-join] {:componentName ::TTF2 :form-fields #{:trait-type/title :trait-type/traits}}) state-map {:trait/id {1 {:trait/id 1 :trait/title "TraitA" :trait/trait-type [:trait-type/id 100]} 2 {:trait/id 2 :trait/title "TraitB" :trait/trait-type [:trait-type/id 100]}} :trait-type/id {100 {:trait-type/id 100 :trait-type/title "TT1" :trait-type/traits [[:trait/id 1] [:trait/id 2]]}}} added? (atom false) xforms (atom 0)] (try (fs/add-form-config* state-map TraitTypeForm2 [:trait-type/id 100]) (reset! added? true) (catch #?(:cljs :default :clj Throwable) e nil)) (try (let [state-map (fs/add-form-config* state-map TraitTypeForm2 [:trait-type/id 100])] (fs/update-forms state-map (fn [entity form-config] (swap! xforms inc) [entity form-config]) [:trait-type/id 100])) (catch #?(:cljs :default :clj Throwable) e nil)) (assertions "Adding config to forms that a accidental loop properly terminates" @added? => true "An accidental form config loop is properly terminated" @xforms => 3)))) (specification "mark-complete*" (behavior "is a state map operation that marks field(s) as complete, so validation checks can be applied" (let [get-person (fn [state id validate?] (let [validated-state (cond-> state validate? (fs/mark-complete* [:person/id id]))] (person-ui-tree validated-state id)))] (assertions "makes the form checked? = true" (fs/checked? (get-person state-map 1 false)) => false (fs/checked? (get-person state-map 1 true)) => true "valid forms become valid" (fs/valid-spec? (get-person state-map 1 false)) => false (fs/valid-spec? (get-person state-map 1 true)) => true)))) (specification "pristine->entity*" (behavior "is a state map operation that recursively undoes any entity state changes that differ from pristine" (let [modified-state-map (-> state-map (assoc-in [:phone/id 3 ::phone-number] "111") (assoc-in [:locale/by-id 22 ::country] :UK) (assoc-in [:person/id 1 ::person-age] 42) (assoc-in [:person/id 1 ::person-name] "PI:NAME:<NAME>END_PI")) reset-state-map (fs/pristine->entity* modified-state-map [:person/id 1]) actual-person (get-in reset-state-map [:person/id 1]) expected-person (get-in state-map [:person/id 1])] (assertions "The modified flag returns to normal" (not= modified-state-map state-map) => true "Fields that were missing are properly removed" (contains? actual-person ::person-age) => false "Recursive fields have returned to their original value" (get-in reset-state-map [:phone/id 3 ::phone-number]) => "555-1212" "Top-level Fields that changed have returned to their original values" (::person-name actual-person) => (::person-name expected-person))))) (specification "entity->pristine*" (behavior "is a state map operation that recursively updates any entity pristine form state so that the form is no longer dirty" (let [modified-state-map (-> state-map (update-in [:phone/id 3] dissoc ::phone-number) (assoc-in [:locale/by-id 22 ::country] :UK) (assoc-in [:person/id 1 ::person-name] "PI:NAME:<NAME>END_PI")) modified-ui-tree (person-ui-tree modified-state-map 1) committed-state-map (fs/entity->pristine* modified-state-map [:person/id 1]) committed-ui-tree (person-ui-tree committed-state-map 1)] (assertions "committing transitions dirty -> clean" (fs/dirty? modified-ui-tree) => true (fs/dirty? committed-ui-tree) => false "The pristine form state has the new data" (get-in committed-state-map [::fs/forms-by-ident {:table :person/id, :row 1} ::fs/pristine-state]) => {::person-name "PI:NAME:<NAME>END_PI" ::phone-numbers [[:phone/id 2] [:phone/id 3]]} (get-in committed-state-map [::fs/forms-by-ident {:table :locale/by-id, :row 22} ::fs/pristine-state]) => {::country :UK} "Removes things from the clean version that disappeared" (contains? (get-in committed-state-map [::fs/forms-by-ident {:table :phone/id, :row 3} ::fs/pristine-state]) ::phone-number) => false "the clean version has the updated data" (get-in committed-ui-tree [::person-name]) => "PI:NAME:<NAME>END_PI" (get-in committed-ui-tree [::phone-numbers 0 ::locale ::country]) => :UK))))) (defsc SomeEntity [_ _] {:query [:entity/id] :ident :entity/id}) (defsc FormPickingEntity [_ _] {:query [:form/id :form/field {:form/entity (comp/get-query SomeEntity)} fs/form-config-join] :ident :form/id :form-fields #{:form/field :form/entity}}) (specification "Working with joins to entities that are meant to be selected" (let [initial-form (fs/add-form-config FormPickingEntity {:form/id 1 :form/field "A" :form/entity {:entity/id 22}}) updated-form (assoc initial-form :form/entity {:entity/id 23})] (assertions "Reports no dirty fields on a pristine form" (fs/dirty-fields initial-form true) => {} "Reports the updated target ident if the entity changes" (fs/dirty-fields updated-form true) => {[:form/id 1] {:form/entity {:before [:entity/id 22] :after [:entity/id 23]}}}))) (specification "Working with joins to entities that are meant to be selected (to-many)" (let [initial-form (fs/add-form-config FormPickingEntity {:form/id 1 :form/field "A" :form/entity [{:entity/id 22} {:entity/id 23}]}) first-entity (get-in initial-form [:form/entity 0]) updated-form (assoc initial-form :form/entity [{:entity/id 23}])] (assertions "Skips adding config to sub-entities that have no fields" (contains? first-entity ::fs/config) => false "Reports no dirty fields on a pristine form" (fs/dirty-fields initial-form true) => {} "Reports the updated target ident if the entity changes" (fs/dirty-fields updated-form true) => {[:form/id 1] {:form/entity {:before [[:entity/id 22] [:entity/id 23]] :after [[:entity/id 23]]}}})))
[ { "context": "(ns ^{:author \"Abhinav Sharma (@abhi18av)\"\n :doc \"Wraps the api for FIX", "end": 29, "score": 0.9998809695243835, "start": 15, "tag": "NAME", "value": "Abhinav Sharma" }, { "context": "(ns ^{:author \"Abhinav Sharma (@abhi18av)\"\n :doc \"Wraps the api for FIXME\"}\n\n app", "end": 40, "score": 0.9993098378181458, "start": 30, "tag": "USERNAME", "value": "(@abhi18av" } ]
src/app/external_api/scratch.cljs
abhi18av/fulcro-frontend-template
1
(ns ^{:author "Abhinav Sharma (@abhi18av)" :doc "Wraps the api for FIXME"} app.external-api.scratch (:require [app.secrets :as secrets] [app.utils :as utils :refer [namespaced-keys pull-namespaced pull-key update-if]] [clojure.core.async :refer [go timeout <! take!]] [clojure.string :as str] [com.wsscode.common.async-cljs :refer [go-catch <? let-chan chan? <?maybe <!maybe go-promise]] [com.wsscode.pathom.diplomat.http.fetch :as p.http.fetch] [com.wsscode.pathom.core :as p] [com.wsscode.pathom.connect :as pc] [com.wsscode.pathom.connect.graphql :as pcg] [com.wsscode.pathom.diplomat.http :as p.http])) (def memory (atom {})) (defn api [{::keys [endpoint]}] (let [cors-proxy "https://cors-anywhere.herokuapp.com/" api-url "FIXME"] (take! (p.http.fetch/request-async {::p.http/url (str cors-proxy api-url endpoint) ::p.http/headers {:Authorization secrets/token} ::p.http/as ::p.http/json ::p.http/method "get"}) #(reset! memory (:com.wsscode.pathom.diplomat.http/body %))))) @memory (api {::endpoint "FIXME"})
122861
(ns ^{:author "<NAME> (@abhi18av)" :doc "Wraps the api for FIXME"} app.external-api.scratch (:require [app.secrets :as secrets] [app.utils :as utils :refer [namespaced-keys pull-namespaced pull-key update-if]] [clojure.core.async :refer [go timeout <! take!]] [clojure.string :as str] [com.wsscode.common.async-cljs :refer [go-catch <? let-chan chan? <?maybe <!maybe go-promise]] [com.wsscode.pathom.diplomat.http.fetch :as p.http.fetch] [com.wsscode.pathom.core :as p] [com.wsscode.pathom.connect :as pc] [com.wsscode.pathom.connect.graphql :as pcg] [com.wsscode.pathom.diplomat.http :as p.http])) (def memory (atom {})) (defn api [{::keys [endpoint]}] (let [cors-proxy "https://cors-anywhere.herokuapp.com/" api-url "FIXME"] (take! (p.http.fetch/request-async {::p.http/url (str cors-proxy api-url endpoint) ::p.http/headers {:Authorization secrets/token} ::p.http/as ::p.http/json ::p.http/method "get"}) #(reset! memory (:com.wsscode.pathom.diplomat.http/body %))))) @memory (api {::endpoint "FIXME"})
true
(ns ^{:author "PI:NAME:<NAME>END_PI (@abhi18av)" :doc "Wraps the api for FIXME"} app.external-api.scratch (:require [app.secrets :as secrets] [app.utils :as utils :refer [namespaced-keys pull-namespaced pull-key update-if]] [clojure.core.async :refer [go timeout <! take!]] [clojure.string :as str] [com.wsscode.common.async-cljs :refer [go-catch <? let-chan chan? <?maybe <!maybe go-promise]] [com.wsscode.pathom.diplomat.http.fetch :as p.http.fetch] [com.wsscode.pathom.core :as p] [com.wsscode.pathom.connect :as pc] [com.wsscode.pathom.connect.graphql :as pcg] [com.wsscode.pathom.diplomat.http :as p.http])) (def memory (atom {})) (defn api [{::keys [endpoint]}] (let [cors-proxy "https://cors-anywhere.herokuapp.com/" api-url "FIXME"] (take! (p.http.fetch/request-async {::p.http/url (str cors-proxy api-url endpoint) ::p.http/headers {:Authorization secrets/token} ::p.http/as ::p.http/json ::p.http/method "get"}) #(reset! memory (:com.wsscode.pathom.diplomat.http/body %))))) @memory (api {::endpoint "FIXME"})
[ { "context": "! *unchecked-math* :warn-on-boxed)\n(ns ^{:author \"John Alan McDonald, Kristina Lisa Klinkner\" :date \"2016-12-30\"\n ", "end": 105, "score": 0.9998700022697449, "start": 87, "tag": "NAME", "value": "John Alan McDonald" }, { "context": ":warn-on-boxed)\n(ns ^{:author \"John Alan McDonald, Kristina Lisa Klinkner\" :date \"2016-12-30\"\n :doc \"Greedy decision t", "end": 129, "score": 0.9998680353164673, "start": 107, "tag": "NAME", "value": "Kristina Lisa Klinkner" } ]
src/main/clojure/taiga/split/api.clj
wahpenayo/taiga
4
(set! *warn-on-reflection* true) (set! *unchecked-math* :warn-on-boxed) (ns ^{:author "John Alan McDonald, Kristina Lisa Klinkner" :date "2016-12-30" :doc "Greedy decision tree splitting." } taiga.split.api (:require [zana.api :as z] [taiga.split.numerical.categorical.scored :as numerical-categorical] [taiga.split.numerical.categorical.weighted-scored :as weighted-categorical] [taiga.split.numerical.eenum.simple :as numerical-enum] [taiga.split.numerical.eenum.weighted :as weighted-enum] [taiga.split.numerical.numerical.xy :as numerical-numerical] [taiga.split.numerical.numerical.xyw :as numerical-weighted] [taiga.split.object.categorical.heuristic :as object-categorical] [taiga.split.object.numerical.xy :as object-numerical]) (:import [java.util List] [clojure.lang IFn IFn$OD] [zana.java.accumulator Accumulator])) ;;------------------------------------------------------------------------------ ;; TODO: dis-entangle mincount test from cost functions, or at least make it ;; easier to just check data. (defn mincount-split? ([^long mincount ^zana.java.accumulator.Accumulator cost] (<= mincount (.netCount cost))) ([^long mincount ^zana.java.accumulator.Accumulator cost0 ^zana.java.accumulator.Accumulator cost1] (and (mincount-split? mincount cost0) (mincount-split? mincount cost1)))) ;;------------------------------------------------------------------------------ (defn- check-best-split-options [options] (assert (not (empty? (:predictors options))) (print-str "No :predictors in:" (z/pprint-map-str options))) (let [^Iterable data (:data options) ^IFn y (:ground-truth options) ^IFn$OD w (:weight options) cost-factory (:cost-factory options)] (assert (not (empty? data)) (print-str "No :data\n" (z/pprint-map-str options))) (assert (instance? IFn y) (print-str "No :ground-truth\n" (z/pprint-map-str options))) (assert (or (not w) (instance? IFn$OD w)) (print-str "invalid :weight\n" (z/pprint-map-str options))) (assert (ifn? cost-factory)))) ;;------------------------------------------------------------------------------ (def enum-valued? (memoize z/enum-valued?)) ;;------------------------------------------------------------------------------ (defn- attribute-split [options] (let [^IFn y (:ground-truth options) [_ x] (:this-predictor options)] (assert (or (instance? IFn$OD y) (nil? (:weight options)))) (if (instance? IFn$OD y) (cond (enum-valued? x) (if (:weight options) (weighted-enum/split options) (numerical-enum/split options)) (z/numerical? x) (if (:weight options) (numerical-weighted/split options) (numerical-numerical/split options)) :else (if (:weight options) (weighted-categorical/split options) (numerical-categorical/split options))) (if (z/numerical? x) (object-numerical/split options) (object-categorical/split options))))) ;;------------------------------------------------------------------------------ (defn- allocate-cache [^IFn y ^IFn$OD w ^List data] (let [n (z/count data)] (if w (do (assert (instance? IFn$OD w)) (assert (instance? IFn$OD y)) (numerical-weighted/allocate-cache n)) (if (instance? IFn$OD y) (numerical-numerical/allocate-cache n) #_(object-numerical/allocate-cache y data) (object-numerical/allocate-cache n))))) ;;------------------------------------------------------------------------------ (defn best-split [options] ;; no need to split if y constant (when-not (z/singular? (:ground-truth options) (:data options)) (check-best-split-options options) (let [^List data (:data options) n (z/count data) ^IFn y (:ground-truth options) ^IFn$OD w (:weight options) ;; cache array, mutated during numerical split optimization options (assoc options :xys (allocate-cache y w data)) xs (:predictors options)] (if (> (* 2 (int (:mincount options))) n) nil (loop [xs xs pmin nil cmin Double/POSITIVE_INFINITY] (if (empty? xs) pmin (let [x (first xs) s (attribute-split (assoc options :this-predictor x)) c (double (:cost s))] (if (< c cmin) (recur (rest xs) (:split s) c) (recur (rest xs) pmin cmin))))))))) ;;------------------------------------------------------------------------------
33028
(set! *warn-on-reflection* true) (set! *unchecked-math* :warn-on-boxed) (ns ^{:author "<NAME>, <NAME>" :date "2016-12-30" :doc "Greedy decision tree splitting." } taiga.split.api (:require [zana.api :as z] [taiga.split.numerical.categorical.scored :as numerical-categorical] [taiga.split.numerical.categorical.weighted-scored :as weighted-categorical] [taiga.split.numerical.eenum.simple :as numerical-enum] [taiga.split.numerical.eenum.weighted :as weighted-enum] [taiga.split.numerical.numerical.xy :as numerical-numerical] [taiga.split.numerical.numerical.xyw :as numerical-weighted] [taiga.split.object.categorical.heuristic :as object-categorical] [taiga.split.object.numerical.xy :as object-numerical]) (:import [java.util List] [clojure.lang IFn IFn$OD] [zana.java.accumulator Accumulator])) ;;------------------------------------------------------------------------------ ;; TODO: dis-entangle mincount test from cost functions, or at least make it ;; easier to just check data. (defn mincount-split? ([^long mincount ^zana.java.accumulator.Accumulator cost] (<= mincount (.netCount cost))) ([^long mincount ^zana.java.accumulator.Accumulator cost0 ^zana.java.accumulator.Accumulator cost1] (and (mincount-split? mincount cost0) (mincount-split? mincount cost1)))) ;;------------------------------------------------------------------------------ (defn- check-best-split-options [options] (assert (not (empty? (:predictors options))) (print-str "No :predictors in:" (z/pprint-map-str options))) (let [^Iterable data (:data options) ^IFn y (:ground-truth options) ^IFn$OD w (:weight options) cost-factory (:cost-factory options)] (assert (not (empty? data)) (print-str "No :data\n" (z/pprint-map-str options))) (assert (instance? IFn y) (print-str "No :ground-truth\n" (z/pprint-map-str options))) (assert (or (not w) (instance? IFn$OD w)) (print-str "invalid :weight\n" (z/pprint-map-str options))) (assert (ifn? cost-factory)))) ;;------------------------------------------------------------------------------ (def enum-valued? (memoize z/enum-valued?)) ;;------------------------------------------------------------------------------ (defn- attribute-split [options] (let [^IFn y (:ground-truth options) [_ x] (:this-predictor options)] (assert (or (instance? IFn$OD y) (nil? (:weight options)))) (if (instance? IFn$OD y) (cond (enum-valued? x) (if (:weight options) (weighted-enum/split options) (numerical-enum/split options)) (z/numerical? x) (if (:weight options) (numerical-weighted/split options) (numerical-numerical/split options)) :else (if (:weight options) (weighted-categorical/split options) (numerical-categorical/split options))) (if (z/numerical? x) (object-numerical/split options) (object-categorical/split options))))) ;;------------------------------------------------------------------------------ (defn- allocate-cache [^IFn y ^IFn$OD w ^List data] (let [n (z/count data)] (if w (do (assert (instance? IFn$OD w)) (assert (instance? IFn$OD y)) (numerical-weighted/allocate-cache n)) (if (instance? IFn$OD y) (numerical-numerical/allocate-cache n) #_(object-numerical/allocate-cache y data) (object-numerical/allocate-cache n))))) ;;------------------------------------------------------------------------------ (defn best-split [options] ;; no need to split if y constant (when-not (z/singular? (:ground-truth options) (:data options)) (check-best-split-options options) (let [^List data (:data options) n (z/count data) ^IFn y (:ground-truth options) ^IFn$OD w (:weight options) ;; cache array, mutated during numerical split optimization options (assoc options :xys (allocate-cache y w data)) xs (:predictors options)] (if (> (* 2 (int (:mincount options))) n) nil (loop [xs xs pmin nil cmin Double/POSITIVE_INFINITY] (if (empty? xs) pmin (let [x (first xs) s (attribute-split (assoc options :this-predictor x)) c (double (:cost s))] (if (< c cmin) (recur (rest xs) (:split s) c) (recur (rest xs) pmin cmin))))))))) ;;------------------------------------------------------------------------------
true
(set! *warn-on-reflection* true) (set! *unchecked-math* :warn-on-boxed) (ns ^{:author "PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI" :date "2016-12-30" :doc "Greedy decision tree splitting." } taiga.split.api (:require [zana.api :as z] [taiga.split.numerical.categorical.scored :as numerical-categorical] [taiga.split.numerical.categorical.weighted-scored :as weighted-categorical] [taiga.split.numerical.eenum.simple :as numerical-enum] [taiga.split.numerical.eenum.weighted :as weighted-enum] [taiga.split.numerical.numerical.xy :as numerical-numerical] [taiga.split.numerical.numerical.xyw :as numerical-weighted] [taiga.split.object.categorical.heuristic :as object-categorical] [taiga.split.object.numerical.xy :as object-numerical]) (:import [java.util List] [clojure.lang IFn IFn$OD] [zana.java.accumulator Accumulator])) ;;------------------------------------------------------------------------------ ;; TODO: dis-entangle mincount test from cost functions, or at least make it ;; easier to just check data. (defn mincount-split? ([^long mincount ^zana.java.accumulator.Accumulator cost] (<= mincount (.netCount cost))) ([^long mincount ^zana.java.accumulator.Accumulator cost0 ^zana.java.accumulator.Accumulator cost1] (and (mincount-split? mincount cost0) (mincount-split? mincount cost1)))) ;;------------------------------------------------------------------------------ (defn- check-best-split-options [options] (assert (not (empty? (:predictors options))) (print-str "No :predictors in:" (z/pprint-map-str options))) (let [^Iterable data (:data options) ^IFn y (:ground-truth options) ^IFn$OD w (:weight options) cost-factory (:cost-factory options)] (assert (not (empty? data)) (print-str "No :data\n" (z/pprint-map-str options))) (assert (instance? IFn y) (print-str "No :ground-truth\n" (z/pprint-map-str options))) (assert (or (not w) (instance? IFn$OD w)) (print-str "invalid :weight\n" (z/pprint-map-str options))) (assert (ifn? cost-factory)))) ;;------------------------------------------------------------------------------ (def enum-valued? (memoize z/enum-valued?)) ;;------------------------------------------------------------------------------ (defn- attribute-split [options] (let [^IFn y (:ground-truth options) [_ x] (:this-predictor options)] (assert (or (instance? IFn$OD y) (nil? (:weight options)))) (if (instance? IFn$OD y) (cond (enum-valued? x) (if (:weight options) (weighted-enum/split options) (numerical-enum/split options)) (z/numerical? x) (if (:weight options) (numerical-weighted/split options) (numerical-numerical/split options)) :else (if (:weight options) (weighted-categorical/split options) (numerical-categorical/split options))) (if (z/numerical? x) (object-numerical/split options) (object-categorical/split options))))) ;;------------------------------------------------------------------------------ (defn- allocate-cache [^IFn y ^IFn$OD w ^List data] (let [n (z/count data)] (if w (do (assert (instance? IFn$OD w)) (assert (instance? IFn$OD y)) (numerical-weighted/allocate-cache n)) (if (instance? IFn$OD y) (numerical-numerical/allocate-cache n) #_(object-numerical/allocate-cache y data) (object-numerical/allocate-cache n))))) ;;------------------------------------------------------------------------------ (defn best-split [options] ;; no need to split if y constant (when-not (z/singular? (:ground-truth options) (:data options)) (check-best-split-options options) (let [^List data (:data options) n (z/count data) ^IFn y (:ground-truth options) ^IFn$OD w (:weight options) ;; cache array, mutated during numerical split optimization options (assoc options :xys (allocate-cache y w data)) xs (:predictors options)] (if (> (* 2 (int (:mincount options))) n) nil (loop [xs xs pmin nil cmin Double/POSITIVE_INFINITY] (if (empty? xs) pmin (let [x (first xs) s (attribute-split (assoc options :this-predictor x)) c (double (:cost s))] (if (< c cmin) (recur (rest xs) (:split s) c) (recur (rest xs) pmin cmin))))))))) ;;------------------------------------------------------------------------------
[ { "context": "(testing \"Successful creation\"\n (let [tag-key \"tag1\"\n tag (tags/make-tag {:tag-key tag-key})", "end": 3205, "score": 0.993882417678833, "start": 3201, "tag": "KEY", "value": "tag1" }, { "context": "ag {:tag-key \"MixedCaseTagKey\"})\n tag-key \"mixedcasetagkey\"\n token (e/login (s/context) \"user1\")\n ", "end": 5731, "score": 0.993270218372345, "start": 5716, "tag": "KEY", "value": "mixedcasetagkey" }, { "context": "))))))\n\n(deftest update-tag-test\n (let [tag-key \"tag1\"\n tag (tags/make-tag {:tag-key tag-key})\n ", "end": 6669, "score": 0.972594678401947, "start": 6665, "tag": "KEY", "value": "tag1" }, { "context": "(deftest update-tag-failure-test\n (let [tag-key \"tag1\"\n tag (tags/make-tag {:tag-key tag-key})\n ", "end": 7834, "score": 0.9981905221939087, "start": 7830, "tag": "KEY", "value": "tag1" }, { "context": "))))))\n\n(deftest delete-tag-test\n (let [tag-key \"tag1\"\n tag (tags/make-tag {:tag-key tag-key})\n ", "end": 9833, "score": 0.9982311725616455, "start": 9829, "tag": "KEY", "value": "tag1" } ]
system-int-test/test/cmr/system_int_test/search/tagging/tag_crud_test.clj
sxu123/Common-Metadata-Repository
0
(ns cmr.system-int-test.search.tagging.tag-crud-test "This tests the CMR Search API's tagging capabilities" (:require [clojure.test :refer :all] [clojure.string :as str] [cmr.common.util :refer [are2]] [cmr.system-int-test.utils.ingest-util :as ingest] [cmr.system-int-test.utils.search-util :as search] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.tag-util :as tags] [cmr.system-int-test.data2.core :as d] [cmr.system-int-test.data2.collection :as dc] [cmr.mock-echo.client.echo-util :as e] [cmr.system-int-test.system :as s])) (use-fixtures :each (join-fixtures [(ingest/reset-fixture {"provguid1" "PROV1"}) tags/grant-all-tag-fixture])) (def field-maxes "A map of fields to their max lengths" {:tag_key 1030 :description 4000}) (deftest create-tag-validation-test (testing "Create without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/create-tag nil (tags/make-tag))))) (testing "Create with unknown token" (is (= {:status 401 :errors ["Token ABC in request header does not exist"]} (tags/create-tag "ABC" (tags/make-tag))))) (let [valid-user-token (e/login (s/context) "user1") valid-tag (tags/make-tag)] (testing "Create tag with invalid content type" (is (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (tags/create-tag valid-user-token valid-tag {:http-options {:content-type :xml}})))) (testing "Create tag with invalid tag key" (is (= {:status 422, :errors ["Tag key [a/c] contains '/' character. Tag keys cannot contain this character."]} (tags/create-tag valid-user-token (assoc valid-tag :tag-key "a/c"))))) (testing "Missing field validations" (is (= {:status 400 :errors ["object has missing required properties ([\"tag_key\"])"]} (tags/create-tag valid-user-token (dissoc valid-tag :tag-key))))) (testing "Minimum field length validations" (are [field] (= {:status 400 :errors [(format "/%s string \"\" is too short (length: 0, required minimum: 1)" (name field))]} (tags/create-tag valid-user-token (assoc valid-tag field ""))) :tag_key :description)) (testing "Maximum field length validations" (doseq [[field max-length] field-maxes] (let [long-value (tags/string-of-length (inc max-length))] (is (= {:status 400 :errors [(format "/%s string \"%s\" is too long (length: %d, maximum allowed: %d)" (name field) long-value (inc max-length) max-length)]} (tags/create-tag valid-user-token (assoc valid-tag field long-value))))))))) (deftest create-tag-test (testing "Successful creation" (let [tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [status concept-id revision-id]} (tags/create-tag token tag)] (is (= 201 status)) (is concept-id) (is (= 1 revision-id)) (tags/assert-tag-saved (assoc tag :originator-id "user1") "user1" concept-id revision-id) (testing "Creation with an already existing tag-key" (is (= {:status 409 :errors [(format "A tag with tag-key [%s] already exists with concept id [%s]." (:tag-key tag) concept-id)]} (tags/create-tag token tag)))) (testing "tag-key is case-insensitive" (let [{:keys[status errors]} (tags/create-tag token (update tag :tag-key str/upper-case))] (is (= [409 [(format "A tag with tag-key [%s] already exists with concept id [%s]." (:tag-key tag) concept-id)]] [status errors])))) (testing "Creation with different tag-key succeeds" (let [response (tags/create-tag token (assoc tag :tag-key "different"))] (is (= 201 (:status response))) (is (not= concept-id (:concept-id response))) (is (= 1 (:revision-id response))))) (testing "Creation of previously deleted tag" (tags/delete-tag token tag-key) (let [new-tag (assoc tag :description "new description") token2 (e/login (s/context) "user2") response (tags/create-tag token2 new-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 3} response)) ;; A tag that was deleted but recreated gets a new originator id. (tags/assert-tag-saved (assoc new-tag :originator-id "user2") "user2" concept-id 3)))) (testing "Create tag with fields at maximum length" (let [tag (into {} (for [[field max-length] field-maxes] [field (tags/string-of-length max-length)]))] (is (= 201 (:status (tags/create-tag (e/login (s/context) "user1") tag))))))) (testing "Creation without optional fields is allowed" (let [tag (dissoc (tags/make-tag {:tag-key "tag-key2"}) :description) token (e/login (s/context) "user1") {:keys [status concept-id revision-id]} (tags/create-tag token tag)] (is (= 201 status)) (is concept-id) (is (= 1 revision-id))))) (deftest get-tag-test (let [tag (tags/make-tag {:tag-key "MixedCaseTagKey"}) tag-key "mixedcasetagkey" token (e/login (s/context) "user1") _ (tags/create-tag token tag) expected-tag (-> tag (update :tag-key str/lower-case) (assoc :originator-id "user1" :status 200))] (testing "Retrieve existing tag, verify tag-key is converted to lowercase" (is (= expected-tag (tags/get-tag tag-key)))) (testing "Retrieve tag with tag-key is case insensitive" (is (= expected-tag (tags/get-tag "MixedCaseTagKey")))) (testing "Retrieve unknown tag" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (tags/get-tag "Tag100")))) (testing "Retrieve deleted tag" (tags/delete-tag token tag-key) (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/get-tag tag-key)))))) (deftest update-tag-test (let [tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag)] (testing "Update with originator id" (let [updated-tag (-> tag (update-in [:description] #(str % " updated")) (assoc :originator-id "user1")) token2 (e/login (s/context) "user2") response (tags/update-tag token2 tag-key updated-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 2} response)) (tags/assert-tag-saved updated-tag "user2" concept-id 2))) (testing "Update without originator id" (let [updated-tag (dissoc tag :originator-id) token2 (e/login (s/context) "user2") response (tags/update-tag token2 tag-key updated-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 3} response)) ;; The previous originator id should not change (tags/assert-tag-saved (assoc updated-tag :originator-id "user1") "user2" concept-id 3))))) (deftest update-tag-failure-test (let [tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1")] (testing "Update tag with invalid content type" (is (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (tags/update-tag token tag-key tag {:http-options {:content-type :xml}})))) (testing "Update without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/update-tag nil tag-key tag)))) (testing "Fields that cannot be changed" (are [field human-name] (= {:status 400 :errors [(format (str "%s cannot be modified. Attempted to change existing value" " [%s] to [updated]") human-name (get tag field))]} (tags/update-tag token tag-key (assoc tag field "updated"))) :tag-key "Tag Key" :originator-id "Originator Id")) (testing "Updates applies JSON validations" (is (= {:status 400 :errors ["/description string \"\" is too short (length: 0, required minimum: 1)"]} (tags/update-tag token concept-id (assoc tag :description ""))))) (testing "Update tag that doesn't exist" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag2]"]} (tags/update-tag token "tag2" tag)))) (testing "Update deleted tag" (tags/delete-tag token tag-key) (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/update-tag token tag-key tag)))))) (deftest delete-tag-test (let [tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag)] (testing "Delete without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/delete-tag nil tag-key)))) (testing "Delete success" (is (= {:status 200 :concept-id concept-id :revision-id 2} (tags/delete-tag token tag-key))) (tags/assert-tag-deleted tag "user1" concept-id 2)) (testing "Delete tag that was already deleted" (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/delete-tag token tag-key)))) (testing "Delete tag that doesn't exist" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag2]"]} (tags/delete-tag token "tag2"))))))
113269
(ns cmr.system-int-test.search.tagging.tag-crud-test "This tests the CMR Search API's tagging capabilities" (:require [clojure.test :refer :all] [clojure.string :as str] [cmr.common.util :refer [are2]] [cmr.system-int-test.utils.ingest-util :as ingest] [cmr.system-int-test.utils.search-util :as search] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.tag-util :as tags] [cmr.system-int-test.data2.core :as d] [cmr.system-int-test.data2.collection :as dc] [cmr.mock-echo.client.echo-util :as e] [cmr.system-int-test.system :as s])) (use-fixtures :each (join-fixtures [(ingest/reset-fixture {"provguid1" "PROV1"}) tags/grant-all-tag-fixture])) (def field-maxes "A map of fields to their max lengths" {:tag_key 1030 :description 4000}) (deftest create-tag-validation-test (testing "Create without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/create-tag nil (tags/make-tag))))) (testing "Create with unknown token" (is (= {:status 401 :errors ["Token ABC in request header does not exist"]} (tags/create-tag "ABC" (tags/make-tag))))) (let [valid-user-token (e/login (s/context) "user1") valid-tag (tags/make-tag)] (testing "Create tag with invalid content type" (is (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (tags/create-tag valid-user-token valid-tag {:http-options {:content-type :xml}})))) (testing "Create tag with invalid tag key" (is (= {:status 422, :errors ["Tag key [a/c] contains '/' character. Tag keys cannot contain this character."]} (tags/create-tag valid-user-token (assoc valid-tag :tag-key "a/c"))))) (testing "Missing field validations" (is (= {:status 400 :errors ["object has missing required properties ([\"tag_key\"])"]} (tags/create-tag valid-user-token (dissoc valid-tag :tag-key))))) (testing "Minimum field length validations" (are [field] (= {:status 400 :errors [(format "/%s string \"\" is too short (length: 0, required minimum: 1)" (name field))]} (tags/create-tag valid-user-token (assoc valid-tag field ""))) :tag_key :description)) (testing "Maximum field length validations" (doseq [[field max-length] field-maxes] (let [long-value (tags/string-of-length (inc max-length))] (is (= {:status 400 :errors [(format "/%s string \"%s\" is too long (length: %d, maximum allowed: %d)" (name field) long-value (inc max-length) max-length)]} (tags/create-tag valid-user-token (assoc valid-tag field long-value))))))))) (deftest create-tag-test (testing "Successful creation" (let [tag-key "<KEY>" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [status concept-id revision-id]} (tags/create-tag token tag)] (is (= 201 status)) (is concept-id) (is (= 1 revision-id)) (tags/assert-tag-saved (assoc tag :originator-id "user1") "user1" concept-id revision-id) (testing "Creation with an already existing tag-key" (is (= {:status 409 :errors [(format "A tag with tag-key [%s] already exists with concept id [%s]." (:tag-key tag) concept-id)]} (tags/create-tag token tag)))) (testing "tag-key is case-insensitive" (let [{:keys[status errors]} (tags/create-tag token (update tag :tag-key str/upper-case))] (is (= [409 [(format "A tag with tag-key [%s] already exists with concept id [%s]." (:tag-key tag) concept-id)]] [status errors])))) (testing "Creation with different tag-key succeeds" (let [response (tags/create-tag token (assoc tag :tag-key "different"))] (is (= 201 (:status response))) (is (not= concept-id (:concept-id response))) (is (= 1 (:revision-id response))))) (testing "Creation of previously deleted tag" (tags/delete-tag token tag-key) (let [new-tag (assoc tag :description "new description") token2 (e/login (s/context) "user2") response (tags/create-tag token2 new-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 3} response)) ;; A tag that was deleted but recreated gets a new originator id. (tags/assert-tag-saved (assoc new-tag :originator-id "user2") "user2" concept-id 3)))) (testing "Create tag with fields at maximum length" (let [tag (into {} (for [[field max-length] field-maxes] [field (tags/string-of-length max-length)]))] (is (= 201 (:status (tags/create-tag (e/login (s/context) "user1") tag))))))) (testing "Creation without optional fields is allowed" (let [tag (dissoc (tags/make-tag {:tag-key "tag-key2"}) :description) token (e/login (s/context) "user1") {:keys [status concept-id revision-id]} (tags/create-tag token tag)] (is (= 201 status)) (is concept-id) (is (= 1 revision-id))))) (deftest get-tag-test (let [tag (tags/make-tag {:tag-key "MixedCaseTagKey"}) tag-key "<KEY>" token (e/login (s/context) "user1") _ (tags/create-tag token tag) expected-tag (-> tag (update :tag-key str/lower-case) (assoc :originator-id "user1" :status 200))] (testing "Retrieve existing tag, verify tag-key is converted to lowercase" (is (= expected-tag (tags/get-tag tag-key)))) (testing "Retrieve tag with tag-key is case insensitive" (is (= expected-tag (tags/get-tag "MixedCaseTagKey")))) (testing "Retrieve unknown tag" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (tags/get-tag "Tag100")))) (testing "Retrieve deleted tag" (tags/delete-tag token tag-key) (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/get-tag tag-key)))))) (deftest update-tag-test (let [tag-key "<KEY>" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag)] (testing "Update with originator id" (let [updated-tag (-> tag (update-in [:description] #(str % " updated")) (assoc :originator-id "user1")) token2 (e/login (s/context) "user2") response (tags/update-tag token2 tag-key updated-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 2} response)) (tags/assert-tag-saved updated-tag "user2" concept-id 2))) (testing "Update without originator id" (let [updated-tag (dissoc tag :originator-id) token2 (e/login (s/context) "user2") response (tags/update-tag token2 tag-key updated-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 3} response)) ;; The previous originator id should not change (tags/assert-tag-saved (assoc updated-tag :originator-id "user1") "user2" concept-id 3))))) (deftest update-tag-failure-test (let [tag-key "<KEY>" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1")] (testing "Update tag with invalid content type" (is (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (tags/update-tag token tag-key tag {:http-options {:content-type :xml}})))) (testing "Update without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/update-tag nil tag-key tag)))) (testing "Fields that cannot be changed" (are [field human-name] (= {:status 400 :errors [(format (str "%s cannot be modified. Attempted to change existing value" " [%s] to [updated]") human-name (get tag field))]} (tags/update-tag token tag-key (assoc tag field "updated"))) :tag-key "Tag Key" :originator-id "Originator Id")) (testing "Updates applies JSON validations" (is (= {:status 400 :errors ["/description string \"\" is too short (length: 0, required minimum: 1)"]} (tags/update-tag token concept-id (assoc tag :description ""))))) (testing "Update tag that doesn't exist" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag2]"]} (tags/update-tag token "tag2" tag)))) (testing "Update deleted tag" (tags/delete-tag token tag-key) (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/update-tag token tag-key tag)))))) (deftest delete-tag-test (let [tag-key "<KEY>" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag)] (testing "Delete without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/delete-tag nil tag-key)))) (testing "Delete success" (is (= {:status 200 :concept-id concept-id :revision-id 2} (tags/delete-tag token tag-key))) (tags/assert-tag-deleted tag "user1" concept-id 2)) (testing "Delete tag that was already deleted" (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/delete-tag token tag-key)))) (testing "Delete tag that doesn't exist" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag2]"]} (tags/delete-tag token "tag2"))))))
true
(ns cmr.system-int-test.search.tagging.tag-crud-test "This tests the CMR Search API's tagging capabilities" (:require [clojure.test :refer :all] [clojure.string :as str] [cmr.common.util :refer [are2]] [cmr.system-int-test.utils.ingest-util :as ingest] [cmr.system-int-test.utils.search-util :as search] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.tag-util :as tags] [cmr.system-int-test.data2.core :as d] [cmr.system-int-test.data2.collection :as dc] [cmr.mock-echo.client.echo-util :as e] [cmr.system-int-test.system :as s])) (use-fixtures :each (join-fixtures [(ingest/reset-fixture {"provguid1" "PROV1"}) tags/grant-all-tag-fixture])) (def field-maxes "A map of fields to their max lengths" {:tag_key 1030 :description 4000}) (deftest create-tag-validation-test (testing "Create without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/create-tag nil (tags/make-tag))))) (testing "Create with unknown token" (is (= {:status 401 :errors ["Token ABC in request header does not exist"]} (tags/create-tag "ABC" (tags/make-tag))))) (let [valid-user-token (e/login (s/context) "user1") valid-tag (tags/make-tag)] (testing "Create tag with invalid content type" (is (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (tags/create-tag valid-user-token valid-tag {:http-options {:content-type :xml}})))) (testing "Create tag with invalid tag key" (is (= {:status 422, :errors ["Tag key [a/c] contains '/' character. Tag keys cannot contain this character."]} (tags/create-tag valid-user-token (assoc valid-tag :tag-key "a/c"))))) (testing "Missing field validations" (is (= {:status 400 :errors ["object has missing required properties ([\"tag_key\"])"]} (tags/create-tag valid-user-token (dissoc valid-tag :tag-key))))) (testing "Minimum field length validations" (are [field] (= {:status 400 :errors [(format "/%s string \"\" is too short (length: 0, required minimum: 1)" (name field))]} (tags/create-tag valid-user-token (assoc valid-tag field ""))) :tag_key :description)) (testing "Maximum field length validations" (doseq [[field max-length] field-maxes] (let [long-value (tags/string-of-length (inc max-length))] (is (= {:status 400 :errors [(format "/%s string \"%s\" is too long (length: %d, maximum allowed: %d)" (name field) long-value (inc max-length) max-length)]} (tags/create-tag valid-user-token (assoc valid-tag field long-value))))))))) (deftest create-tag-test (testing "Successful creation" (let [tag-key "PI:KEY:<KEY>END_PI" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [status concept-id revision-id]} (tags/create-tag token tag)] (is (= 201 status)) (is concept-id) (is (= 1 revision-id)) (tags/assert-tag-saved (assoc tag :originator-id "user1") "user1" concept-id revision-id) (testing "Creation with an already existing tag-key" (is (= {:status 409 :errors [(format "A tag with tag-key [%s] already exists with concept id [%s]." (:tag-key tag) concept-id)]} (tags/create-tag token tag)))) (testing "tag-key is case-insensitive" (let [{:keys[status errors]} (tags/create-tag token (update tag :tag-key str/upper-case))] (is (= [409 [(format "A tag with tag-key [%s] already exists with concept id [%s]." (:tag-key tag) concept-id)]] [status errors])))) (testing "Creation with different tag-key succeeds" (let [response (tags/create-tag token (assoc tag :tag-key "different"))] (is (= 201 (:status response))) (is (not= concept-id (:concept-id response))) (is (= 1 (:revision-id response))))) (testing "Creation of previously deleted tag" (tags/delete-tag token tag-key) (let [new-tag (assoc tag :description "new description") token2 (e/login (s/context) "user2") response (tags/create-tag token2 new-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 3} response)) ;; A tag that was deleted but recreated gets a new originator id. (tags/assert-tag-saved (assoc new-tag :originator-id "user2") "user2" concept-id 3)))) (testing "Create tag with fields at maximum length" (let [tag (into {} (for [[field max-length] field-maxes] [field (tags/string-of-length max-length)]))] (is (= 201 (:status (tags/create-tag (e/login (s/context) "user1") tag))))))) (testing "Creation without optional fields is allowed" (let [tag (dissoc (tags/make-tag {:tag-key "tag-key2"}) :description) token (e/login (s/context) "user1") {:keys [status concept-id revision-id]} (tags/create-tag token tag)] (is (= 201 status)) (is concept-id) (is (= 1 revision-id))))) (deftest get-tag-test (let [tag (tags/make-tag {:tag-key "MixedCaseTagKey"}) tag-key "PI:KEY:<KEY>END_PI" token (e/login (s/context) "user1") _ (tags/create-tag token tag) expected-tag (-> tag (update :tag-key str/lower-case) (assoc :originator-id "user1" :status 200))] (testing "Retrieve existing tag, verify tag-key is converted to lowercase" (is (= expected-tag (tags/get-tag tag-key)))) (testing "Retrieve tag with tag-key is case insensitive" (is (= expected-tag (tags/get-tag "MixedCaseTagKey")))) (testing "Retrieve unknown tag" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (tags/get-tag "Tag100")))) (testing "Retrieve deleted tag" (tags/delete-tag token tag-key) (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/get-tag tag-key)))))) (deftest update-tag-test (let [tag-key "PI:KEY:<KEY>END_PI" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag)] (testing "Update with originator id" (let [updated-tag (-> tag (update-in [:description] #(str % " updated")) (assoc :originator-id "user1")) token2 (e/login (s/context) "user2") response (tags/update-tag token2 tag-key updated-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 2} response)) (tags/assert-tag-saved updated-tag "user2" concept-id 2))) (testing "Update without originator id" (let [updated-tag (dissoc tag :originator-id) token2 (e/login (s/context) "user2") response (tags/update-tag token2 tag-key updated-tag)] (is (= {:status 200 :concept-id concept-id :revision-id 3} response)) ;; The previous originator id should not change (tags/assert-tag-saved (assoc updated-tag :originator-id "user1") "user2" concept-id 3))))) (deftest update-tag-failure-test (let [tag-key "PI:KEY:<KEY>END_PI" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1")] (testing "Update tag with invalid content type" (is (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (tags/update-tag token tag-key tag {:http-options {:content-type :xml}})))) (testing "Update without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/update-tag nil tag-key tag)))) (testing "Fields that cannot be changed" (are [field human-name] (= {:status 400 :errors [(format (str "%s cannot be modified. Attempted to change existing value" " [%s] to [updated]") human-name (get tag field))]} (tags/update-tag token tag-key (assoc tag field "updated"))) :tag-key "Tag Key" :originator-id "Originator Id")) (testing "Updates applies JSON validations" (is (= {:status 400 :errors ["/description string \"\" is too short (length: 0, required minimum: 1)"]} (tags/update-tag token concept-id (assoc tag :description ""))))) (testing "Update tag that doesn't exist" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag2]"]} (tags/update-tag token "tag2" tag)))) (testing "Update deleted tag" (tags/delete-tag token tag-key) (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/update-tag token tag-key tag)))))) (deftest delete-tag-test (let [tag-key "PI:KEY:<KEY>END_PI" tag (tags/make-tag {:tag-key tag-key}) token (e/login (s/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag)] (testing "Delete without token" (is (= {:status 401 :errors ["Tags cannot be modified without a valid user token."]} (tags/delete-tag nil tag-key)))) (testing "Delete success" (is (= {:status 200 :concept-id concept-id :revision-id 2} (tags/delete-tag token tag-key))) (tags/assert-tag-deleted tag "user1" concept-id 2)) (testing "Delete tag that was already deleted" (is (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (tags/delete-tag token tag-key)))) (testing "Delete tag that doesn't exist" (is (= {:status 404 :errors ["Tag could not be found with tag-key [tag2]"]} (tags/delete-tag token "tag2"))))))
[ { "context": "sys))))\n\n(defn signup\n [user]\n (let [password (:password user)\n user-id (-> (http.helper/post! @htt", "end": 2868, "score": 0.8782147169113159, "start": 2860, "tag": "PASSWORD", "value": "password" }, { "context": "\n(defn signup\n [user]\n (let [password (:password user)\n user-id (-> (http.helper/post! @http-cli", "end": 2873, "score": 0.7401527762413025, "start": 2869, "tag": "PASSWORD", "value": "user" }, { "context": "ser) :result :id)]\n {:user-id user-id :password password}))\n\n(defn login\n [{:keys [user-id password]}]\n ", "end": 2996, "score": 0.9337681531906128, "start": 2988, "tag": "PASSWORD", "value": "password" }, { "context": "r/random-user)\n first-username (:username first-user)]\n (http.helper/post! @http-client \"signup\" ", "end": 4646, "score": 0.9992285370826721, "start": 4636, "tag": "USERNAME", "value": "first-user" }, { "context": ":email (application.helper/random-email) :username first-username :password (application.helper/random-password)}\n ", "end": 4841, "score": 0.9987136721611023, "start": 4827, "tag": "USERNAME", "value": "first-username" }, { "context": "rst-username :password (application.helper/random-password)}\n {:keys [response body result]} (htt", "end": 4887, "score": 0.5215628147125244, "start": 4879, "tag": "PASSWORD", "value": "password" }, { "context": "lication.helper/random-user)\n password (:password user)\n user-id (-> (signup user) :user-i", "end": 5488, "score": 0.9263197779655457, "start": 5480, "tag": "PASSWORD", "value": "password" }, { "context": ".helper/random-user)\n password (:password user)\n user-id (-> (signup user) :user-id)\n ", "end": 5493, "score": 0.9910791516304016, "start": 5489, "tag": "PASSWORD", "value": "user" }, { "context": "! @http-client \"login\" {:user-id user-id :password password}) ;; Log first time.\n (let [{:keys [response", "end": 6123, "score": 0.9707788825035095, "start": 6115, "tag": "PASSWORD", "value": "password" }, { "context": "! @http-client \"login\" {:user-id user-id :password password})]\n (is (= \"failure\" (:status body)))\n ", "end": 6264, "score": 0.9775606393814087, "start": 6256, "tag": "PASSWORD", "value": "password" }, { "context": "ndom-uuid)\n password (application.helper/random-password)\n {:keys [response body result]} (http.h", "end": 6606, "score": 0.7246329188346863, "start": 6591, "tag": "PASSWORD", "value": "random-password" }, { "context": "! @http-client \"login\" {:user-id user-id :password password})]\n (is (= \"failure\" (:status body)))\n ", "end": 6725, "score": 0.7947748899459839, "start": 6717, "tag": "PASSWORD", "value": "password" }, { "context": "oken (-> (login {:user-id second-user-id :password second-user-password}) :token)]\n (dotimes [_ 5] (http.he", "end": 29447, "score": 0.709706723690033, "start": 29436, "tag": "PASSWORD", "value": "second-user" }, { "context": "token (-> (login {:user-id third-user-id :password third-user-password}) :token)]\n (dotimes [_ 5] (http.helper/po", "end": 29681, "score": 0.8097221255302429, "start": 29662, "tag": "PASSWORD", "value": "third-user-password" }, { "context": "token (-> (login {:user-id first-user-id :password first-user-password}) :token)]\n (http.helper/post! @http-cli", "end": 29909, "score": 0.9980754852294922, "start": 29890, "tag": "PASSWORD", "value": "first-user-password" } ]
test/integration/thoughts/adapter/http/http_test.clj
kmyokoyama/thoughts-clj
3
(ns integration.thoughts.adapter.http.http-test (:require [clojure.test :refer :all] [com.stuartsierra.component :as component] [integration.thoughts.adapter.http.helper :as http.helper] [thoughts.adapter.cache.in-mem :as a.cache.in-mem] [thoughts.adapter.cache.redis :as a.cache.redis] [thoughts.adapter.config.simple-config :as a.config.simple-config] [thoughts.adapter.http.component :as a.http.component] [thoughts.adapter.repository.datomic :as a.repository.datomic] [thoughts.adapter.repository.in-mem :as a.repository.in-mem] [thoughts.application.service :as service] [unit.thoughts.application.helper :as application.helper])) (def ^:private http-client (atom nil)) (defn- in-mem-test-system-map [] (component/system-map :http-client (component/using (http.helper/make-http-client) [:config]) :config (a.config.simple-config/make-simple-config) :repository (a.repository.in-mem/make-in-mem-repository) :cache (a.cache.in-mem/make-in-mem-cache) :service (component/using (service/make-service) [:repository :cache]) :controller (component/using (a.http.component/make-http-controller) [:config :service]))) (defn- full-test-system-map [] (component/system-map :http-client (component/using (http.helper/make-http-client) [:config]) :config (a.config.simple-config/make-simple-config) :repository (component/using (a.repository.datomic/make-datomic-repository) [:config]) :cache (a.cache.redis/make-redis-cache) :service (component/using (service/make-service) [:repository :cache]) :controller (component/using (a.http.component/make-http-controller) [:config :service]))) (defn- start-in-mem-test-system [] (component/start (in-mem-test-system-map))) (defn- stop-in-mem-test-system [sys] (component/stop sys)) (defn- start-full-test-system [] (let [sys (component/start (full-test-system-map)) conn (get-in sys [:repository :conn])] (a.repository.datomic/load-schema conn "schema.edn") sys)) (defn- stop-full-test-system [system] (a.repository.datomic/delete-database (-> system :config :datomic-uri)) (component/stop system)) (def start-stop-fns {:in-mem [start-in-mem-test-system stop-in-mem-test-system] :full [start-full-test-system stop-full-test-system]}) (use-fixtures :each (fn [test] (let [[start-system! stop-system!] (:in-mem start-stop-fns) sys (start-system!)] (swap! http-client (constantly (:http-client sys))) (test) (stop-system! sys)))) (defn signup [user] (let [password (:password user) user-id (-> (http.helper/post! @http-client "signup" user) :result :id)] {:user-id user-id :password password})) (defn login [{:keys [user-id password]}] (let [token (-> (http.helper/post! @http-client "login" {:user-id user-id :password password}) :result :token)] {:user-id user-id :token token})) (defn signup-and-login ([] (signup-and-login (application.helper/random-user))) ([user] (-> user signup login))) ;; Tests. (deftest ^:integration test-signup (testing "Sign up with a single user" (let [{:keys [response body]} (http.helper/post! @http-client "signup" (application.helper/random-user))] (is (= "success" (:status body))) (is (= 201 (:status response))))) ;; HTTP 201 Created. (testing "Sign up with duplicate email returns a failure" (let [first-user (application.helper/random-user) first-user-email (:email first-user)] (http.helper/post! @http-client "signup" first-user) (let [second-user {:name (application.helper/random-fullname) :email first-user-email :username (application.helper/random-username) :password (application.helper/random-password)} {:keys [response body result]} (-> (http.helper/post! @http-client "signup" second-user))] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "user" (:subject result))) (is (= "email" (get-in result [:context :attribute]))) (is (= (clojure.string/lower-case first-user-email) (get-in result [:context :email])))))) (testing "Sign up with duplicate username returns a failure" (let [first-user (application.helper/random-user) first-username (:username first-user)] (http.helper/post! @http-client "signup" first-user) (let [second-user {:name (application.helper/random-fullname) :email (application.helper/random-email) :username first-username :password (application.helper/random-password)} {:keys [response body result]} (http.helper/post! @http-client "signup" second-user)] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "user" (:subject result))) (is (= "username" (get-in result [:context :attribute]))) (is (= (clojure.string/lower-case first-username) (get-in result [:context :username]))))))) (deftest ^:integration test-login (testing "Login returns success when user exists" (let [user (application.helper/random-user) password (:password user) user-id (-> (signup user) :user-id) {:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password password})] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (and (string? (:token result)) ((complement clojure.string/blank?) (:token result)))))) (testing "Login fails when user is already logged in" (let [user-id (application.helper/random-uuid) password (application.helper/random-password)] (http.helper/post! @http-client "login" {:user-id user-id :password password}) ;; Log first time. (let [{:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password password})] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is ((complement contains?) result :token))))) (testing "Login fails when user does not exist" (let [user-id (application.helper/random-uuid) password (application.helper/random-password) {:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password password})] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is ((complement contains?) result :token))))) (deftest ^:integration test-logout (testing "Logout returns success when user is already logged in" (let [{:keys [token]} (signup-and-login) {:keys [response body]} (http.helper/post! @http-client "logout" token {})] (is (= "success" (:status body))) (is (= 200 (:status response))))) (testing "Logout fails when user is not logged in yet" (let [{:keys [response body]} (http.helper/post! @http-client "logout" nil {})] (is (= "failure" (:status body))) (is (= 401 (:status response)))))) ;; HTTP 401 Unauthorized. (deftest ^:integration test-thought (testing "Add a single thought" (let [{:keys [user-id token]} (signup-and-login) text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client "thought" token (application.helper/random-thought text))] (is (= "success" (:status body))) (is (= 201 (:status response))) ;; HTTP 201 Created. (is (= user-id (:user-id result))) (is (= text (:text result))) (is (= 0 (:likes result) (:rethoughts result) (:replies result)))))) (deftest ^:integration test-get-thoughts-from-user (testing "Get two thoughts from the same user" (let [{:keys [user-id token]} (signup-and-login) first-thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) second-thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/thoughts") token)] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= 2 (:total body) (count result))) (is (= #{first-thought-id second-thought-id} (into #{} (map :id result)))))) (testing "Returns no thought if user has not thought yet" (let [{:keys [user-id token]} (signup-and-login)] ;; No thought. (let [{:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/thoughts") token)] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= 0 (:total body) (count result))) (is (empty? result)))))) (deftest ^:integration test-get-user-by-id (testing "Get an existing user returns successfully" (let [expected-user (application.helper/random-user) {:keys [user-id token]} (signup-and-login expected-user) {:keys [response result]} (http.helper/get! @http-client (str "user/" user-id) token) attributes [:name :email :username]] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= (let [expected (select-keys expected-user attributes)] (zipmap (keys expected) (map clojure.string/lower-case (vals expected)))) (select-keys result attributes))))) (testing "Get a missing user returns failure" (let [{:keys [token]} (signup-and-login) user-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id) token)] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str user-id) (get-in result [:context :user-id])))))) (deftest ^:integration test-get-thought-by-id (testing "Get an existing thought returns successfully" (let [{:keys [user-id token]} (signup-and-login) expected-thought (application.helper/random-thought) thought-id (-> (http.helper/post! @http-client "thought" token expected-thought) :result :id) {:keys [response result]} (http.helper/get! @http-client (str "thought/" thought-id) token) zeroed-attributes [:likes :rethoughts :replies]] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= user-id (:user-id result))) (is (= (:text expected-thought) (:text result))) (is (every? zero? (vals (select-keys expected-thought zeroed-attributes)))))) (testing "Get a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id) token)] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-like (testing "Like an existing thought" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= thought-id (:id result))) (is (= 1 (:likes result))))) (testing "Like the same thought twice does not have any effect" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "like" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str user-id) (get-in result [:context :user-id])))))) (testing "Like a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-unlike (testing "Unlike an existing thought previously liked" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= thought-id (:id result))) (is (= 0 (:likes result)))))) (testing "Unlike an existing thought not previously liked" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unlike" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str user-id) (get-in result [:context :user-id]))))) (testing "Unlike an existing thought with another user does not have any effect" (let [{:keys [token]} (signup-and-login) {other-user-id :user-id other-token :token} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") other-token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unlike" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str other-user-id) (get-in result [:context :user-id])))))) (testing "Unlike a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-add-reply (testing "Add new reply to existing thought returns success" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) reply-text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text reply-text})] (is (= "success" (:status body))) (is (= 201 (:status response))) ;; HTTP 201 Created. (is (= user-id (:user-id result))) (is (= reply-text (:text result))) (is (= 0 (:likes result) (:rethoughts result) (:replies result))))) (testing "Add new reply to missing thought fails" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) reply-text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text reply-text})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-get-rethought-by-id (testing "Get rethoughts from thought not rethoughted yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) rethought-id (-> (http.helper/post! @http-client (str "thought/" thought-id "/rethought") token {}) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "rethought/" rethought-id) token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= rethought-id (:id result))) (is (= thought-id (:source-thought-id result)))))) (deftest ^:integration test-get-rethoughts (testing "Get rethoughts from a thought already rethoughted returns all replies" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought user-id)) :result :id)] (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/rethought") token {})) (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/rethought-comment") token {:comment (application.helper/random-text)})) (let [{:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/rethoughts") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 10 (:total body) (count result)))))) (testing "Get rethoughts from thought not rethoughted yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/rethoughts") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-replies (testing "Get rethoughts from a thought already rethoughted returns all replies" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text (application.helper/random-text)})) (let [{:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/replies") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 5 (:total body) (count result)))))) (testing "Get replies from thought not replied yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/replies") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-follow (testing "User follows another user" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) {:keys [response body]} (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) followed-result (:result body) follower-result (-> (http.helper/get! @http-client (str "user/" follower-id) follower-token {}) :result)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:followers followed-result))) (is (= 1 (:following follower-result))))) (testing "Follow a missing user returns failure" (let [{:keys [token]} (signup-and-login) random-followed-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "user/" random-followed-id "/follow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str random-followed-id) (get-in result [:context :user-id]))))) (testing "Follow the same user twice returns failure" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") token {}) {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/follow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "follow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id])))))) (deftest ^:integration test-unfollow (testing "User unfollows an user she/he follows" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") follower-token {}) unfollowed-result (:result body) follower-result (-> (http.helper/get! @http-client (str "user/" follower-id) follower-token {}) :result)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 0 (:followers unfollowed-result))) (is (= 0 (:following follower-result))))) (testing "Unfollow a missing user returns failure" (let [{:keys [token]} (signup-and-login) random-followed-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "user/" random-followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str random-followed-id) (get-in result [:context :user-id]))))) (testing "Unfollow an user that is not currently followed" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unfollow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id]))))) (testing "Unfollow the same user twice returns failure" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") token {}) _ (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {}) ;; First unfollow. {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unfollow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id])))))) (deftest ^:integration test-get-user-following (testing "Get following list of an user" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body result]} (http.helper/get! @http-client (str "user/" follower-id "/following") follower-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:total body) (count result))))) (testing "Get following list of an user that does not follow anyone returns an empty list" (let [{:keys [user-id token]} (signup-and-login) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/following") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-user-followers (testing "Get followers list of an user" (let [{follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body result]} (http.helper/get! @http-client (str "user/" followed-id "/followers") follower-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:total body) (count result))))) (testing "Get followers list of an user that is not followed by anyone returns an empty list" (let [{:keys [user-id token]} (signup-and-login) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/followers") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-feed (testing "Get feed of an user returns most recent thoughts" (let [{first-user-id :user-id first-user-password :password} (signup (application.helper/random-user)) {second-user-id :user-id second-user-password :password} (signup (application.helper/random-user)) {third-user-id :user-id third-user-password :password} (signup (application.helper/random-user)) second-user-token (-> (login {:user-id second-user-id :password second-user-password}) :token)] (dotimes [_ 5] (http.helper/post! @http-client "thought" second-user-token (application.helper/random-thought))) (let [third-user-token (-> (login {:user-id third-user-id :password third-user-password}) :token)] (dotimes [_ 5] (http.helper/post! @http-client "thought" third-user-token (application.helper/random-thought))) (let [first-user-token (-> (login {:user-id first-user-id :password first-user-password}) :token)] (http.helper/post! @http-client (str "user/" second-user-id "/follow") first-user-token {}) (http.helper/post! @http-client (str "user/" third-user-id "/follow") first-user-token {}) (let [{:keys [response body result]} (http.helper/get! @http-client "feed" first-user-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 10 (:total body) (count result))) (is (->> result (map :publish-date) (map http.helper/str->EpochSecond) (apply >=)))))))))
119312
(ns integration.thoughts.adapter.http.http-test (:require [clojure.test :refer :all] [com.stuartsierra.component :as component] [integration.thoughts.adapter.http.helper :as http.helper] [thoughts.adapter.cache.in-mem :as a.cache.in-mem] [thoughts.adapter.cache.redis :as a.cache.redis] [thoughts.adapter.config.simple-config :as a.config.simple-config] [thoughts.adapter.http.component :as a.http.component] [thoughts.adapter.repository.datomic :as a.repository.datomic] [thoughts.adapter.repository.in-mem :as a.repository.in-mem] [thoughts.application.service :as service] [unit.thoughts.application.helper :as application.helper])) (def ^:private http-client (atom nil)) (defn- in-mem-test-system-map [] (component/system-map :http-client (component/using (http.helper/make-http-client) [:config]) :config (a.config.simple-config/make-simple-config) :repository (a.repository.in-mem/make-in-mem-repository) :cache (a.cache.in-mem/make-in-mem-cache) :service (component/using (service/make-service) [:repository :cache]) :controller (component/using (a.http.component/make-http-controller) [:config :service]))) (defn- full-test-system-map [] (component/system-map :http-client (component/using (http.helper/make-http-client) [:config]) :config (a.config.simple-config/make-simple-config) :repository (component/using (a.repository.datomic/make-datomic-repository) [:config]) :cache (a.cache.redis/make-redis-cache) :service (component/using (service/make-service) [:repository :cache]) :controller (component/using (a.http.component/make-http-controller) [:config :service]))) (defn- start-in-mem-test-system [] (component/start (in-mem-test-system-map))) (defn- stop-in-mem-test-system [sys] (component/stop sys)) (defn- start-full-test-system [] (let [sys (component/start (full-test-system-map)) conn (get-in sys [:repository :conn])] (a.repository.datomic/load-schema conn "schema.edn") sys)) (defn- stop-full-test-system [system] (a.repository.datomic/delete-database (-> system :config :datomic-uri)) (component/stop system)) (def start-stop-fns {:in-mem [start-in-mem-test-system stop-in-mem-test-system] :full [start-full-test-system stop-full-test-system]}) (use-fixtures :each (fn [test] (let [[start-system! stop-system!] (:in-mem start-stop-fns) sys (start-system!)] (swap! http-client (constantly (:http-client sys))) (test) (stop-system! sys)))) (defn signup [user] (let [password (:<PASSWORD> <PASSWORD>) user-id (-> (http.helper/post! @http-client "signup" user) :result :id)] {:user-id user-id :password <PASSWORD>})) (defn login [{:keys [user-id password]}] (let [token (-> (http.helper/post! @http-client "login" {:user-id user-id :password password}) :result :token)] {:user-id user-id :token token})) (defn signup-and-login ([] (signup-and-login (application.helper/random-user))) ([user] (-> user signup login))) ;; Tests. (deftest ^:integration test-signup (testing "Sign up with a single user" (let [{:keys [response body]} (http.helper/post! @http-client "signup" (application.helper/random-user))] (is (= "success" (:status body))) (is (= 201 (:status response))))) ;; HTTP 201 Created. (testing "Sign up with duplicate email returns a failure" (let [first-user (application.helper/random-user) first-user-email (:email first-user)] (http.helper/post! @http-client "signup" first-user) (let [second-user {:name (application.helper/random-fullname) :email first-user-email :username (application.helper/random-username) :password (application.helper/random-password)} {:keys [response body result]} (-> (http.helper/post! @http-client "signup" second-user))] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "user" (:subject result))) (is (= "email" (get-in result [:context :attribute]))) (is (= (clojure.string/lower-case first-user-email) (get-in result [:context :email])))))) (testing "Sign up with duplicate username returns a failure" (let [first-user (application.helper/random-user) first-username (:username first-user)] (http.helper/post! @http-client "signup" first-user) (let [second-user {:name (application.helper/random-fullname) :email (application.helper/random-email) :username first-username :password (application.helper/random-<PASSWORD>)} {:keys [response body result]} (http.helper/post! @http-client "signup" second-user)] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "user" (:subject result))) (is (= "username" (get-in result [:context :attribute]))) (is (= (clojure.string/lower-case first-username) (get-in result [:context :username]))))))) (deftest ^:integration test-login (testing "Login returns success when user exists" (let [user (application.helper/random-user) password (:<PASSWORD> <PASSWORD>) user-id (-> (signup user) :user-id) {:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password password})] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (and (string? (:token result)) ((complement clojure.string/blank?) (:token result)))))) (testing "Login fails when user is already logged in" (let [user-id (application.helper/random-uuid) password (application.helper/random-password)] (http.helper/post! @http-client "login" {:user-id user-id :password <PASSWORD>}) ;; Log first time. (let [{:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password <PASSWORD>})] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is ((complement contains?) result :token))))) (testing "Login fails when user does not exist" (let [user-id (application.helper/random-uuid) password (application.helper/<PASSWORD>) {:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password <PASSWORD>})] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is ((complement contains?) result :token))))) (deftest ^:integration test-logout (testing "Logout returns success when user is already logged in" (let [{:keys [token]} (signup-and-login) {:keys [response body]} (http.helper/post! @http-client "logout" token {})] (is (= "success" (:status body))) (is (= 200 (:status response))))) (testing "Logout fails when user is not logged in yet" (let [{:keys [response body]} (http.helper/post! @http-client "logout" nil {})] (is (= "failure" (:status body))) (is (= 401 (:status response)))))) ;; HTTP 401 Unauthorized. (deftest ^:integration test-thought (testing "Add a single thought" (let [{:keys [user-id token]} (signup-and-login) text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client "thought" token (application.helper/random-thought text))] (is (= "success" (:status body))) (is (= 201 (:status response))) ;; HTTP 201 Created. (is (= user-id (:user-id result))) (is (= text (:text result))) (is (= 0 (:likes result) (:rethoughts result) (:replies result)))))) (deftest ^:integration test-get-thoughts-from-user (testing "Get two thoughts from the same user" (let [{:keys [user-id token]} (signup-and-login) first-thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) second-thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/thoughts") token)] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= 2 (:total body) (count result))) (is (= #{first-thought-id second-thought-id} (into #{} (map :id result)))))) (testing "Returns no thought if user has not thought yet" (let [{:keys [user-id token]} (signup-and-login)] ;; No thought. (let [{:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/thoughts") token)] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= 0 (:total body) (count result))) (is (empty? result)))))) (deftest ^:integration test-get-user-by-id (testing "Get an existing user returns successfully" (let [expected-user (application.helper/random-user) {:keys [user-id token]} (signup-and-login expected-user) {:keys [response result]} (http.helper/get! @http-client (str "user/" user-id) token) attributes [:name :email :username]] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= (let [expected (select-keys expected-user attributes)] (zipmap (keys expected) (map clojure.string/lower-case (vals expected)))) (select-keys result attributes))))) (testing "Get a missing user returns failure" (let [{:keys [token]} (signup-and-login) user-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id) token)] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str user-id) (get-in result [:context :user-id])))))) (deftest ^:integration test-get-thought-by-id (testing "Get an existing thought returns successfully" (let [{:keys [user-id token]} (signup-and-login) expected-thought (application.helper/random-thought) thought-id (-> (http.helper/post! @http-client "thought" token expected-thought) :result :id) {:keys [response result]} (http.helper/get! @http-client (str "thought/" thought-id) token) zeroed-attributes [:likes :rethoughts :replies]] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= user-id (:user-id result))) (is (= (:text expected-thought) (:text result))) (is (every? zero? (vals (select-keys expected-thought zeroed-attributes)))))) (testing "Get a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id) token)] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-like (testing "Like an existing thought" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= thought-id (:id result))) (is (= 1 (:likes result))))) (testing "Like the same thought twice does not have any effect" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "like" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str user-id) (get-in result [:context :user-id])))))) (testing "Like a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-unlike (testing "Unlike an existing thought previously liked" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= thought-id (:id result))) (is (= 0 (:likes result)))))) (testing "Unlike an existing thought not previously liked" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unlike" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str user-id) (get-in result [:context :user-id]))))) (testing "Unlike an existing thought with another user does not have any effect" (let [{:keys [token]} (signup-and-login) {other-user-id :user-id other-token :token} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") other-token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unlike" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str other-user-id) (get-in result [:context :user-id])))))) (testing "Unlike a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-add-reply (testing "Add new reply to existing thought returns success" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) reply-text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text reply-text})] (is (= "success" (:status body))) (is (= 201 (:status response))) ;; HTTP 201 Created. (is (= user-id (:user-id result))) (is (= reply-text (:text result))) (is (= 0 (:likes result) (:rethoughts result) (:replies result))))) (testing "Add new reply to missing thought fails" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) reply-text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text reply-text})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-get-rethought-by-id (testing "Get rethoughts from thought not rethoughted yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) rethought-id (-> (http.helper/post! @http-client (str "thought/" thought-id "/rethought") token {}) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "rethought/" rethought-id) token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= rethought-id (:id result))) (is (= thought-id (:source-thought-id result)))))) (deftest ^:integration test-get-rethoughts (testing "Get rethoughts from a thought already rethoughted returns all replies" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought user-id)) :result :id)] (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/rethought") token {})) (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/rethought-comment") token {:comment (application.helper/random-text)})) (let [{:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/rethoughts") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 10 (:total body) (count result)))))) (testing "Get rethoughts from thought not rethoughted yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/rethoughts") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-replies (testing "Get rethoughts from a thought already rethoughted returns all replies" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text (application.helper/random-text)})) (let [{:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/replies") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 5 (:total body) (count result)))))) (testing "Get replies from thought not replied yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/replies") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-follow (testing "User follows another user" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) {:keys [response body]} (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) followed-result (:result body) follower-result (-> (http.helper/get! @http-client (str "user/" follower-id) follower-token {}) :result)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:followers followed-result))) (is (= 1 (:following follower-result))))) (testing "Follow a missing user returns failure" (let [{:keys [token]} (signup-and-login) random-followed-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "user/" random-followed-id "/follow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str random-followed-id) (get-in result [:context :user-id]))))) (testing "Follow the same user twice returns failure" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") token {}) {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/follow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "follow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id])))))) (deftest ^:integration test-unfollow (testing "User unfollows an user she/he follows" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") follower-token {}) unfollowed-result (:result body) follower-result (-> (http.helper/get! @http-client (str "user/" follower-id) follower-token {}) :result)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 0 (:followers unfollowed-result))) (is (= 0 (:following follower-result))))) (testing "Unfollow a missing user returns failure" (let [{:keys [token]} (signup-and-login) random-followed-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "user/" random-followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str random-followed-id) (get-in result [:context :user-id]))))) (testing "Unfollow an user that is not currently followed" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unfollow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id]))))) (testing "Unfollow the same user twice returns failure" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") token {}) _ (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {}) ;; First unfollow. {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unfollow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id])))))) (deftest ^:integration test-get-user-following (testing "Get following list of an user" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body result]} (http.helper/get! @http-client (str "user/" follower-id "/following") follower-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:total body) (count result))))) (testing "Get following list of an user that does not follow anyone returns an empty list" (let [{:keys [user-id token]} (signup-and-login) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/following") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-user-followers (testing "Get followers list of an user" (let [{follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body result]} (http.helper/get! @http-client (str "user/" followed-id "/followers") follower-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:total body) (count result))))) (testing "Get followers list of an user that is not followed by anyone returns an empty list" (let [{:keys [user-id token]} (signup-and-login) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/followers") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-feed (testing "Get feed of an user returns most recent thoughts" (let [{first-user-id :user-id first-user-password :password} (signup (application.helper/random-user)) {second-user-id :user-id second-user-password :password} (signup (application.helper/random-user)) {third-user-id :user-id third-user-password :password} (signup (application.helper/random-user)) second-user-token (-> (login {:user-id second-user-id :password <PASSWORD>-password}) :token)] (dotimes [_ 5] (http.helper/post! @http-client "thought" second-user-token (application.helper/random-thought))) (let [third-user-token (-> (login {:user-id third-user-id :password <PASSWORD>}) :token)] (dotimes [_ 5] (http.helper/post! @http-client "thought" third-user-token (application.helper/random-thought))) (let [first-user-token (-> (login {:user-id first-user-id :password <PASSWORD>}) :token)] (http.helper/post! @http-client (str "user/" second-user-id "/follow") first-user-token {}) (http.helper/post! @http-client (str "user/" third-user-id "/follow") first-user-token {}) (let [{:keys [response body result]} (http.helper/get! @http-client "feed" first-user-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 10 (:total body) (count result))) (is (->> result (map :publish-date) (map http.helper/str->EpochSecond) (apply >=)))))))))
true
(ns integration.thoughts.adapter.http.http-test (:require [clojure.test :refer :all] [com.stuartsierra.component :as component] [integration.thoughts.adapter.http.helper :as http.helper] [thoughts.adapter.cache.in-mem :as a.cache.in-mem] [thoughts.adapter.cache.redis :as a.cache.redis] [thoughts.adapter.config.simple-config :as a.config.simple-config] [thoughts.adapter.http.component :as a.http.component] [thoughts.adapter.repository.datomic :as a.repository.datomic] [thoughts.adapter.repository.in-mem :as a.repository.in-mem] [thoughts.application.service :as service] [unit.thoughts.application.helper :as application.helper])) (def ^:private http-client (atom nil)) (defn- in-mem-test-system-map [] (component/system-map :http-client (component/using (http.helper/make-http-client) [:config]) :config (a.config.simple-config/make-simple-config) :repository (a.repository.in-mem/make-in-mem-repository) :cache (a.cache.in-mem/make-in-mem-cache) :service (component/using (service/make-service) [:repository :cache]) :controller (component/using (a.http.component/make-http-controller) [:config :service]))) (defn- full-test-system-map [] (component/system-map :http-client (component/using (http.helper/make-http-client) [:config]) :config (a.config.simple-config/make-simple-config) :repository (component/using (a.repository.datomic/make-datomic-repository) [:config]) :cache (a.cache.redis/make-redis-cache) :service (component/using (service/make-service) [:repository :cache]) :controller (component/using (a.http.component/make-http-controller) [:config :service]))) (defn- start-in-mem-test-system [] (component/start (in-mem-test-system-map))) (defn- stop-in-mem-test-system [sys] (component/stop sys)) (defn- start-full-test-system [] (let [sys (component/start (full-test-system-map)) conn (get-in sys [:repository :conn])] (a.repository.datomic/load-schema conn "schema.edn") sys)) (defn- stop-full-test-system [system] (a.repository.datomic/delete-database (-> system :config :datomic-uri)) (component/stop system)) (def start-stop-fns {:in-mem [start-in-mem-test-system stop-in-mem-test-system] :full [start-full-test-system stop-full-test-system]}) (use-fixtures :each (fn [test] (let [[start-system! stop-system!] (:in-mem start-stop-fns) sys (start-system!)] (swap! http-client (constantly (:http-client sys))) (test) (stop-system! sys)))) (defn signup [user] (let [password (:PI:PASSWORD:<PASSWORD>END_PI PI:PASSWORD:<PASSWORD>END_PI) user-id (-> (http.helper/post! @http-client "signup" user) :result :id)] {:user-id user-id :password PI:PASSWORD:<PASSWORD>END_PI})) (defn login [{:keys [user-id password]}] (let [token (-> (http.helper/post! @http-client "login" {:user-id user-id :password password}) :result :token)] {:user-id user-id :token token})) (defn signup-and-login ([] (signup-and-login (application.helper/random-user))) ([user] (-> user signup login))) ;; Tests. (deftest ^:integration test-signup (testing "Sign up with a single user" (let [{:keys [response body]} (http.helper/post! @http-client "signup" (application.helper/random-user))] (is (= "success" (:status body))) (is (= 201 (:status response))))) ;; HTTP 201 Created. (testing "Sign up with duplicate email returns a failure" (let [first-user (application.helper/random-user) first-user-email (:email first-user)] (http.helper/post! @http-client "signup" first-user) (let [second-user {:name (application.helper/random-fullname) :email first-user-email :username (application.helper/random-username) :password (application.helper/random-password)} {:keys [response body result]} (-> (http.helper/post! @http-client "signup" second-user))] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "user" (:subject result))) (is (= "email" (get-in result [:context :attribute]))) (is (= (clojure.string/lower-case first-user-email) (get-in result [:context :email])))))) (testing "Sign up with duplicate username returns a failure" (let [first-user (application.helper/random-user) first-username (:username first-user)] (http.helper/post! @http-client "signup" first-user) (let [second-user {:name (application.helper/random-fullname) :email (application.helper/random-email) :username first-username :password (application.helper/random-PI:PASSWORD:<PASSWORD>END_PI)} {:keys [response body result]} (http.helper/post! @http-client "signup" second-user)] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "user" (:subject result))) (is (= "username" (get-in result [:context :attribute]))) (is (= (clojure.string/lower-case first-username) (get-in result [:context :username]))))))) (deftest ^:integration test-login (testing "Login returns success when user exists" (let [user (application.helper/random-user) password (:PI:PASSWORD:<PASSWORD>END_PI PI:PASSWORD:<PASSWORD>END_PI) user-id (-> (signup user) :user-id) {:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password password})] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (and (string? (:token result)) ((complement clojure.string/blank?) (:token result)))))) (testing "Login fails when user is already logged in" (let [user-id (application.helper/random-uuid) password (application.helper/random-password)] (http.helper/post! @http-client "login" {:user-id user-id :password PI:PASSWORD:<PASSWORD>END_PI}) ;; Log first time. (let [{:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password PI:PASSWORD:<PASSWORD>END_PI})] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is ((complement contains?) result :token))))) (testing "Login fails when user does not exist" (let [user-id (application.helper/random-uuid) password (application.helper/PI:PASSWORD:<PASSWORD>END_PI) {:keys [response body result]} (http.helper/post! @http-client "login" {:user-id user-id :password PI:PASSWORD:<PASSWORD>END_PI})] (is (= "failure" (:status body))) (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is ((complement contains?) result :token))))) (deftest ^:integration test-logout (testing "Logout returns success when user is already logged in" (let [{:keys [token]} (signup-and-login) {:keys [response body]} (http.helper/post! @http-client "logout" token {})] (is (= "success" (:status body))) (is (= 200 (:status response))))) (testing "Logout fails when user is not logged in yet" (let [{:keys [response body]} (http.helper/post! @http-client "logout" nil {})] (is (= "failure" (:status body))) (is (= 401 (:status response)))))) ;; HTTP 401 Unauthorized. (deftest ^:integration test-thought (testing "Add a single thought" (let [{:keys [user-id token]} (signup-and-login) text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client "thought" token (application.helper/random-thought text))] (is (= "success" (:status body))) (is (= 201 (:status response))) ;; HTTP 201 Created. (is (= user-id (:user-id result))) (is (= text (:text result))) (is (= 0 (:likes result) (:rethoughts result) (:replies result)))))) (deftest ^:integration test-get-thoughts-from-user (testing "Get two thoughts from the same user" (let [{:keys [user-id token]} (signup-and-login) first-thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) second-thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/thoughts") token)] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= 2 (:total body) (count result))) (is (= #{first-thought-id second-thought-id} (into #{} (map :id result)))))) (testing "Returns no thought if user has not thought yet" (let [{:keys [user-id token]} (signup-and-login)] ;; No thought. (let [{:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/thoughts") token)] (is (= "success" (:status body))) (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= 0 (:total body) (count result))) (is (empty? result)))))) (deftest ^:integration test-get-user-by-id (testing "Get an existing user returns successfully" (let [expected-user (application.helper/random-user) {:keys [user-id token]} (signup-and-login expected-user) {:keys [response result]} (http.helper/get! @http-client (str "user/" user-id) token) attributes [:name :email :username]] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= (let [expected (select-keys expected-user attributes)] (zipmap (keys expected) (map clojure.string/lower-case (vals expected)))) (select-keys result attributes))))) (testing "Get a missing user returns failure" (let [{:keys [token]} (signup-and-login) user-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id) token)] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str user-id) (get-in result [:context :user-id])))))) (deftest ^:integration test-get-thought-by-id (testing "Get an existing thought returns successfully" (let [{:keys [user-id token]} (signup-and-login) expected-thought (application.helper/random-thought) thought-id (-> (http.helper/post! @http-client "thought" token expected-thought) :result :id) {:keys [response result]} (http.helper/get! @http-client (str "thought/" thought-id) token) zeroed-attributes [:likes :rethoughts :replies]] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= user-id (:user-id result))) (is (= (:text expected-thought) (:text result))) (is (every? zero? (vals (select-keys expected-thought zeroed-attributes)))))) (testing "Get a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id) token)] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-like (testing "Like an existing thought" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= thought-id (:id result))) (is (= 1 (:likes result))))) (testing "Like the same thought twice does not have any effect" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "like" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str user-id) (get-in result [:context :user-id])))))) (testing "Like a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/like") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-unlike (testing "Unlike an existing thought previously liked" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= thought-id (:id result))) (is (= 0 (:likes result)))))) (testing "Unlike an existing thought not previously liked" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unlike" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str user-id) (get-in result [:context :user-id]))))) (testing "Unlike an existing thought with another user does not have any effect" (let [{:keys [token]} (signup-and-login) {other-user-id :user-id other-token :token} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (http.helper/post! @http-client (str "thought/" thought-id "/like") token {}) (let [{:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") other-token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unlike" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id]))) (is (= (str other-user-id) (get-in result [:context :user-id])))))) (testing "Unlike a missing thought returns failure" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/unlike") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-add-reply (testing "Add new reply to existing thought returns success" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) reply-text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text reply-text})] (is (= "success" (:status body))) (is (= 201 (:status response))) ;; HTTP 201 Created. (is (= user-id (:user-id result))) (is (= reply-text (:text result))) (is (= 0 (:likes result) (:rethoughts result) (:replies result))))) (testing "Add new reply to missing thought fails" (let [{:keys [token]} (signup-and-login) thought-id (application.helper/random-uuid) reply-text (application.helper/random-text) {:keys [response body result]} (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text reply-text})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "thought" (:subject result))) (is (= (str thought-id) (get-in result [:context :thought-id])))))) (deftest ^:integration test-get-rethought-by-id (testing "Get rethoughts from thought not rethoughted yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) rethought-id (-> (http.helper/post! @http-client (str "thought/" thought-id "/rethought") token {}) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "rethought/" rethought-id) token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= rethought-id (:id result))) (is (= thought-id (:source-thought-id result)))))) (deftest ^:integration test-get-rethoughts (testing "Get rethoughts from a thought already rethoughted returns all replies" (let [{:keys [user-id token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought user-id)) :result :id)] (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/rethought") token {})) (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/rethought-comment") token {:comment (application.helper/random-text)})) (let [{:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/rethoughts") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 10 (:total body) (count result)))))) (testing "Get rethoughts from thought not rethoughted yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/rethoughts") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-replies (testing "Get rethoughts from a thought already rethoughted returns all replies" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id)] (dotimes [_ 5] (http.helper/post! @http-client (str "thought/" thought-id "/reply") token {:text (application.helper/random-text)})) (let [{:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/replies") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 5 (:total body) (count result)))))) (testing "Get replies from thought not replied yet returns an empty list" (let [{:keys [token]} (signup-and-login) thought-id (-> (http.helper/post! @http-client "thought" token (application.helper/random-thought)) :result :id) {:keys [response body result]} (http.helper/get! @http-client (str "thought/" thought-id "/replies") token)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-follow (testing "User follows another user" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) {:keys [response body]} (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) followed-result (:result body) follower-result (-> (http.helper/get! @http-client (str "user/" follower-id) follower-token {}) :result)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:followers followed-result))) (is (= 1 (:following follower-result))))) (testing "Follow a missing user returns failure" (let [{:keys [token]} (signup-and-login) random-followed-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "user/" random-followed-id "/follow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str random-followed-id) (get-in result [:context :user-id]))))) (testing "Follow the same user twice returns failure" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") token {}) {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/follow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "follow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id])))))) (deftest ^:integration test-unfollow (testing "User unfollows an user she/he follows" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") follower-token {}) unfollowed-result (:result body) follower-result (-> (http.helper/get! @http-client (str "user/" follower-id) follower-token {}) :result)] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 0 (:followers unfollowed-result))) (is (= 0 (:following follower-result))))) (testing "Unfollow a missing user returns failure" (let [{:keys [token]} (signup-and-login) random-followed-id (application.helper/random-uuid) {:keys [response body result]} (http.helper/post! @http-client (str "user/" random-followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "resource not found" (:type result))) (is (= "user" (:subject result))) (is (= (str random-followed-id) (get-in result [:context :user-id]))))) (testing "Unfollow an user that is not currently followed" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unfollow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id]))))) (testing "Unfollow the same user twice returns failure" (let [{:keys [user-id token]} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") token {}) _ (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {}) ;; First unfollow. {:keys [response body result]} (http.helper/post! @http-client (str "user/" followed-id "/unfollow") token {})] (is (= 400 (:status response))) ;; HTTP 400 Bad Request. (is (= "failure" (:status body))) (is (= "invalid action" (:type result))) (is (= "unfollow" (:subject result))) (is (= (str user-id) (get-in result [:context :follower-id]))) (is (= (str followed-id) (get-in result [:context :followed-id])))))) (deftest ^:integration test-get-user-following (testing "Get following list of an user" (let [{follower-id :user-id follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body result]} (http.helper/get! @http-client (str "user/" follower-id "/following") follower-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:total body) (count result))))) (testing "Get following list of an user that does not follow anyone returns an empty list" (let [{:keys [user-id token]} (signup-and-login) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/following") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-user-followers (testing "Get followers list of an user" (let [{follower-token :token} (signup-and-login) {followed-id :user-id} (signup-and-login) _ (http.helper/post! @http-client (str "user/" followed-id "/follow") follower-token {}) {:keys [response body result]} (http.helper/get! @http-client (str "user/" followed-id "/followers") follower-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 1 (:total body) (count result))))) (testing "Get followers list of an user that is not followed by anyone returns an empty list" (let [{:keys [user-id token]} (signup-and-login) {:keys [response body result]} (http.helper/get! @http-client (str "user/" user-id "/followers") token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (empty? result))))) (deftest ^:integration test-get-feed (testing "Get feed of an user returns most recent thoughts" (let [{first-user-id :user-id first-user-password :password} (signup (application.helper/random-user)) {second-user-id :user-id second-user-password :password} (signup (application.helper/random-user)) {third-user-id :user-id third-user-password :password} (signup (application.helper/random-user)) second-user-token (-> (login {:user-id second-user-id :password PI:PASSWORD:<PASSWORD>END_PI-password}) :token)] (dotimes [_ 5] (http.helper/post! @http-client "thought" second-user-token (application.helper/random-thought))) (let [third-user-token (-> (login {:user-id third-user-id :password PI:PASSWORD:<PASSWORD>END_PI}) :token)] (dotimes [_ 5] (http.helper/post! @http-client "thought" third-user-token (application.helper/random-thought))) (let [first-user-token (-> (login {:user-id first-user-id :password PI:PASSWORD:<PASSWORD>END_PI}) :token)] (http.helper/post! @http-client (str "user/" second-user-id "/follow") first-user-token {}) (http.helper/post! @http-client (str "user/" third-user-id "/follow") first-user-token {}) (let [{:keys [response body result]} (http.helper/get! @http-client "feed" first-user-token {})] (is (= 200 (:status response))) ;; HTTP 200 OK. (is (= "success" (:status body))) (is (= 10 (:total body) (count result))) (is (->> result (map :publish-date) (map http.helper/str->EpochSecond) (apply >=)))))))))
[ { "context": "weights)\n\t (map vector preds)\n\t (into {}))))\n\n\n;;; Mira Reranking\n\n(defn mira-diff-vec-and-loss [weigh", "end": 3138, "score": 0.6285568475723267, "start": 3137, "tag": "NAME", "value": "M" } ]
src/infer/sparse_reranking.clj
robertpfeiffer/infer
1
(ns infer.sparse-reranking ^{:doc "In sparse reranking each instance comes with several possible labels, but in contrast to classification, the labels are instance-specific, rather than fixed set of labels. So you can think of there being a global set of predicates rather than separate features for each label and predicate pair as in classification. Some terminology. A feat-vec is a map of active predicates for some choice for an instance. A datum is a seq of feat-vec representing feature maps for each choice for an instance. Another convention: During training, the first choice is the correct choice." } (:use [infer.sparse-classification :only [with-l2-regularization]] [infer.measures :only [sparse-dot-product]] [infer.core :only [log-add]] [infer.optimize :only [remember-last lbfgs-optimize]] [plumbing.core :only [map-map apply-each]])) (defn get-posteriors "returns [log-z posts] using a linear model over the weights posts is a sequence of posteriors for each choice in datum " [weights-map datum] (let [vals (map (partial sparse-dot-product weights-map) datum) log-val (log-add vals)] [log-val (map (fn [x] (Math/exp (- x log-val))) vals)])) (defn get-datum "get-labels: Available choices for instance get-feat-extractor: Given instance, returns feat-vec for each label If you're making a datum for training, make sure your get-labels function puts the correct label first" [get-labels get-feat-extractor instance] (let [get-feats (get-feat-extractor instance)] (map get-feats (get-labels instance)))) (defn min-index [xs] (apply min-key (partial nth xs) (range (count xs)))) (defn max-index [xs] (apply max-key (partial nth xs) (range (count xs)))) ;;; MaxEnt Reranking (defn maxent-grad [posts datum] (->> posts (map-indexed (fn [i p] (if (zero? i) p (- p)))) (vector datum) (apply map (fn [fv post] (map-map (partial * post) fv))) (reduce (partial merge-with (fnil + 0.0 0.0))))) (defn maxent-obj-term [weight-map datum] (let [[log-z posts] (get-posteriors weight-map datum) true-log-score (sparse-dot-product weight-map (first datum))] [(- true-log-score log-z) (maxent-grad posts datum)])) (defn maxent-obj [train-data preds weights] (let [weight-map (into {} (map vector preds weights)) [obj-val grad-map] (->> train-data (map (partial maxent-obj-term weight-map)) (reduce (partial apply-each [+ (partial merge-with (fnil + 0.0 0.0))])))] [(- obj-val) (map #(- (get grad-map % 0.0)) preds)])) (defn train-maxent "train-data: seq of rerank datums the training data will be looped over multiple times returns the weight map that can be used with get-posteriors" [train-data & {:keys [sigma-sq] :or {sigma-sq 1.0}}] (let [preds (->> train-data (mapcat (partial mapcat keys)) (into #{}) seq) init-weights (repeat (count preds) 0.0) obj-fn (->> (partial maxent-obj train-data preds) (partial with-l2-regularization sigma-sq) remember-last)] (->> (lbfgs-optimize obj-fn init-weights) (map vector preds) (into {})))) ;;; Mira Reranking (defn mira-diff-vec-and-loss [weights y* y-hat losses datum] (let [delta-loss (- (nth losses y-hat) (nth losses y*))] (assert (>= delta-loss 0)) [(merge-with + (nth datum y*) (map-map - (nth datum y-hat))) delta-loss])) (defn mira-update [decode weights losses datum] (let [y* (min-index losses) y-hat (decode weights losses datum) [delta-f delta-l] (mira-diff-vec-and-loss weights y* y-hat losses datum) weights-delta-f (sparse-dot-product weights delta-f)] (when (> delta-l 0) [(/ (- delta-l weights-delta-f) (sparse-dot-product delta-f delta-f)) delta-f]))) (defn mira-iter [init-weights get-losses-and-datum instances {:keys [max-alpha,lambda] :or {max-alpha 0.15, num-iters 10, lambda 1}}] (reduce (fn [[num-errors weights] instance] (let [[losses datum] (get-losses-and-datum instance) [alpha delta-f] (mira-update (fn [weights losses datum] (let [scores (map (fn [fv loss] (+ (sparse-dot-product fv weights) (* lambda loss))) datum losses)] (max-index scores))) weights losses datum)] (if (nil? delta-f) [num-errors weights] [(inc num-errors) (merge-with + weights (map-map (partial * (min alpha max-alpha)) delta-f))]))) [0 init-weights] instances)) (defn train-mira [get-losses-and-datum instances & {:keys [num-iters] :or {num-iters 10}}] (loop [weights {} iter 0] (if (= iter num-iters) weights (let [[num-errors new-weights] (mira-iter weights get-losses-and-datum instances nil)] (if (zero? num-errors) weights (recur new-weights (inc iter)))))))
53722
(ns infer.sparse-reranking ^{:doc "In sparse reranking each instance comes with several possible labels, but in contrast to classification, the labels are instance-specific, rather than fixed set of labels. So you can think of there being a global set of predicates rather than separate features for each label and predicate pair as in classification. Some terminology. A feat-vec is a map of active predicates for some choice for an instance. A datum is a seq of feat-vec representing feature maps for each choice for an instance. Another convention: During training, the first choice is the correct choice." } (:use [infer.sparse-classification :only [with-l2-regularization]] [infer.measures :only [sparse-dot-product]] [infer.core :only [log-add]] [infer.optimize :only [remember-last lbfgs-optimize]] [plumbing.core :only [map-map apply-each]])) (defn get-posteriors "returns [log-z posts] using a linear model over the weights posts is a sequence of posteriors for each choice in datum " [weights-map datum] (let [vals (map (partial sparse-dot-product weights-map) datum) log-val (log-add vals)] [log-val (map (fn [x] (Math/exp (- x log-val))) vals)])) (defn get-datum "get-labels: Available choices for instance get-feat-extractor: Given instance, returns feat-vec for each label If you're making a datum for training, make sure your get-labels function puts the correct label first" [get-labels get-feat-extractor instance] (let [get-feats (get-feat-extractor instance)] (map get-feats (get-labels instance)))) (defn min-index [xs] (apply min-key (partial nth xs) (range (count xs)))) (defn max-index [xs] (apply max-key (partial nth xs) (range (count xs)))) ;;; MaxEnt Reranking (defn maxent-grad [posts datum] (->> posts (map-indexed (fn [i p] (if (zero? i) p (- p)))) (vector datum) (apply map (fn [fv post] (map-map (partial * post) fv))) (reduce (partial merge-with (fnil + 0.0 0.0))))) (defn maxent-obj-term [weight-map datum] (let [[log-z posts] (get-posteriors weight-map datum) true-log-score (sparse-dot-product weight-map (first datum))] [(- true-log-score log-z) (maxent-grad posts datum)])) (defn maxent-obj [train-data preds weights] (let [weight-map (into {} (map vector preds weights)) [obj-val grad-map] (->> train-data (map (partial maxent-obj-term weight-map)) (reduce (partial apply-each [+ (partial merge-with (fnil + 0.0 0.0))])))] [(- obj-val) (map #(- (get grad-map % 0.0)) preds)])) (defn train-maxent "train-data: seq of rerank datums the training data will be looped over multiple times returns the weight map that can be used with get-posteriors" [train-data & {:keys [sigma-sq] :or {sigma-sq 1.0}}] (let [preds (->> train-data (mapcat (partial mapcat keys)) (into #{}) seq) init-weights (repeat (count preds) 0.0) obj-fn (->> (partial maxent-obj train-data preds) (partial with-l2-regularization sigma-sq) remember-last)] (->> (lbfgs-optimize obj-fn init-weights) (map vector preds) (into {})))) ;;; <NAME>ira Reranking (defn mira-diff-vec-and-loss [weights y* y-hat losses datum] (let [delta-loss (- (nth losses y-hat) (nth losses y*))] (assert (>= delta-loss 0)) [(merge-with + (nth datum y*) (map-map - (nth datum y-hat))) delta-loss])) (defn mira-update [decode weights losses datum] (let [y* (min-index losses) y-hat (decode weights losses datum) [delta-f delta-l] (mira-diff-vec-and-loss weights y* y-hat losses datum) weights-delta-f (sparse-dot-product weights delta-f)] (when (> delta-l 0) [(/ (- delta-l weights-delta-f) (sparse-dot-product delta-f delta-f)) delta-f]))) (defn mira-iter [init-weights get-losses-and-datum instances {:keys [max-alpha,lambda] :or {max-alpha 0.15, num-iters 10, lambda 1}}] (reduce (fn [[num-errors weights] instance] (let [[losses datum] (get-losses-and-datum instance) [alpha delta-f] (mira-update (fn [weights losses datum] (let [scores (map (fn [fv loss] (+ (sparse-dot-product fv weights) (* lambda loss))) datum losses)] (max-index scores))) weights losses datum)] (if (nil? delta-f) [num-errors weights] [(inc num-errors) (merge-with + weights (map-map (partial * (min alpha max-alpha)) delta-f))]))) [0 init-weights] instances)) (defn train-mira [get-losses-and-datum instances & {:keys [num-iters] :or {num-iters 10}}] (loop [weights {} iter 0] (if (= iter num-iters) weights (let [[num-errors new-weights] (mira-iter weights get-losses-and-datum instances nil)] (if (zero? num-errors) weights (recur new-weights (inc iter)))))))
true
(ns infer.sparse-reranking ^{:doc "In sparse reranking each instance comes with several possible labels, but in contrast to classification, the labels are instance-specific, rather than fixed set of labels. So you can think of there being a global set of predicates rather than separate features for each label and predicate pair as in classification. Some terminology. A feat-vec is a map of active predicates for some choice for an instance. A datum is a seq of feat-vec representing feature maps for each choice for an instance. Another convention: During training, the first choice is the correct choice." } (:use [infer.sparse-classification :only [with-l2-regularization]] [infer.measures :only [sparse-dot-product]] [infer.core :only [log-add]] [infer.optimize :only [remember-last lbfgs-optimize]] [plumbing.core :only [map-map apply-each]])) (defn get-posteriors "returns [log-z posts] using a linear model over the weights posts is a sequence of posteriors for each choice in datum " [weights-map datum] (let [vals (map (partial sparse-dot-product weights-map) datum) log-val (log-add vals)] [log-val (map (fn [x] (Math/exp (- x log-val))) vals)])) (defn get-datum "get-labels: Available choices for instance get-feat-extractor: Given instance, returns feat-vec for each label If you're making a datum for training, make sure your get-labels function puts the correct label first" [get-labels get-feat-extractor instance] (let [get-feats (get-feat-extractor instance)] (map get-feats (get-labels instance)))) (defn min-index [xs] (apply min-key (partial nth xs) (range (count xs)))) (defn max-index [xs] (apply max-key (partial nth xs) (range (count xs)))) ;;; MaxEnt Reranking (defn maxent-grad [posts datum] (->> posts (map-indexed (fn [i p] (if (zero? i) p (- p)))) (vector datum) (apply map (fn [fv post] (map-map (partial * post) fv))) (reduce (partial merge-with (fnil + 0.0 0.0))))) (defn maxent-obj-term [weight-map datum] (let [[log-z posts] (get-posteriors weight-map datum) true-log-score (sparse-dot-product weight-map (first datum))] [(- true-log-score log-z) (maxent-grad posts datum)])) (defn maxent-obj [train-data preds weights] (let [weight-map (into {} (map vector preds weights)) [obj-val grad-map] (->> train-data (map (partial maxent-obj-term weight-map)) (reduce (partial apply-each [+ (partial merge-with (fnil + 0.0 0.0))])))] [(- obj-val) (map #(- (get grad-map % 0.0)) preds)])) (defn train-maxent "train-data: seq of rerank datums the training data will be looped over multiple times returns the weight map that can be used with get-posteriors" [train-data & {:keys [sigma-sq] :or {sigma-sq 1.0}}] (let [preds (->> train-data (mapcat (partial mapcat keys)) (into #{}) seq) init-weights (repeat (count preds) 0.0) obj-fn (->> (partial maxent-obj train-data preds) (partial with-l2-regularization sigma-sq) remember-last)] (->> (lbfgs-optimize obj-fn init-weights) (map vector preds) (into {})))) ;;; PI:NAME:<NAME>END_PIira Reranking (defn mira-diff-vec-and-loss [weights y* y-hat losses datum] (let [delta-loss (- (nth losses y-hat) (nth losses y*))] (assert (>= delta-loss 0)) [(merge-with + (nth datum y*) (map-map - (nth datum y-hat))) delta-loss])) (defn mira-update [decode weights losses datum] (let [y* (min-index losses) y-hat (decode weights losses datum) [delta-f delta-l] (mira-diff-vec-and-loss weights y* y-hat losses datum) weights-delta-f (sparse-dot-product weights delta-f)] (when (> delta-l 0) [(/ (- delta-l weights-delta-f) (sparse-dot-product delta-f delta-f)) delta-f]))) (defn mira-iter [init-weights get-losses-and-datum instances {:keys [max-alpha,lambda] :or {max-alpha 0.15, num-iters 10, lambda 1}}] (reduce (fn [[num-errors weights] instance] (let [[losses datum] (get-losses-and-datum instance) [alpha delta-f] (mira-update (fn [weights losses datum] (let [scores (map (fn [fv loss] (+ (sparse-dot-product fv weights) (* lambda loss))) datum losses)] (max-index scores))) weights losses datum)] (if (nil? delta-f) [num-errors weights] [(inc num-errors) (merge-with + weights (map-map (partial * (min alpha max-alpha)) delta-f))]))) [0 init-weights] instances)) (defn train-mira [get-losses-and-datum instances & {:keys [num-iters] :or {num-iters 10}}] (loop [weights {} iter 0] (if (= iter num-iters) weights (let [[num-errors new-weights] (mira-iter weights get-losses-and-datum instances nil)] (if (zero? num-errors) weights (recur new-weights (inc iter)))))))
[ { "context": "; Copyright 2015 Zalando SE\n;\n; Licensed under the Apache License, Version 2.", "end": 27, "score": 0.8918332457542419, "start": 17, "tag": "NAME", "value": "Zalando SE" }, { "context": " :db-user \"postgres\"\n :db-password \"postgres\"\n :db-init-sql \"SET search_path TO zk_data, ", "end": 1146, "score": 0.9989563822746277, "start": 1138, "tag": "PASSWORD", "value": "postgres" } ]
src/org/zalando/stups/kio/sql.clj
oporkka/kio
25
; Copyright 2015 Zalando SE ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns org.zalando.stups.kio.sql (:require [environ.core :as env] [yesql.core :refer [defqueries]] [org.zalando.stups.friboo.system.db :refer [def-db-component generate-hystrix-commands]])) ;;USE env variable AUTO_MIGRATION to configure auto-migration? (def-db-component DB :auto-migration? (Boolean/parseBoolean (env/env :auto-migration))) (def default-db-configuration {:db-classname "org.postgresql.Driver" :db-subprotocol "postgresql" :db-subname "//localhost:5432/kio" :db-user "postgres" :db-password "postgres" :db-init-sql "SET search_path TO zk_data, public"}) (defqueries "db/applications.sql") (generate-hystrix-commands)
9368
; Copyright 2015 <NAME> ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns org.zalando.stups.kio.sql (:require [environ.core :as env] [yesql.core :refer [defqueries]] [org.zalando.stups.friboo.system.db :refer [def-db-component generate-hystrix-commands]])) ;;USE env variable AUTO_MIGRATION to configure auto-migration? (def-db-component DB :auto-migration? (Boolean/parseBoolean (env/env :auto-migration))) (def default-db-configuration {:db-classname "org.postgresql.Driver" :db-subprotocol "postgresql" :db-subname "//localhost:5432/kio" :db-user "postgres" :db-password "<PASSWORD>" :db-init-sql "SET search_path TO zk_data, public"}) (defqueries "db/applications.sql") (generate-hystrix-commands)
true
; Copyright 2015 PI:NAME:<NAME>END_PI ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns org.zalando.stups.kio.sql (:require [environ.core :as env] [yesql.core :refer [defqueries]] [org.zalando.stups.friboo.system.db :refer [def-db-component generate-hystrix-commands]])) ;;USE env variable AUTO_MIGRATION to configure auto-migration? (def-db-component DB :auto-migration? (Boolean/parseBoolean (env/env :auto-migration))) (def default-db-configuration {:db-classname "org.postgresql.Driver" :db-subprotocol "postgresql" :db-subname "//localhost:5432/kio" :db-user "postgres" :db-password "PI:PASSWORD:<PASSWORD>END_PI" :db-init-sql "SET search_path TO zk_data, public"}) (defqueries "db/applications.sql") (generate-hystrix-commands)
[ { "context": ";;;;;;;\n;;\n;; MIT License\n;;\n;; Copyright (c) 2017 Andrew Cheng\n;;\n;; Permission is hereby granted, free of charg", "end": 352, "score": 0.9998319745063782, "start": 340, "tag": "NAME", "value": "Andrew Cheng" }, { "context": "ies or substantial portions of the Software.\n;;\n;; Jon Anthony (2019,2020):\n;; Many changes and fixes for workin", "end": 960, "score": 0.9998021721839905, "start": 949, "tag": "NAME", "value": "Jon Anthony" } ]
src/cljs/paredit_cm/core.cljs
devurandom/saite
108
(ns paredit-cm.core "paredit operations (exported)" (:require [clojure.string :as str] [cljsjs.codemirror] [cljsjs.codemirror.mode.clojure] [cljsjs.codemirror.keymap.emacs])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; MIT License ;; ;; Copyright (c) 2017 Andrew Cheng ;; ;; Permission is hereby granted, free of charge, to any person obtaining a copy ;; of this software and associated documentation files (the "Software"), to deal ;; in the Software without restriction, including without limitation the rights ;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ;; copies of the Software, and to permit persons to whom the Software is ;; furnished to do so, subject to the following conditions: ;; ;; The above copyright notice and this permission notice shall be included ;; in all copies or substantial portions of the Software. ;; ;; Jon Anthony (2019,2020): ;; Many changes and fixes for working with newer codemirror releases ;; ;; ** PAREDI PROJECT CONVENTIONS ** ;; ;; consider this notation: aXbc ;; ;; in the unit tests as well as here, aXbc contains a single capital X which ;; represents the position of the cursor. aXbc means the code mirror instance's ;; value is 'abc' and a block-style cursor is on 'b' (a bar-style cursor would ;; be between 'a' and 'b'). aXbc is what you would see if you typed a capital X ;; in this example code mirror. ;; ;; 'cur' is for the current position's cursor (on 'b' in the example). ;; 'left-cur' is for position 'a'. 'right-cur' is for position 'c'. ;; ;; if there is a current cursor cur and a new cursor, then the new cursor will ;; be named cur' (the single quote is part of the name, so read it aloud as ;; cursor-prime) ;; ;; when there are two cursors (as in the beginning and ending of a selection) we ;; use c1 and c2. it feels strange to call them 'start' and 'end' when those are ;; the names codemirror uses to refer to the ends of a token. ;; ;; the following all refer to the values for the token at 'cur': 'start' 'line' ;; 'ch' 'i' 'string' 'type' ;; ;; use the same prefixes 'left-' and 'right-' when referring to the same kinds ;; of values belonging to 'left-cur' and 'right-cur' ;; ;; ints *other than i, the code mirror index* are named with a single character ;; like 'x'. neighboring values are represented alphabetically, so (inc x) would ;; be named 'y' and (dec x) would be named 'w'. ;; ;; s1 is a string. similarly s1, s2, and s ;; ;; for numerical values like 'offset', lower is for left and higher is for ;; right, just as for code mirror's index i. ;; ;; sp is a 'skipping predicate'. these are used with a trampoline wrapper like ;; 'skip' to move along the text in code mirror until our predicate is ;; satisfied. in many cases, the predicate will push and pop openers/closers off ;; a stack and when the stack is empty and we satisfy some additional condition, ;; then we stop and return the cursor. ;; ;; functions with names ending in -sp are skipping predicates. ;; ;; currently we're assuming perfect matching of openers/closers so we don't ;; actually keep track of the stack -- we just inc and dec an int until it gets ;; to 0 and our other conditions are satisfied ;; ;; any trampoline use should be limited by the cm character count, to guard ;; against infinite loops. we'll start at the limit and count down, stopping ;; when it goes negative. ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (enable-console-print!) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; general helper methods ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def openers #{ "(" "[" "{" }) (def closers #{ ")" "]" "}" }) (def pair {"(" ")", "[" "]", "{" "}", "\"" "\"", ")" "(", "]" "[", "}" "{"}) (defn pair? "true if the two strings are a matching open/close pair " [s1 s2] (= (pair s1) s2)) (defn opener? [s] (contains? openers s)) (defn closer? [s] (contains? closers s)) (defn is-bracket-type? [t] (and t (str/starts-with? t "bracket"))) (defn char-count "returns the number of characters in the code mirror instance" [cm] (-> cm .getValue count)) (defn cursor "get cur, the position of the cursor" ([cm] (.getCursor cm)) ;; get current cursor ([cm i] (.posFromIndex cm i))) ;; get cursor for index i (defn index "get the index i for the cursor's position" ([cm] (index cm (cursor cm))) ([cm cur] (when cur (.indexFromPos cm cur)))) (defn bof? "true if at beginning of file" [cm cur] (zero? (index cm cur))) (defn eof? "true if at end of file" [cm cur] (= (index cm cur) (char-count cm))) (defn token "get token at cursor" [cm cur] (.getTokenAt cm cur true)) (defn get-type "get the type at the current cursor." ([cm] (get-type cm (cursor cm))) ([cm cur] (.-type (token cm cur)))) (defn get-string "gets the string of the current token" ([cm] (get-string cm (cursor cm))) ([cm cur] (when cur (.-string (token cm cur))))) (defn line-length "gets the length of the current line" ([cm] (line-length cm (cursor cm))) ([cm cur] (when cur (count (.getLine cm (.-line cur)))))) (defn last-token "returns the last token of a line" [cm cur] (->> cur .-line (.getLineTokens cm) last)) (defn last-cur "returns the last cursor of a line" ([cm] (last-cur cm (cursor cm))) ([cm cur] (let [end (.-end (last-token cm cur)) diff (- end (.-ch cur))] (cursor cm (+ diff (index cm cur)))))) (defn get-info "make info from CodeMirror more conveniently accessed by our code. we'll use destructuring and just name what we rant. hypothesizing that performance hit won't be that bad." ([cm] (get-info cm (cursor cm))) ([cm cur] (when cur (let [tok (token cm cur) eof (eof? cm cur) bof (bof? cm cur) i (index cm cur) left-cur (when-not bof (cursor cm (dec i))) right-cur (when-not eof (cursor cm (inc i)))] {:cur cur :line (.-line cur) :ch (.-ch cur) :i i :tok tok :string (.-string tok) :start (.-start tok) :end (.-end tok) :type (.-type tok) :top (-> tok .-state .-indentStack nil?) ;; true for toplevel :eof eof :bof bof :left-char (when-not bof (.getRange cm left-cur cur)) :right-char (when-not eof (.getRange cm cur right-cur)) :left-cur left-cur :right-cur right-cur :mode (.-mode (.-state tok))})))) (defn comment-or-string? "true if the type is comment or string. a lot of editing behavior (like movement and deletion) is similar when you are in a string or in a comment, so often this is the predicate for that behavior." [type] (or (= type "comment") (= type "string"))) (defn indent-line "indent the current line" [cm] (->> cm cursor .-line (.indentLine cm))) (defn escaped-char-name? [stg] (let [escnames #{"\\newline", "\\space", "\\tab", "\\formfeed", "\\backspace", "\\return"}] (when (escnames stg) (dec (count stg))))) (defn in-escaped-char? "returns true if backslash is to the left and cursor is on an escaped char" ([cm cur] (in-escaped-char? cm cur 0)) ([cm cur offset] (let [{:keys [ch start end type]} (get-info cm cur)] #_(js/console.log start ch end type) (and (= type "string-2") (and (< start ch) (< ch end)))))) (defn escaped-char-to-left? "returns true if an escaped char and its backslash are to the left" [cm cur] (let [{:keys [ch end type string]} (get-info cm cur)] (and (= type "string-2") (= ch end)))) (defn escaped-char-to-right? "returns true if an escaped char and its backslash is to the right" [cm cur] (let [cur+ (cursor cm 0) {:keys [type]} (get-info cm cur)] (and (not= type "string-2")) (set! cur+.line cur.line) (set! cur+.ch (inc cur.ch)) (in-escaped-char? cm cur))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-open-round ( ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn insert "insert text at current cursor. move cursor to the end of inserted text minus optional offset. the offset is for moving the cursor immediately after the insert and before returning. example: inserting a pair of brackets and placing the cursor inside the pair. this returns the new cursor." ([cm text] (insert cm text 0)) ([cm text offset] (insert cm text offset (cursor cm))) ([cm text offset cur] (let [{:keys [line ch]} (get-info cm cur)] (.replaceRange cm text cur) (.setCursor cm line (+ (+ ch (count text)) offset)) (cursor cm)))) (defn ^:export open-round "paredit-open-round exposed for keymap. unlike traditional emacs paredit, this supports brackets [] {} () but not double-quote" ([cm] (open-round cm "(")) ([cm c] (let [{:keys [type left-char right-char]} (get-info cm)] (cond ;; escaping the next character: (= "\\" left-char) (insert cm c) ;; typing in a comment or string as-is: (comment-or-string? type) (insert cm c) ;; insert a pair, pad with a space to the left and/or right if necessary, ;; and move the cursor into the pair before returning: :else (let [need-left-padding (and (not= " " left-char) (not (opener? left-char))) need-right-padding (and (not= " " right-char) (not (closer? right-char)))] (insert cm (str (when need-left-padding " ") c (pair c) (when need-right-padding " ")) (if need-right-padding -2 -1))))))) (defn ^:export open-brace "open curly brace with matching close brace" ([cm] (open-round cm "{"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-close-round ) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn parent-closer-sp ;; -sp see 'skipping predicate' below "finds the *parent* closing bracket. behavior when used with skip: pushes opening brackets that appear along the way on a stack. closing brackets pop them off. stops when encountering a closing bracket while the stack is empty. assuming the cm has matched brackets for now. moves to the right." [cm cur state] (let [{:keys [string type top eof]} (get-info cm cur)] (cond ;; 'push' opener on our 'stack': (and (is-bracket-type? type) (opener? string)), (inc state) ;; stop if we see a closer while our 'stack' is empty: (and (is-bracket-type? type) (closer? string) (zero? state)), :yes ;; closer means we 'pop' off the 'stack', unless eof (and (is-bracket-type? type) (closer? string) (not= 0 state) eof), :eof ;; closer means we 'pop' off the 'stack': (and (is-bracket-type? type) (closer? string) (not= 0 state)), (dec state) ;; we can* rely on code mirror to tell us if we're at the top ;; level: (* NOT in [cljsjs/codemirror "5.21.0-2"] ... but maybe ;; in a later version ... until we can figure out how to refer ;; to the latest codemirror in our tests, the tests will have to ;; live here in order to get the codemirror that is included in ;; the script tag on the demo index.html page) ;; TODO: investigate whether we can use this, given CodeMirror version: ;; top, :stop ;; stack stays unchanged. move to the next thing: :default, state))) (defn token-start "returns the cursor for the start of the current token" [cm cur] (let [{:keys [i line start ch type]} (get-info cm cur)] (cursor cm (- i (- ch start))))) (defn token-end "returns the cursor for the end of the current token" ([cm cur] (token-end cm cur 0)) ([cm cur offset] (let [{:keys [i line end ch type]} (get-info cm cur)] (cursor cm (+ i offset (- end ch)))))) (defn token-end-index "take an index. get its token. return index of that token's end." [cm i] (->> i (cursor cm) (token-end cm) (index cm))) (defn guard [] (println "past")) (defn skip-trampoline-helper "returns the cursor that satsifies skipping predicate 'sp' or nil if eof reached. does this by making sp something we can trampoline. sp takes these args: cm, cursor, state. counts down 'n' to 0 in order to guard against infinite loops." [cm cur sp state n] (if (>= n 0) (let [{:keys [left-cur right-cur i]} (get-info cm cur) result (sp cm cur state)] #_(js/console.log result) (case result :eof nil :stop nil :yes cur :left left-cur :right right-cur :end-of-this-token (token-end cm cur) :start-of-this-tok (token-start cm cur) (let [next-cur (token-end cm cur 1)] #_(js/console.log next-cur) (fn [] ;; for trampoline (skip-trampoline-helper cm next-cur sp result (dec n)))))) (guard))) (defn skip-trampoline-helper-left "like skip-trampoline-helper but in the opposite direction." [cm cur sp state n] (if (>= n 0) (let [{:keys [left-cur right-cur i start ch]} (get-info cm cur) result (sp cm cur state)] #_(js/console.log result) (case result :bof nil :stop nil :yes left-cur :right right-cur :end-of-this-token (token-end cm cur) :start-of-this-tok (token-start cm cur) (let [next-cur (if (= ch start) (cursor cm (dec i)) (cursor cm (- i (- ch start))))] (fn [] ;; for trampoline (skip-trampoline-helper-left cm next-cur sp result (dec n)))))) (guard))) (defn skip "returns the cursor that satisfies sp or nil if either eof reached or we found out sp could not be satisfied. see skip-to for more info." ([cm sp] (skip cm sp (cursor cm))) ([cm sp cur] (when-let [right-cur (:right-cur (get-info cm cur))] (trampoline skip-trampoline-helper cm right-cur sp 0 (char-count cm))))) (defn skip-left "returns the cursor that satisfies sp or nil if either bof reached or we found out sp could not be satisfied. see skip-to for more info." [cm sp] (when-let [cur (cursor cm)] (trampoline skip-trampoline-helper-left cm cur sp 0 (char-count cm)))) (defn delete-whitespace "if cur is in whitespace, deletes it optionally without ruining indentation." ([cm] (delete-whitespace cm (cursor cm) true)) ([cm cur] (delete-whitespace cm cur true)) ([cm cur indent-after] (let [{:keys [start end line ch i type]} (get-info cm cur) c1 (cursor cm (+ i (- start ch))) c2 (cursor cm (+ i (- end ch)))] (when (nil? type) (.replaceRange cm "" c1 c2) (if indent-after (.indentLine cm line)))))) ;; todo (defn just-one-space ([cm] (just-one-space cm (cursor cm) true)) ([cm cur] (just-one-space cm cur true)) ([cm cur indent-after] (let [{:keys [start end line ch i type]} (get-info cm cur) c1 (cursor cm (+ i (- start ch))) c2 (cursor cm (+ i (- end ch)))] (when (nil? type) (.replaceRange cm " " c1 c2) (if indent-after (.indentLine cm line)))))) (defn skip-to "moves to the cursor that satisfies sp or doesn't move if eof reached. starts at current cursor for cm. sp stands for 'skipping predicate' which returns: - :yes if sp is satisfied. - :stop if we know we will not be satisfied with any future result. - :left if the cursor to the left is what we want. - new non-nil state if not satisfied. this state is used with the next iteration after we skip to the end of the current token. an sp takes cm, cursor, state." [cm sp] (when-let [cur' (skip cm sp)] (.setCursor cm cur') cur')) (defn move-past-parent-closer "moves cursor to just outside the closing bracket, or if there is none then doesn't move at all." ;; emacs has this extending the current selection if there is one. [cm] (when-let [cur (skip-to cm parent-closer-sp)] (delete-whitespace cm (:left-cur (get-info cm))) cur)) (defn ^:export close-round "paredit-close-round exposed for keymap. skips to end of current list even if it ends with ] or }. but if you're in a string or comment then this just inserts the bracket. requires CodeMirror mode's parser uses state with indentStack because that's how we can tell we've reached the end of a top level form and avoid entering the next top level form. 's' is the character as a string." ([cm] (close-round cm ")")) ([cm s] (let [{:keys [type left-char]} (get-info cm)] (cond (= "\\" left-char) (insert cm s) (comment-or-string? type) (insert cm s) :else (move-past-parent-closer cm))))) (defn ^:export close-brace "close curly brace like close-rond" ([cm] (close-round cm "}"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-close-round-and-newline paredit-open-square paredit-close-square ;; paredit-doublequote ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export close-round-and-newline ([cm] (close-round-and-newline cm ")")) ([cm s] (if (comment-or-string? (get-type cm)) (insert cm s) (when (close-round cm s) (.execCommand cm "newlineAndIndent"))))) ;; question: is there a better way than .execCommand? (defn ^:export open-square [cm] (open-round cm "[")) (defn ^:export close-square [cm] (close-round cm "]")) (defn ^:export doublequote [cm] (let [{:keys [type left-char right-char ch cur]} (get-info cm)] (cond ;; about to escape this char so insert as-is: (= "\\" left-char) (insert cm "\"") ;; we're in a string so escape this doublequote: (= type "string") (insert cm "\\\"") ;; we're in code. pad with a space to the left and/or right if necessary ;; to separate it from neighboring code. after inserting, move the cursor ;; to between the quotes: :else (insert cm (str (when (not= " " left-char) " ") ;; left padding "\"\"" (when (and (not= " " right-char) (not= "\n" right-char)) " ")) ;; right padding (if (or (= " " right-char) (= "\n" right-char)) -1 -2))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-meta-doublequote M-" ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn word? [type] (or (= type "atom") (= type "builtin") (= type "number") (= type "variable") (= type "keyword") (= type "meta"))) (defn at-a-word? "returns true if at a word of code" [cm cur] (word? (get-type cm cur))) (defn in-a-word? "true if in a word AND not at the end of that word. false if in whitespace or a string or a comment or at a bracket." [cm] (let [cur (cursor cm), i (index cm cur)] (and (at-a-word? cm cur) (not= i (token-end-index cm i))))) (defn start-of-a-string? "returns true if at the start of a string." [cm cur] (let [{:keys [string type start ch left-char]} (get-info cm cur)] #_(js/console.log right-char type string ch start) (and (= left-char "\"") (= type "string") (= 1 (- ch start))))) (defn start-of-a-string2? [cm cur] (let [i (index cm cur) p2 (cursor cm (inc i))] #_(js/console.log cur p2) (start-of-a-string? cm p2))) (defn end-of-a-string? "returns true if just to the right of a closing doublequote of a string." [cm cur] (let [{:keys [type ch end string left-char]} (get-info cm cur)] #_(js/console.log left-char type string ch end) (and (= type "string") (= ch end) (= left-char "\"")))) (defn end-of-next-sibling-sp ;; -sp see 'skipping predicate' "returns the cursor at the end of the sibling to the right or nil if no sibling or eof. does not exit the containing form. does this by skipping past any comments or whitespace, and branches depending on whether an opening bracket or doublequote is encountered (sp satisfied when encountering a closing bracket that empties the stack) vs the beginning of a word (return token at the end of the word). assuming the cm has matched brackets for now." [cm cur stack] (let [dq "\"" info (get-info cm cur) {:keys [string type eof ch end tok]} info stack-empty (zero? stack) one-left (= 1 stack) ;; for multi-line strings start-of-stg? (start-of-a-string? cm cur) end-of-stg? (end-of-a-string? cm cur) empty-stg? (when end-of-stg? (and (= tok.type "string") (= tok.string "\"\""))) string-extends (or (not= dq (last string)) (= "\\" (last (drop-last string))))] #_(js/console.log stack stack-empty string type ch end cur string-extends #_(escaped-char-to-right? cm cur) start-of-stg? end-of-stg?) (cond ;; we return a keyword when we know where to stop, stack otherwise. ;; skip whitespace (or (nil? type) (and (= type "error") (= string ","))), stack (and (escaped-char-to-left? cm cur) stack-empty), :yes (and (word? type) stack-empty (= ch end)), :yes (and (is-bracket-type? type) (closer? string) one-left), :yes (and end-of-stg? one-left), :yes eof, :eof ;; skip comments (= type "comment"), stack ;; strings ............................................................... empty-stg? :end-of-this-token ;; our starting point is at beginning of a string and it doesn't extend (and start-of-stg? (and (not string-extends) stack-empty)), :end-of-this-token ;; We are in a nested form, at start of string, but it doesn't extend (and start-of-stg? (not stack-empty) (not string-extends)), stack ;; entering a multi-line string, push " onto stack (and start-of-stg? string-extends), (inc stack) ;; at end of string and stack already empty, we must have started in the ;; middle of the string (and end-of-stg? stack-empty), :stop ;; at end of string and stack about to be empty, we've found the end of ;; the string -- handled before checking for eof above ;; in string, the end of this string is our goal ... ;; ... but the end of this string is on a different line: (and (= type "string") #_(not stack-empty) #_one-left string-extends), stack (and (= type "string") stack-empty (not string-extends)), :end-of-this-token ;; in string, the end of this string is our goal ... ;; ... the end is on this line: (and (= type "string") one-left), :end-of-this-token ;; in string, need to get out of this form, pop stack (and (= type "string") (not stack-empty)), (dec stack) ;; escaped chars ......................................................... ;; inside an escaped char and the end of it is what we want (and (in-escaped-char? cm cur) stack-empty), :end-of-this-token ;; To the right of escaped char, keep going (and (escaped-char-to-right? cm cur) stack-empty), :start-of-this-tok ;; in an escaped char inside the next sibling (in-escaped-char? cm cur), stack ;; at end of an escaped char which was the next sibling -- handled before ;;checking for eof above ;; at end of an escaped char inside the next sibling (escaped-char-to-left? cm cur), stack ;; words ................................................................. ;; reached the end of a word which was the next sibling -- handled before ;;checking for eof above ;; in a word that is the next sibling, the end of it is what we want (and (word? type) stack-empty), :end-of-this-token ;; in a word that is inside the next sibling (word? type), stack ;; brackets .............................................................. ;; push opener on stack (and (is-bracket-type? type) (opener? string)), (inc stack) ;; we've reached the end of a form -- handled before checking for eof ;;above ;; there was no sibling (and (is-bracket-type? type) (closer? string) stack-empty), :stop ;; passing through the guts of a sibling form (.. (guts)|..) (and (is-bracket-type? type) (closer? string)), (dec stack) :default, :stop))) (defn end-of-next-sibling "get the cursor for the end of the sibling to the right." ([cm] (skip cm end-of-next-sibling-sp)) ([cm cur] (when cur (.setCursor cm cur) (skip cm end-of-next-sibling-sp)))) #_(let [cm (get-ddb [:tabs :extns :ed3 :cms :$ed]) cur (.getCursor cm) info (pe/get-info cm cur) tok (info :tok)] [(pe/start-of-a-string? cm cur) (pe/end-of-a-string? cm cur) (info :left-char) (info :right-char) tok.string] #_(console.log (pe/token-end cm cur 1)) #_(console.log (pe/cursor cm (+ 9 1 (- 8 8)))) #_(pe/get-info cm (pe/cursor cm (+ 9 1))) #_(pe/end-of-a-string? cm cur)) (defn start-of-prev-sibling-sp ;; -sp see 'skipping predicate' "returns the cursor at the start of the sibling to the left or nil if no sibling or eof. does not exit the containing form. does this by skipping past any comments or whitespace, and branches depending on whether a bracket or doublequote is encountered (sp satisfied when encountering an opening bracket that empties the stack) vs the beginning of a word (return token at the start of the word). assuming the cm has matched brackets for now." [cm cur stack] (let [info (get-info cm cur) {:keys [string type bof ch start tok]} info stack-empty (zero? stack) one-left (= 1 stack) string-extends (not= "\"" (first string)) ; for multiline strings start-of-stg? (start-of-a-string? cm cur) end-of-stg? (end-of-a-string? cm cur) empty-stg? (when start-of-stg? (and (= tok.type "string") (= tok.string "\"\"")))] #_(js/console.log stack stack-empty string type ch start cur string-extends ;;(escaped-char-to-left? cm cur) ;;(escaped-char-to-right? cm cur) start-of-stg? end-of-stg?) (cond ;; we return a keyword when we know where to stop, stack otherwise. ;; check these before checking for bof: ;; in a multi-line string, keep searching for the first line of it: (and start-of-stg? one-left string-extends), stack ;; at the first line of a string and we want its opening doublequote: (and start-of-stg? one-left), :yes ;; at the start of a word: (and (word? type) stack-empty (= ch start)), :yes ;; at the opener we were looking for: (and (is-bracket-type? type) (opener? string) one-left), :yes bof, :bof; reached beginning of file (and (start-of-a-string2? cm cur) (not stack-empty)), stack #_(dec stack) ;; at the start of an escaped char: (and (escaped-char-to-right? cm cur) stack-empty), stack ;; skip whitespace (or (nil? type) (and (= type "error") (= string ","))), stack ;; skip comments (= type "comment"), stack ;; strings ............................................................... empty-stg? :start-of-this-tok ;; our starting point is at end of a string and it doesn't extend (and end-of-stg? (and (not string-extends) stack-empty)), :start-of-this-tok ;; We are in a nested form, at end of string, but it doesn't extend (and end-of-stg? (not stack-empty) (not string-extends)) stack ;; entering a multi-line string from the right; push " onto stack (and end-of-stg? string-extends), (inc stack) ;; at start of string and stack already empty, we must have started in ;; the middle of the string. (and start-of-stg? stack-empty), :stop ;; at start of string and stack about to be empty, we've found the end of ;; the string -- handled before check for bof above ;; in string, the start of it is our goal ... ;; ... but the start of this string is on a higher line: (and (= type "string") #_(not stack-empty) string-extends), stack ;; it's on this line: (and (= type "string") stack-empty (not string-extends)), :start-of-this-tok ;; in string, the start of this string is our goal ... ;;; ... and the start is on this line: (and (= type "string") one-left) :start-of-this-tok ;; in string, need to get out of this form, pop stack (and (= type "string") (not stack-empty)), (dec stack) ;; escaped chars ......................................................... ;; inside an escaped char and the start of it is what we want (and (in-escaped-char? cm cur) stack-empty), :start-of-this-tok ;; To the left of escaped char, keep going (and (escaped-char-to-left? cm cur) stack-empty), :start-of-this-tok ;; in an escaped char inside the prev sibling (or (in-escaped-char? cm cur) (escaped-char-to-left? cm cur)), stack ;; at start of an escaped char which was the prev sibling -- handled ;; before check for bof above ;; at start of an escaped char inside the prev sibling (escaped-char-to-right? cm cur), stack ;; words ................................................................. ;; reached the start of a word which was the prev sibling -- handled ;; before check for bof above ;; in a word that is the prev sibling, the start of it is what we want (and (word? type) stack-empty), :start-of-this-tok ;; in a word that is inside the prev sibling (word? type), stack ;; brackets .............................................................. ;; push closer on stack (and (is-bracket-type? type) (closer? string)), (inc stack) ;; we've reached the start of a form -- handled before check for ;; bof above ;; there was no prev sibling, avoid exiting the form (and (is-bracket-type? type) (opener? string) stack-empty), :stop ;; passing through the guts of a sibling form (.. X(guts)..) (and (is-bracket-type? type) (opener? string)), (dec stack) :default :stop))) (defn start-of-prev-sibling "return the cursor at the start of the sibling to the left." ([cm] (skip-left cm start-of-prev-sibling-sp)) ([cm cur] (when cur (.setCursor cm cur) (skip-left cm start-of-prev-sibling-sp)))) (defn escape-string "escapes a string, replacing backslashes and doublequotes. wraps result in a new pair of doublequotes." [s] (str "\"" (-> s (str/replace #"[\\]" "\\\\") (str/replace #"[\"]" "\\\"")) "\"")) (defn stringify-selection "turns selection into a string, escaping backslashes and doublequotes" [cm] (->> cm .getSelection escape-string (.replaceSelection cm))) (defn stringify "turns the region from cur-1 to cur-2 into a string, escaping backslashes and doublequotes" [cm cur-1 cur-2] (.setSelection cm cur-1 cur-2) (stringify-selection cm) (.setCursor cm (cursor cm (inc (index cm cur-1))))) (defn exit-string "moves cursor right, out of the current string" [cm] (let [{:keys [type i ch end]} (get-info cm)] (when (= type "string") (.setCursor cm (cursor cm (+ i (- end ch))))))) (defn in-string? "returns true if token is in the middle of a string." ([cm] (in-string? cm (cursor cm))) ([cm cur] (let [type (get-type cm cur)] (or (= type "string") (= type "string-2"))))) (defn ^:export meta-doublequote "paredit meta-doublequote exposed for keymap. if in a string, moves cursor out of the string to the right. if in a comment, insert a doublequote. if in an escaped char, do nothing. otherwise starts a string that that continues to the end of the next form, escaping backslashes and doublequotes." [cm] (let [{:keys [type eof cur]} (get-info cm)] (cond eof :do-nothing (in-escaped-char? cm cur) :do-nothing (in-string? cm cur) (exit-string cm) (= type "comment") (insert cm "\"") (in-a-word? cm) (stringify cm cur (token-end cm cur)) :else (stringify cm cur (end-of-next-sibling cm))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-comment-dwim ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn left "given a pair of cursors c1 and c2, returns the left-most one" [cm c1 c2] (let [i1 (index cm c1) i2 (index cm c2)] (if (< i1 i2) c1 c2))) (defn right "given a pair of cursors c1 and c2, returns the right-most one" [cm c1 c2] (let [i1 (index cm c1) i2 (index cm c2)] (if (< i1 i2) c2 c1))) (defn selection-info "like get-info but for the first selection. gets the cursor to the left of the selection, the start, the end, the text selected, the starting and ending line numbers. nil if nothing selected." [cm] (when (.somethingSelected cm) (let [first-sel (-> cm .listSelections first) text (-> cm .getSelections first) anchor (.-anchor first-sel) head (.-head first-sel) left-of-start (left cm anchor head) start-cur (cursor cm (inc (index cm left-of-start))) end-cur (right cm anchor head)] [left-of-start start-cur end-cur text (.-line start-cur) (.-line end-cur)]))) (defn get-types "get the types from cursors c1 to c2. assumes 1 is to the left of 2 and not vice versa." [cm c1 c2] (loop [types [], cur c1] (let [{:keys [type right-cur]} (get-info cm cur) types' (conj types type)] (if (= cur c2) types' (recur types' right-cur))))) (defn selection-completely-satisfies-pred? "true if every position's type satisfies pred, for the entire (first) selection" [cm pred] (when-let [[_ c1 c2] (selection-info cm)] (every? pred (get-types cm c1 c2)))) (defn selection-completely-whitespace? [cm] (selection-completely-satisfies-pred? cm nil?)) (defn not-code? [type] (or (nil? type) (= type "comment"))) (defn selection-completely-non-code? [cm] (selection-completely-satisfies-pred? cm not-code?)) (defn to-comment "starts each line in 's' with ;; and appends 'post-script'" [s postscript] (let [cmnt (->> s str/split-lines (map #(str/replace % #"^" ";; ")) (str/join "\n"))] (str cmnt "\n" postscript))) (defn uncomment "removes leading whitespace and semicolons from lines in 's'" [s] (->> s str/split-lines (map #(str/replace % #"^\s*;+" "")) (str/join "\n"))) (defn indent-lines "indents lines from a to z (line numbers). assumes a is before z." [cm a z] (doseq [line (range a (inc z))] (.indentLine cm line))) (defn uncomment-selection "removes whitespace and leading semicolons from selection, replaces selection with the result, indents lines affected." [cm] (when-let [[_ c1 c2 text] (selection-info cm)] (.replaceSelection cm (uncomment text)) (indent-lines cm (.-line c1) (.-line c2)))) (defn append "returns the result of appending the applicable part of 'tok' to 's'. this is for collecting all the text on a line after 'ch'" [ch s tok] (if (< ch (.-end tok)) (str s (subs (.-string tok) (- (max ch (.-start tok)) (.-start tok)))) s)) (defn get-text-to-end-of-line [cm cur] (let [toks (.getLineTokens cm (.-line cur)) ch (.-ch cur)] (reduce (partial append ch) "" toks))) (defn comment-selection [cm] (let [[_ c1 c2 text l1 l2] (selection-info cm) text-after-selection (get-text-to-end-of-line cm c2) code-follows-selection (not= text-after-selection "") end-of-line (last-cur cm) line-to (if code-follows-selection (inc l2) l2)] (when code-follows-selection (.setSelection cm left end-of-line)) (.replaceSelection cm (to-comment text text-after-selection)) (indent-lines cm l1 line-to))) (defn line-ends-with-comment? "true if the line ends with a comment" [cm] (= "comment" (.-type (last-token cm (cursor cm))))) (defn indent-current-line [cm] (->> cm cursor .-line (.indentLine cm))) (defn go-to-comment "moves cursor to ;;X" [cm] (let [cur (cursor cm) ch (.-ch cur) i (index cm cur) c-tok (last-token cm cur) start (.-start c-tok) offset (count (take-while #(= ";" %) (.-string c-tok)))] (.setCursor cm (cursor cm (+ i (- start ch) offset))))) (defn insert-spaces-to-col-40 "presses spacebar until we are at col 40" [cm] (let [ch (-> cm cursor .-ch)] (when (< ch 40) (insert cm (str/join (repeat (- 40 ch) " ")))))) (defn go-to-comment-and-indent "moves cursor to the comment on the line and makes sure the comment starts on column 40 or greater. assumes last token is a comment" [cm] (indent-current-line cm) (let [cur (cursor cm) ch (.-ch cur) i (index cm cur) comment-start (.-start (last-token cm cur))] (.setCursor cm (cursor cm (+ i (- comment-start ch)))) (insert-spaces-to-col-40 cm) (go-to-comment cm))) (defn betw-code-and-line-end? "true if code is to the left and whitespace* is to the right. assumes you already know line does not end with a comment." [cm] (let [cur (cursor cm) toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) tests (map #(or (<= (.-end %) ch) (nil? (.-type %))) toks)] (and (seq toks) ; the line is not empty (every? true? tests) ; there's only whitespace to the right (some #(not (nil? (.-type %))) toks)))) ; there's code on the left (defn move-to-end-of-line "moves cursor to end of last non-whitespace token on a line. returns a vector of new index, new ch, and new cursor." ([cm] (move-to-end-of-line cm (cursor cm))) ([cm cur] (let [end (->> cur .-line (.getLineTokens cm) (remove #(nil? (.-type %))) last .-end) ch (.-ch cur) i (index cm cur) i' (+ i (- end ch)) cur' (cursor cm i')] (.setCursor cm cur') [i' (.-ch cur') cur']))) (defn select-rest-of-line "selects from current position to the end of the line" [cm] (.setSelection cm (cursor cm) (last-cur cm))) (defn delete-to-end-of-line "deletes from current position to the end of the line" [cm] (.replaceRange cm "" (cursor cm) (last-cur cm))) (defn create-comment-at-end "starts a ; comment at column 40 or greater and moves to it." [cm] (indent-current-line cm) (move-to-end-of-line cm) (insert cm " ") (insert-spaces-to-col-40 cm) (insert cm "; ") (delete-to-end-of-line cm)) (defn line-is-whitespace? "returns true if line is all whitespace" [cm] (->> cm cursor .-line (.getLineTokens cm) (every? #(nil? (.-type %))))) (defn create-line-comment "creates and indents a ;; comment" [cm] (insert cm ";; ") (delete-to-end-of-line cm) (indent-current-line cm)) (defn new-line-and-comment "creates and indents a ;; comment on a new line" [cm] (indent-current-line cm) (insert cm "\n\n") (.execCommand cm "goLineDown") (.execCommand cm "goLineDown") (indent-current-line cm) (.execCommand cm "goLineUp") (create-line-comment cm)) (defn insert-line-comment-here "creates and indents a ;; comment on this line" [cm] (insert cm "\n") (.execCommand cm "goLineDown") (indent-current-line cm) (.execCommand cm "goLineUp") (create-line-comment cm)) (defn in-code? "returns true if token is in the middle of code. assumes you've already ruled out comments." [cm] (let [{:keys [type start end ch]} (get-info cm)] (and (< start ch) (< ch end) (not (nil? type))))) (defn in-whitespace? "returns true if token is to the right of whitespace" [cm] (-> cm get-type nil?)) (defn code-to-left? "returns true if there's any code to the left of cursor. assumes you've already ruled out comments so only looks for non nil tokens" [cm] (let [cur (cursor cm) toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) code (map #(and (not (nil? (.-type %))) (or (<= (.-end %) ch) (and (< (.-start %) ch) (< ch (.-end %))))) toks)] (and (seq toks) ; the line is not empty (some true? code)))) ; there's one token that contains code to the left (defn ^:export comment-dwim [cm] (cond (selection-completely-whitespace? cm) :do-nothing (selection-completely-non-code? cm) (uncomment-selection cm) (.somethingSelected cm) (comment-selection cm) (line-ends-with-comment? cm) (go-to-comment-and-indent cm) (betw-code-and-line-end? cm) (create-comment-at-end cm) (in-code? cm) (create-comment-at-end cm) (in-string? cm) (create-comment-at-end cm) (line-is-whitespace? cm) (create-line-comment cm) (and (code-to-left? cm) (in-whitespace? cm)) (new-line-and-comment cm) (in-whitespace? cm) (insert-line-comment-here cm) :default :do-nothing)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-newline ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; seems like code mirror behaves as desired already ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-delete ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn backspace "delete 1 or n char to left" ([cm] (backspace cm 1)) ([cm n] (let [-n #(- % n) cur (cursor cm) cur0 (->> cur (index cm) -n (cursor cm))] (.replaceRange cm "" cur0 cur)))) (defn right-cur-would-be-whitespace? "true if this position would be whitespace if we pressed the spacebar." [cm cur right-cur] (let [original-cur (cursor cm) _ (insert cm " " 0 cur) answer (nil? (get-type cm right-cur))] (backspace cm) (.setCursor cm original-cur) answer)) (defn closing-delim? "returns true for closing brackets and for closing double-quotes" [cm cur] (let [{:keys [string type left-char right-cur]} (get-info cm cur)] ;;(println "closing delim?" type string left-char) (or (and (is-bracket-type? type) (closer? left-char)) (end-of-a-string? cm cur) (and (= type "string") (= "\"" left-char) ;; at this point, we could be just inside the start of a string. ;; if we check the type at the position to the right, this could ;; trick us: "X""hello" ... one way to be absolutely sure we're ;; at the end of a string is to add a space temporarily and see ;; if code mirror says its type is 'null' or 'string'. (right-cur-would-be-whitespace? cm cur right-cur))))) (defn opening-doublequote? "returns true if cur is just to the right of an opening doublequote" ([cm cur] (let [{:keys [type left-char right-cur]} (get-info cm cur)] (opening-doublequote? cm type left-char right-cur))) ([cm type left-char right-cur] (and (= type "string") (= "\"" left-char) right-cur (= "string" (get-type cm right-cur))))) (defn closing-doublequote? "returns true if cur is just to the right of a closing doublequote" [cm cur] (let [{:keys [type left-char right-cur]} (get-info cm cur) right-type (get-type cm right-cur)] (and (= type "string") (= "\"" left-char) (not= right-type "string")))) (defn opening-delim? "returns true for opening brackets and for opening double-quotes" [cm cur] (let [{:keys [string type left-char right-cur]} (get-info cm cur)] (or (and (is-bracket-type? type) (opener? left-char)) (opening-doublequote? cm type left-char right-cur)))) (defn opening-delim-for-empty-pair? "returns true for an opening bracket of an empty pair ()" [cm cur] (let [{:keys [left-char right-char right-cur]} (get-info cm cur)] (and (opening-delim? cm cur) right-cur (closing-delim? cm right-cur) (pair? left-char right-char)))) (defn opening-delim-for-non-empty-pair? "returns true for an opening bracket of a pair that contains one or more chars." [cm] (let [{:keys [left-char right-char cur]} (get-info cm)] (and (opening-delim? cm cur) (not (pair? left-char right-char))))) (defn move "moves the cursor by 'offset' places, negative for left. returns the cursor." [cm offset] (->> cm index (+ offset) (cursor cm) (.setCursor cm)) (cursor cm)) (defn delete "delete 1 or n char to right" ([cm] (delete cm 1)) ([cm n] (let [+n #(+ % n) cur (cursor cm) cur2 (->> cur (index cm) +n (cursor cm))] (.replaceRange cm "" cur cur2)))) (defn whitespace? "returns true if cursor indicates whitespace" [cm cur] (let [info (get-info cm cur)] (and (not (nil? info)) (nil? (:type info))))) (defn bracket? "true if cursor info indicates opening/closing bracket or quote" [cm cur] (let [{:keys [type left-char] :as info} (get-info cm cur)] (or (is-bracket-type? type) (and (= "string" type) (= "\"" left-char))))) (defn select-pair "assumes a pair of brackets surround the cursor. selects the pair." [cm] (let [i (->> cm cursor (index cm)) c1 (->> i dec (cursor cm)) c2 (->> i inc (cursor cm))] (.setSelection cm c1 c2))) (defn delete-selection [cm] (.replaceSelection cm "")) (defn delete-pair "assumes a pair of brackets surround the cursor. deletes the pair." [cm] (backspace cm) (delete cm)) (defn move-right [cm] (move cm 1)) (defn move-left [cm] (move cm -1)) (defn ^:export forward-delete "paredit-forward-delete exposed for keymap" [cm] (let [{:keys [cur right-cur] :as info} (get-info cm)] (cond (.somethingSelected cm) (delete-selection cm) (whitespace? cm right-cur) (delete cm) (not (bracket? cm right-cur)) (delete cm) (opening-delim? cm right-cur) (move-right cm) (opening-delim-for-empty-pair? cm cur) (delete-pair cm) :default :do-nothing))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-delete ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export backward-delete "paredit backward delete exposed for keymap" [cm] (let [cur (cursor cm)] (cond (.somethingSelected cm) (delete-selection cm) (in-escaped-char? cm cur) (delete-pair cm) (escaped-char-to-left? cm cur) (backspace cm 2) (opening-delim-for-non-empty-pair? cm) :do-nothing (opening-delim-for-empty-pair? cm cur) (delete-pair cm) (closing-delim? cm cur) (move-left cm) :default (backspace cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-kill ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn in-regular-string? "returns true if token is in the middle of a string." [cm cur] (or (opening-doublequote? cm cur) (and (= "string" (get-type cm cur)) (not (closing-doublequote? cm cur))))) (defn str-ends-on-another-line? "true if these values are from a string token that ends on another line" [type string] (and (= "string" type) (not= "\"" (last string)))) (defn go-to-end-of-string "moves cursor to end of the string you're in (but still inside the closing doublequote). assumes you're in a string. the end could be on a different line from where you start" ([cm] (go-to-end-of-string cm (cursor cm))) ([cm cur] (let [{:keys [left-char right-cur type string ch end]} (get-info cm cur)] (cond (nil? type) (go-to-end-of-string cm right-cur) (str-ends-on-another-line? type string) (do (move-to-end-of-line cm cur), (move cm 2), (go-to-end-of-string cm)) (opening-doublequote? cm type left-char right-cur) (do (move cm 1), (go-to-end-of-string cm)) (and (= "string" type)) (move cm (- end ch 1 )) :default cur)))) (defn select-rest-of-string "assumes you are in a string." [cm] (let [c1 (cursor cm) c2 (go-to-end-of-string cm c1)] (.setSelection cm c1 c2))) (defn betw-code-and-comment? "true if code is to the left and whitespace* comment* is to the right." [cm cur] (when cur (let [toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) tests (map #(or (<= (.-end %) ch) (or (nil? (.-type %)) (= "comment" (.-type %)))) toks)] (and (seq toks) ; the line is not empty (every? true? tests) ; there's only junk to the right (some #(not (nil? (.-type %))) toks))))) (defn rest-of-siblings [cm] (let [c1 (cursor cm) parent-closer (skip cm parent-closer-sp) c2 (when parent-closer (cursor cm (dec (index cm parent-closer))))] [c1 c2])) (defn select-rest-of-siblings [cm] (let [[c1 c2] (rest-of-siblings cm)c1 (cursor cm)] (when c2 (.setSelection cm c1 c2)))) (defn kill-from-to [cm i j] (let [cur (cursor cm i)] (CodeMirror.emacs.kill cm cur (cursor cm j)) (.setCursor cm cur))) (defn kill-region [cm] (let [first-sel (-> cm .listSelections first) anchor (.-anchor first-sel) head (.-head first-sel)] (CodeMirror.emacs.kill cm anchor head))) (defn kill-pair "assumes a pair of brackets surround the cursor. deletes the pair." [cm] (select-pair cm) (kill-region cm)) (defn kill-rest-of-string [cm] (select-rest-of-string cm) (kill-region cm)) (defn kill-rest-of-line [cm] (select-rest-of-line cm) (kill-region cm)) (defn kill-rest-of-siblings [cm] (select-rest-of-siblings cm) (kill-region cm)) (defn kill-next-sibling "kills the next sibling to the right of the cursor" [cm] (let [from (cursor cm) mid (end-of-next-sibling cm from) to (if (betw-code-and-comment? cm mid) (last-cur cm mid) mid)] (when to (.setSelection cm from to) (kill-region cm)))) (defn ^:export kill "paredit kill exposed for keymap." [cm] (let [cur (cursor cm)] (cond (.somethingSelected cm) (kill-region cm) (in-regular-string? cm cur) (kill-rest-of-string cm) (betw-code-and-comment? cm cur) (kill-rest-of-line cm) (in-escaped-char? cm cur) (kill-pair cm) (code-to-left? cm) (kill-rest-of-siblings cm) :default (kill-next-sibling cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-kill-word M-d ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn comment? [cm cur] (= "comment" (get-type cm cur))) (defn start-of-comment? "true if block cursor is on the first ; of a line comment" [cm cur] (let [{:keys [type right-cur]} (get-info cm cur) right-type (get-type cm right-cur)] (and (not= "comment" type) (= "comment right-type")))) (defn idx-of-next [cm i chars member max] (let [{:keys [right-char]} (get-info cm (cursor cm i))] (cond (= i max), (guard) (= member (contains? chars right-char)), i :default, (fn [] (idx-of-next cm (inc i) chars member max))))) (defn index-of-next [cm i chars] (trampoline idx-of-next cm i chars true (char-count cm))) (defn index-of-next-non [cm i chars] (trampoline idx-of-next cm i chars false (char-count cm))) (def non-word-chars (set "(){}[]|&; \n")) (def comment-start (set "; ")) (def semicolons #{";"}) (def comment-whitespace #{" " (str \tab)}) (defn end-of-next-word "assumes i is in a comment or a string. returns the i at the end of the next word (going to the right) in this comment/string" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) tail (subs string (- ch start)) word (re-find #"^\s*[\S]*" tail) length (count word) quote (if (str/ends-with? word "\"") -1 0)] (+ i length quote))) (defn start-of-prev-word "assumes i is in a comment or a string. returns the i at the start of the prev word (going to the left) in this comment/string" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) head (subs string 0 (- ch start)) last-word (re-find #"[\S]*\s*$" head) length (count last-word) quote (if (str/ends-with? last-word "\"") 1 0)] (- i length quote))) (defn kill-next-word "assumes i is in a comment or a string. kills text from i to the end of the next word in this comment/string" [cm i] (kill-from-to cm i (end-of-next-word cm (inc i))) (.setCursor cm (cursor cm i))) (defn fwd-kill-word "trampoline helper for forward-kill-word. 'mark' is the index to start killing from. 'i' is the index we're inspecting. 'n' is how many calls remaining that we'll support before stopping because of a suspected infinite loop. first call can put the count of characters in this cm instance." [cm mark i n] (let [m (dec n), j (inc i), cur (cursor cm i), right-cur (cursor cm j)] (cond (neg? n) (guard) (eof? cm right-cur) :do-nothing (whitespace? cm right-cur) #(fwd-kill-word cm mark (token-end-index cm j) m) (start-of-a-string? cm right-cur) #(fwd-kill-word cm j j m) (in-regular-string? cm right-cur) (kill-next-word cm mark) (opening-delim? cm right-cur) #(fwd-kill-word cm j j m) (closing-delim? cm right-cur) #(fwd-kill-word cm j j m) (at-a-word? cm right-cur) (kill-from-to cm mark (token-end-index cm j)) (start-of-comment? cm cur) (let [j (index-of-next-non cm i semicolons)] #(fwd-kill-word cm j j m)) (comment? cm right-cur) (kill-next-word cm mark) :else (println "unhandled")))) (defn ^:export forward-kill-word "paredit forward-kill-word exposed for keymap." [cm] (let [i (index cm)] (trampoline fwd-kill-word cm i i (char-count cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-kill-word ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn start-of-token-at [cm i] (let [{:keys [ch start]} (get-info cm (cursor cm i))] (- i (- ch start)))) (defn kill-prev-word-in-comment "assumes i is in a comment. kills text from i to the beginning of the previous word in this comment" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) cur-offset-in-string (- ch start) head (subs string 0 cur-offset-in-string) tail (subs string cur-offset-in-string) word (re-find #"\S*\s*$" head) length (count word)] (kill-from-to cm (- i length) i) (.setCursor cm (cursor cm (- i length))))) (defn beginning-of-line? [cm cur] (let [{:keys [start end type] :as info} (get-info cm cur)] (and (not (nil? info)) (nil? type) (= start end 0)))) (defn bkwd-kill-skippable-comment-char? [cm cur] (let [{:keys [type left-char] :as info} (get-info cm cur)] (and (not (nil? info)) (= "comment" type) (re-matches #"\s|;" left-char)))) (defn bkwd-kill-word "trampoline helper for backward-kill-word. 'mark' is the index to start killing from. 'i' is the index we're inspecting. 'n' is how many more calls we'll entertain before stopping because we suspect an infinite loop. first call can use char count for 'n'." [cm mark i n] (let [h (dec i), m (dec n), cur (cursor cm i)] (cond (neg? n) (guard) (bof? cm cur) :do-nothing (beginning-of-line? cm cur) #(bkwd-kill-word cm h h m) (whitespace? cm cur) #(bkwd-kill-word cm mark (start-of-token-at cm i) m) (opening-delim? cm cur) #(bkwd-kill-word cm h h m) (closing-delim? cm cur) #(bkwd-kill-word cm h h m) (at-a-word? cm cur) (kill-from-to cm (start-of-token-at cm i) mark) (start-of-comment? cm cur) (let [j (index-of-next-non cm i semicolons)] #(fwd-kill-word cm j j m)) (bkwd-kill-skippable-comment-char? cm cur) #(bkwd-kill-word cm mark h m) (comment? cm cur) (kill-prev-word-in-comment cm mark) :else (println "unhandled")))) (defn ^:export backward-kill-word "paredit backward-kill-word exposed for keymap." [cm] (let [i (index cm)] (trampoline bkwd-kill-word cm i i (char-count cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd "trampoline helper for forward. 'i' is the index we're inspecting. 'n' is how many more calls we'll entertain before suspecting an infinite loop. first call can pass in char count." [cm i n] (let [j (inc i), m (dec n), cur (cursor cm i), right-cur (cursor cm j)] (cond (neg? n) (guard) (nil? right-cur) :do-nothing (eof? cm right-cur) :do-nothing (whitespace? cm right-cur) #(fwd cm j m) (opening-delim? cm right-cur) (.setCursor cm (end-of-next-sibling cm cur)) (closing-delim? cm right-cur) (.setCursor cm right-cur) (at-a-word? cm right-cur) (.setCursor cm (cursor cm (token-end-index cm j))) (comment? cm right-cur) #(fwd cm (token-end-index cm j) m) (in-string? cm right-cur) (.setCursor cm (cursor cm (end-of-next-word cm j))) :else (println "unhandled")))) (defn ^:export forward "paredit forward exposed for keymap. find the first thing that isn't whitespace or comment. if it is a closing bracket, step past it. otherwise skip over the thing." [cm] (trampoline fwd cm (index cm) (char-count cm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd "trampoline helper for backward. 'i' is the index we're inspecting. 'n' is number of remaining calls before we suspect an infinite loop" [cm i n] (let [h (dec i), m (dec n), cur (cursor cm i)] (cond (neg? n) (guard) (nil? cur) :do-nothing (bof? cm cur) (.setCursor cm (cursor cm h)) (whitespace? cm cur) #(bkwd cm h m) (opening-delim? cm cur) (.setCursor cm (cursor cm h)) (closing-delim? cm cur) (.setCursor cm (start-of-prev-sibling cm cur)) (at-a-word? cm cur) (.setCursor cm (start-of-prev-sibling cm cur)) (comment? cm cur) #(bkwd cm (start-of-prev-sibling cm cur) m) (in-string? cm cur) (.setCursor cm (cursor cm (start-of-prev-word cm h))) :else (println "unhandled")))) (defn ^:export backward "paredit backward exposed for keymap." [cm] (trampoline bkwd cm (index cm) (char-count cm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-up ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn forward-up-cur "get cursor corresponding to paredit forward up" ([cm] (forward-up-cur cm (cursor cm))) ([cm cur] (cond (nil? cur), nil (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (token-end cm cur) :default, (skip cm parent-closer-sp)))) (defn ^:export forward-up "paredit forward-up exposed for keymap." ([cm] (forward-up cm (cursor cm))) ([cm cur] (when-let [cur' (forward-up-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-up ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn backward-up-cur "get cursor corresponding to paredit backward up" ([cm] (backward-up-cur cm (cursor cm))) ([cm cur] (start-of-prev-sibling cm (forward-up-cur cm cur)))) (defn ^:export backward-up "paredit backward-up exposed for keymap." ([cm] (backward-up cm (cursor cm))) ([cm cur] (when-let [cur' (backward-up-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-wrap-round ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn end-of-this "go to the end of the current thing, whether it be a string or a word of code" [cm cur] (if (in-string? cm cur) (token-end cm cur) (end-of-next-sibling cm cur))) (defn ^:export wrap-round "paredit wrap-round exposed for keymap." ([cm] (wrap-round cm (cursor cm))) ([cm cur] (let [cur-close (end-of-this cm cur) cur-open (start-of-prev-sibling cm cur-close) i (inc (index cm cur-open)) text (.getRange cm cur-open cur-close) text' (str "(" text ")")] (.replaceRange cm text' cur-open cur-close) (.setCursor cm (cursor cm i))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp M-s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp "paredit splice-sexp exposed for keymap. unlike emacs' version, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp cm (cursor cm))) ([cm cur] (let [i (dec (index cm)) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) text' (when cur-open (.getRange cm (cursor cm (inc (index cm cur-open))) (cursor cm (dec (index cm cur-close)))))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm (cursor cm i)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp-killing-backward M-<up> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp-killing-backward "paredit splice-sexp-killing-backward exposed for keymap. like emacs' version, this doesn't actually kill to the clipboard. it just deletes. but unlink emacs, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp-killing-backward cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (backward-up cm cur)) (let [cur' (cursor cm) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) text' (when cur-close (.getRange cm cur' (cursor cm (dec (index cm cur-close)))))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm cur-open))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp-killing-forward M-<down> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp-killing-forward "paredit splice-sexp-killing-forward exposed for keymap. like emacs' version, this doesn't actually kill to the clipboard. it just deletes. but unlink emacs, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp-killing-forward cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (forward-up cm cur)) (let [cur' (cursor cm) final-cur (cursor cm (dec (index cm cur'))) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) keep-from (when cur-open (cursor cm (inc (index cm cur-open))) ) text (when keep-from (.getRange cm cur-open cur-close)) text' (when keep-from (.getRange cm keep-from cur'))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm final-cur))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-raise-sexp M-r ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export raise-sexp "paredit raise-sexp exposed for keymap." ([cm] (raise-sexp cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (backward-up cm cur)) (let [c1 (cursor cm) c2 (end-of-next-sibling cm c1) text (when c2 (.getRange cm c1 c2)) cur-close (when text (skip cm parent-closer-sp)) cur-open (when cur-close (start-of-prev-sibling cm cur-close))] (when cur-open (.replaceRange cm text cur-open cur-close) (.setCursor cm cur-open))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-slurp-sexp C-), C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd-string-slurp "String slurping consists of simply 'go to end of string, mark as parent, go to next sibling end, mark as sibling'" [cm cur] (let [parent (if (start-of-a-string? cm cur) (end-of-next-sibling cm cur) (end-of-next-sibling cm (start-of-prev-sibling cm cur))) sibling (end-of-next-sibling cm parent)] (when sibling [parent sibling "\""]))) (defn fwd-slurp "trampoline-able that looks for an ancestor closing bracket (parent, grandparent, etc) that has a sibling to slurp. returns a vector of the cur to the right of such a bracket, the cur to the right of the sibling that will be slurped, the string of the bracket to move. nil if there is no such anscestor that can slurp." [cm cur n] (if (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (fwd-string-slurp cm cur) (when (>= n 0) (let [parent (skip cm parent-closer-sp cur) sibling (end-of-next-sibling cm parent)] (if sibling [parent sibling (get-string cm parent)] (fn [] (fwd-slurp cm parent (dec n)))))))) (defn ^:export forward-slurp-sexp "paredit forward-slurp-sexp exposed for keymap." ([cm] (forward-slurp-sexp cm (cursor cm))) ([cm cur] (when-let [[parent sibling bracket] (trampoline fwd-slurp cm cur (char-count cm))] #_(js/console.log "FWD-SLURP" parent sibling bracket) (insert cm bracket 0 sibling);; put bracket in new spot (.replaceRange cm "" (cursor cm (- (index cm parent) (count bracket))) parent));; remove bracket from old spot (.setCursor cm cur))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-down C-M-d ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd-down "trampoline-able that looks for the cursor where we'd be if we went forward and then down into the next sibling that is available. nil if there is no sibling to enter." [cm cur n] (cond (<= n 0), nil (nil? cur), nil (opening-delim? cm cur), cur :default, (when-let [cur' (token-end cm cur 1)] (fn [] (fwd-down cm cur' (dec n)))))) (defn forward-down-cur ([cm] (forward-down-cur cm (cursor cm))) ([cm cur] (trampoline fwd-down cm cur (char-count cm)))) (defn ^:export forward-down ([cm] (forward-down cm (cursor cm))) ([cm cur] (when-let [cur' (forward-down-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-down C-M-p ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-down "trampoline-able that looks for the cursor where we'd be if we went backward and then down into the prev sibling that is available. nil if there is no sibling to enter." [cm cur n] (let [{:keys [left-cur i start ch bof]} (get-info cm cur)] (cond (<= n 0), (guard) (closing-delim? cm cur), left-cur bof, nil (zero? ch), (fn [] (bkwd-down cm (cursor cm (dec i)) (dec n))) :default, (fn [] (bkwd-down cm (cursor cm (- i (- ch start))) (dec n)))))) (defn ^:export backward-down ([cm] (backward-down cm (cursor cm))) ([cm cur] (when-let [cur' (trampoline bkwd-down cm cur (char-count cm))] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-slurp-sexp C-), C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-slurp "trampolin-able that looks for an ancestor opening bracket (parent, grandparent, etc) that has a sibling to slurp. returns a vector of the cur to the left of such a bracket, the cur to the left of the sibling that will be slurped, the string of the bracket to move. nil if there is no such anscestor that can slurp." [cm cur n] (when (>= n 0) (let [ending (skip cm parent-closer-sp cur) parent (start-of-prev-sibling cm ending) sibling (start-of-prev-sibling cm parent) bracket-cur (forward-down-cur cm parent)] (if (and (not (nil? sibling)) (not (nil? bracket-cur))) [parent sibling (get-string cm bracket-cur)] (fn [] (bkwd-slurp cm parent (dec n))))))) (defn ^:export backward-slurp-sexp "paredit backward-slurp-sexp exposed for keymap." ([cm] (backward-slurp-sexp cm (cursor cm))) ([cm cur] (let [i (index cm cur)] ;; line,ch may change but index will not. (when-let [[parent sibling bracket] (trampoline bkwd-slurp cm cur (char-count cm))] (.replaceRange cm "" parent (cursor cm (+ (index cm parent) (count bracket)))) (insert cm bracket 0 sibling)) (.setCursor cm (cursor cm i))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-barf-sexp C-\} C-<left> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn find-first-ws [stg ch] (let [cnt (count stg)] (loop [ch ch] (if (or (= (.charAt stg ch) " ") (= ch cnt)) ch (recur (inc ch)))))) (defn find-first-nonws [stg ch] (let [cnt (count stg)] (loop [ch ch] (if (or (not= (.charAt stg ch) " ") (= ch cnt)) ch (recur (inc ch)))))) (defn rfind-blank-or-start [stg] (let [rstg (str/reverse stg) cnt (count rstg) bdq? (= (.charAt stg 0) "\"") ch (->> 0 (find-first-ws rstg) (find-first-nonws rstg))] #_(js/console.log rstg ch) (cond (not= ch cnt) (dec ch) ; found ws and nonws bdq? (- ch 2) ; has beg dq and at beg :else ch))) (defn fwd-string-barf "String barffing consists of simply 'go to end of string, mark as parent, reverse look for non whitespace, reverse look for whitespace, mark as sibling'" [cm cur] (let [parent (if (start-of-a-string? cm cur) (end-of-next-sibling cm cur) (end-of-next-sibling cm (start-of-prev-sibling cm cur))) inside (cursor cm (dec (index cm parent))) {:keys [string i]} (get-info cm inside) ri (rfind-blank-or-start string) sibling (cursor cm (- i ri))] #_(js/console.log (index cm cur) i ri (- i ri)) (when (and parent inside) [parent inside sibling "\"" (< (- i ri) (index cm cur))]))) (defn fwd-barf "trampoline-able that looks for an ancestor closing bracket (parent, grandparent, etc) that has a sibling to barf. returns a vector of the cur to the right of such a bracket, the cur at the bracket, the cur where the bracket should go, the text of the bracket, and whether the operation causes the cursor to be moved. nil if there is no such anscestor that can barf" [cm cur n] (if (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (fwd-string-barf cm cur) (when (>= n 0) (let [parent (skip cm parent-closer-sp cur) inside (cursor cm (dec (index cm parent))) sibling (start-of-prev-sibling cm inside) ;; prevsib: end of prev sibling if there is one: prevsib (end-of-next-sibling cm (start-of-prev-sibling cm sibling)) ;; bracket-cur: where the new bracket should go: bracket-cur (or prevsib (forward-down-cur cm (backward-up-cur cm sibling))) ;; whether the cursor needs to change: moved (and bracket-cur (< (index cm bracket-cur) (index cm cur))) ;; text of the bracket, e.g. ")" bracket (when parent (if moved (str (get-string cm parent) " ") (get-string cm parent)))] (cond (nil? parent) nil (nil? bracket-cur) (fn [] (fwd-barf cm parent (dec n))) :default [parent inside bracket-cur bracket moved]))))) (defn ^:export forward-barf-sexp "paredit forward-barf-sexp exposed for keymap." ([cm] (forward-barf-sexp cm (cursor cm))) ([cm cur] (if-let [[parent inside sibling bracket moved] (trampoline fwd-barf cm cur (char-count cm))] (do #_(js/console.log parent inside sibling bracket moved) (.replaceRange cm "" inside parent) (insert cm bracket 0 sibling) (if moved (.setCursor cm sibling) (.setCursor cm cur))) (.setCursor cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backard-barf-sexp C-{, C-M-<right>, Esc C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-barf "trampoline-able that looks for an ancestor opening bracket (parent, grandparent, etc) that has a sibling to barf. returns... . nil if there is no such anscestor that can barf" [cm cur n] (when (>= n 0) (let [outside (backward-up-cur cm cur) inside (forward-down-cur cm outside) end-of-barfed-sexp (end-of-next-sibling cm inside) end-of-new-first-sib (end-of-next-sibling cm end-of-barfed-sexp) bracket-cur (start-of-prev-sibling cm end-of-new-first-sib) bracket-text (get-string cm inside) moved (and bracket-cur (< (index cm cur) (index cm bracket-cur)))] (cond (nil? outside) nil (nil? end-of-barfed-sexp) (fn [] (bkwd-barf cm outside (dec n))) :default [outside inside bracket-cur bracket-text moved])))) (defn ^:export backward-barf-sexp "paredit backward-barf-sexp exposed for keymap." ([cm] (backward-barf-sexp cm (cursor cm))) ([cm cur] (if-let [[outside inside bracket-cur bracket-text moved] (trampoline bkwd-barf cm cur (char-count cm))] (do (insert cm bracket-text 0 bracket-cur) (.replaceRange cm "" outside inside) (if moved (.setCursor cm (cursor cm (- (index cm cur) (count bracket-text)))) (.setCursor cm cur))) (.setCursor cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-split-sexp M-S ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn split-form "split sexp for (forms like this)" [cm cur] (let [close-cur (skip cm parent-closer-sp cur) close-bracket (get-string cm close-cur) open-cur (start-of-prev-sibling cm close-cur) open-bracket (get-string cm (cursor cm (inc (index cm open-cur))))] (when (and (not (nil? open-bracket)) (not (nil? close-bracket))) (.setCursor cm cur) (let [offset (if (in-whitespace? cm) 1 (do (insert cm " ") (just-one-space cm (cursor cm) false) 0)) cur' (cursor cm) i' (+ (index cm cur') offset) prev-sib (start-of-prev-sibling cm cur') prev-sib-end (end-of-next-sibling cm prev-sib) next-sib (end-of-next-sibling cm cur) next-sib-start (start-of-prev-sibling cm next-sib)] (if (nil? next-sib-start) (insert cm open-bracket) (insert cm open-bracket 0 next-sib-start)) (if (nil? prev-sib-end) (do (move-left cm) (insert cm close-bracket)) (insert cm close-bracket 0 prev-sib-end)) (.setCursor cm (cursor cm i')))))) (defn split-string "split sexp for \"strings like this\"" [cm cur] (let [open-quote-i (index-of-next-non cm (index cm cur) " ")] (.replaceRange cm "\" \"" cur (cursor cm open-quote-i)) (move-left cm) (move-left cm))) (defn ^:export split-sexp "paredit split-sexp exposed for keymap." ([cm] (split-sexp cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (split-string cm cur) (split-form cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-join-sexps M-J ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export join-sexps "paredit join-sexps exposed for keymap." ([cm] (join-sexps cm (cursor cm))) ([cm cur] (let [left-sib (start-of-prev-sibling cm cur) close (end-of-next-sibling cm left-sib) right-sib (end-of-next-sibling cm cur) open (start-of-prev-sibling cm right-sib) open-right (when open (cursor cm (inc (index cm open)))) close-char (get-string cm close) open-char (get-string cm open-right)] (if (and (not (nil? open)) (not (nil? close)) (pair? open-char close-char)) (do (.setCursor cm open) (delete cm) (.setCursor cm close) (backspace cm) (.setCursor cm (if (= (.-line open) (.-line close)) (cursor cm (dec (index cm cur))) cur))) (.setCursor cm cur))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-reindent-defun M-q ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn top-most-opener-candidate "trampoline-able that looks for the top-most opening bracket for the specified location. returns the current cursor if there is no such anscestor" [cm cur n] (when (>= n 0) (if-let [parent (backward-up-cur cm cur)] (fn [] (top-most-opener-candidate cm parent (dec n))) cur))) (defn top-most-opener "get the top most opening bracket for the specified location. nil if there is no such bracket." ([cm] (top-most-opener cm (cursor cm))) ([cm cur] (let [candidate (top-most-opener-candidate cm cur (char-count cm))] (when (not= candidate cur) candidate)))) (defn ^:export reindent-defun "paredit reindent-defun exposed for keymap." ([cm] (reindent-defun cm (cursor cm))) ([cm cur] (let [open (trampoline top-most-opener cm cur) close (end-of-next-sibling cm open) open-line (when open (.-line open)) line-offset (when open (- (.-line cur) open-line)) line-len (count (.getLine cm (.-line cur))) ch (.-ch cur)] (when (and (not (nil? open)) (not (nil? close))) (indent-lines cm (.-line open) (.-line close)) (repeatedly line-offset (.execCommand cm "goLineDown")) (.execCommand cm "goLineStart") (.setCursor cm (cursor cm (+ (index cm) ch (- (count (.getLine cm (.-line (cursor cm)))) line-len)))))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-sexp ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export forward-sexp "forward-sexp exposed for keymap. seems part of emacs and not part of paredit itself. but including it here since this will be used in things other than emacs itself." ([cm] (forward-sexp cm (cursor cm))) ([cm cur] (when-let [cur' (end-of-next-sibling cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-sexp ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export backward-sexp "backward-sexp exposed for keymap. seems part of emacs and not part of paredit itself. but including it here since this will be used in things other than emacs itself." ([cm] (backward-sexp cm (cursor cm))) ([cm cur] (when-let [cur' (start-of-prev-sibling cm cur)] (.setCursor cm cur'))))
78558
(ns paredit-cm.core "paredit operations (exported)" (:require [clojure.string :as str] [cljsjs.codemirror] [cljsjs.codemirror.mode.clojure] [cljsjs.codemirror.keymap.emacs])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; MIT License ;; ;; Copyright (c) 2017 <NAME> ;; ;; Permission is hereby granted, free of charge, to any person obtaining a copy ;; of this software and associated documentation files (the "Software"), to deal ;; in the Software without restriction, including without limitation the rights ;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ;; copies of the Software, and to permit persons to whom the Software is ;; furnished to do so, subject to the following conditions: ;; ;; The above copyright notice and this permission notice shall be included ;; in all copies or substantial portions of the Software. ;; ;; <NAME> (2019,2020): ;; Many changes and fixes for working with newer codemirror releases ;; ;; ** PAREDI PROJECT CONVENTIONS ** ;; ;; consider this notation: aXbc ;; ;; in the unit tests as well as here, aXbc contains a single capital X which ;; represents the position of the cursor. aXbc means the code mirror instance's ;; value is 'abc' and a block-style cursor is on 'b' (a bar-style cursor would ;; be between 'a' and 'b'). aXbc is what you would see if you typed a capital X ;; in this example code mirror. ;; ;; 'cur' is for the current position's cursor (on 'b' in the example). ;; 'left-cur' is for position 'a'. 'right-cur' is for position 'c'. ;; ;; if there is a current cursor cur and a new cursor, then the new cursor will ;; be named cur' (the single quote is part of the name, so read it aloud as ;; cursor-prime) ;; ;; when there are two cursors (as in the beginning and ending of a selection) we ;; use c1 and c2. it feels strange to call them 'start' and 'end' when those are ;; the names codemirror uses to refer to the ends of a token. ;; ;; the following all refer to the values for the token at 'cur': 'start' 'line' ;; 'ch' 'i' 'string' 'type' ;; ;; use the same prefixes 'left-' and 'right-' when referring to the same kinds ;; of values belonging to 'left-cur' and 'right-cur' ;; ;; ints *other than i, the code mirror index* are named with a single character ;; like 'x'. neighboring values are represented alphabetically, so (inc x) would ;; be named 'y' and (dec x) would be named 'w'. ;; ;; s1 is a string. similarly s1, s2, and s ;; ;; for numerical values like 'offset', lower is for left and higher is for ;; right, just as for code mirror's index i. ;; ;; sp is a 'skipping predicate'. these are used with a trampoline wrapper like ;; 'skip' to move along the text in code mirror until our predicate is ;; satisfied. in many cases, the predicate will push and pop openers/closers off ;; a stack and when the stack is empty and we satisfy some additional condition, ;; then we stop and return the cursor. ;; ;; functions with names ending in -sp are skipping predicates. ;; ;; currently we're assuming perfect matching of openers/closers so we don't ;; actually keep track of the stack -- we just inc and dec an int until it gets ;; to 0 and our other conditions are satisfied ;; ;; any trampoline use should be limited by the cm character count, to guard ;; against infinite loops. we'll start at the limit and count down, stopping ;; when it goes negative. ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (enable-console-print!) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; general helper methods ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def openers #{ "(" "[" "{" }) (def closers #{ ")" "]" "}" }) (def pair {"(" ")", "[" "]", "{" "}", "\"" "\"", ")" "(", "]" "[", "}" "{"}) (defn pair? "true if the two strings are a matching open/close pair " [s1 s2] (= (pair s1) s2)) (defn opener? [s] (contains? openers s)) (defn closer? [s] (contains? closers s)) (defn is-bracket-type? [t] (and t (str/starts-with? t "bracket"))) (defn char-count "returns the number of characters in the code mirror instance" [cm] (-> cm .getValue count)) (defn cursor "get cur, the position of the cursor" ([cm] (.getCursor cm)) ;; get current cursor ([cm i] (.posFromIndex cm i))) ;; get cursor for index i (defn index "get the index i for the cursor's position" ([cm] (index cm (cursor cm))) ([cm cur] (when cur (.indexFromPos cm cur)))) (defn bof? "true if at beginning of file" [cm cur] (zero? (index cm cur))) (defn eof? "true if at end of file" [cm cur] (= (index cm cur) (char-count cm))) (defn token "get token at cursor" [cm cur] (.getTokenAt cm cur true)) (defn get-type "get the type at the current cursor." ([cm] (get-type cm (cursor cm))) ([cm cur] (.-type (token cm cur)))) (defn get-string "gets the string of the current token" ([cm] (get-string cm (cursor cm))) ([cm cur] (when cur (.-string (token cm cur))))) (defn line-length "gets the length of the current line" ([cm] (line-length cm (cursor cm))) ([cm cur] (when cur (count (.getLine cm (.-line cur)))))) (defn last-token "returns the last token of a line" [cm cur] (->> cur .-line (.getLineTokens cm) last)) (defn last-cur "returns the last cursor of a line" ([cm] (last-cur cm (cursor cm))) ([cm cur] (let [end (.-end (last-token cm cur)) diff (- end (.-ch cur))] (cursor cm (+ diff (index cm cur)))))) (defn get-info "make info from CodeMirror more conveniently accessed by our code. we'll use destructuring and just name what we rant. hypothesizing that performance hit won't be that bad." ([cm] (get-info cm (cursor cm))) ([cm cur] (when cur (let [tok (token cm cur) eof (eof? cm cur) bof (bof? cm cur) i (index cm cur) left-cur (when-not bof (cursor cm (dec i))) right-cur (when-not eof (cursor cm (inc i)))] {:cur cur :line (.-line cur) :ch (.-ch cur) :i i :tok tok :string (.-string tok) :start (.-start tok) :end (.-end tok) :type (.-type tok) :top (-> tok .-state .-indentStack nil?) ;; true for toplevel :eof eof :bof bof :left-char (when-not bof (.getRange cm left-cur cur)) :right-char (when-not eof (.getRange cm cur right-cur)) :left-cur left-cur :right-cur right-cur :mode (.-mode (.-state tok))})))) (defn comment-or-string? "true if the type is comment or string. a lot of editing behavior (like movement and deletion) is similar when you are in a string or in a comment, so often this is the predicate for that behavior." [type] (or (= type "comment") (= type "string"))) (defn indent-line "indent the current line" [cm] (->> cm cursor .-line (.indentLine cm))) (defn escaped-char-name? [stg] (let [escnames #{"\\newline", "\\space", "\\tab", "\\formfeed", "\\backspace", "\\return"}] (when (escnames stg) (dec (count stg))))) (defn in-escaped-char? "returns true if backslash is to the left and cursor is on an escaped char" ([cm cur] (in-escaped-char? cm cur 0)) ([cm cur offset] (let [{:keys [ch start end type]} (get-info cm cur)] #_(js/console.log start ch end type) (and (= type "string-2") (and (< start ch) (< ch end)))))) (defn escaped-char-to-left? "returns true if an escaped char and its backslash are to the left" [cm cur] (let [{:keys [ch end type string]} (get-info cm cur)] (and (= type "string-2") (= ch end)))) (defn escaped-char-to-right? "returns true if an escaped char and its backslash is to the right" [cm cur] (let [cur+ (cursor cm 0) {:keys [type]} (get-info cm cur)] (and (not= type "string-2")) (set! cur+.line cur.line) (set! cur+.ch (inc cur.ch)) (in-escaped-char? cm cur))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-open-round ( ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn insert "insert text at current cursor. move cursor to the end of inserted text minus optional offset. the offset is for moving the cursor immediately after the insert and before returning. example: inserting a pair of brackets and placing the cursor inside the pair. this returns the new cursor." ([cm text] (insert cm text 0)) ([cm text offset] (insert cm text offset (cursor cm))) ([cm text offset cur] (let [{:keys [line ch]} (get-info cm cur)] (.replaceRange cm text cur) (.setCursor cm line (+ (+ ch (count text)) offset)) (cursor cm)))) (defn ^:export open-round "paredit-open-round exposed for keymap. unlike traditional emacs paredit, this supports brackets [] {} () but not double-quote" ([cm] (open-round cm "(")) ([cm c] (let [{:keys [type left-char right-char]} (get-info cm)] (cond ;; escaping the next character: (= "\\" left-char) (insert cm c) ;; typing in a comment or string as-is: (comment-or-string? type) (insert cm c) ;; insert a pair, pad with a space to the left and/or right if necessary, ;; and move the cursor into the pair before returning: :else (let [need-left-padding (and (not= " " left-char) (not (opener? left-char))) need-right-padding (and (not= " " right-char) (not (closer? right-char)))] (insert cm (str (when need-left-padding " ") c (pair c) (when need-right-padding " ")) (if need-right-padding -2 -1))))))) (defn ^:export open-brace "open curly brace with matching close brace" ([cm] (open-round cm "{"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-close-round ) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn parent-closer-sp ;; -sp see 'skipping predicate' below "finds the *parent* closing bracket. behavior when used with skip: pushes opening brackets that appear along the way on a stack. closing brackets pop them off. stops when encountering a closing bracket while the stack is empty. assuming the cm has matched brackets for now. moves to the right." [cm cur state] (let [{:keys [string type top eof]} (get-info cm cur)] (cond ;; 'push' opener on our 'stack': (and (is-bracket-type? type) (opener? string)), (inc state) ;; stop if we see a closer while our 'stack' is empty: (and (is-bracket-type? type) (closer? string) (zero? state)), :yes ;; closer means we 'pop' off the 'stack', unless eof (and (is-bracket-type? type) (closer? string) (not= 0 state) eof), :eof ;; closer means we 'pop' off the 'stack': (and (is-bracket-type? type) (closer? string) (not= 0 state)), (dec state) ;; we can* rely on code mirror to tell us if we're at the top ;; level: (* NOT in [cljsjs/codemirror "5.21.0-2"] ... but maybe ;; in a later version ... until we can figure out how to refer ;; to the latest codemirror in our tests, the tests will have to ;; live here in order to get the codemirror that is included in ;; the script tag on the demo index.html page) ;; TODO: investigate whether we can use this, given CodeMirror version: ;; top, :stop ;; stack stays unchanged. move to the next thing: :default, state))) (defn token-start "returns the cursor for the start of the current token" [cm cur] (let [{:keys [i line start ch type]} (get-info cm cur)] (cursor cm (- i (- ch start))))) (defn token-end "returns the cursor for the end of the current token" ([cm cur] (token-end cm cur 0)) ([cm cur offset] (let [{:keys [i line end ch type]} (get-info cm cur)] (cursor cm (+ i offset (- end ch)))))) (defn token-end-index "take an index. get its token. return index of that token's end." [cm i] (->> i (cursor cm) (token-end cm) (index cm))) (defn guard [] (println "past")) (defn skip-trampoline-helper "returns the cursor that satsifies skipping predicate 'sp' or nil if eof reached. does this by making sp something we can trampoline. sp takes these args: cm, cursor, state. counts down 'n' to 0 in order to guard against infinite loops." [cm cur sp state n] (if (>= n 0) (let [{:keys [left-cur right-cur i]} (get-info cm cur) result (sp cm cur state)] #_(js/console.log result) (case result :eof nil :stop nil :yes cur :left left-cur :right right-cur :end-of-this-token (token-end cm cur) :start-of-this-tok (token-start cm cur) (let [next-cur (token-end cm cur 1)] #_(js/console.log next-cur) (fn [] ;; for trampoline (skip-trampoline-helper cm next-cur sp result (dec n)))))) (guard))) (defn skip-trampoline-helper-left "like skip-trampoline-helper but in the opposite direction." [cm cur sp state n] (if (>= n 0) (let [{:keys [left-cur right-cur i start ch]} (get-info cm cur) result (sp cm cur state)] #_(js/console.log result) (case result :bof nil :stop nil :yes left-cur :right right-cur :end-of-this-token (token-end cm cur) :start-of-this-tok (token-start cm cur) (let [next-cur (if (= ch start) (cursor cm (dec i)) (cursor cm (- i (- ch start))))] (fn [] ;; for trampoline (skip-trampoline-helper-left cm next-cur sp result (dec n)))))) (guard))) (defn skip "returns the cursor that satisfies sp or nil if either eof reached or we found out sp could not be satisfied. see skip-to for more info." ([cm sp] (skip cm sp (cursor cm))) ([cm sp cur] (when-let [right-cur (:right-cur (get-info cm cur))] (trampoline skip-trampoline-helper cm right-cur sp 0 (char-count cm))))) (defn skip-left "returns the cursor that satisfies sp or nil if either bof reached or we found out sp could not be satisfied. see skip-to for more info." [cm sp] (when-let [cur (cursor cm)] (trampoline skip-trampoline-helper-left cm cur sp 0 (char-count cm)))) (defn delete-whitespace "if cur is in whitespace, deletes it optionally without ruining indentation." ([cm] (delete-whitespace cm (cursor cm) true)) ([cm cur] (delete-whitespace cm cur true)) ([cm cur indent-after] (let [{:keys [start end line ch i type]} (get-info cm cur) c1 (cursor cm (+ i (- start ch))) c2 (cursor cm (+ i (- end ch)))] (when (nil? type) (.replaceRange cm "" c1 c2) (if indent-after (.indentLine cm line)))))) ;; todo (defn just-one-space ([cm] (just-one-space cm (cursor cm) true)) ([cm cur] (just-one-space cm cur true)) ([cm cur indent-after] (let [{:keys [start end line ch i type]} (get-info cm cur) c1 (cursor cm (+ i (- start ch))) c2 (cursor cm (+ i (- end ch)))] (when (nil? type) (.replaceRange cm " " c1 c2) (if indent-after (.indentLine cm line)))))) (defn skip-to "moves to the cursor that satisfies sp or doesn't move if eof reached. starts at current cursor for cm. sp stands for 'skipping predicate' which returns: - :yes if sp is satisfied. - :stop if we know we will not be satisfied with any future result. - :left if the cursor to the left is what we want. - new non-nil state if not satisfied. this state is used with the next iteration after we skip to the end of the current token. an sp takes cm, cursor, state." [cm sp] (when-let [cur' (skip cm sp)] (.setCursor cm cur') cur')) (defn move-past-parent-closer "moves cursor to just outside the closing bracket, or if there is none then doesn't move at all." ;; emacs has this extending the current selection if there is one. [cm] (when-let [cur (skip-to cm parent-closer-sp)] (delete-whitespace cm (:left-cur (get-info cm))) cur)) (defn ^:export close-round "paredit-close-round exposed for keymap. skips to end of current list even if it ends with ] or }. but if you're in a string or comment then this just inserts the bracket. requires CodeMirror mode's parser uses state with indentStack because that's how we can tell we've reached the end of a top level form and avoid entering the next top level form. 's' is the character as a string." ([cm] (close-round cm ")")) ([cm s] (let [{:keys [type left-char]} (get-info cm)] (cond (= "\\" left-char) (insert cm s) (comment-or-string? type) (insert cm s) :else (move-past-parent-closer cm))))) (defn ^:export close-brace "close curly brace like close-rond" ([cm] (close-round cm "}"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-close-round-and-newline paredit-open-square paredit-close-square ;; paredit-doublequote ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export close-round-and-newline ([cm] (close-round-and-newline cm ")")) ([cm s] (if (comment-or-string? (get-type cm)) (insert cm s) (when (close-round cm s) (.execCommand cm "newlineAndIndent"))))) ;; question: is there a better way than .execCommand? (defn ^:export open-square [cm] (open-round cm "[")) (defn ^:export close-square [cm] (close-round cm "]")) (defn ^:export doublequote [cm] (let [{:keys [type left-char right-char ch cur]} (get-info cm)] (cond ;; about to escape this char so insert as-is: (= "\\" left-char) (insert cm "\"") ;; we're in a string so escape this doublequote: (= type "string") (insert cm "\\\"") ;; we're in code. pad with a space to the left and/or right if necessary ;; to separate it from neighboring code. after inserting, move the cursor ;; to between the quotes: :else (insert cm (str (when (not= " " left-char) " ") ;; left padding "\"\"" (when (and (not= " " right-char) (not= "\n" right-char)) " ")) ;; right padding (if (or (= " " right-char) (= "\n" right-char)) -1 -2))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-meta-doublequote M-" ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn word? [type] (or (= type "atom") (= type "builtin") (= type "number") (= type "variable") (= type "keyword") (= type "meta"))) (defn at-a-word? "returns true if at a word of code" [cm cur] (word? (get-type cm cur))) (defn in-a-word? "true if in a word AND not at the end of that word. false if in whitespace or a string or a comment or at a bracket." [cm] (let [cur (cursor cm), i (index cm cur)] (and (at-a-word? cm cur) (not= i (token-end-index cm i))))) (defn start-of-a-string? "returns true if at the start of a string." [cm cur] (let [{:keys [string type start ch left-char]} (get-info cm cur)] #_(js/console.log right-char type string ch start) (and (= left-char "\"") (= type "string") (= 1 (- ch start))))) (defn start-of-a-string2? [cm cur] (let [i (index cm cur) p2 (cursor cm (inc i))] #_(js/console.log cur p2) (start-of-a-string? cm p2))) (defn end-of-a-string? "returns true if just to the right of a closing doublequote of a string." [cm cur] (let [{:keys [type ch end string left-char]} (get-info cm cur)] #_(js/console.log left-char type string ch end) (and (= type "string") (= ch end) (= left-char "\"")))) (defn end-of-next-sibling-sp ;; -sp see 'skipping predicate' "returns the cursor at the end of the sibling to the right or nil if no sibling or eof. does not exit the containing form. does this by skipping past any comments or whitespace, and branches depending on whether an opening bracket or doublequote is encountered (sp satisfied when encountering a closing bracket that empties the stack) vs the beginning of a word (return token at the end of the word). assuming the cm has matched brackets for now." [cm cur stack] (let [dq "\"" info (get-info cm cur) {:keys [string type eof ch end tok]} info stack-empty (zero? stack) one-left (= 1 stack) ;; for multi-line strings start-of-stg? (start-of-a-string? cm cur) end-of-stg? (end-of-a-string? cm cur) empty-stg? (when end-of-stg? (and (= tok.type "string") (= tok.string "\"\""))) string-extends (or (not= dq (last string)) (= "\\" (last (drop-last string))))] #_(js/console.log stack stack-empty string type ch end cur string-extends #_(escaped-char-to-right? cm cur) start-of-stg? end-of-stg?) (cond ;; we return a keyword when we know where to stop, stack otherwise. ;; skip whitespace (or (nil? type) (and (= type "error") (= string ","))), stack (and (escaped-char-to-left? cm cur) stack-empty), :yes (and (word? type) stack-empty (= ch end)), :yes (and (is-bracket-type? type) (closer? string) one-left), :yes (and end-of-stg? one-left), :yes eof, :eof ;; skip comments (= type "comment"), stack ;; strings ............................................................... empty-stg? :end-of-this-token ;; our starting point is at beginning of a string and it doesn't extend (and start-of-stg? (and (not string-extends) stack-empty)), :end-of-this-token ;; We are in a nested form, at start of string, but it doesn't extend (and start-of-stg? (not stack-empty) (not string-extends)), stack ;; entering a multi-line string, push " onto stack (and start-of-stg? string-extends), (inc stack) ;; at end of string and stack already empty, we must have started in the ;; middle of the string (and end-of-stg? stack-empty), :stop ;; at end of string and stack about to be empty, we've found the end of ;; the string -- handled before checking for eof above ;; in string, the end of this string is our goal ... ;; ... but the end of this string is on a different line: (and (= type "string") #_(not stack-empty) #_one-left string-extends), stack (and (= type "string") stack-empty (not string-extends)), :end-of-this-token ;; in string, the end of this string is our goal ... ;; ... the end is on this line: (and (= type "string") one-left), :end-of-this-token ;; in string, need to get out of this form, pop stack (and (= type "string") (not stack-empty)), (dec stack) ;; escaped chars ......................................................... ;; inside an escaped char and the end of it is what we want (and (in-escaped-char? cm cur) stack-empty), :end-of-this-token ;; To the right of escaped char, keep going (and (escaped-char-to-right? cm cur) stack-empty), :start-of-this-tok ;; in an escaped char inside the next sibling (in-escaped-char? cm cur), stack ;; at end of an escaped char which was the next sibling -- handled before ;;checking for eof above ;; at end of an escaped char inside the next sibling (escaped-char-to-left? cm cur), stack ;; words ................................................................. ;; reached the end of a word which was the next sibling -- handled before ;;checking for eof above ;; in a word that is the next sibling, the end of it is what we want (and (word? type) stack-empty), :end-of-this-token ;; in a word that is inside the next sibling (word? type), stack ;; brackets .............................................................. ;; push opener on stack (and (is-bracket-type? type) (opener? string)), (inc stack) ;; we've reached the end of a form -- handled before checking for eof ;;above ;; there was no sibling (and (is-bracket-type? type) (closer? string) stack-empty), :stop ;; passing through the guts of a sibling form (.. (guts)|..) (and (is-bracket-type? type) (closer? string)), (dec stack) :default, :stop))) (defn end-of-next-sibling "get the cursor for the end of the sibling to the right." ([cm] (skip cm end-of-next-sibling-sp)) ([cm cur] (when cur (.setCursor cm cur) (skip cm end-of-next-sibling-sp)))) #_(let [cm (get-ddb [:tabs :extns :ed3 :cms :$ed]) cur (.getCursor cm) info (pe/get-info cm cur) tok (info :tok)] [(pe/start-of-a-string? cm cur) (pe/end-of-a-string? cm cur) (info :left-char) (info :right-char) tok.string] #_(console.log (pe/token-end cm cur 1)) #_(console.log (pe/cursor cm (+ 9 1 (- 8 8)))) #_(pe/get-info cm (pe/cursor cm (+ 9 1))) #_(pe/end-of-a-string? cm cur)) (defn start-of-prev-sibling-sp ;; -sp see 'skipping predicate' "returns the cursor at the start of the sibling to the left or nil if no sibling or eof. does not exit the containing form. does this by skipping past any comments or whitespace, and branches depending on whether a bracket or doublequote is encountered (sp satisfied when encountering an opening bracket that empties the stack) vs the beginning of a word (return token at the start of the word). assuming the cm has matched brackets for now." [cm cur stack] (let [info (get-info cm cur) {:keys [string type bof ch start tok]} info stack-empty (zero? stack) one-left (= 1 stack) string-extends (not= "\"" (first string)) ; for multiline strings start-of-stg? (start-of-a-string? cm cur) end-of-stg? (end-of-a-string? cm cur) empty-stg? (when start-of-stg? (and (= tok.type "string") (= tok.string "\"\"")))] #_(js/console.log stack stack-empty string type ch start cur string-extends ;;(escaped-char-to-left? cm cur) ;;(escaped-char-to-right? cm cur) start-of-stg? end-of-stg?) (cond ;; we return a keyword when we know where to stop, stack otherwise. ;; check these before checking for bof: ;; in a multi-line string, keep searching for the first line of it: (and start-of-stg? one-left string-extends), stack ;; at the first line of a string and we want its opening doublequote: (and start-of-stg? one-left), :yes ;; at the start of a word: (and (word? type) stack-empty (= ch start)), :yes ;; at the opener we were looking for: (and (is-bracket-type? type) (opener? string) one-left), :yes bof, :bof; reached beginning of file (and (start-of-a-string2? cm cur) (not stack-empty)), stack #_(dec stack) ;; at the start of an escaped char: (and (escaped-char-to-right? cm cur) stack-empty), stack ;; skip whitespace (or (nil? type) (and (= type "error") (= string ","))), stack ;; skip comments (= type "comment"), stack ;; strings ............................................................... empty-stg? :start-of-this-tok ;; our starting point is at end of a string and it doesn't extend (and end-of-stg? (and (not string-extends) stack-empty)), :start-of-this-tok ;; We are in a nested form, at end of string, but it doesn't extend (and end-of-stg? (not stack-empty) (not string-extends)) stack ;; entering a multi-line string from the right; push " onto stack (and end-of-stg? string-extends), (inc stack) ;; at start of string and stack already empty, we must have started in ;; the middle of the string. (and start-of-stg? stack-empty), :stop ;; at start of string and stack about to be empty, we've found the end of ;; the string -- handled before check for bof above ;; in string, the start of it is our goal ... ;; ... but the start of this string is on a higher line: (and (= type "string") #_(not stack-empty) string-extends), stack ;; it's on this line: (and (= type "string") stack-empty (not string-extends)), :start-of-this-tok ;; in string, the start of this string is our goal ... ;;; ... and the start is on this line: (and (= type "string") one-left) :start-of-this-tok ;; in string, need to get out of this form, pop stack (and (= type "string") (not stack-empty)), (dec stack) ;; escaped chars ......................................................... ;; inside an escaped char and the start of it is what we want (and (in-escaped-char? cm cur) stack-empty), :start-of-this-tok ;; To the left of escaped char, keep going (and (escaped-char-to-left? cm cur) stack-empty), :start-of-this-tok ;; in an escaped char inside the prev sibling (or (in-escaped-char? cm cur) (escaped-char-to-left? cm cur)), stack ;; at start of an escaped char which was the prev sibling -- handled ;; before check for bof above ;; at start of an escaped char inside the prev sibling (escaped-char-to-right? cm cur), stack ;; words ................................................................. ;; reached the start of a word which was the prev sibling -- handled ;; before check for bof above ;; in a word that is the prev sibling, the start of it is what we want (and (word? type) stack-empty), :start-of-this-tok ;; in a word that is inside the prev sibling (word? type), stack ;; brackets .............................................................. ;; push closer on stack (and (is-bracket-type? type) (closer? string)), (inc stack) ;; we've reached the start of a form -- handled before check for ;; bof above ;; there was no prev sibling, avoid exiting the form (and (is-bracket-type? type) (opener? string) stack-empty), :stop ;; passing through the guts of a sibling form (.. X(guts)..) (and (is-bracket-type? type) (opener? string)), (dec stack) :default :stop))) (defn start-of-prev-sibling "return the cursor at the start of the sibling to the left." ([cm] (skip-left cm start-of-prev-sibling-sp)) ([cm cur] (when cur (.setCursor cm cur) (skip-left cm start-of-prev-sibling-sp)))) (defn escape-string "escapes a string, replacing backslashes and doublequotes. wraps result in a new pair of doublequotes." [s] (str "\"" (-> s (str/replace #"[\\]" "\\\\") (str/replace #"[\"]" "\\\"")) "\"")) (defn stringify-selection "turns selection into a string, escaping backslashes and doublequotes" [cm] (->> cm .getSelection escape-string (.replaceSelection cm))) (defn stringify "turns the region from cur-1 to cur-2 into a string, escaping backslashes and doublequotes" [cm cur-1 cur-2] (.setSelection cm cur-1 cur-2) (stringify-selection cm) (.setCursor cm (cursor cm (inc (index cm cur-1))))) (defn exit-string "moves cursor right, out of the current string" [cm] (let [{:keys [type i ch end]} (get-info cm)] (when (= type "string") (.setCursor cm (cursor cm (+ i (- end ch))))))) (defn in-string? "returns true if token is in the middle of a string." ([cm] (in-string? cm (cursor cm))) ([cm cur] (let [type (get-type cm cur)] (or (= type "string") (= type "string-2"))))) (defn ^:export meta-doublequote "paredit meta-doublequote exposed for keymap. if in a string, moves cursor out of the string to the right. if in a comment, insert a doublequote. if in an escaped char, do nothing. otherwise starts a string that that continues to the end of the next form, escaping backslashes and doublequotes." [cm] (let [{:keys [type eof cur]} (get-info cm)] (cond eof :do-nothing (in-escaped-char? cm cur) :do-nothing (in-string? cm cur) (exit-string cm) (= type "comment") (insert cm "\"") (in-a-word? cm) (stringify cm cur (token-end cm cur)) :else (stringify cm cur (end-of-next-sibling cm))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-comment-dwim ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn left "given a pair of cursors c1 and c2, returns the left-most one" [cm c1 c2] (let [i1 (index cm c1) i2 (index cm c2)] (if (< i1 i2) c1 c2))) (defn right "given a pair of cursors c1 and c2, returns the right-most one" [cm c1 c2] (let [i1 (index cm c1) i2 (index cm c2)] (if (< i1 i2) c2 c1))) (defn selection-info "like get-info but for the first selection. gets the cursor to the left of the selection, the start, the end, the text selected, the starting and ending line numbers. nil if nothing selected." [cm] (when (.somethingSelected cm) (let [first-sel (-> cm .listSelections first) text (-> cm .getSelections first) anchor (.-anchor first-sel) head (.-head first-sel) left-of-start (left cm anchor head) start-cur (cursor cm (inc (index cm left-of-start))) end-cur (right cm anchor head)] [left-of-start start-cur end-cur text (.-line start-cur) (.-line end-cur)]))) (defn get-types "get the types from cursors c1 to c2. assumes 1 is to the left of 2 and not vice versa." [cm c1 c2] (loop [types [], cur c1] (let [{:keys [type right-cur]} (get-info cm cur) types' (conj types type)] (if (= cur c2) types' (recur types' right-cur))))) (defn selection-completely-satisfies-pred? "true if every position's type satisfies pred, for the entire (first) selection" [cm pred] (when-let [[_ c1 c2] (selection-info cm)] (every? pred (get-types cm c1 c2)))) (defn selection-completely-whitespace? [cm] (selection-completely-satisfies-pred? cm nil?)) (defn not-code? [type] (or (nil? type) (= type "comment"))) (defn selection-completely-non-code? [cm] (selection-completely-satisfies-pred? cm not-code?)) (defn to-comment "starts each line in 's' with ;; and appends 'post-script'" [s postscript] (let [cmnt (->> s str/split-lines (map #(str/replace % #"^" ";; ")) (str/join "\n"))] (str cmnt "\n" postscript))) (defn uncomment "removes leading whitespace and semicolons from lines in 's'" [s] (->> s str/split-lines (map #(str/replace % #"^\s*;+" "")) (str/join "\n"))) (defn indent-lines "indents lines from a to z (line numbers). assumes a is before z." [cm a z] (doseq [line (range a (inc z))] (.indentLine cm line))) (defn uncomment-selection "removes whitespace and leading semicolons from selection, replaces selection with the result, indents lines affected." [cm] (when-let [[_ c1 c2 text] (selection-info cm)] (.replaceSelection cm (uncomment text)) (indent-lines cm (.-line c1) (.-line c2)))) (defn append "returns the result of appending the applicable part of 'tok' to 's'. this is for collecting all the text on a line after 'ch'" [ch s tok] (if (< ch (.-end tok)) (str s (subs (.-string tok) (- (max ch (.-start tok)) (.-start tok)))) s)) (defn get-text-to-end-of-line [cm cur] (let [toks (.getLineTokens cm (.-line cur)) ch (.-ch cur)] (reduce (partial append ch) "" toks))) (defn comment-selection [cm] (let [[_ c1 c2 text l1 l2] (selection-info cm) text-after-selection (get-text-to-end-of-line cm c2) code-follows-selection (not= text-after-selection "") end-of-line (last-cur cm) line-to (if code-follows-selection (inc l2) l2)] (when code-follows-selection (.setSelection cm left end-of-line)) (.replaceSelection cm (to-comment text text-after-selection)) (indent-lines cm l1 line-to))) (defn line-ends-with-comment? "true if the line ends with a comment" [cm] (= "comment" (.-type (last-token cm (cursor cm))))) (defn indent-current-line [cm] (->> cm cursor .-line (.indentLine cm))) (defn go-to-comment "moves cursor to ;;X" [cm] (let [cur (cursor cm) ch (.-ch cur) i (index cm cur) c-tok (last-token cm cur) start (.-start c-tok) offset (count (take-while #(= ";" %) (.-string c-tok)))] (.setCursor cm (cursor cm (+ i (- start ch) offset))))) (defn insert-spaces-to-col-40 "presses spacebar until we are at col 40" [cm] (let [ch (-> cm cursor .-ch)] (when (< ch 40) (insert cm (str/join (repeat (- 40 ch) " ")))))) (defn go-to-comment-and-indent "moves cursor to the comment on the line and makes sure the comment starts on column 40 or greater. assumes last token is a comment" [cm] (indent-current-line cm) (let [cur (cursor cm) ch (.-ch cur) i (index cm cur) comment-start (.-start (last-token cm cur))] (.setCursor cm (cursor cm (+ i (- comment-start ch)))) (insert-spaces-to-col-40 cm) (go-to-comment cm))) (defn betw-code-and-line-end? "true if code is to the left and whitespace* is to the right. assumes you already know line does not end with a comment." [cm] (let [cur (cursor cm) toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) tests (map #(or (<= (.-end %) ch) (nil? (.-type %))) toks)] (and (seq toks) ; the line is not empty (every? true? tests) ; there's only whitespace to the right (some #(not (nil? (.-type %))) toks)))) ; there's code on the left (defn move-to-end-of-line "moves cursor to end of last non-whitespace token on a line. returns a vector of new index, new ch, and new cursor." ([cm] (move-to-end-of-line cm (cursor cm))) ([cm cur] (let [end (->> cur .-line (.getLineTokens cm) (remove #(nil? (.-type %))) last .-end) ch (.-ch cur) i (index cm cur) i' (+ i (- end ch)) cur' (cursor cm i')] (.setCursor cm cur') [i' (.-ch cur') cur']))) (defn select-rest-of-line "selects from current position to the end of the line" [cm] (.setSelection cm (cursor cm) (last-cur cm))) (defn delete-to-end-of-line "deletes from current position to the end of the line" [cm] (.replaceRange cm "" (cursor cm) (last-cur cm))) (defn create-comment-at-end "starts a ; comment at column 40 or greater and moves to it." [cm] (indent-current-line cm) (move-to-end-of-line cm) (insert cm " ") (insert-spaces-to-col-40 cm) (insert cm "; ") (delete-to-end-of-line cm)) (defn line-is-whitespace? "returns true if line is all whitespace" [cm] (->> cm cursor .-line (.getLineTokens cm) (every? #(nil? (.-type %))))) (defn create-line-comment "creates and indents a ;; comment" [cm] (insert cm ";; ") (delete-to-end-of-line cm) (indent-current-line cm)) (defn new-line-and-comment "creates and indents a ;; comment on a new line" [cm] (indent-current-line cm) (insert cm "\n\n") (.execCommand cm "goLineDown") (.execCommand cm "goLineDown") (indent-current-line cm) (.execCommand cm "goLineUp") (create-line-comment cm)) (defn insert-line-comment-here "creates and indents a ;; comment on this line" [cm] (insert cm "\n") (.execCommand cm "goLineDown") (indent-current-line cm) (.execCommand cm "goLineUp") (create-line-comment cm)) (defn in-code? "returns true if token is in the middle of code. assumes you've already ruled out comments." [cm] (let [{:keys [type start end ch]} (get-info cm)] (and (< start ch) (< ch end) (not (nil? type))))) (defn in-whitespace? "returns true if token is to the right of whitespace" [cm] (-> cm get-type nil?)) (defn code-to-left? "returns true if there's any code to the left of cursor. assumes you've already ruled out comments so only looks for non nil tokens" [cm] (let [cur (cursor cm) toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) code (map #(and (not (nil? (.-type %))) (or (<= (.-end %) ch) (and (< (.-start %) ch) (< ch (.-end %))))) toks)] (and (seq toks) ; the line is not empty (some true? code)))) ; there's one token that contains code to the left (defn ^:export comment-dwim [cm] (cond (selection-completely-whitespace? cm) :do-nothing (selection-completely-non-code? cm) (uncomment-selection cm) (.somethingSelected cm) (comment-selection cm) (line-ends-with-comment? cm) (go-to-comment-and-indent cm) (betw-code-and-line-end? cm) (create-comment-at-end cm) (in-code? cm) (create-comment-at-end cm) (in-string? cm) (create-comment-at-end cm) (line-is-whitespace? cm) (create-line-comment cm) (and (code-to-left? cm) (in-whitespace? cm)) (new-line-and-comment cm) (in-whitespace? cm) (insert-line-comment-here cm) :default :do-nothing)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-newline ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; seems like code mirror behaves as desired already ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-delete ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn backspace "delete 1 or n char to left" ([cm] (backspace cm 1)) ([cm n] (let [-n #(- % n) cur (cursor cm) cur0 (->> cur (index cm) -n (cursor cm))] (.replaceRange cm "" cur0 cur)))) (defn right-cur-would-be-whitespace? "true if this position would be whitespace if we pressed the spacebar." [cm cur right-cur] (let [original-cur (cursor cm) _ (insert cm " " 0 cur) answer (nil? (get-type cm right-cur))] (backspace cm) (.setCursor cm original-cur) answer)) (defn closing-delim? "returns true for closing brackets and for closing double-quotes" [cm cur] (let [{:keys [string type left-char right-cur]} (get-info cm cur)] ;;(println "closing delim?" type string left-char) (or (and (is-bracket-type? type) (closer? left-char)) (end-of-a-string? cm cur) (and (= type "string") (= "\"" left-char) ;; at this point, we could be just inside the start of a string. ;; if we check the type at the position to the right, this could ;; trick us: "X""hello" ... one way to be absolutely sure we're ;; at the end of a string is to add a space temporarily and see ;; if code mirror says its type is 'null' or 'string'. (right-cur-would-be-whitespace? cm cur right-cur))))) (defn opening-doublequote? "returns true if cur is just to the right of an opening doublequote" ([cm cur] (let [{:keys [type left-char right-cur]} (get-info cm cur)] (opening-doublequote? cm type left-char right-cur))) ([cm type left-char right-cur] (and (= type "string") (= "\"" left-char) right-cur (= "string" (get-type cm right-cur))))) (defn closing-doublequote? "returns true if cur is just to the right of a closing doublequote" [cm cur] (let [{:keys [type left-char right-cur]} (get-info cm cur) right-type (get-type cm right-cur)] (and (= type "string") (= "\"" left-char) (not= right-type "string")))) (defn opening-delim? "returns true for opening brackets and for opening double-quotes" [cm cur] (let [{:keys [string type left-char right-cur]} (get-info cm cur)] (or (and (is-bracket-type? type) (opener? left-char)) (opening-doublequote? cm type left-char right-cur)))) (defn opening-delim-for-empty-pair? "returns true for an opening bracket of an empty pair ()" [cm cur] (let [{:keys [left-char right-char right-cur]} (get-info cm cur)] (and (opening-delim? cm cur) right-cur (closing-delim? cm right-cur) (pair? left-char right-char)))) (defn opening-delim-for-non-empty-pair? "returns true for an opening bracket of a pair that contains one or more chars." [cm] (let [{:keys [left-char right-char cur]} (get-info cm)] (and (opening-delim? cm cur) (not (pair? left-char right-char))))) (defn move "moves the cursor by 'offset' places, negative for left. returns the cursor." [cm offset] (->> cm index (+ offset) (cursor cm) (.setCursor cm)) (cursor cm)) (defn delete "delete 1 or n char to right" ([cm] (delete cm 1)) ([cm n] (let [+n #(+ % n) cur (cursor cm) cur2 (->> cur (index cm) +n (cursor cm))] (.replaceRange cm "" cur cur2)))) (defn whitespace? "returns true if cursor indicates whitespace" [cm cur] (let [info (get-info cm cur)] (and (not (nil? info)) (nil? (:type info))))) (defn bracket? "true if cursor info indicates opening/closing bracket or quote" [cm cur] (let [{:keys [type left-char] :as info} (get-info cm cur)] (or (is-bracket-type? type) (and (= "string" type) (= "\"" left-char))))) (defn select-pair "assumes a pair of brackets surround the cursor. selects the pair." [cm] (let [i (->> cm cursor (index cm)) c1 (->> i dec (cursor cm)) c2 (->> i inc (cursor cm))] (.setSelection cm c1 c2))) (defn delete-selection [cm] (.replaceSelection cm "")) (defn delete-pair "assumes a pair of brackets surround the cursor. deletes the pair." [cm] (backspace cm) (delete cm)) (defn move-right [cm] (move cm 1)) (defn move-left [cm] (move cm -1)) (defn ^:export forward-delete "paredit-forward-delete exposed for keymap" [cm] (let [{:keys [cur right-cur] :as info} (get-info cm)] (cond (.somethingSelected cm) (delete-selection cm) (whitespace? cm right-cur) (delete cm) (not (bracket? cm right-cur)) (delete cm) (opening-delim? cm right-cur) (move-right cm) (opening-delim-for-empty-pair? cm cur) (delete-pair cm) :default :do-nothing))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-delete ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export backward-delete "paredit backward delete exposed for keymap" [cm] (let [cur (cursor cm)] (cond (.somethingSelected cm) (delete-selection cm) (in-escaped-char? cm cur) (delete-pair cm) (escaped-char-to-left? cm cur) (backspace cm 2) (opening-delim-for-non-empty-pair? cm) :do-nothing (opening-delim-for-empty-pair? cm cur) (delete-pair cm) (closing-delim? cm cur) (move-left cm) :default (backspace cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-kill ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn in-regular-string? "returns true if token is in the middle of a string." [cm cur] (or (opening-doublequote? cm cur) (and (= "string" (get-type cm cur)) (not (closing-doublequote? cm cur))))) (defn str-ends-on-another-line? "true if these values are from a string token that ends on another line" [type string] (and (= "string" type) (not= "\"" (last string)))) (defn go-to-end-of-string "moves cursor to end of the string you're in (but still inside the closing doublequote). assumes you're in a string. the end could be on a different line from where you start" ([cm] (go-to-end-of-string cm (cursor cm))) ([cm cur] (let [{:keys [left-char right-cur type string ch end]} (get-info cm cur)] (cond (nil? type) (go-to-end-of-string cm right-cur) (str-ends-on-another-line? type string) (do (move-to-end-of-line cm cur), (move cm 2), (go-to-end-of-string cm)) (opening-doublequote? cm type left-char right-cur) (do (move cm 1), (go-to-end-of-string cm)) (and (= "string" type)) (move cm (- end ch 1 )) :default cur)))) (defn select-rest-of-string "assumes you are in a string." [cm] (let [c1 (cursor cm) c2 (go-to-end-of-string cm c1)] (.setSelection cm c1 c2))) (defn betw-code-and-comment? "true if code is to the left and whitespace* comment* is to the right." [cm cur] (when cur (let [toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) tests (map #(or (<= (.-end %) ch) (or (nil? (.-type %)) (= "comment" (.-type %)))) toks)] (and (seq toks) ; the line is not empty (every? true? tests) ; there's only junk to the right (some #(not (nil? (.-type %))) toks))))) (defn rest-of-siblings [cm] (let [c1 (cursor cm) parent-closer (skip cm parent-closer-sp) c2 (when parent-closer (cursor cm (dec (index cm parent-closer))))] [c1 c2])) (defn select-rest-of-siblings [cm] (let [[c1 c2] (rest-of-siblings cm)c1 (cursor cm)] (when c2 (.setSelection cm c1 c2)))) (defn kill-from-to [cm i j] (let [cur (cursor cm i)] (CodeMirror.emacs.kill cm cur (cursor cm j)) (.setCursor cm cur))) (defn kill-region [cm] (let [first-sel (-> cm .listSelections first) anchor (.-anchor first-sel) head (.-head first-sel)] (CodeMirror.emacs.kill cm anchor head))) (defn kill-pair "assumes a pair of brackets surround the cursor. deletes the pair." [cm] (select-pair cm) (kill-region cm)) (defn kill-rest-of-string [cm] (select-rest-of-string cm) (kill-region cm)) (defn kill-rest-of-line [cm] (select-rest-of-line cm) (kill-region cm)) (defn kill-rest-of-siblings [cm] (select-rest-of-siblings cm) (kill-region cm)) (defn kill-next-sibling "kills the next sibling to the right of the cursor" [cm] (let [from (cursor cm) mid (end-of-next-sibling cm from) to (if (betw-code-and-comment? cm mid) (last-cur cm mid) mid)] (when to (.setSelection cm from to) (kill-region cm)))) (defn ^:export kill "paredit kill exposed for keymap." [cm] (let [cur (cursor cm)] (cond (.somethingSelected cm) (kill-region cm) (in-regular-string? cm cur) (kill-rest-of-string cm) (betw-code-and-comment? cm cur) (kill-rest-of-line cm) (in-escaped-char? cm cur) (kill-pair cm) (code-to-left? cm) (kill-rest-of-siblings cm) :default (kill-next-sibling cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-kill-word M-d ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn comment? [cm cur] (= "comment" (get-type cm cur))) (defn start-of-comment? "true if block cursor is on the first ; of a line comment" [cm cur] (let [{:keys [type right-cur]} (get-info cm cur) right-type (get-type cm right-cur)] (and (not= "comment" type) (= "comment right-type")))) (defn idx-of-next [cm i chars member max] (let [{:keys [right-char]} (get-info cm (cursor cm i))] (cond (= i max), (guard) (= member (contains? chars right-char)), i :default, (fn [] (idx-of-next cm (inc i) chars member max))))) (defn index-of-next [cm i chars] (trampoline idx-of-next cm i chars true (char-count cm))) (defn index-of-next-non [cm i chars] (trampoline idx-of-next cm i chars false (char-count cm))) (def non-word-chars (set "(){}[]|&; \n")) (def comment-start (set "; ")) (def semicolons #{";"}) (def comment-whitespace #{" " (str \tab)}) (defn end-of-next-word "assumes i is in a comment or a string. returns the i at the end of the next word (going to the right) in this comment/string" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) tail (subs string (- ch start)) word (re-find #"^\s*[\S]*" tail) length (count word) quote (if (str/ends-with? word "\"") -1 0)] (+ i length quote))) (defn start-of-prev-word "assumes i is in a comment or a string. returns the i at the start of the prev word (going to the left) in this comment/string" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) head (subs string 0 (- ch start)) last-word (re-find #"[\S]*\s*$" head) length (count last-word) quote (if (str/ends-with? last-word "\"") 1 0)] (- i length quote))) (defn kill-next-word "assumes i is in a comment or a string. kills text from i to the end of the next word in this comment/string" [cm i] (kill-from-to cm i (end-of-next-word cm (inc i))) (.setCursor cm (cursor cm i))) (defn fwd-kill-word "trampoline helper for forward-kill-word. 'mark' is the index to start killing from. 'i' is the index we're inspecting. 'n' is how many calls remaining that we'll support before stopping because of a suspected infinite loop. first call can put the count of characters in this cm instance." [cm mark i n] (let [m (dec n), j (inc i), cur (cursor cm i), right-cur (cursor cm j)] (cond (neg? n) (guard) (eof? cm right-cur) :do-nothing (whitespace? cm right-cur) #(fwd-kill-word cm mark (token-end-index cm j) m) (start-of-a-string? cm right-cur) #(fwd-kill-word cm j j m) (in-regular-string? cm right-cur) (kill-next-word cm mark) (opening-delim? cm right-cur) #(fwd-kill-word cm j j m) (closing-delim? cm right-cur) #(fwd-kill-word cm j j m) (at-a-word? cm right-cur) (kill-from-to cm mark (token-end-index cm j)) (start-of-comment? cm cur) (let [j (index-of-next-non cm i semicolons)] #(fwd-kill-word cm j j m)) (comment? cm right-cur) (kill-next-word cm mark) :else (println "unhandled")))) (defn ^:export forward-kill-word "paredit forward-kill-word exposed for keymap." [cm] (let [i (index cm)] (trampoline fwd-kill-word cm i i (char-count cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-kill-word ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn start-of-token-at [cm i] (let [{:keys [ch start]} (get-info cm (cursor cm i))] (- i (- ch start)))) (defn kill-prev-word-in-comment "assumes i is in a comment. kills text from i to the beginning of the previous word in this comment" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) cur-offset-in-string (- ch start) head (subs string 0 cur-offset-in-string) tail (subs string cur-offset-in-string) word (re-find #"\S*\s*$" head) length (count word)] (kill-from-to cm (- i length) i) (.setCursor cm (cursor cm (- i length))))) (defn beginning-of-line? [cm cur] (let [{:keys [start end type] :as info} (get-info cm cur)] (and (not (nil? info)) (nil? type) (= start end 0)))) (defn bkwd-kill-skippable-comment-char? [cm cur] (let [{:keys [type left-char] :as info} (get-info cm cur)] (and (not (nil? info)) (= "comment" type) (re-matches #"\s|;" left-char)))) (defn bkwd-kill-word "trampoline helper for backward-kill-word. 'mark' is the index to start killing from. 'i' is the index we're inspecting. 'n' is how many more calls we'll entertain before stopping because we suspect an infinite loop. first call can use char count for 'n'." [cm mark i n] (let [h (dec i), m (dec n), cur (cursor cm i)] (cond (neg? n) (guard) (bof? cm cur) :do-nothing (beginning-of-line? cm cur) #(bkwd-kill-word cm h h m) (whitespace? cm cur) #(bkwd-kill-word cm mark (start-of-token-at cm i) m) (opening-delim? cm cur) #(bkwd-kill-word cm h h m) (closing-delim? cm cur) #(bkwd-kill-word cm h h m) (at-a-word? cm cur) (kill-from-to cm (start-of-token-at cm i) mark) (start-of-comment? cm cur) (let [j (index-of-next-non cm i semicolons)] #(fwd-kill-word cm j j m)) (bkwd-kill-skippable-comment-char? cm cur) #(bkwd-kill-word cm mark h m) (comment? cm cur) (kill-prev-word-in-comment cm mark) :else (println "unhandled")))) (defn ^:export backward-kill-word "paredit backward-kill-word exposed for keymap." [cm] (let [i (index cm)] (trampoline bkwd-kill-word cm i i (char-count cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd "trampoline helper for forward. 'i' is the index we're inspecting. 'n' is how many more calls we'll entertain before suspecting an infinite loop. first call can pass in char count." [cm i n] (let [j (inc i), m (dec n), cur (cursor cm i), right-cur (cursor cm j)] (cond (neg? n) (guard) (nil? right-cur) :do-nothing (eof? cm right-cur) :do-nothing (whitespace? cm right-cur) #(fwd cm j m) (opening-delim? cm right-cur) (.setCursor cm (end-of-next-sibling cm cur)) (closing-delim? cm right-cur) (.setCursor cm right-cur) (at-a-word? cm right-cur) (.setCursor cm (cursor cm (token-end-index cm j))) (comment? cm right-cur) #(fwd cm (token-end-index cm j) m) (in-string? cm right-cur) (.setCursor cm (cursor cm (end-of-next-word cm j))) :else (println "unhandled")))) (defn ^:export forward "paredit forward exposed for keymap. find the first thing that isn't whitespace or comment. if it is a closing bracket, step past it. otherwise skip over the thing." [cm] (trampoline fwd cm (index cm) (char-count cm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd "trampoline helper for backward. 'i' is the index we're inspecting. 'n' is number of remaining calls before we suspect an infinite loop" [cm i n] (let [h (dec i), m (dec n), cur (cursor cm i)] (cond (neg? n) (guard) (nil? cur) :do-nothing (bof? cm cur) (.setCursor cm (cursor cm h)) (whitespace? cm cur) #(bkwd cm h m) (opening-delim? cm cur) (.setCursor cm (cursor cm h)) (closing-delim? cm cur) (.setCursor cm (start-of-prev-sibling cm cur)) (at-a-word? cm cur) (.setCursor cm (start-of-prev-sibling cm cur)) (comment? cm cur) #(bkwd cm (start-of-prev-sibling cm cur) m) (in-string? cm cur) (.setCursor cm (cursor cm (start-of-prev-word cm h))) :else (println "unhandled")))) (defn ^:export backward "paredit backward exposed for keymap." [cm] (trampoline bkwd cm (index cm) (char-count cm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-up ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn forward-up-cur "get cursor corresponding to paredit forward up" ([cm] (forward-up-cur cm (cursor cm))) ([cm cur] (cond (nil? cur), nil (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (token-end cm cur) :default, (skip cm parent-closer-sp)))) (defn ^:export forward-up "paredit forward-up exposed for keymap." ([cm] (forward-up cm (cursor cm))) ([cm cur] (when-let [cur' (forward-up-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-up ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn backward-up-cur "get cursor corresponding to paredit backward up" ([cm] (backward-up-cur cm (cursor cm))) ([cm cur] (start-of-prev-sibling cm (forward-up-cur cm cur)))) (defn ^:export backward-up "paredit backward-up exposed for keymap." ([cm] (backward-up cm (cursor cm))) ([cm cur] (when-let [cur' (backward-up-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-wrap-round ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn end-of-this "go to the end of the current thing, whether it be a string or a word of code" [cm cur] (if (in-string? cm cur) (token-end cm cur) (end-of-next-sibling cm cur))) (defn ^:export wrap-round "paredit wrap-round exposed for keymap." ([cm] (wrap-round cm (cursor cm))) ([cm cur] (let [cur-close (end-of-this cm cur) cur-open (start-of-prev-sibling cm cur-close) i (inc (index cm cur-open)) text (.getRange cm cur-open cur-close) text' (str "(" text ")")] (.replaceRange cm text' cur-open cur-close) (.setCursor cm (cursor cm i))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp M-s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp "paredit splice-sexp exposed for keymap. unlike emacs' version, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp cm (cursor cm))) ([cm cur] (let [i (dec (index cm)) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) text' (when cur-open (.getRange cm (cursor cm (inc (index cm cur-open))) (cursor cm (dec (index cm cur-close)))))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm (cursor cm i)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp-killing-backward M-<up> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp-killing-backward "paredit splice-sexp-killing-backward exposed for keymap. like emacs' version, this doesn't actually kill to the clipboard. it just deletes. but unlink emacs, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp-killing-backward cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (backward-up cm cur)) (let [cur' (cursor cm) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) text' (when cur-close (.getRange cm cur' (cursor cm (dec (index cm cur-close)))))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm cur-open))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp-killing-forward M-<down> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp-killing-forward "paredit splice-sexp-killing-forward exposed for keymap. like emacs' version, this doesn't actually kill to the clipboard. it just deletes. but unlink emacs, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp-killing-forward cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (forward-up cm cur)) (let [cur' (cursor cm) final-cur (cursor cm (dec (index cm cur'))) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) keep-from (when cur-open (cursor cm (inc (index cm cur-open))) ) text (when keep-from (.getRange cm cur-open cur-close)) text' (when keep-from (.getRange cm keep-from cur'))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm final-cur))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-raise-sexp M-r ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export raise-sexp "paredit raise-sexp exposed for keymap." ([cm] (raise-sexp cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (backward-up cm cur)) (let [c1 (cursor cm) c2 (end-of-next-sibling cm c1) text (when c2 (.getRange cm c1 c2)) cur-close (when text (skip cm parent-closer-sp)) cur-open (when cur-close (start-of-prev-sibling cm cur-close))] (when cur-open (.replaceRange cm text cur-open cur-close) (.setCursor cm cur-open))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-slurp-sexp C-), C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd-string-slurp "String slurping consists of simply 'go to end of string, mark as parent, go to next sibling end, mark as sibling'" [cm cur] (let [parent (if (start-of-a-string? cm cur) (end-of-next-sibling cm cur) (end-of-next-sibling cm (start-of-prev-sibling cm cur))) sibling (end-of-next-sibling cm parent)] (when sibling [parent sibling "\""]))) (defn fwd-slurp "trampoline-able that looks for an ancestor closing bracket (parent, grandparent, etc) that has a sibling to slurp. returns a vector of the cur to the right of such a bracket, the cur to the right of the sibling that will be slurped, the string of the bracket to move. nil if there is no such anscestor that can slurp." [cm cur n] (if (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (fwd-string-slurp cm cur) (when (>= n 0) (let [parent (skip cm parent-closer-sp cur) sibling (end-of-next-sibling cm parent)] (if sibling [parent sibling (get-string cm parent)] (fn [] (fwd-slurp cm parent (dec n)))))))) (defn ^:export forward-slurp-sexp "paredit forward-slurp-sexp exposed for keymap." ([cm] (forward-slurp-sexp cm (cursor cm))) ([cm cur] (when-let [[parent sibling bracket] (trampoline fwd-slurp cm cur (char-count cm))] #_(js/console.log "FWD-SLURP" parent sibling bracket) (insert cm bracket 0 sibling);; put bracket in new spot (.replaceRange cm "" (cursor cm (- (index cm parent) (count bracket))) parent));; remove bracket from old spot (.setCursor cm cur))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-down C-M-d ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd-down "trampoline-able that looks for the cursor where we'd be if we went forward and then down into the next sibling that is available. nil if there is no sibling to enter." [cm cur n] (cond (<= n 0), nil (nil? cur), nil (opening-delim? cm cur), cur :default, (when-let [cur' (token-end cm cur 1)] (fn [] (fwd-down cm cur' (dec n)))))) (defn forward-down-cur ([cm] (forward-down-cur cm (cursor cm))) ([cm cur] (trampoline fwd-down cm cur (char-count cm)))) (defn ^:export forward-down ([cm] (forward-down cm (cursor cm))) ([cm cur] (when-let [cur' (forward-down-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-down C-M-p ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-down "trampoline-able that looks for the cursor where we'd be if we went backward and then down into the prev sibling that is available. nil if there is no sibling to enter." [cm cur n] (let [{:keys [left-cur i start ch bof]} (get-info cm cur)] (cond (<= n 0), (guard) (closing-delim? cm cur), left-cur bof, nil (zero? ch), (fn [] (bkwd-down cm (cursor cm (dec i)) (dec n))) :default, (fn [] (bkwd-down cm (cursor cm (- i (- ch start))) (dec n)))))) (defn ^:export backward-down ([cm] (backward-down cm (cursor cm))) ([cm cur] (when-let [cur' (trampoline bkwd-down cm cur (char-count cm))] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-slurp-sexp C-), C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-slurp "trampolin-able that looks for an ancestor opening bracket (parent, grandparent, etc) that has a sibling to slurp. returns a vector of the cur to the left of such a bracket, the cur to the left of the sibling that will be slurped, the string of the bracket to move. nil if there is no such anscestor that can slurp." [cm cur n] (when (>= n 0) (let [ending (skip cm parent-closer-sp cur) parent (start-of-prev-sibling cm ending) sibling (start-of-prev-sibling cm parent) bracket-cur (forward-down-cur cm parent)] (if (and (not (nil? sibling)) (not (nil? bracket-cur))) [parent sibling (get-string cm bracket-cur)] (fn [] (bkwd-slurp cm parent (dec n))))))) (defn ^:export backward-slurp-sexp "paredit backward-slurp-sexp exposed for keymap." ([cm] (backward-slurp-sexp cm (cursor cm))) ([cm cur] (let [i (index cm cur)] ;; line,ch may change but index will not. (when-let [[parent sibling bracket] (trampoline bkwd-slurp cm cur (char-count cm))] (.replaceRange cm "" parent (cursor cm (+ (index cm parent) (count bracket)))) (insert cm bracket 0 sibling)) (.setCursor cm (cursor cm i))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-barf-sexp C-\} C-<left> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn find-first-ws [stg ch] (let [cnt (count stg)] (loop [ch ch] (if (or (= (.charAt stg ch) " ") (= ch cnt)) ch (recur (inc ch)))))) (defn find-first-nonws [stg ch] (let [cnt (count stg)] (loop [ch ch] (if (or (not= (.charAt stg ch) " ") (= ch cnt)) ch (recur (inc ch)))))) (defn rfind-blank-or-start [stg] (let [rstg (str/reverse stg) cnt (count rstg) bdq? (= (.charAt stg 0) "\"") ch (->> 0 (find-first-ws rstg) (find-first-nonws rstg))] #_(js/console.log rstg ch) (cond (not= ch cnt) (dec ch) ; found ws and nonws bdq? (- ch 2) ; has beg dq and at beg :else ch))) (defn fwd-string-barf "String barffing consists of simply 'go to end of string, mark as parent, reverse look for non whitespace, reverse look for whitespace, mark as sibling'" [cm cur] (let [parent (if (start-of-a-string? cm cur) (end-of-next-sibling cm cur) (end-of-next-sibling cm (start-of-prev-sibling cm cur))) inside (cursor cm (dec (index cm parent))) {:keys [string i]} (get-info cm inside) ri (rfind-blank-or-start string) sibling (cursor cm (- i ri))] #_(js/console.log (index cm cur) i ri (- i ri)) (when (and parent inside) [parent inside sibling "\"" (< (- i ri) (index cm cur))]))) (defn fwd-barf "trampoline-able that looks for an ancestor closing bracket (parent, grandparent, etc) that has a sibling to barf. returns a vector of the cur to the right of such a bracket, the cur at the bracket, the cur where the bracket should go, the text of the bracket, and whether the operation causes the cursor to be moved. nil if there is no such anscestor that can barf" [cm cur n] (if (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (fwd-string-barf cm cur) (when (>= n 0) (let [parent (skip cm parent-closer-sp cur) inside (cursor cm (dec (index cm parent))) sibling (start-of-prev-sibling cm inside) ;; prevsib: end of prev sibling if there is one: prevsib (end-of-next-sibling cm (start-of-prev-sibling cm sibling)) ;; bracket-cur: where the new bracket should go: bracket-cur (or prevsib (forward-down-cur cm (backward-up-cur cm sibling))) ;; whether the cursor needs to change: moved (and bracket-cur (< (index cm bracket-cur) (index cm cur))) ;; text of the bracket, e.g. ")" bracket (when parent (if moved (str (get-string cm parent) " ") (get-string cm parent)))] (cond (nil? parent) nil (nil? bracket-cur) (fn [] (fwd-barf cm parent (dec n))) :default [parent inside bracket-cur bracket moved]))))) (defn ^:export forward-barf-sexp "paredit forward-barf-sexp exposed for keymap." ([cm] (forward-barf-sexp cm (cursor cm))) ([cm cur] (if-let [[parent inside sibling bracket moved] (trampoline fwd-barf cm cur (char-count cm))] (do #_(js/console.log parent inside sibling bracket moved) (.replaceRange cm "" inside parent) (insert cm bracket 0 sibling) (if moved (.setCursor cm sibling) (.setCursor cm cur))) (.setCursor cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backard-barf-sexp C-{, C-M-<right>, Esc C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-barf "trampoline-able that looks for an ancestor opening bracket (parent, grandparent, etc) that has a sibling to barf. returns... . nil if there is no such anscestor that can barf" [cm cur n] (when (>= n 0) (let [outside (backward-up-cur cm cur) inside (forward-down-cur cm outside) end-of-barfed-sexp (end-of-next-sibling cm inside) end-of-new-first-sib (end-of-next-sibling cm end-of-barfed-sexp) bracket-cur (start-of-prev-sibling cm end-of-new-first-sib) bracket-text (get-string cm inside) moved (and bracket-cur (< (index cm cur) (index cm bracket-cur)))] (cond (nil? outside) nil (nil? end-of-barfed-sexp) (fn [] (bkwd-barf cm outside (dec n))) :default [outside inside bracket-cur bracket-text moved])))) (defn ^:export backward-barf-sexp "paredit backward-barf-sexp exposed for keymap." ([cm] (backward-barf-sexp cm (cursor cm))) ([cm cur] (if-let [[outside inside bracket-cur bracket-text moved] (trampoline bkwd-barf cm cur (char-count cm))] (do (insert cm bracket-text 0 bracket-cur) (.replaceRange cm "" outside inside) (if moved (.setCursor cm (cursor cm (- (index cm cur) (count bracket-text)))) (.setCursor cm cur))) (.setCursor cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-split-sexp M-S ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn split-form "split sexp for (forms like this)" [cm cur] (let [close-cur (skip cm parent-closer-sp cur) close-bracket (get-string cm close-cur) open-cur (start-of-prev-sibling cm close-cur) open-bracket (get-string cm (cursor cm (inc (index cm open-cur))))] (when (and (not (nil? open-bracket)) (not (nil? close-bracket))) (.setCursor cm cur) (let [offset (if (in-whitespace? cm) 1 (do (insert cm " ") (just-one-space cm (cursor cm) false) 0)) cur' (cursor cm) i' (+ (index cm cur') offset) prev-sib (start-of-prev-sibling cm cur') prev-sib-end (end-of-next-sibling cm prev-sib) next-sib (end-of-next-sibling cm cur) next-sib-start (start-of-prev-sibling cm next-sib)] (if (nil? next-sib-start) (insert cm open-bracket) (insert cm open-bracket 0 next-sib-start)) (if (nil? prev-sib-end) (do (move-left cm) (insert cm close-bracket)) (insert cm close-bracket 0 prev-sib-end)) (.setCursor cm (cursor cm i')))))) (defn split-string "split sexp for \"strings like this\"" [cm cur] (let [open-quote-i (index-of-next-non cm (index cm cur) " ")] (.replaceRange cm "\" \"" cur (cursor cm open-quote-i)) (move-left cm) (move-left cm))) (defn ^:export split-sexp "paredit split-sexp exposed for keymap." ([cm] (split-sexp cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (split-string cm cur) (split-form cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-join-sexps M-J ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export join-sexps "paredit join-sexps exposed for keymap." ([cm] (join-sexps cm (cursor cm))) ([cm cur] (let [left-sib (start-of-prev-sibling cm cur) close (end-of-next-sibling cm left-sib) right-sib (end-of-next-sibling cm cur) open (start-of-prev-sibling cm right-sib) open-right (when open (cursor cm (inc (index cm open)))) close-char (get-string cm close) open-char (get-string cm open-right)] (if (and (not (nil? open)) (not (nil? close)) (pair? open-char close-char)) (do (.setCursor cm open) (delete cm) (.setCursor cm close) (backspace cm) (.setCursor cm (if (= (.-line open) (.-line close)) (cursor cm (dec (index cm cur))) cur))) (.setCursor cm cur))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-reindent-defun M-q ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn top-most-opener-candidate "trampoline-able that looks for the top-most opening bracket for the specified location. returns the current cursor if there is no such anscestor" [cm cur n] (when (>= n 0) (if-let [parent (backward-up-cur cm cur)] (fn [] (top-most-opener-candidate cm parent (dec n))) cur))) (defn top-most-opener "get the top most opening bracket for the specified location. nil if there is no such bracket." ([cm] (top-most-opener cm (cursor cm))) ([cm cur] (let [candidate (top-most-opener-candidate cm cur (char-count cm))] (when (not= candidate cur) candidate)))) (defn ^:export reindent-defun "paredit reindent-defun exposed for keymap." ([cm] (reindent-defun cm (cursor cm))) ([cm cur] (let [open (trampoline top-most-opener cm cur) close (end-of-next-sibling cm open) open-line (when open (.-line open)) line-offset (when open (- (.-line cur) open-line)) line-len (count (.getLine cm (.-line cur))) ch (.-ch cur)] (when (and (not (nil? open)) (not (nil? close))) (indent-lines cm (.-line open) (.-line close)) (repeatedly line-offset (.execCommand cm "goLineDown")) (.execCommand cm "goLineStart") (.setCursor cm (cursor cm (+ (index cm) ch (- (count (.getLine cm (.-line (cursor cm)))) line-len)))))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-sexp ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export forward-sexp "forward-sexp exposed for keymap. seems part of emacs and not part of paredit itself. but including it here since this will be used in things other than emacs itself." ([cm] (forward-sexp cm (cursor cm))) ([cm cur] (when-let [cur' (end-of-next-sibling cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-sexp ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export backward-sexp "backward-sexp exposed for keymap. seems part of emacs and not part of paredit itself. but including it here since this will be used in things other than emacs itself." ([cm] (backward-sexp cm (cursor cm))) ([cm cur] (when-let [cur' (start-of-prev-sibling cm cur)] (.setCursor cm cur'))))
true
(ns paredit-cm.core "paredit operations (exported)" (:require [clojure.string :as str] [cljsjs.codemirror] [cljsjs.codemirror.mode.clojure] [cljsjs.codemirror.keymap.emacs])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; MIT License ;; ;; Copyright (c) 2017 PI:NAME:<NAME>END_PI ;; ;; Permission is hereby granted, free of charge, to any person obtaining a copy ;; of this software and associated documentation files (the "Software"), to deal ;; in the Software without restriction, including without limitation the rights ;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ;; copies of the Software, and to permit persons to whom the Software is ;; furnished to do so, subject to the following conditions: ;; ;; The above copyright notice and this permission notice shall be included ;; in all copies or substantial portions of the Software. ;; ;; PI:NAME:<NAME>END_PI (2019,2020): ;; Many changes and fixes for working with newer codemirror releases ;; ;; ** PAREDI PROJECT CONVENTIONS ** ;; ;; consider this notation: aXbc ;; ;; in the unit tests as well as here, aXbc contains a single capital X which ;; represents the position of the cursor. aXbc means the code mirror instance's ;; value is 'abc' and a block-style cursor is on 'b' (a bar-style cursor would ;; be between 'a' and 'b'). aXbc is what you would see if you typed a capital X ;; in this example code mirror. ;; ;; 'cur' is for the current position's cursor (on 'b' in the example). ;; 'left-cur' is for position 'a'. 'right-cur' is for position 'c'. ;; ;; if there is a current cursor cur and a new cursor, then the new cursor will ;; be named cur' (the single quote is part of the name, so read it aloud as ;; cursor-prime) ;; ;; when there are two cursors (as in the beginning and ending of a selection) we ;; use c1 and c2. it feels strange to call them 'start' and 'end' when those are ;; the names codemirror uses to refer to the ends of a token. ;; ;; the following all refer to the values for the token at 'cur': 'start' 'line' ;; 'ch' 'i' 'string' 'type' ;; ;; use the same prefixes 'left-' and 'right-' when referring to the same kinds ;; of values belonging to 'left-cur' and 'right-cur' ;; ;; ints *other than i, the code mirror index* are named with a single character ;; like 'x'. neighboring values are represented alphabetically, so (inc x) would ;; be named 'y' and (dec x) would be named 'w'. ;; ;; s1 is a string. similarly s1, s2, and s ;; ;; for numerical values like 'offset', lower is for left and higher is for ;; right, just as for code mirror's index i. ;; ;; sp is a 'skipping predicate'. these are used with a trampoline wrapper like ;; 'skip' to move along the text in code mirror until our predicate is ;; satisfied. in many cases, the predicate will push and pop openers/closers off ;; a stack and when the stack is empty and we satisfy some additional condition, ;; then we stop and return the cursor. ;; ;; functions with names ending in -sp are skipping predicates. ;; ;; currently we're assuming perfect matching of openers/closers so we don't ;; actually keep track of the stack -- we just inc and dec an int until it gets ;; to 0 and our other conditions are satisfied ;; ;; any trampoline use should be limited by the cm character count, to guard ;; against infinite loops. we'll start at the limit and count down, stopping ;; when it goes negative. ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (enable-console-print!) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; general helper methods ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def openers #{ "(" "[" "{" }) (def closers #{ ")" "]" "}" }) (def pair {"(" ")", "[" "]", "{" "}", "\"" "\"", ")" "(", "]" "[", "}" "{"}) (defn pair? "true if the two strings are a matching open/close pair " [s1 s2] (= (pair s1) s2)) (defn opener? [s] (contains? openers s)) (defn closer? [s] (contains? closers s)) (defn is-bracket-type? [t] (and t (str/starts-with? t "bracket"))) (defn char-count "returns the number of characters in the code mirror instance" [cm] (-> cm .getValue count)) (defn cursor "get cur, the position of the cursor" ([cm] (.getCursor cm)) ;; get current cursor ([cm i] (.posFromIndex cm i))) ;; get cursor for index i (defn index "get the index i for the cursor's position" ([cm] (index cm (cursor cm))) ([cm cur] (when cur (.indexFromPos cm cur)))) (defn bof? "true if at beginning of file" [cm cur] (zero? (index cm cur))) (defn eof? "true if at end of file" [cm cur] (= (index cm cur) (char-count cm))) (defn token "get token at cursor" [cm cur] (.getTokenAt cm cur true)) (defn get-type "get the type at the current cursor." ([cm] (get-type cm (cursor cm))) ([cm cur] (.-type (token cm cur)))) (defn get-string "gets the string of the current token" ([cm] (get-string cm (cursor cm))) ([cm cur] (when cur (.-string (token cm cur))))) (defn line-length "gets the length of the current line" ([cm] (line-length cm (cursor cm))) ([cm cur] (when cur (count (.getLine cm (.-line cur)))))) (defn last-token "returns the last token of a line" [cm cur] (->> cur .-line (.getLineTokens cm) last)) (defn last-cur "returns the last cursor of a line" ([cm] (last-cur cm (cursor cm))) ([cm cur] (let [end (.-end (last-token cm cur)) diff (- end (.-ch cur))] (cursor cm (+ diff (index cm cur)))))) (defn get-info "make info from CodeMirror more conveniently accessed by our code. we'll use destructuring and just name what we rant. hypothesizing that performance hit won't be that bad." ([cm] (get-info cm (cursor cm))) ([cm cur] (when cur (let [tok (token cm cur) eof (eof? cm cur) bof (bof? cm cur) i (index cm cur) left-cur (when-not bof (cursor cm (dec i))) right-cur (when-not eof (cursor cm (inc i)))] {:cur cur :line (.-line cur) :ch (.-ch cur) :i i :tok tok :string (.-string tok) :start (.-start tok) :end (.-end tok) :type (.-type tok) :top (-> tok .-state .-indentStack nil?) ;; true for toplevel :eof eof :bof bof :left-char (when-not bof (.getRange cm left-cur cur)) :right-char (when-not eof (.getRange cm cur right-cur)) :left-cur left-cur :right-cur right-cur :mode (.-mode (.-state tok))})))) (defn comment-or-string? "true if the type is comment or string. a lot of editing behavior (like movement and deletion) is similar when you are in a string or in a comment, so often this is the predicate for that behavior." [type] (or (= type "comment") (= type "string"))) (defn indent-line "indent the current line" [cm] (->> cm cursor .-line (.indentLine cm))) (defn escaped-char-name? [stg] (let [escnames #{"\\newline", "\\space", "\\tab", "\\formfeed", "\\backspace", "\\return"}] (when (escnames stg) (dec (count stg))))) (defn in-escaped-char? "returns true if backslash is to the left and cursor is on an escaped char" ([cm cur] (in-escaped-char? cm cur 0)) ([cm cur offset] (let [{:keys [ch start end type]} (get-info cm cur)] #_(js/console.log start ch end type) (and (= type "string-2") (and (< start ch) (< ch end)))))) (defn escaped-char-to-left? "returns true if an escaped char and its backslash are to the left" [cm cur] (let [{:keys [ch end type string]} (get-info cm cur)] (and (= type "string-2") (= ch end)))) (defn escaped-char-to-right? "returns true if an escaped char and its backslash is to the right" [cm cur] (let [cur+ (cursor cm 0) {:keys [type]} (get-info cm cur)] (and (not= type "string-2")) (set! cur+.line cur.line) (set! cur+.ch (inc cur.ch)) (in-escaped-char? cm cur))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-open-round ( ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn insert "insert text at current cursor. move cursor to the end of inserted text minus optional offset. the offset is for moving the cursor immediately after the insert and before returning. example: inserting a pair of brackets and placing the cursor inside the pair. this returns the new cursor." ([cm text] (insert cm text 0)) ([cm text offset] (insert cm text offset (cursor cm))) ([cm text offset cur] (let [{:keys [line ch]} (get-info cm cur)] (.replaceRange cm text cur) (.setCursor cm line (+ (+ ch (count text)) offset)) (cursor cm)))) (defn ^:export open-round "paredit-open-round exposed for keymap. unlike traditional emacs paredit, this supports brackets [] {} () but not double-quote" ([cm] (open-round cm "(")) ([cm c] (let [{:keys [type left-char right-char]} (get-info cm)] (cond ;; escaping the next character: (= "\\" left-char) (insert cm c) ;; typing in a comment or string as-is: (comment-or-string? type) (insert cm c) ;; insert a pair, pad with a space to the left and/or right if necessary, ;; and move the cursor into the pair before returning: :else (let [need-left-padding (and (not= " " left-char) (not (opener? left-char))) need-right-padding (and (not= " " right-char) (not (closer? right-char)))] (insert cm (str (when need-left-padding " ") c (pair c) (when need-right-padding " ")) (if need-right-padding -2 -1))))))) (defn ^:export open-brace "open curly brace with matching close brace" ([cm] (open-round cm "{"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-close-round ) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn parent-closer-sp ;; -sp see 'skipping predicate' below "finds the *parent* closing bracket. behavior when used with skip: pushes opening brackets that appear along the way on a stack. closing brackets pop them off. stops when encountering a closing bracket while the stack is empty. assuming the cm has matched brackets for now. moves to the right." [cm cur state] (let [{:keys [string type top eof]} (get-info cm cur)] (cond ;; 'push' opener on our 'stack': (and (is-bracket-type? type) (opener? string)), (inc state) ;; stop if we see a closer while our 'stack' is empty: (and (is-bracket-type? type) (closer? string) (zero? state)), :yes ;; closer means we 'pop' off the 'stack', unless eof (and (is-bracket-type? type) (closer? string) (not= 0 state) eof), :eof ;; closer means we 'pop' off the 'stack': (and (is-bracket-type? type) (closer? string) (not= 0 state)), (dec state) ;; we can* rely on code mirror to tell us if we're at the top ;; level: (* NOT in [cljsjs/codemirror "5.21.0-2"] ... but maybe ;; in a later version ... until we can figure out how to refer ;; to the latest codemirror in our tests, the tests will have to ;; live here in order to get the codemirror that is included in ;; the script tag on the demo index.html page) ;; TODO: investigate whether we can use this, given CodeMirror version: ;; top, :stop ;; stack stays unchanged. move to the next thing: :default, state))) (defn token-start "returns the cursor for the start of the current token" [cm cur] (let [{:keys [i line start ch type]} (get-info cm cur)] (cursor cm (- i (- ch start))))) (defn token-end "returns the cursor for the end of the current token" ([cm cur] (token-end cm cur 0)) ([cm cur offset] (let [{:keys [i line end ch type]} (get-info cm cur)] (cursor cm (+ i offset (- end ch)))))) (defn token-end-index "take an index. get its token. return index of that token's end." [cm i] (->> i (cursor cm) (token-end cm) (index cm))) (defn guard [] (println "past")) (defn skip-trampoline-helper "returns the cursor that satsifies skipping predicate 'sp' or nil if eof reached. does this by making sp something we can trampoline. sp takes these args: cm, cursor, state. counts down 'n' to 0 in order to guard against infinite loops." [cm cur sp state n] (if (>= n 0) (let [{:keys [left-cur right-cur i]} (get-info cm cur) result (sp cm cur state)] #_(js/console.log result) (case result :eof nil :stop nil :yes cur :left left-cur :right right-cur :end-of-this-token (token-end cm cur) :start-of-this-tok (token-start cm cur) (let [next-cur (token-end cm cur 1)] #_(js/console.log next-cur) (fn [] ;; for trampoline (skip-trampoline-helper cm next-cur sp result (dec n)))))) (guard))) (defn skip-trampoline-helper-left "like skip-trampoline-helper but in the opposite direction." [cm cur sp state n] (if (>= n 0) (let [{:keys [left-cur right-cur i start ch]} (get-info cm cur) result (sp cm cur state)] #_(js/console.log result) (case result :bof nil :stop nil :yes left-cur :right right-cur :end-of-this-token (token-end cm cur) :start-of-this-tok (token-start cm cur) (let [next-cur (if (= ch start) (cursor cm (dec i)) (cursor cm (- i (- ch start))))] (fn [] ;; for trampoline (skip-trampoline-helper-left cm next-cur sp result (dec n)))))) (guard))) (defn skip "returns the cursor that satisfies sp or nil if either eof reached or we found out sp could not be satisfied. see skip-to for more info." ([cm sp] (skip cm sp (cursor cm))) ([cm sp cur] (when-let [right-cur (:right-cur (get-info cm cur))] (trampoline skip-trampoline-helper cm right-cur sp 0 (char-count cm))))) (defn skip-left "returns the cursor that satisfies sp or nil if either bof reached or we found out sp could not be satisfied. see skip-to for more info." [cm sp] (when-let [cur (cursor cm)] (trampoline skip-trampoline-helper-left cm cur sp 0 (char-count cm)))) (defn delete-whitespace "if cur is in whitespace, deletes it optionally without ruining indentation." ([cm] (delete-whitespace cm (cursor cm) true)) ([cm cur] (delete-whitespace cm cur true)) ([cm cur indent-after] (let [{:keys [start end line ch i type]} (get-info cm cur) c1 (cursor cm (+ i (- start ch))) c2 (cursor cm (+ i (- end ch)))] (when (nil? type) (.replaceRange cm "" c1 c2) (if indent-after (.indentLine cm line)))))) ;; todo (defn just-one-space ([cm] (just-one-space cm (cursor cm) true)) ([cm cur] (just-one-space cm cur true)) ([cm cur indent-after] (let [{:keys [start end line ch i type]} (get-info cm cur) c1 (cursor cm (+ i (- start ch))) c2 (cursor cm (+ i (- end ch)))] (when (nil? type) (.replaceRange cm " " c1 c2) (if indent-after (.indentLine cm line)))))) (defn skip-to "moves to the cursor that satisfies sp or doesn't move if eof reached. starts at current cursor for cm. sp stands for 'skipping predicate' which returns: - :yes if sp is satisfied. - :stop if we know we will not be satisfied with any future result. - :left if the cursor to the left is what we want. - new non-nil state if not satisfied. this state is used with the next iteration after we skip to the end of the current token. an sp takes cm, cursor, state." [cm sp] (when-let [cur' (skip cm sp)] (.setCursor cm cur') cur')) (defn move-past-parent-closer "moves cursor to just outside the closing bracket, or if there is none then doesn't move at all." ;; emacs has this extending the current selection if there is one. [cm] (when-let [cur (skip-to cm parent-closer-sp)] (delete-whitespace cm (:left-cur (get-info cm))) cur)) (defn ^:export close-round "paredit-close-round exposed for keymap. skips to end of current list even if it ends with ] or }. but if you're in a string or comment then this just inserts the bracket. requires CodeMirror mode's parser uses state with indentStack because that's how we can tell we've reached the end of a top level form and avoid entering the next top level form. 's' is the character as a string." ([cm] (close-round cm ")")) ([cm s] (let [{:keys [type left-char]} (get-info cm)] (cond (= "\\" left-char) (insert cm s) (comment-or-string? type) (insert cm s) :else (move-past-parent-closer cm))))) (defn ^:export close-brace "close curly brace like close-rond" ([cm] (close-round cm "}"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-close-round-and-newline paredit-open-square paredit-close-square ;; paredit-doublequote ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export close-round-and-newline ([cm] (close-round-and-newline cm ")")) ([cm s] (if (comment-or-string? (get-type cm)) (insert cm s) (when (close-round cm s) (.execCommand cm "newlineAndIndent"))))) ;; question: is there a better way than .execCommand? (defn ^:export open-square [cm] (open-round cm "[")) (defn ^:export close-square [cm] (close-round cm "]")) (defn ^:export doublequote [cm] (let [{:keys [type left-char right-char ch cur]} (get-info cm)] (cond ;; about to escape this char so insert as-is: (= "\\" left-char) (insert cm "\"") ;; we're in a string so escape this doublequote: (= type "string") (insert cm "\\\"") ;; we're in code. pad with a space to the left and/or right if necessary ;; to separate it from neighboring code. after inserting, move the cursor ;; to between the quotes: :else (insert cm (str (when (not= " " left-char) " ") ;; left padding "\"\"" (when (and (not= " " right-char) (not= "\n" right-char)) " ")) ;; right padding (if (or (= " " right-char) (= "\n" right-char)) -1 -2))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-meta-doublequote M-" ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn word? [type] (or (= type "atom") (= type "builtin") (= type "number") (= type "variable") (= type "keyword") (= type "meta"))) (defn at-a-word? "returns true if at a word of code" [cm cur] (word? (get-type cm cur))) (defn in-a-word? "true if in a word AND not at the end of that word. false if in whitespace or a string or a comment or at a bracket." [cm] (let [cur (cursor cm), i (index cm cur)] (and (at-a-word? cm cur) (not= i (token-end-index cm i))))) (defn start-of-a-string? "returns true if at the start of a string." [cm cur] (let [{:keys [string type start ch left-char]} (get-info cm cur)] #_(js/console.log right-char type string ch start) (and (= left-char "\"") (= type "string") (= 1 (- ch start))))) (defn start-of-a-string2? [cm cur] (let [i (index cm cur) p2 (cursor cm (inc i))] #_(js/console.log cur p2) (start-of-a-string? cm p2))) (defn end-of-a-string? "returns true if just to the right of a closing doublequote of a string." [cm cur] (let [{:keys [type ch end string left-char]} (get-info cm cur)] #_(js/console.log left-char type string ch end) (and (= type "string") (= ch end) (= left-char "\"")))) (defn end-of-next-sibling-sp ;; -sp see 'skipping predicate' "returns the cursor at the end of the sibling to the right or nil if no sibling or eof. does not exit the containing form. does this by skipping past any comments or whitespace, and branches depending on whether an opening bracket or doublequote is encountered (sp satisfied when encountering a closing bracket that empties the stack) vs the beginning of a word (return token at the end of the word). assuming the cm has matched brackets for now." [cm cur stack] (let [dq "\"" info (get-info cm cur) {:keys [string type eof ch end tok]} info stack-empty (zero? stack) one-left (= 1 stack) ;; for multi-line strings start-of-stg? (start-of-a-string? cm cur) end-of-stg? (end-of-a-string? cm cur) empty-stg? (when end-of-stg? (and (= tok.type "string") (= tok.string "\"\""))) string-extends (or (not= dq (last string)) (= "\\" (last (drop-last string))))] #_(js/console.log stack stack-empty string type ch end cur string-extends #_(escaped-char-to-right? cm cur) start-of-stg? end-of-stg?) (cond ;; we return a keyword when we know where to stop, stack otherwise. ;; skip whitespace (or (nil? type) (and (= type "error") (= string ","))), stack (and (escaped-char-to-left? cm cur) stack-empty), :yes (and (word? type) stack-empty (= ch end)), :yes (and (is-bracket-type? type) (closer? string) one-left), :yes (and end-of-stg? one-left), :yes eof, :eof ;; skip comments (= type "comment"), stack ;; strings ............................................................... empty-stg? :end-of-this-token ;; our starting point is at beginning of a string and it doesn't extend (and start-of-stg? (and (not string-extends) stack-empty)), :end-of-this-token ;; We are in a nested form, at start of string, but it doesn't extend (and start-of-stg? (not stack-empty) (not string-extends)), stack ;; entering a multi-line string, push " onto stack (and start-of-stg? string-extends), (inc stack) ;; at end of string and stack already empty, we must have started in the ;; middle of the string (and end-of-stg? stack-empty), :stop ;; at end of string and stack about to be empty, we've found the end of ;; the string -- handled before checking for eof above ;; in string, the end of this string is our goal ... ;; ... but the end of this string is on a different line: (and (= type "string") #_(not stack-empty) #_one-left string-extends), stack (and (= type "string") stack-empty (not string-extends)), :end-of-this-token ;; in string, the end of this string is our goal ... ;; ... the end is on this line: (and (= type "string") one-left), :end-of-this-token ;; in string, need to get out of this form, pop stack (and (= type "string") (not stack-empty)), (dec stack) ;; escaped chars ......................................................... ;; inside an escaped char and the end of it is what we want (and (in-escaped-char? cm cur) stack-empty), :end-of-this-token ;; To the right of escaped char, keep going (and (escaped-char-to-right? cm cur) stack-empty), :start-of-this-tok ;; in an escaped char inside the next sibling (in-escaped-char? cm cur), stack ;; at end of an escaped char which was the next sibling -- handled before ;;checking for eof above ;; at end of an escaped char inside the next sibling (escaped-char-to-left? cm cur), stack ;; words ................................................................. ;; reached the end of a word which was the next sibling -- handled before ;;checking for eof above ;; in a word that is the next sibling, the end of it is what we want (and (word? type) stack-empty), :end-of-this-token ;; in a word that is inside the next sibling (word? type), stack ;; brackets .............................................................. ;; push opener on stack (and (is-bracket-type? type) (opener? string)), (inc stack) ;; we've reached the end of a form -- handled before checking for eof ;;above ;; there was no sibling (and (is-bracket-type? type) (closer? string) stack-empty), :stop ;; passing through the guts of a sibling form (.. (guts)|..) (and (is-bracket-type? type) (closer? string)), (dec stack) :default, :stop))) (defn end-of-next-sibling "get the cursor for the end of the sibling to the right." ([cm] (skip cm end-of-next-sibling-sp)) ([cm cur] (when cur (.setCursor cm cur) (skip cm end-of-next-sibling-sp)))) #_(let [cm (get-ddb [:tabs :extns :ed3 :cms :$ed]) cur (.getCursor cm) info (pe/get-info cm cur) tok (info :tok)] [(pe/start-of-a-string? cm cur) (pe/end-of-a-string? cm cur) (info :left-char) (info :right-char) tok.string] #_(console.log (pe/token-end cm cur 1)) #_(console.log (pe/cursor cm (+ 9 1 (- 8 8)))) #_(pe/get-info cm (pe/cursor cm (+ 9 1))) #_(pe/end-of-a-string? cm cur)) (defn start-of-prev-sibling-sp ;; -sp see 'skipping predicate' "returns the cursor at the start of the sibling to the left or nil if no sibling or eof. does not exit the containing form. does this by skipping past any comments or whitespace, and branches depending on whether a bracket or doublequote is encountered (sp satisfied when encountering an opening bracket that empties the stack) vs the beginning of a word (return token at the start of the word). assuming the cm has matched brackets for now." [cm cur stack] (let [info (get-info cm cur) {:keys [string type bof ch start tok]} info stack-empty (zero? stack) one-left (= 1 stack) string-extends (not= "\"" (first string)) ; for multiline strings start-of-stg? (start-of-a-string? cm cur) end-of-stg? (end-of-a-string? cm cur) empty-stg? (when start-of-stg? (and (= tok.type "string") (= tok.string "\"\"")))] #_(js/console.log stack stack-empty string type ch start cur string-extends ;;(escaped-char-to-left? cm cur) ;;(escaped-char-to-right? cm cur) start-of-stg? end-of-stg?) (cond ;; we return a keyword when we know where to stop, stack otherwise. ;; check these before checking for bof: ;; in a multi-line string, keep searching for the first line of it: (and start-of-stg? one-left string-extends), stack ;; at the first line of a string and we want its opening doublequote: (and start-of-stg? one-left), :yes ;; at the start of a word: (and (word? type) stack-empty (= ch start)), :yes ;; at the opener we were looking for: (and (is-bracket-type? type) (opener? string) one-left), :yes bof, :bof; reached beginning of file (and (start-of-a-string2? cm cur) (not stack-empty)), stack #_(dec stack) ;; at the start of an escaped char: (and (escaped-char-to-right? cm cur) stack-empty), stack ;; skip whitespace (or (nil? type) (and (= type "error") (= string ","))), stack ;; skip comments (= type "comment"), stack ;; strings ............................................................... empty-stg? :start-of-this-tok ;; our starting point is at end of a string and it doesn't extend (and end-of-stg? (and (not string-extends) stack-empty)), :start-of-this-tok ;; We are in a nested form, at end of string, but it doesn't extend (and end-of-stg? (not stack-empty) (not string-extends)) stack ;; entering a multi-line string from the right; push " onto stack (and end-of-stg? string-extends), (inc stack) ;; at start of string and stack already empty, we must have started in ;; the middle of the string. (and start-of-stg? stack-empty), :stop ;; at start of string and stack about to be empty, we've found the end of ;; the string -- handled before check for bof above ;; in string, the start of it is our goal ... ;; ... but the start of this string is on a higher line: (and (= type "string") #_(not stack-empty) string-extends), stack ;; it's on this line: (and (= type "string") stack-empty (not string-extends)), :start-of-this-tok ;; in string, the start of this string is our goal ... ;;; ... and the start is on this line: (and (= type "string") one-left) :start-of-this-tok ;; in string, need to get out of this form, pop stack (and (= type "string") (not stack-empty)), (dec stack) ;; escaped chars ......................................................... ;; inside an escaped char and the start of it is what we want (and (in-escaped-char? cm cur) stack-empty), :start-of-this-tok ;; To the left of escaped char, keep going (and (escaped-char-to-left? cm cur) stack-empty), :start-of-this-tok ;; in an escaped char inside the prev sibling (or (in-escaped-char? cm cur) (escaped-char-to-left? cm cur)), stack ;; at start of an escaped char which was the prev sibling -- handled ;; before check for bof above ;; at start of an escaped char inside the prev sibling (escaped-char-to-right? cm cur), stack ;; words ................................................................. ;; reached the start of a word which was the prev sibling -- handled ;; before check for bof above ;; in a word that is the prev sibling, the start of it is what we want (and (word? type) stack-empty), :start-of-this-tok ;; in a word that is inside the prev sibling (word? type), stack ;; brackets .............................................................. ;; push closer on stack (and (is-bracket-type? type) (closer? string)), (inc stack) ;; we've reached the start of a form -- handled before check for ;; bof above ;; there was no prev sibling, avoid exiting the form (and (is-bracket-type? type) (opener? string) stack-empty), :stop ;; passing through the guts of a sibling form (.. X(guts)..) (and (is-bracket-type? type) (opener? string)), (dec stack) :default :stop))) (defn start-of-prev-sibling "return the cursor at the start of the sibling to the left." ([cm] (skip-left cm start-of-prev-sibling-sp)) ([cm cur] (when cur (.setCursor cm cur) (skip-left cm start-of-prev-sibling-sp)))) (defn escape-string "escapes a string, replacing backslashes and doublequotes. wraps result in a new pair of doublequotes." [s] (str "\"" (-> s (str/replace #"[\\]" "\\\\") (str/replace #"[\"]" "\\\"")) "\"")) (defn stringify-selection "turns selection into a string, escaping backslashes and doublequotes" [cm] (->> cm .getSelection escape-string (.replaceSelection cm))) (defn stringify "turns the region from cur-1 to cur-2 into a string, escaping backslashes and doublequotes" [cm cur-1 cur-2] (.setSelection cm cur-1 cur-2) (stringify-selection cm) (.setCursor cm (cursor cm (inc (index cm cur-1))))) (defn exit-string "moves cursor right, out of the current string" [cm] (let [{:keys [type i ch end]} (get-info cm)] (when (= type "string") (.setCursor cm (cursor cm (+ i (- end ch))))))) (defn in-string? "returns true if token is in the middle of a string." ([cm] (in-string? cm (cursor cm))) ([cm cur] (let [type (get-type cm cur)] (or (= type "string") (= type "string-2"))))) (defn ^:export meta-doublequote "paredit meta-doublequote exposed for keymap. if in a string, moves cursor out of the string to the right. if in a comment, insert a doublequote. if in an escaped char, do nothing. otherwise starts a string that that continues to the end of the next form, escaping backslashes and doublequotes." [cm] (let [{:keys [type eof cur]} (get-info cm)] (cond eof :do-nothing (in-escaped-char? cm cur) :do-nothing (in-string? cm cur) (exit-string cm) (= type "comment") (insert cm "\"") (in-a-word? cm) (stringify cm cur (token-end cm cur)) :else (stringify cm cur (end-of-next-sibling cm))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-comment-dwim ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn left "given a pair of cursors c1 and c2, returns the left-most one" [cm c1 c2] (let [i1 (index cm c1) i2 (index cm c2)] (if (< i1 i2) c1 c2))) (defn right "given a pair of cursors c1 and c2, returns the right-most one" [cm c1 c2] (let [i1 (index cm c1) i2 (index cm c2)] (if (< i1 i2) c2 c1))) (defn selection-info "like get-info but for the first selection. gets the cursor to the left of the selection, the start, the end, the text selected, the starting and ending line numbers. nil if nothing selected." [cm] (when (.somethingSelected cm) (let [first-sel (-> cm .listSelections first) text (-> cm .getSelections first) anchor (.-anchor first-sel) head (.-head first-sel) left-of-start (left cm anchor head) start-cur (cursor cm (inc (index cm left-of-start))) end-cur (right cm anchor head)] [left-of-start start-cur end-cur text (.-line start-cur) (.-line end-cur)]))) (defn get-types "get the types from cursors c1 to c2. assumes 1 is to the left of 2 and not vice versa." [cm c1 c2] (loop [types [], cur c1] (let [{:keys [type right-cur]} (get-info cm cur) types' (conj types type)] (if (= cur c2) types' (recur types' right-cur))))) (defn selection-completely-satisfies-pred? "true if every position's type satisfies pred, for the entire (first) selection" [cm pred] (when-let [[_ c1 c2] (selection-info cm)] (every? pred (get-types cm c1 c2)))) (defn selection-completely-whitespace? [cm] (selection-completely-satisfies-pred? cm nil?)) (defn not-code? [type] (or (nil? type) (= type "comment"))) (defn selection-completely-non-code? [cm] (selection-completely-satisfies-pred? cm not-code?)) (defn to-comment "starts each line in 's' with ;; and appends 'post-script'" [s postscript] (let [cmnt (->> s str/split-lines (map #(str/replace % #"^" ";; ")) (str/join "\n"))] (str cmnt "\n" postscript))) (defn uncomment "removes leading whitespace and semicolons from lines in 's'" [s] (->> s str/split-lines (map #(str/replace % #"^\s*;+" "")) (str/join "\n"))) (defn indent-lines "indents lines from a to z (line numbers). assumes a is before z." [cm a z] (doseq [line (range a (inc z))] (.indentLine cm line))) (defn uncomment-selection "removes whitespace and leading semicolons from selection, replaces selection with the result, indents lines affected." [cm] (when-let [[_ c1 c2 text] (selection-info cm)] (.replaceSelection cm (uncomment text)) (indent-lines cm (.-line c1) (.-line c2)))) (defn append "returns the result of appending the applicable part of 'tok' to 's'. this is for collecting all the text on a line after 'ch'" [ch s tok] (if (< ch (.-end tok)) (str s (subs (.-string tok) (- (max ch (.-start tok)) (.-start tok)))) s)) (defn get-text-to-end-of-line [cm cur] (let [toks (.getLineTokens cm (.-line cur)) ch (.-ch cur)] (reduce (partial append ch) "" toks))) (defn comment-selection [cm] (let [[_ c1 c2 text l1 l2] (selection-info cm) text-after-selection (get-text-to-end-of-line cm c2) code-follows-selection (not= text-after-selection "") end-of-line (last-cur cm) line-to (if code-follows-selection (inc l2) l2)] (when code-follows-selection (.setSelection cm left end-of-line)) (.replaceSelection cm (to-comment text text-after-selection)) (indent-lines cm l1 line-to))) (defn line-ends-with-comment? "true if the line ends with a comment" [cm] (= "comment" (.-type (last-token cm (cursor cm))))) (defn indent-current-line [cm] (->> cm cursor .-line (.indentLine cm))) (defn go-to-comment "moves cursor to ;;X" [cm] (let [cur (cursor cm) ch (.-ch cur) i (index cm cur) c-tok (last-token cm cur) start (.-start c-tok) offset (count (take-while #(= ";" %) (.-string c-tok)))] (.setCursor cm (cursor cm (+ i (- start ch) offset))))) (defn insert-spaces-to-col-40 "presses spacebar until we are at col 40" [cm] (let [ch (-> cm cursor .-ch)] (when (< ch 40) (insert cm (str/join (repeat (- 40 ch) " ")))))) (defn go-to-comment-and-indent "moves cursor to the comment on the line and makes sure the comment starts on column 40 or greater. assumes last token is a comment" [cm] (indent-current-line cm) (let [cur (cursor cm) ch (.-ch cur) i (index cm cur) comment-start (.-start (last-token cm cur))] (.setCursor cm (cursor cm (+ i (- comment-start ch)))) (insert-spaces-to-col-40 cm) (go-to-comment cm))) (defn betw-code-and-line-end? "true if code is to the left and whitespace* is to the right. assumes you already know line does not end with a comment." [cm] (let [cur (cursor cm) toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) tests (map #(or (<= (.-end %) ch) (nil? (.-type %))) toks)] (and (seq toks) ; the line is not empty (every? true? tests) ; there's only whitespace to the right (some #(not (nil? (.-type %))) toks)))) ; there's code on the left (defn move-to-end-of-line "moves cursor to end of last non-whitespace token on a line. returns a vector of new index, new ch, and new cursor." ([cm] (move-to-end-of-line cm (cursor cm))) ([cm cur] (let [end (->> cur .-line (.getLineTokens cm) (remove #(nil? (.-type %))) last .-end) ch (.-ch cur) i (index cm cur) i' (+ i (- end ch)) cur' (cursor cm i')] (.setCursor cm cur') [i' (.-ch cur') cur']))) (defn select-rest-of-line "selects from current position to the end of the line" [cm] (.setSelection cm (cursor cm) (last-cur cm))) (defn delete-to-end-of-line "deletes from current position to the end of the line" [cm] (.replaceRange cm "" (cursor cm) (last-cur cm))) (defn create-comment-at-end "starts a ; comment at column 40 or greater and moves to it." [cm] (indent-current-line cm) (move-to-end-of-line cm) (insert cm " ") (insert-spaces-to-col-40 cm) (insert cm "; ") (delete-to-end-of-line cm)) (defn line-is-whitespace? "returns true if line is all whitespace" [cm] (->> cm cursor .-line (.getLineTokens cm) (every? #(nil? (.-type %))))) (defn create-line-comment "creates and indents a ;; comment" [cm] (insert cm ";; ") (delete-to-end-of-line cm) (indent-current-line cm)) (defn new-line-and-comment "creates and indents a ;; comment on a new line" [cm] (indent-current-line cm) (insert cm "\n\n") (.execCommand cm "goLineDown") (.execCommand cm "goLineDown") (indent-current-line cm) (.execCommand cm "goLineUp") (create-line-comment cm)) (defn insert-line-comment-here "creates and indents a ;; comment on this line" [cm] (insert cm "\n") (.execCommand cm "goLineDown") (indent-current-line cm) (.execCommand cm "goLineUp") (create-line-comment cm)) (defn in-code? "returns true if token is in the middle of code. assumes you've already ruled out comments." [cm] (let [{:keys [type start end ch]} (get-info cm)] (and (< start ch) (< ch end) (not (nil? type))))) (defn in-whitespace? "returns true if token is to the right of whitespace" [cm] (-> cm get-type nil?)) (defn code-to-left? "returns true if there's any code to the left of cursor. assumes you've already ruled out comments so only looks for non nil tokens" [cm] (let [cur (cursor cm) toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) code (map #(and (not (nil? (.-type %))) (or (<= (.-end %) ch) (and (< (.-start %) ch) (< ch (.-end %))))) toks)] (and (seq toks) ; the line is not empty (some true? code)))) ; there's one token that contains code to the left (defn ^:export comment-dwim [cm] (cond (selection-completely-whitespace? cm) :do-nothing (selection-completely-non-code? cm) (uncomment-selection cm) (.somethingSelected cm) (comment-selection cm) (line-ends-with-comment? cm) (go-to-comment-and-indent cm) (betw-code-and-line-end? cm) (create-comment-at-end cm) (in-code? cm) (create-comment-at-end cm) (in-string? cm) (create-comment-at-end cm) (line-is-whitespace? cm) (create-line-comment cm) (and (code-to-left? cm) (in-whitespace? cm)) (new-line-and-comment cm) (in-whitespace? cm) (insert-line-comment-here cm) :default :do-nothing)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-newline ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; seems like code mirror behaves as desired already ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-delete ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn backspace "delete 1 or n char to left" ([cm] (backspace cm 1)) ([cm n] (let [-n #(- % n) cur (cursor cm) cur0 (->> cur (index cm) -n (cursor cm))] (.replaceRange cm "" cur0 cur)))) (defn right-cur-would-be-whitespace? "true if this position would be whitespace if we pressed the spacebar." [cm cur right-cur] (let [original-cur (cursor cm) _ (insert cm " " 0 cur) answer (nil? (get-type cm right-cur))] (backspace cm) (.setCursor cm original-cur) answer)) (defn closing-delim? "returns true for closing brackets and for closing double-quotes" [cm cur] (let [{:keys [string type left-char right-cur]} (get-info cm cur)] ;;(println "closing delim?" type string left-char) (or (and (is-bracket-type? type) (closer? left-char)) (end-of-a-string? cm cur) (and (= type "string") (= "\"" left-char) ;; at this point, we could be just inside the start of a string. ;; if we check the type at the position to the right, this could ;; trick us: "X""hello" ... one way to be absolutely sure we're ;; at the end of a string is to add a space temporarily and see ;; if code mirror says its type is 'null' or 'string'. (right-cur-would-be-whitespace? cm cur right-cur))))) (defn opening-doublequote? "returns true if cur is just to the right of an opening doublequote" ([cm cur] (let [{:keys [type left-char right-cur]} (get-info cm cur)] (opening-doublequote? cm type left-char right-cur))) ([cm type left-char right-cur] (and (= type "string") (= "\"" left-char) right-cur (= "string" (get-type cm right-cur))))) (defn closing-doublequote? "returns true if cur is just to the right of a closing doublequote" [cm cur] (let [{:keys [type left-char right-cur]} (get-info cm cur) right-type (get-type cm right-cur)] (and (= type "string") (= "\"" left-char) (not= right-type "string")))) (defn opening-delim? "returns true for opening brackets and for opening double-quotes" [cm cur] (let [{:keys [string type left-char right-cur]} (get-info cm cur)] (or (and (is-bracket-type? type) (opener? left-char)) (opening-doublequote? cm type left-char right-cur)))) (defn opening-delim-for-empty-pair? "returns true for an opening bracket of an empty pair ()" [cm cur] (let [{:keys [left-char right-char right-cur]} (get-info cm cur)] (and (opening-delim? cm cur) right-cur (closing-delim? cm right-cur) (pair? left-char right-char)))) (defn opening-delim-for-non-empty-pair? "returns true for an opening bracket of a pair that contains one or more chars." [cm] (let [{:keys [left-char right-char cur]} (get-info cm)] (and (opening-delim? cm cur) (not (pair? left-char right-char))))) (defn move "moves the cursor by 'offset' places, negative for left. returns the cursor." [cm offset] (->> cm index (+ offset) (cursor cm) (.setCursor cm)) (cursor cm)) (defn delete "delete 1 or n char to right" ([cm] (delete cm 1)) ([cm n] (let [+n #(+ % n) cur (cursor cm) cur2 (->> cur (index cm) +n (cursor cm))] (.replaceRange cm "" cur cur2)))) (defn whitespace? "returns true if cursor indicates whitespace" [cm cur] (let [info (get-info cm cur)] (and (not (nil? info)) (nil? (:type info))))) (defn bracket? "true if cursor info indicates opening/closing bracket or quote" [cm cur] (let [{:keys [type left-char] :as info} (get-info cm cur)] (or (is-bracket-type? type) (and (= "string" type) (= "\"" left-char))))) (defn select-pair "assumes a pair of brackets surround the cursor. selects the pair." [cm] (let [i (->> cm cursor (index cm)) c1 (->> i dec (cursor cm)) c2 (->> i inc (cursor cm))] (.setSelection cm c1 c2))) (defn delete-selection [cm] (.replaceSelection cm "")) (defn delete-pair "assumes a pair of brackets surround the cursor. deletes the pair." [cm] (backspace cm) (delete cm)) (defn move-right [cm] (move cm 1)) (defn move-left [cm] (move cm -1)) (defn ^:export forward-delete "paredit-forward-delete exposed for keymap" [cm] (let [{:keys [cur right-cur] :as info} (get-info cm)] (cond (.somethingSelected cm) (delete-selection cm) (whitespace? cm right-cur) (delete cm) (not (bracket? cm right-cur)) (delete cm) (opening-delim? cm right-cur) (move-right cm) (opening-delim-for-empty-pair? cm cur) (delete-pair cm) :default :do-nothing))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-delete ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export backward-delete "paredit backward delete exposed for keymap" [cm] (let [cur (cursor cm)] (cond (.somethingSelected cm) (delete-selection cm) (in-escaped-char? cm cur) (delete-pair cm) (escaped-char-to-left? cm cur) (backspace cm 2) (opening-delim-for-non-empty-pair? cm) :do-nothing (opening-delim-for-empty-pair? cm cur) (delete-pair cm) (closing-delim? cm cur) (move-left cm) :default (backspace cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-kill ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn in-regular-string? "returns true if token is in the middle of a string." [cm cur] (or (opening-doublequote? cm cur) (and (= "string" (get-type cm cur)) (not (closing-doublequote? cm cur))))) (defn str-ends-on-another-line? "true if these values are from a string token that ends on another line" [type string] (and (= "string" type) (not= "\"" (last string)))) (defn go-to-end-of-string "moves cursor to end of the string you're in (but still inside the closing doublequote). assumes you're in a string. the end could be on a different line from where you start" ([cm] (go-to-end-of-string cm (cursor cm))) ([cm cur] (let [{:keys [left-char right-cur type string ch end]} (get-info cm cur)] (cond (nil? type) (go-to-end-of-string cm right-cur) (str-ends-on-another-line? type string) (do (move-to-end-of-line cm cur), (move cm 2), (go-to-end-of-string cm)) (opening-doublequote? cm type left-char right-cur) (do (move cm 1), (go-to-end-of-string cm)) (and (= "string" type)) (move cm (- end ch 1 )) :default cur)))) (defn select-rest-of-string "assumes you are in a string." [cm] (let [c1 (cursor cm) c2 (go-to-end-of-string cm c1)] (.setSelection cm c1 c2))) (defn betw-code-and-comment? "true if code is to the left and whitespace* comment* is to the right." [cm cur] (when cur (let [toks (.getLineTokens cm (.-line cur)) ch (.-ch cur) tests (map #(or (<= (.-end %) ch) (or (nil? (.-type %)) (= "comment" (.-type %)))) toks)] (and (seq toks) ; the line is not empty (every? true? tests) ; there's only junk to the right (some #(not (nil? (.-type %))) toks))))) (defn rest-of-siblings [cm] (let [c1 (cursor cm) parent-closer (skip cm parent-closer-sp) c2 (when parent-closer (cursor cm (dec (index cm parent-closer))))] [c1 c2])) (defn select-rest-of-siblings [cm] (let [[c1 c2] (rest-of-siblings cm)c1 (cursor cm)] (when c2 (.setSelection cm c1 c2)))) (defn kill-from-to [cm i j] (let [cur (cursor cm i)] (CodeMirror.emacs.kill cm cur (cursor cm j)) (.setCursor cm cur))) (defn kill-region [cm] (let [first-sel (-> cm .listSelections first) anchor (.-anchor first-sel) head (.-head first-sel)] (CodeMirror.emacs.kill cm anchor head))) (defn kill-pair "assumes a pair of brackets surround the cursor. deletes the pair." [cm] (select-pair cm) (kill-region cm)) (defn kill-rest-of-string [cm] (select-rest-of-string cm) (kill-region cm)) (defn kill-rest-of-line [cm] (select-rest-of-line cm) (kill-region cm)) (defn kill-rest-of-siblings [cm] (select-rest-of-siblings cm) (kill-region cm)) (defn kill-next-sibling "kills the next sibling to the right of the cursor" [cm] (let [from (cursor cm) mid (end-of-next-sibling cm from) to (if (betw-code-and-comment? cm mid) (last-cur cm mid) mid)] (when to (.setSelection cm from to) (kill-region cm)))) (defn ^:export kill "paredit kill exposed for keymap." [cm] (let [cur (cursor cm)] (cond (.somethingSelected cm) (kill-region cm) (in-regular-string? cm cur) (kill-rest-of-string cm) (betw-code-and-comment? cm cur) (kill-rest-of-line cm) (in-escaped-char? cm cur) (kill-pair cm) (code-to-left? cm) (kill-rest-of-siblings cm) :default (kill-next-sibling cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-kill-word M-d ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn comment? [cm cur] (= "comment" (get-type cm cur))) (defn start-of-comment? "true if block cursor is on the first ; of a line comment" [cm cur] (let [{:keys [type right-cur]} (get-info cm cur) right-type (get-type cm right-cur)] (and (not= "comment" type) (= "comment right-type")))) (defn idx-of-next [cm i chars member max] (let [{:keys [right-char]} (get-info cm (cursor cm i))] (cond (= i max), (guard) (= member (contains? chars right-char)), i :default, (fn [] (idx-of-next cm (inc i) chars member max))))) (defn index-of-next [cm i chars] (trampoline idx-of-next cm i chars true (char-count cm))) (defn index-of-next-non [cm i chars] (trampoline idx-of-next cm i chars false (char-count cm))) (def non-word-chars (set "(){}[]|&; \n")) (def comment-start (set "; ")) (def semicolons #{";"}) (def comment-whitespace #{" " (str \tab)}) (defn end-of-next-word "assumes i is in a comment or a string. returns the i at the end of the next word (going to the right) in this comment/string" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) tail (subs string (- ch start)) word (re-find #"^\s*[\S]*" tail) length (count word) quote (if (str/ends-with? word "\"") -1 0)] (+ i length quote))) (defn start-of-prev-word "assumes i is in a comment or a string. returns the i at the start of the prev word (going to the left) in this comment/string" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) head (subs string 0 (- ch start)) last-word (re-find #"[\S]*\s*$" head) length (count last-word) quote (if (str/ends-with? last-word "\"") 1 0)] (- i length quote))) (defn kill-next-word "assumes i is in a comment or a string. kills text from i to the end of the next word in this comment/string" [cm i] (kill-from-to cm i (end-of-next-word cm (inc i))) (.setCursor cm (cursor cm i))) (defn fwd-kill-word "trampoline helper for forward-kill-word. 'mark' is the index to start killing from. 'i' is the index we're inspecting. 'n' is how many calls remaining that we'll support before stopping because of a suspected infinite loop. first call can put the count of characters in this cm instance." [cm mark i n] (let [m (dec n), j (inc i), cur (cursor cm i), right-cur (cursor cm j)] (cond (neg? n) (guard) (eof? cm right-cur) :do-nothing (whitespace? cm right-cur) #(fwd-kill-word cm mark (token-end-index cm j) m) (start-of-a-string? cm right-cur) #(fwd-kill-word cm j j m) (in-regular-string? cm right-cur) (kill-next-word cm mark) (opening-delim? cm right-cur) #(fwd-kill-word cm j j m) (closing-delim? cm right-cur) #(fwd-kill-word cm j j m) (at-a-word? cm right-cur) (kill-from-to cm mark (token-end-index cm j)) (start-of-comment? cm cur) (let [j (index-of-next-non cm i semicolons)] #(fwd-kill-word cm j j m)) (comment? cm right-cur) (kill-next-word cm mark) :else (println "unhandled")))) (defn ^:export forward-kill-word "paredit forward-kill-word exposed for keymap." [cm] (let [i (index cm)] (trampoline fwd-kill-word cm i i (char-count cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-kill-word ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn start-of-token-at [cm i] (let [{:keys [ch start]} (get-info cm (cursor cm i))] (- i (- ch start)))) (defn kill-prev-word-in-comment "assumes i is in a comment. kills text from i to the beginning of the previous word in this comment" [cm i] (let [{:keys [ch start string]} (get-info cm (cursor cm i)) cur-offset-in-string (- ch start) head (subs string 0 cur-offset-in-string) tail (subs string cur-offset-in-string) word (re-find #"\S*\s*$" head) length (count word)] (kill-from-to cm (- i length) i) (.setCursor cm (cursor cm (- i length))))) (defn beginning-of-line? [cm cur] (let [{:keys [start end type] :as info} (get-info cm cur)] (and (not (nil? info)) (nil? type) (= start end 0)))) (defn bkwd-kill-skippable-comment-char? [cm cur] (let [{:keys [type left-char] :as info} (get-info cm cur)] (and (not (nil? info)) (= "comment" type) (re-matches #"\s|;" left-char)))) (defn bkwd-kill-word "trampoline helper for backward-kill-word. 'mark' is the index to start killing from. 'i' is the index we're inspecting. 'n' is how many more calls we'll entertain before stopping because we suspect an infinite loop. first call can use char count for 'n'." [cm mark i n] (let [h (dec i), m (dec n), cur (cursor cm i)] (cond (neg? n) (guard) (bof? cm cur) :do-nothing (beginning-of-line? cm cur) #(bkwd-kill-word cm h h m) (whitespace? cm cur) #(bkwd-kill-word cm mark (start-of-token-at cm i) m) (opening-delim? cm cur) #(bkwd-kill-word cm h h m) (closing-delim? cm cur) #(bkwd-kill-word cm h h m) (at-a-word? cm cur) (kill-from-to cm (start-of-token-at cm i) mark) (start-of-comment? cm cur) (let [j (index-of-next-non cm i semicolons)] #(fwd-kill-word cm j j m)) (bkwd-kill-skippable-comment-char? cm cur) #(bkwd-kill-word cm mark h m) (comment? cm cur) (kill-prev-word-in-comment cm mark) :else (println "unhandled")))) (defn ^:export backward-kill-word "paredit backward-kill-word exposed for keymap." [cm] (let [i (index cm)] (trampoline bkwd-kill-word cm i i (char-count cm)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd "trampoline helper for forward. 'i' is the index we're inspecting. 'n' is how many more calls we'll entertain before suspecting an infinite loop. first call can pass in char count." [cm i n] (let [j (inc i), m (dec n), cur (cursor cm i), right-cur (cursor cm j)] (cond (neg? n) (guard) (nil? right-cur) :do-nothing (eof? cm right-cur) :do-nothing (whitespace? cm right-cur) #(fwd cm j m) (opening-delim? cm right-cur) (.setCursor cm (end-of-next-sibling cm cur)) (closing-delim? cm right-cur) (.setCursor cm right-cur) (at-a-word? cm right-cur) (.setCursor cm (cursor cm (token-end-index cm j))) (comment? cm right-cur) #(fwd cm (token-end-index cm j) m) (in-string? cm right-cur) (.setCursor cm (cursor cm (end-of-next-word cm j))) :else (println "unhandled")))) (defn ^:export forward "paredit forward exposed for keymap. find the first thing that isn't whitespace or comment. if it is a closing bracket, step past it. otherwise skip over the thing." [cm] (trampoline fwd cm (index cm) (char-count cm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd "trampoline helper for backward. 'i' is the index we're inspecting. 'n' is number of remaining calls before we suspect an infinite loop" [cm i n] (let [h (dec i), m (dec n), cur (cursor cm i)] (cond (neg? n) (guard) (nil? cur) :do-nothing (bof? cm cur) (.setCursor cm (cursor cm h)) (whitespace? cm cur) #(bkwd cm h m) (opening-delim? cm cur) (.setCursor cm (cursor cm h)) (closing-delim? cm cur) (.setCursor cm (start-of-prev-sibling cm cur)) (at-a-word? cm cur) (.setCursor cm (start-of-prev-sibling cm cur)) (comment? cm cur) #(bkwd cm (start-of-prev-sibling cm cur) m) (in-string? cm cur) (.setCursor cm (cursor cm (start-of-prev-word cm h))) :else (println "unhandled")))) (defn ^:export backward "paredit backward exposed for keymap." [cm] (trampoline bkwd cm (index cm) (char-count cm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-up ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn forward-up-cur "get cursor corresponding to paredit forward up" ([cm] (forward-up-cur cm (cursor cm))) ([cm cur] (cond (nil? cur), nil (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (token-end cm cur) :default, (skip cm parent-closer-sp)))) (defn ^:export forward-up "paredit forward-up exposed for keymap." ([cm] (forward-up cm (cursor cm))) ([cm cur] (when-let [cur' (forward-up-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-up ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn backward-up-cur "get cursor corresponding to paredit backward up" ([cm] (backward-up-cur cm (cursor cm))) ([cm cur] (start-of-prev-sibling cm (forward-up-cur cm cur)))) (defn ^:export backward-up "paredit backward-up exposed for keymap." ([cm] (backward-up cm (cursor cm))) ([cm cur] (when-let [cur' (backward-up-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-wrap-round ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn end-of-this "go to the end of the current thing, whether it be a string or a word of code" [cm cur] (if (in-string? cm cur) (token-end cm cur) (end-of-next-sibling cm cur))) (defn ^:export wrap-round "paredit wrap-round exposed for keymap." ([cm] (wrap-round cm (cursor cm))) ([cm cur] (let [cur-close (end-of-this cm cur) cur-open (start-of-prev-sibling cm cur-close) i (inc (index cm cur-open)) text (.getRange cm cur-open cur-close) text' (str "(" text ")")] (.replaceRange cm text' cur-open cur-close) (.setCursor cm (cursor cm i))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp M-s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp "paredit splice-sexp exposed for keymap. unlike emacs' version, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp cm (cursor cm))) ([cm cur] (let [i (dec (index cm)) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) text' (when cur-open (.getRange cm (cursor cm (inc (index cm cur-open))) (cursor cm (dec (index cm cur-close)))))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm (cursor cm i)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp-killing-backward M-<up> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp-killing-backward "paredit splice-sexp-killing-backward exposed for keymap. like emacs' version, this doesn't actually kill to the clipboard. it just deletes. but unlink emacs, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp-killing-backward cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (backward-up cm cur)) (let [cur' (cursor cm) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) text' (when cur-close (.getRange cm cur' (cursor cm (dec (index cm cur-close)))))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm cur-open))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-splice-sexp-killing-forward M-<down> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export splice-sexp-killing-forward "paredit splice-sexp-killing-forward exposed for keymap. like emacs' version, this doesn't actually kill to the clipboard. it just deletes. but unlink emacs, this does not splice a string by dropping its double-quotes." ([cm] (splice-sexp-killing-forward cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (forward-up cm cur)) (let [cur' (cursor cm) final-cur (cursor cm (dec (index cm cur'))) cur-close (skip cm parent-closer-sp) cur-open (start-of-prev-sibling cm cur-close) keep-from (when cur-open (cursor cm (inc (index cm cur-open))) ) text (when keep-from (.getRange cm cur-open cur-close)) text' (when keep-from (.getRange cm keep-from cur'))] (when text' (.replaceRange cm text' cur-open cur-close) (.setCursor cm final-cur))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-raise-sexp M-r ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export raise-sexp "paredit raise-sexp exposed for keymap." ([cm] (raise-sexp cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (backward-up cm cur)) (let [c1 (cursor cm) c2 (end-of-next-sibling cm c1) text (when c2 (.getRange cm c1 c2)) cur-close (when text (skip cm parent-closer-sp)) cur-open (when cur-close (start-of-prev-sibling cm cur-close))] (when cur-open (.replaceRange cm text cur-open cur-close) (.setCursor cm cur-open))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-slurp-sexp C-), C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd-string-slurp "String slurping consists of simply 'go to end of string, mark as parent, go to next sibling end, mark as sibling'" [cm cur] (let [parent (if (start-of-a-string? cm cur) (end-of-next-sibling cm cur) (end-of-next-sibling cm (start-of-prev-sibling cm cur))) sibling (end-of-next-sibling cm parent)] (when sibling [parent sibling "\""]))) (defn fwd-slurp "trampoline-able that looks for an ancestor closing bracket (parent, grandparent, etc) that has a sibling to slurp. returns a vector of the cur to the right of such a bracket, the cur to the right of the sibling that will be slurped, the string of the bracket to move. nil if there is no such anscestor that can slurp." [cm cur n] (if (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (fwd-string-slurp cm cur) (when (>= n 0) (let [parent (skip cm parent-closer-sp cur) sibling (end-of-next-sibling cm parent)] (if sibling [parent sibling (get-string cm parent)] (fn [] (fwd-slurp cm parent (dec n)))))))) (defn ^:export forward-slurp-sexp "paredit forward-slurp-sexp exposed for keymap." ([cm] (forward-slurp-sexp cm (cursor cm))) ([cm cur] (when-let [[parent sibling bracket] (trampoline fwd-slurp cm cur (char-count cm))] #_(js/console.log "FWD-SLURP" parent sibling bracket) (insert cm bracket 0 sibling);; put bracket in new spot (.replaceRange cm "" (cursor cm (- (index cm parent) (count bracket))) parent));; remove bracket from old spot (.setCursor cm cur))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-down C-M-d ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn fwd-down "trampoline-able that looks for the cursor where we'd be if we went forward and then down into the next sibling that is available. nil if there is no sibling to enter." [cm cur n] (cond (<= n 0), nil (nil? cur), nil (opening-delim? cm cur), cur :default, (when-let [cur' (token-end cm cur 1)] (fn [] (fwd-down cm cur' (dec n)))))) (defn forward-down-cur ([cm] (forward-down-cur cm (cursor cm))) ([cm cur] (trampoline fwd-down cm cur (char-count cm)))) (defn ^:export forward-down ([cm] (forward-down cm (cursor cm))) ([cm cur] (when-let [cur' (forward-down-cur cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-down C-M-p ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-down "trampoline-able that looks for the cursor where we'd be if we went backward and then down into the prev sibling that is available. nil if there is no sibling to enter." [cm cur n] (let [{:keys [left-cur i start ch bof]} (get-info cm cur)] (cond (<= n 0), (guard) (closing-delim? cm cur), left-cur bof, nil (zero? ch), (fn [] (bkwd-down cm (cursor cm (dec i)) (dec n))) :default, (fn [] (bkwd-down cm (cursor cm (- i (- ch start))) (dec n)))))) (defn ^:export backward-down ([cm] (backward-down cm (cursor cm))) ([cm cur] (when-let [cur' (trampoline bkwd-down cm cur (char-count cm))] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-slurp-sexp C-), C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-slurp "trampolin-able that looks for an ancestor opening bracket (parent, grandparent, etc) that has a sibling to slurp. returns a vector of the cur to the left of such a bracket, the cur to the left of the sibling that will be slurped, the string of the bracket to move. nil if there is no such anscestor that can slurp." [cm cur n] (when (>= n 0) (let [ending (skip cm parent-closer-sp cur) parent (start-of-prev-sibling cm ending) sibling (start-of-prev-sibling cm parent) bracket-cur (forward-down-cur cm parent)] (if (and (not (nil? sibling)) (not (nil? bracket-cur))) [parent sibling (get-string cm bracket-cur)] (fn [] (bkwd-slurp cm parent (dec n))))))) (defn ^:export backward-slurp-sexp "paredit backward-slurp-sexp exposed for keymap." ([cm] (backward-slurp-sexp cm (cursor cm))) ([cm cur] (let [i (index cm cur)] ;; line,ch may change but index will not. (when-let [[parent sibling bracket] (trampoline bkwd-slurp cm cur (char-count cm))] (.replaceRange cm "" parent (cursor cm (+ (index cm parent) (count bracket)))) (insert cm bracket 0 sibling)) (.setCursor cm (cursor cm i))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-barf-sexp C-\} C-<left> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn find-first-ws [stg ch] (let [cnt (count stg)] (loop [ch ch] (if (or (= (.charAt stg ch) " ") (= ch cnt)) ch (recur (inc ch)))))) (defn find-first-nonws [stg ch] (let [cnt (count stg)] (loop [ch ch] (if (or (not= (.charAt stg ch) " ") (= ch cnt)) ch (recur (inc ch)))))) (defn rfind-blank-or-start [stg] (let [rstg (str/reverse stg) cnt (count rstg) bdq? (= (.charAt stg 0) "\"") ch (->> 0 (find-first-ws rstg) (find-first-nonws rstg))] #_(js/console.log rstg ch) (cond (not= ch cnt) (dec ch) ; found ws and nonws bdq? (- ch 2) ; has beg dq and at beg :else ch))) (defn fwd-string-barf "String barffing consists of simply 'go to end of string, mark as parent, reverse look for non whitespace, reverse look for whitespace, mark as sibling'" [cm cur] (let [parent (if (start-of-a-string? cm cur) (end-of-next-sibling cm cur) (end-of-next-sibling cm (start-of-prev-sibling cm cur))) inside (cursor cm (dec (index cm parent))) {:keys [string i]} (get-info cm inside) ri (rfind-blank-or-start string) sibling (cursor cm (- i ri))] #_(js/console.log (index cm cur) i ri (- i ri)) (when (and parent inside) [parent inside sibling "\"" (< (- i ri) (index cm cur))]))) (defn fwd-barf "trampoline-able that looks for an ancestor closing bracket (parent, grandparent, etc) that has a sibling to barf. returns a vector of the cur to the right of such a bracket, the cur at the bracket, the cur where the bracket should go, the text of the bracket, and whether the operation causes the cursor to be moved. nil if there is no such anscestor that can barf" [cm cur n] (if (and (in-string? cm cur) (not (end-of-a-string? cm cur))) (fwd-string-barf cm cur) (when (>= n 0) (let [parent (skip cm parent-closer-sp cur) inside (cursor cm (dec (index cm parent))) sibling (start-of-prev-sibling cm inside) ;; prevsib: end of prev sibling if there is one: prevsib (end-of-next-sibling cm (start-of-prev-sibling cm sibling)) ;; bracket-cur: where the new bracket should go: bracket-cur (or prevsib (forward-down-cur cm (backward-up-cur cm sibling))) ;; whether the cursor needs to change: moved (and bracket-cur (< (index cm bracket-cur) (index cm cur))) ;; text of the bracket, e.g. ")" bracket (when parent (if moved (str (get-string cm parent) " ") (get-string cm parent)))] (cond (nil? parent) nil (nil? bracket-cur) (fn [] (fwd-barf cm parent (dec n))) :default [parent inside bracket-cur bracket moved]))))) (defn ^:export forward-barf-sexp "paredit forward-barf-sexp exposed for keymap." ([cm] (forward-barf-sexp cm (cursor cm))) ([cm cur] (if-let [[parent inside sibling bracket moved] (trampoline fwd-barf cm cur (char-count cm))] (do #_(js/console.log parent inside sibling bracket moved) (.replaceRange cm "" inside parent) (insert cm bracket 0 sibling) (if moved (.setCursor cm sibling) (.setCursor cm cur))) (.setCursor cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backard-barf-sexp C-{, C-M-<right>, Esc C-<right> ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn bkwd-barf "trampoline-able that looks for an ancestor opening bracket (parent, grandparent, etc) that has a sibling to barf. returns... . nil if there is no such anscestor that can barf" [cm cur n] (when (>= n 0) (let [outside (backward-up-cur cm cur) inside (forward-down-cur cm outside) end-of-barfed-sexp (end-of-next-sibling cm inside) end-of-new-first-sib (end-of-next-sibling cm end-of-barfed-sexp) bracket-cur (start-of-prev-sibling cm end-of-new-first-sib) bracket-text (get-string cm inside) moved (and bracket-cur (< (index cm cur) (index cm bracket-cur)))] (cond (nil? outside) nil (nil? end-of-barfed-sexp) (fn [] (bkwd-barf cm outside (dec n))) :default [outside inside bracket-cur bracket-text moved])))) (defn ^:export backward-barf-sexp "paredit backward-barf-sexp exposed for keymap." ([cm] (backward-barf-sexp cm (cursor cm))) ([cm cur] (if-let [[outside inside bracket-cur bracket-text moved] (trampoline bkwd-barf cm cur (char-count cm))] (do (insert cm bracket-text 0 bracket-cur) (.replaceRange cm "" outside inside) (if moved (.setCursor cm (cursor cm (- (index cm cur) (count bracket-text)))) (.setCursor cm cur))) (.setCursor cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-split-sexp M-S ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn split-form "split sexp for (forms like this)" [cm cur] (let [close-cur (skip cm parent-closer-sp cur) close-bracket (get-string cm close-cur) open-cur (start-of-prev-sibling cm close-cur) open-bracket (get-string cm (cursor cm (inc (index cm open-cur))))] (when (and (not (nil? open-bracket)) (not (nil? close-bracket))) (.setCursor cm cur) (let [offset (if (in-whitespace? cm) 1 (do (insert cm " ") (just-one-space cm (cursor cm) false) 0)) cur' (cursor cm) i' (+ (index cm cur') offset) prev-sib (start-of-prev-sibling cm cur') prev-sib-end (end-of-next-sibling cm prev-sib) next-sib (end-of-next-sibling cm cur) next-sib-start (start-of-prev-sibling cm next-sib)] (if (nil? next-sib-start) (insert cm open-bracket) (insert cm open-bracket 0 next-sib-start)) (if (nil? prev-sib-end) (do (move-left cm) (insert cm close-bracket)) (insert cm close-bracket 0 prev-sib-end)) (.setCursor cm (cursor cm i')))))) (defn split-string "split sexp for \"strings like this\"" [cm cur] (let [open-quote-i (index-of-next-non cm (index cm cur) " ")] (.replaceRange cm "\" \"" cur (cursor cm open-quote-i)) (move-left cm) (move-left cm))) (defn ^:export split-sexp "paredit split-sexp exposed for keymap." ([cm] (split-sexp cm (cursor cm))) ([cm cur] (if (in-string? cm cur) (split-string cm cur) (split-form cm cur)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-join-sexps M-J ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export join-sexps "paredit join-sexps exposed for keymap." ([cm] (join-sexps cm (cursor cm))) ([cm cur] (let [left-sib (start-of-prev-sibling cm cur) close (end-of-next-sibling cm left-sib) right-sib (end-of-next-sibling cm cur) open (start-of-prev-sibling cm right-sib) open-right (when open (cursor cm (inc (index cm open)))) close-char (get-string cm close) open-char (get-string cm open-right)] (if (and (not (nil? open)) (not (nil? close)) (pair? open-char close-char)) (do (.setCursor cm open) (delete cm) (.setCursor cm close) (backspace cm) (.setCursor cm (if (= (.-line open) (.-line close)) (cursor cm (dec (index cm cur))) cur))) (.setCursor cm cur))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-reindent-defun M-q ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn top-most-opener-candidate "trampoline-able that looks for the top-most opening bracket for the specified location. returns the current cursor if there is no such anscestor" [cm cur n] (when (>= n 0) (if-let [parent (backward-up-cur cm cur)] (fn [] (top-most-opener-candidate cm parent (dec n))) cur))) (defn top-most-opener "get the top most opening bracket for the specified location. nil if there is no such bracket." ([cm] (top-most-opener cm (cursor cm))) ([cm cur] (let [candidate (top-most-opener-candidate cm cur (char-count cm))] (when (not= candidate cur) candidate)))) (defn ^:export reindent-defun "paredit reindent-defun exposed for keymap." ([cm] (reindent-defun cm (cursor cm))) ([cm cur] (let [open (trampoline top-most-opener cm cur) close (end-of-next-sibling cm open) open-line (when open (.-line open)) line-offset (when open (- (.-line cur) open-line)) line-len (count (.getLine cm (.-line cur))) ch (.-ch cur)] (when (and (not (nil? open)) (not (nil? close))) (indent-lines cm (.-line open) (.-line close)) (repeatedly line-offset (.execCommand cm "goLineDown")) (.execCommand cm "goLineStart") (.setCursor cm (cursor cm (+ (index cm) ch (- (count (.getLine cm (.-line (cursor cm)))) line-len)))))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-forward-sexp ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export forward-sexp "forward-sexp exposed for keymap. seems part of emacs and not part of paredit itself. but including it here since this will be used in things other than emacs itself." ([cm] (forward-sexp cm (cursor cm))) ([cm cur] (when-let [cur' (end-of-next-sibling cm cur)] (.setCursor cm cur')))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; paredit-backward-sexp ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn ^:export backward-sexp "backward-sexp exposed for keymap. seems part of emacs and not part of paredit itself. but including it here since this will be used in things other than emacs itself." ([cm] (backward-sexp cm (cursor cm))) ([cm cur] (when-let [cur' (start-of-prev-sibling cm cur)] (.setCursor cm cur'))))
[ { "context": " (rf/dispatch [::events/add-todo-list {:name @name}])\n (reset! name \"\"))", "end": 3681, "score": 0.8948689103126526, "start": 3677, "tag": "NAME", "value": "name" }, { "context": ":type \"text\"\n :placeholder \"[email protected]\"\n :value @email\n ", "end": 4541, "score": 0.9999107122421265, "start": 4525, "tag": "EMAIL", "value": "[email protected]" } ]
src/cljs/doit/views.cljs
nilenso/do.it
1
(ns doit.views (:refer-clojure :exclude [subs]) (:require [re-frame.core :as rf] [reagent.core :as reagent] [doit.subs :as subs] [doit.auth :as auth] [doit.config :as config] [doit.events :as events])) (defn sign-out-btn [] [:a {:href "#" :on-click (fn [_] (auth/sign-out))} "Sign Out"]) (defn header [] (let [auth-token (rf/subscribe [::subs/auth-token])] (fn [] [:div.header [:h1 "DOβ€’IT"] (when @auth-token [sign-out-btn])]))) (defn add-todo [listid] (let [content (reagent/atom "")] (fn [] [:div.add-object [:input {:type "text" :placeholder "Enter a new todo..." :value @content :on-change (fn [val] (reset! content (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/add-todo {:content @content :listid listid}]) (reset! content ""))} [:i {:class "fas fa-plus"}]]]))) (defn set-todo-height [id] (when-let [parent-element (.getElementById js/document (str "todo-" id))] ;; Change height of the text (set! (.-height (.-style parent-element)) "auto") (let [new-height (.-scrollHeight parent-element)] (set! (.-height (.-style parent-element)) (str new-height "px"))))) (defn editable-todo [id] (let [todo (rf/subscribe [::subs/todo id]) content (reagent/atom (:content @todo))] (fn [] (set-todo-height id) [:textarea.todo {:type "text" :id (str "todo-" id) :value @content :on-change (fn [val] (let [new-content (.-value (.-target val))] (reset! content new-content))) :on-blur (fn [] (rf/dispatch [::events/update-todo (assoc @todo :content @content)]))}]))) (defn remaining-todos-panel [listid] (let [todos (rf/subscribe [::subs/remaining-todos listid])] (fn [] [:div.items-remaining (for [todo @todos] ^{:key (:id todo)} [:div.item [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo (:id todo)]))}] [:i.check-box.far.fa-square {:on-click (fn [_] (rf/dispatch [::events/mark-done (:id todo)]))}] [editable-todo (:id todo)]])]))) (defn completed-todos-panel [listid] (let [todos (rf/subscribe [::subs/completed-todos listid])] (fn [] [:div.items-completed (for [todo @todos] ^{:key (:id todo)} [:div.item [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo (:id todo)]))}] [:i.check-box.fas.fa-check-square {:on-click (fn [_] (rf/dispatch [::events/mark-undone (:id todo)]))}] [editable-todo (:id todo)]])]))) (defn todos-panel [listid] [:div.list-items [remaining-todos-panel listid] [:hr] [completed-todos-panel listid]]) (defn add-todo-list [] (let [name (reagent/atom "")] (fn [] [:div.add-todo-list.add-object [:input {:type "text" :placeholder "Enter list name..." :value @name :on-change (fn [val] (reset! name (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/add-todo-list {:name @name}]) (reset! name ""))} [:i {:class "fas fa-plus"}] "Add Todo List"]]))) (defn editable-list-name [id] (let [todo-list (rf/subscribe [::subs/todo-list id]) name (reagent/atom (:name @todo-list))] (fn [] [:input.list-title {:type "text" :value @name :on-change (fn [val] (let [new-name (.-value (.-target val))] (reset! name new-name))) :on-blur (fn [] (rf/dispatch [::events/update-todo-list (assoc @todo-list :name @name)]))}]))) (defn invite-user-panel [] (let [email (reagent/atom "")] (fn [] [:div.invite-user.add-object [:input {:type "text" :placeholder "[email protected]" :value @email :on-change (fn [val] (reset! email (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/invite-user @email]) (reset! email ""))} [:i {:class "fas fa-plus"}] " Invite User"]]))) (defn lists-panel [] (let [todo-lists (rf/subscribe [::subs/todo-lists])] (fn [] [:div [add-todo-list] [:div.lists-panel (for [todo-list @todo-lists] ^{:key (:id todo-list)} [:div.list-container [:div.list-container-header [editable-list-name (:id todo-list)] [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo-list (:id todo-list)]))}] [:i.fa.fa-archive {:on-click (fn [_] (rf/dispatch [::events/archive-todo-list (:id todo-list)]))}]] [todos-panel (:id todo-list)] [add-todo (:id todo-list)]])] [invite-user-panel]]))) (defn sign-in-panel [] [:div.sign-in-panel [:a {:href "#" :on-click auth/sign-in} [:img.sign-in-btn-img {:src "/images/btn_google_signin.png" :alt "sign in with Google"}]]]) (defn main-panel [] (let [auth-token (rf/subscribe [::subs/auth-token])] (fn [] [:div [header] [:div.main-container (if-not @auth-token [sign-in-panel] [lists-panel])]])))
85783
(ns doit.views (:refer-clojure :exclude [subs]) (:require [re-frame.core :as rf] [reagent.core :as reagent] [doit.subs :as subs] [doit.auth :as auth] [doit.config :as config] [doit.events :as events])) (defn sign-out-btn [] [:a {:href "#" :on-click (fn [_] (auth/sign-out))} "Sign Out"]) (defn header [] (let [auth-token (rf/subscribe [::subs/auth-token])] (fn [] [:div.header [:h1 "DOβ€’IT"] (when @auth-token [sign-out-btn])]))) (defn add-todo [listid] (let [content (reagent/atom "")] (fn [] [:div.add-object [:input {:type "text" :placeholder "Enter a new todo..." :value @content :on-change (fn [val] (reset! content (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/add-todo {:content @content :listid listid}]) (reset! content ""))} [:i {:class "fas fa-plus"}]]]))) (defn set-todo-height [id] (when-let [parent-element (.getElementById js/document (str "todo-" id))] ;; Change height of the text (set! (.-height (.-style parent-element)) "auto") (let [new-height (.-scrollHeight parent-element)] (set! (.-height (.-style parent-element)) (str new-height "px"))))) (defn editable-todo [id] (let [todo (rf/subscribe [::subs/todo id]) content (reagent/atom (:content @todo))] (fn [] (set-todo-height id) [:textarea.todo {:type "text" :id (str "todo-" id) :value @content :on-change (fn [val] (let [new-content (.-value (.-target val))] (reset! content new-content))) :on-blur (fn [] (rf/dispatch [::events/update-todo (assoc @todo :content @content)]))}]))) (defn remaining-todos-panel [listid] (let [todos (rf/subscribe [::subs/remaining-todos listid])] (fn [] [:div.items-remaining (for [todo @todos] ^{:key (:id todo)} [:div.item [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo (:id todo)]))}] [:i.check-box.far.fa-square {:on-click (fn [_] (rf/dispatch [::events/mark-done (:id todo)]))}] [editable-todo (:id todo)]])]))) (defn completed-todos-panel [listid] (let [todos (rf/subscribe [::subs/completed-todos listid])] (fn [] [:div.items-completed (for [todo @todos] ^{:key (:id todo)} [:div.item [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo (:id todo)]))}] [:i.check-box.fas.fa-check-square {:on-click (fn [_] (rf/dispatch [::events/mark-undone (:id todo)]))}] [editable-todo (:id todo)]])]))) (defn todos-panel [listid] [:div.list-items [remaining-todos-panel listid] [:hr] [completed-todos-panel listid]]) (defn add-todo-list [] (let [name (reagent/atom "")] (fn [] [:div.add-todo-list.add-object [:input {:type "text" :placeholder "Enter list name..." :value @name :on-change (fn [val] (reset! name (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/add-todo-list {:name @<NAME>}]) (reset! name ""))} [:i {:class "fas fa-plus"}] "Add Todo List"]]))) (defn editable-list-name [id] (let [todo-list (rf/subscribe [::subs/todo-list id]) name (reagent/atom (:name @todo-list))] (fn [] [:input.list-title {:type "text" :value @name :on-change (fn [val] (let [new-name (.-value (.-target val))] (reset! name new-name))) :on-blur (fn [] (rf/dispatch [::events/update-todo-list (assoc @todo-list :name @name)]))}]))) (defn invite-user-panel [] (let [email (reagent/atom "")] (fn [] [:div.invite-user.add-object [:input {:type "text" :placeholder "<EMAIL>" :value @email :on-change (fn [val] (reset! email (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/invite-user @email]) (reset! email ""))} [:i {:class "fas fa-plus"}] " Invite User"]]))) (defn lists-panel [] (let [todo-lists (rf/subscribe [::subs/todo-lists])] (fn [] [:div [add-todo-list] [:div.lists-panel (for [todo-list @todo-lists] ^{:key (:id todo-list)} [:div.list-container [:div.list-container-header [editable-list-name (:id todo-list)] [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo-list (:id todo-list)]))}] [:i.fa.fa-archive {:on-click (fn [_] (rf/dispatch [::events/archive-todo-list (:id todo-list)]))}]] [todos-panel (:id todo-list)] [add-todo (:id todo-list)]])] [invite-user-panel]]))) (defn sign-in-panel [] [:div.sign-in-panel [:a {:href "#" :on-click auth/sign-in} [:img.sign-in-btn-img {:src "/images/btn_google_signin.png" :alt "sign in with Google"}]]]) (defn main-panel [] (let [auth-token (rf/subscribe [::subs/auth-token])] (fn [] [:div [header] [:div.main-container (if-not @auth-token [sign-in-panel] [lists-panel])]])))
true
(ns doit.views (:refer-clojure :exclude [subs]) (:require [re-frame.core :as rf] [reagent.core :as reagent] [doit.subs :as subs] [doit.auth :as auth] [doit.config :as config] [doit.events :as events])) (defn sign-out-btn [] [:a {:href "#" :on-click (fn [_] (auth/sign-out))} "Sign Out"]) (defn header [] (let [auth-token (rf/subscribe [::subs/auth-token])] (fn [] [:div.header [:h1 "DOβ€’IT"] (when @auth-token [sign-out-btn])]))) (defn add-todo [listid] (let [content (reagent/atom "")] (fn [] [:div.add-object [:input {:type "text" :placeholder "Enter a new todo..." :value @content :on-change (fn [val] (reset! content (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/add-todo {:content @content :listid listid}]) (reset! content ""))} [:i {:class "fas fa-plus"}]]]))) (defn set-todo-height [id] (when-let [parent-element (.getElementById js/document (str "todo-" id))] ;; Change height of the text (set! (.-height (.-style parent-element)) "auto") (let [new-height (.-scrollHeight parent-element)] (set! (.-height (.-style parent-element)) (str new-height "px"))))) (defn editable-todo [id] (let [todo (rf/subscribe [::subs/todo id]) content (reagent/atom (:content @todo))] (fn [] (set-todo-height id) [:textarea.todo {:type "text" :id (str "todo-" id) :value @content :on-change (fn [val] (let [new-content (.-value (.-target val))] (reset! content new-content))) :on-blur (fn [] (rf/dispatch [::events/update-todo (assoc @todo :content @content)]))}]))) (defn remaining-todos-panel [listid] (let [todos (rf/subscribe [::subs/remaining-todos listid])] (fn [] [:div.items-remaining (for [todo @todos] ^{:key (:id todo)} [:div.item [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo (:id todo)]))}] [:i.check-box.far.fa-square {:on-click (fn [_] (rf/dispatch [::events/mark-done (:id todo)]))}] [editable-todo (:id todo)]])]))) (defn completed-todos-panel [listid] (let [todos (rf/subscribe [::subs/completed-todos listid])] (fn [] [:div.items-completed (for [todo @todos] ^{:key (:id todo)} [:div.item [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo (:id todo)]))}] [:i.check-box.fas.fa-check-square {:on-click (fn [_] (rf/dispatch [::events/mark-undone (:id todo)]))}] [editable-todo (:id todo)]])]))) (defn todos-panel [listid] [:div.list-items [remaining-todos-panel listid] [:hr] [completed-todos-panel listid]]) (defn add-todo-list [] (let [name (reagent/atom "")] (fn [] [:div.add-todo-list.add-object [:input {:type "text" :placeholder "Enter list name..." :value @name :on-change (fn [val] (reset! name (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/add-todo-list {:name @PI:NAME:<NAME>END_PI}]) (reset! name ""))} [:i {:class "fas fa-plus"}] "Add Todo List"]]))) (defn editable-list-name [id] (let [todo-list (rf/subscribe [::subs/todo-list id]) name (reagent/atom (:name @todo-list))] (fn [] [:input.list-title {:type "text" :value @name :on-change (fn [val] (let [new-name (.-value (.-target val))] (reset! name new-name))) :on-blur (fn [] (rf/dispatch [::events/update-todo-list (assoc @todo-list :name @name)]))}]))) (defn invite-user-panel [] (let [email (reagent/atom "")] (fn [] [:div.invite-user.add-object [:input {:type "text" :placeholder "PI:EMAIL:<EMAIL>END_PI" :value @email :on-change (fn [val] (reset! email (.-value (.-target val))))}] [:button {:type "input" :on-click (fn [args] (rf/dispatch [::events/invite-user @email]) (reset! email ""))} [:i {:class "fas fa-plus"}] " Invite User"]]))) (defn lists-panel [] (let [todo-lists (rf/subscribe [::subs/todo-lists])] (fn [] [:div [add-todo-list] [:div.lists-panel (for [todo-list @todo-lists] ^{:key (:id todo-list)} [:div.list-container [:div.list-container-header [editable-list-name (:id todo-list)] [:i.delete-btn.far.fa-trash-alt {:on-click (fn [_] (rf/dispatch [::events/delete-todo-list (:id todo-list)]))}] [:i.fa.fa-archive {:on-click (fn [_] (rf/dispatch [::events/archive-todo-list (:id todo-list)]))}]] [todos-panel (:id todo-list)] [add-todo (:id todo-list)]])] [invite-user-panel]]))) (defn sign-in-panel [] [:div.sign-in-panel [:a {:href "#" :on-click auth/sign-in} [:img.sign-in-btn-img {:src "/images/btn_google_signin.png" :alt "sign in with Google"}]]]) (defn main-panel [] (let [auth-token (rf/subscribe [::subs/auth-token])] (fn [] [:div [header] [:div.main-container (if-not @auth-token [sign-in-panel] [lists-panel])]])))
[ { "context": "t \"expected static fields\"\n (rdd/app-name) => \"Geni App\"\n (rdd/value (rdd/broadcast [1 2 3])) => [1 2 ", "end": 1550, "score": 0.7601265907287598, "start": 1542, "tag": "USERNAME", "value": "Geni App" }, { "context": "\"at no cost and with\" 0.1\n \"by Lewis Carroll\" 0.1\n \"of anyone anywhere\" 0.", "end": 3686, "score": 0.9998809099197388, "start": 3673, "tag": "NAME", "value": "Lewis Carroll" }, { "context": " 27\n \"by Lewis Carroll\" 18\n \"o", "end": 4579, "score": 0.999850869178772, "start": 4566, "tag": "NAME", "value": "Lewis Carroll" }, { "context": "-distinct-by-key 0.01)\n rdd/collect) => [[\"Alice’s Adventures in Wonderland\" 1]\n ", "end": 5173, "score": 0.8948318958282471, "start": 5168, "tag": "NAME", "value": "Alice" }, { "context": " anyone anywhere\" 1]\n [\"by Lewis Carroll\" 1]\n [\"Project Gutenberg’", "end": 5349, "score": 0.9998034238815308, "start": 5336, "tag": "NAME", "value": "Lewis Carroll" }, { "context": "ne-by-key str str str)\n rdd/collect) => [[\"Alice’s Adventures in Wonderland\" \"111111111111111111\"]", "end": 5697, "score": 0.7810742855072021, "start": 5692, "tag": "NAME", "value": "Alice" }, { "context": "111111111111111111\"]\n [\"by Lewis Carroll\" \"111111111111111111\"]\n [", "end": 5948, "score": 0.9997383952140808, "start": 5935, "tag": "NAME", "value": "Lewis Carroll" }, { "context": "rdd/fold-by-key 100 -)\n rdd/collect) => [[\"Alice’s Adventures in Wonderland\" -2]\n ", "end": 6334, "score": 0.8657424449920654, "start": 6329, "tag": "NAME", "value": "Alice" }, { "context": " anyone anywhere\" 1]\n [\"by Lewis Carroll\" 0]\n [\"Project Gutenberg’", "end": 6511, "score": 0.9998590350151062, "start": 6498, "tag": "NAME", "value": "Lewis Carroll" }, { "context": "ggregate-by-key 0 + +)\n rdd/collect) => [[\"Alice’s Adventures in Wonderland\" 18]\n ", "end": 6840, "score": 0.6500263810157776, "start": 6835, "tag": "NAME", "value": "Alice" }, { "context": "anyone anywhere\" 27]\n [\"by Lewis Carroll\" 18]\n [\"Project Gutenberg", "end": 7019, "score": 0.9998660087585449, "start": 7006, "tag": "NAME", "value": "Lewis Carroll" }, { "context": "ject Gutenberg’s,1)\"\n \"(by Lewis Carroll,1)\"\n \"(at no cost and wit", "end": 7552, "score": 0.9998494386672974, "start": 7539, "tag": "NAME", "value": "Lewis Carroll" }, { "context": "works\"\n (rdd/count-by-key dummy-pair-rdd) => {\"Alice’s Adventures in Wonderland\" 18\n ", "end": 7922, "score": 0.9900826811790466, "start": 7917, "tag": "NAME", "value": "Alice" }, { "context": "\" 27\n \"by Lewis Carroll\" 18\n \"of", "end": 8219, "score": 0.9998894929885864, "start": 8206, "tag": "NAME", "value": "Lewis Carroll" }, { "context": "no cost and with\" 1]\n [\"by Lewis Carroll\" 1]\n [\"Alice’s Adventures", "end": 8767, "score": 0.9998885989189148, "start": 8754, "tag": "NAME", "value": "Lewis Carroll" }, { "context": "[\"by Lewis Carroll\" 1]\n [\"Alice’s Adventures in Wonderland\" 1]\n ", "end": 8804, "score": 0.9974197745323181, "start": 8799, "tag": "NAME", "value": "Alice" }, { "context": "]\n (-> right rdd/distinct rdd/collect) => [[\"Lewis\" 1]]\n (-> left (rdd/join right) rdd/distinct", "end": 9387, "score": 0.9993431568145752, "start": 9382, "tag": "NAME", "value": "Lewis" }, { "context": " (rdd/join right) rdd/distinct rdd/collect) => [[\"Lewis\" [1 1]]]\n (-> left (rdd/right-outer-join rig", "end": 9462, "score": 0.9994037747383118, "start": 9457, "tag": "NAME", "value": "Lewis" }, { "context": "ith\" 27\n \"by Lewis Carroll\" 18\n \"of an", "end": 12725, "score": 0.9998834729194641, "start": 12712, "tag": "NAME", "value": "Lewis Carroll" }, { "context": " in Wonderland\"\n \"by Lewis Carroll\"])\n (fact \"take-async works\"\n @(rdd/take-asyn", "end": 13403, "score": 0.9998283386230469, "start": 13390, "tag": "NAME", "value": "Lewis Carroll" }, { "context": " (rdd/reduce-by-key +)\n rdd/collect) => [[\"Alice’s Adventures in Wonderland\" 18]\n ", "end": 16658, "score": 0.7968435287475586, "start": 16653, "tag": "NAME", "value": "Alice" }, { "context": "anyone anywhere\" 27]\n [\"by Lewis Carroll\" 18]\n [\"Project Gutenberg", "end": 16837, "score": 0.9998950958251953, "start": 16824, "tag": "NAME", "value": "Lewis Carroll" } ]
test/zero_one/geni/rdd_test.clj
WaqasAliAbbasi/geni
233
(ns zero-one.geni.rdd-test (:require [clojure.java.io :as io] [clojure.string :as string] [midje.sweet :refer [facts fact =>]] [zero-one.geni.aot-functions :as aot] [zero-one.geni.defaults] [zero-one.geni.partitioner :as partitioner] [zero-one.geni.rdd :as rdd] [zero-one.geni.test-resources :refer [create-temp-file!]]) (:import (org.apache.spark SparkContext) (org.apache.spark.api.java JavaRDD JavaSparkContext))) (def dummy-rdd (rdd/text-file "test/resources/rdd.txt")) (def dummy-pair-rdd (rdd/map-to-pair dummy-rdd aot/to-pair)) (facts "On variadic functions" :rdd (fact "expected 0-adic and 1-adic returns" (doall (for [variadic-fn [rdd/cartesian rdd/union rdd/intersection rdd/subtract]] (do (variadic-fn) => rdd/empty? (let [rand-num (rand-int 100)] (-> (rdd/parallelise [rand-num]) variadic-fn rdd/collect) => [rand-num]))))) (fact "expected 3-adic returns" (let [left (rdd/parallelise [1 2 3]) mid (rdd/parallelise [3 4 5]) right (rdd/parallelise [1 4 3])] (rdd/collect (rdd/union left mid right)) => [1 2 3 3 4 5 1 4 3] (rdd/collect (rdd/intersection left mid right)) => [3] (rdd/count (rdd/cartesian left mid right)) => 27 (rdd/collect (rdd/subtract left mid right)) => [2] (rdd/collect (rdd/subtract left mid right (rdd/parallelise [2]))) => empty?))) (facts "On JavaSparkContext methods" :rdd (fact "expected static fields" (rdd/app-name) => "Geni App" (rdd/value (rdd/broadcast [1 2 3])) => [1 2 3] (rdd/checkpoint-dir) => string? (rdd/conf) => map? (rdd/default-min-partitions) => integer? (rdd/default-parallelism) => integer? (rdd/empty-rdd) => (partial instance? JavaRDD) (rdd/jars) => vector? (rdd/local?) => true (rdd/local-property "abc") => nil? (rdd/master) => "local[*]" (rdd/persistent-rdds) => map? (rdd/resources) => {} (rdd/spark-home) => (System/getenv "SPARK_HOME") (rdd/sc) => (partial instance? SparkContext) (rdd/version) => "3.1.1")) (facts "On repartitioning" :rdd (fact "partition-by works" (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/partition-by (partitioner/hash-partitioner 11)) rdd/num-partitions) => 11) (fact "repartition-and-sort-within-partitions works" (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/repartition-and-sort-within-partitions (partitioner/hash-partitioner 1)) rdd/collect distinct) => #(= % (sort %)) (-> (rdd/parallelise [1 2 3 4 5 4 3 2 1]) (rdd/map-to-pair aot/to-pair) (rdd/repartition-and-sort-within-partitions (partitioner/hash-partitioner 1) >) rdd/collect distinct) => #(= % (reverse (sort %))))) (facts "On basic PairRDD transformations" :rdd (fact "cogroup work" (let [left (rdd/flat-map-to-pair dummy-rdd aot/split-spaces-and-pair) mid (rdd/filter left aot/first-equals-lewis-or-carroll) right (rdd/filter left aot/first-equals-lewis)] (-> (rdd/cogroup left mid right) rdd/collect flatten set) => #(every? % [1 "eBook" "Wonderland"]) (-> (rdd/cogroup left mid right 4) rdd/num-partitions) => 4 (-> (rdd/cogroup mid right) rdd/collect rdd/count) => 2)) (fact "sample-by-key + sample-by-key-exact works" (let [fractions {"Alice’s Adventures in Wonderland" 0.1 "Project Gutenberg’s" 0.1 "This eBook is for the use" 0.1 "at no cost and with" 0.1 "by Lewis Carroll" 0.1 "of anyone anywhere" 0.1}] (-> dummy-pair-rdd (rdd/sample-by-key true fractions) rdd/count) => #(< 2 % 27) (-> dummy-pair-rdd (rdd/sample-by-key true fractions 123) rdd/count) => #(< 2 % 27) (-> dummy-pair-rdd (rdd/sample-by-key-exact true fractions) rdd/count) => 14 (-> dummy-pair-rdd (rdd/sample-by-key-exact true fractions 123) rdd/count) => 14)) (fact "reduce-by-key-locally works" (-> dummy-pair-rdd (rdd/reduce-by-key-locally +)) => {"Alice’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by Lewis Carroll" 18 "of anyone anywhere" 27}) (fact "reduce-by-key works" (-> dummy-pair-rdd (rdd/reduce-by-key + 2) rdd/num-partitions) => 2) (fact "count-by-key-approx works" (let [result (-> dummy-pair-rdd (rdd/count-by-key-approx 100) rdd/final-value)] (map (comp keys second) result)) => (fn [ks] (every? #(= [:mean :confidence :low :high] %) ks))) (fact "count-approx-distinct-by-key works" (-> dummy-pair-rdd (rdd/count-approx-distinct-by-key 0.01) rdd/collect) => [["Alice’s Adventures in Wonderland" 1] ["at no cost and with" 1] ["of anyone anywhere" 1] ["by Lewis Carroll" 1] ["Project Gutenberg’s" 1] ["This eBook is for the use" 1]] (-> dummy-pair-rdd (rdd/count-approx-distinct-by-key 0.01 3) rdd/num-partitions) => 3) (fact "combine-by-key works" (-> dummy-pair-rdd (rdd/combine-by-key str str str) rdd/collect) => [["Alice’s Adventures in Wonderland" "111111111111111111"] ["at no cost and with" "111111111111111111111111111"] ["of anyone anywhere" "111111111111111111111111111"] ["by Lewis Carroll" "111111111111111111"] ["Project Gutenberg’s" "111111111"] ["This eBook is for the use" "111111111111111111111111111"]] (-> dummy-pair-rdd (rdd/combine-by-key str str str 2) rdd/num-partitions) => 2) (fact "fold-by-key works" (-> dummy-pair-rdd (rdd/fold-by-key 100 -) rdd/collect) => [["Alice’s Adventures in Wonderland" -2] ["at no cost and with" 1] ["of anyone anywhere" 1] ["by Lewis Carroll" 0] ["Project Gutenberg’s" -1] ["This eBook is for the use" 1]] (-> dummy-pair-rdd (rdd/fold-by-key 0 2 -) rdd/num-partitions) => 2) (fact "aggregate-by-key works" (-> dummy-pair-rdd (rdd/aggregate-by-key 0 + +) rdd/collect) => [["Alice’s Adventures in Wonderland" 18] ["at no cost and with" 27] ["of anyone anywhere" 27] ["by Lewis Carroll" 18] ["Project Gutenberg’s" 9] ["This eBook is for the use" 27]] (-> dummy-pair-rdd (rdd/aggregate-by-key 3 0 + +) rdd/num-partitions) => 3) (fact "group-by works" (-> dummy-pair-rdd (rdd/group-by str) rdd/keys rdd/distinct rdd/collect) => ["(Alice’s Adventures in Wonderland,1)" "(of anyone anywhere,1)" "(Project Gutenberg’s,1)" "(by Lewis Carroll,1)" "(at no cost and with,1)" "(This eBook is for the use,1)"] (-> dummy-pair-rdd (rdd/group-by str 7) rdd/num-partitions) => 7 (-> dummy-pair-rdd (rdd/group-by str 11) rdd/name) => #(string/includes? % "[clojure.core/str, 11]")) (fact "count-by-key works" (rdd/count-by-key dummy-pair-rdd) => {"Alice’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by Lewis Carroll" 18 "of anyone anywhere" 27}) (fact "lookup works" (-> dummy-pair-rdd (rdd/lookup "at no cost and with") distinct) => [1]) (fact "map-values works" (-> dummy-pair-rdd (rdd/map-values inc) rdd/values rdd/distinct rdd/collect) => [2]) (fact "flat-map-values works" (-> dummy-pair-rdd (rdd/flat-map-values aot/to-pair) rdd/distinct rdd/collect) => [["at no cost and with" 1] ["by Lewis Carroll" 1] ["Alice’s Adventures in Wonderland" 1] ["of anyone anywhere" 1] ["This eBook is for the use" 1] ["Project Gutenberg’s" 1]]) (fact "keys + values work" (-> dummy-pair-rdd rdd/keys rdd/distinct rdd/count) => 6 (-> dummy-pair-rdd rdd/values rdd/distinct rdd/collect) => [1]) (fact "filter + join + subtract-by-key work" (let [left (rdd/flat-map-to-pair dummy-rdd aot/split-spaces-and-pair) right (rdd/filter left aot/first-equals-lewis)] (-> right rdd/distinct rdd/collect) => [["Lewis" 1]] (-> left (rdd/join right) rdd/distinct rdd/collect) => [["Lewis" [1 1]]] (-> left (rdd/right-outer-join right) rdd/count) => 324 (-> left (rdd/left-outer-join right) rdd/count) => 828 (-> left (rdd/full-outer-join right) rdd/count) => 828 (-> left (rdd/join right 11) rdd/num-partitions) => 11 (-> left (rdd/right-outer-join right 2) rdd/num-partitions) => 2 (-> left (rdd/left-outer-join right 3) rdd/num-partitions) => 3 (-> left (rdd/full-outer-join right 4) rdd/num-partitions) => 4 (-> left (rdd/subtract-by-key right) rdd/distinct rdd/count) => 22 (-> left (rdd/subtract-by-key right 4) rdd/num-partitions) => 4))) (facts "On basic RDD saving and loading" :rdd (fact "binary-files works" (rdd/count (rdd/binary-files "test/resources/housing.parquet/*.parquet")) => 1 (rdd/count (rdd/binary-files "test/resources/housing.parquet/*.parquet" 2)) => 1) (fact "save-as-text-file works" (let [write-rdd (rdd/parallelise (mapv (fn [_] (rand-int 100)) (range 100))) temp-file (create-temp-file! ".rdd") read-rdd (do (io/delete-file temp-file true) (rdd/save-as-text-file write-rdd (str temp-file)) (rdd/text-file (str temp-file)))] (rdd/count read-rdd) => (rdd/count write-rdd) (rdd/count (rdd/whole-text-files (str temp-file))) => pos? (rdd/count (rdd/whole-text-files (str temp-file) 2)) => #(< 1 %)))) (facts "On basic RDD fields" :rdd (let [rdd (rdd/parallelise-doubles [1])] (rdd/context rdd) => (partial instance? JavaSparkContext) (rdd/id rdd) => integer? (rdd/name rdd) => nil? (rdd/checkpointed? rdd) => false (rdd/empty? (rdd/parallelise [])) => true (rdd/empty? rdd) => false (rdd/empty? rdd) => false (rdd/partitioner rdd) => nil? (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/group-by-key (partitioner/hash-partitioner 123)) rdd/partitioner) => (complement nil?))) (facts "On basic PartialResult" :rdd (let [result (rdd/count-approx dummy-rdd 1000)] (rdd/initial-value result) => #(every? % [:mean :low :high :confidence]) (rdd/final-value result) => #(every? % [:mean :low :high :confidence]) (rdd/final? result) => boolean?) (-> (rdd/count-approx dummy-rdd 1000 0.9) rdd/initial-value :low) => #(< 100 %)) (facts "On basic RDD actions" :rdd (fact "collect-async works" @(rdd/collect-async (rdd/parallelise [1])) => [1]) (fact "collect-partitions works" (let [rdd (rdd/parallelise (into [] (range 100))) part-id (->> rdd rdd/partitions (map #(.index %)) first)] (rdd/collect-partitions rdd [part-id])) => #(and (every? seq? %) (every? (set (range 100)) (flatten %)))) (fact "count-approx-distinct works" (rdd/count-approx-distinct dummy-rdd 0.01) => #(< 3 % 7)) (fact "count-async works" @(rdd/count-async dummy-rdd) => 126) (fact "count-by-value works" (rdd/count-by-value dummy-rdd) => {"Alice’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by Lewis Carroll" 18 "of anyone anywhere" 27}) (fact "first works" (rdd/first dummy-rdd) => "Project Gutenberg’s") (fact "foreach works" (rdd/foreach dummy-rdd identity) => nil?) (fact "foreach-async works" @(rdd/foreach-async dummy-rdd identity) => nil?) (fact "foreach-partition works" (rdd/foreach-partition dummy-rdd identity) => nil?) (fact "foreach-partition-async works" @(rdd/foreach-partition-async dummy-rdd identity) => nil?) (fact "take works" (rdd/take dummy-rdd 3) => ["Project Gutenberg’s" "Alice’s Adventures in Wonderland" "by Lewis Carroll"]) (fact "take-async works" @(rdd/take-async dummy-rdd 2) => ["Project Gutenberg’s" "Alice’s Adventures in Wonderland"]) (fact "take-ordered works" (rdd/take-ordered dummy-rdd 20) => #(= (sort %) %) (let [rdd (rdd/parallelise (mapv (fn [_] (rand-int 100)) (range 100)))] (rdd/take-ordered rdd 20 >) => #(= (sort %) (reverse %)))) (fact "take-sample works" (let [rdd (rdd/parallelise (into [] (range 100)))] (rdd/take-sample rdd false 10) => #(= (-> % distinct count) 10)) (let [rdd (rdd/parallelise (into [] (range 100)))] (rdd/take-sample rdd true 100 1) => #(< (-> % distinct count) 100)))) (facts "On basic RDD transformations + actions" :rdd (-> dummy-rdd (rdd/top 2)) => ["of anyone anywhere" "of anyone anywhere"] (-> (rdd/parallelise [1 2 3]) (rdd/top 2 <)) => [3 2] (-> (rdd/text-file "test/resources/rdd.txt" 2) (rdd/map-to-pair aot/to-pair) rdd/group-by-key rdd/num-partitions) => #(< 1 %) (-> (rdd/parallelise-pairs [[1 2] [3 4]]) rdd/collect) => [[1 2] [3 4]] (-> dummy-pair-rdd (rdd/group-by-key 7) rdd/num-partitions) => 7 (fact "aggregate and fold work" (-> (rdd/parallelise (range 10)) (rdd/aggregate 0 + +)) => 45 (-> (rdd/parallelise (range 10)) (rdd/fold 0 +)) => 45) (fact "subtract works" (let [left (rdd/parallelise [1 2 3 4 5]) right (rdd/parallelise [9 8 7 6 5])] (-> (rdd/subtract left right) rdd/collect set) => #{1 2 3 4} (rdd/num-partitions (rdd/subtract left right 3)) => 3)) (fact "random-split works" (->> (rdd/random-split dummy-rdd [0.9 0.1]) (map rdd/count)) => #(< (second %) (first %)) (->> (rdd/random-split dummy-rdd [0.1 0.9] 123) (map rdd/count)) => #(< (first %) (second %))) (fact "persist and unpersist work" (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) rdd/storage-level) => rdd/disk-only (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) rdd/unpersist rdd/storage-level) => #(not= % rdd/disk-only) (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) (rdd/unpersist false) rdd/storage-level) => #(not= % rdd/disk-only)) (fact "max and min work" (-> (rdd/parallelise [-1 2 3]) (rdd/max <)) => 3 (-> (rdd/parallelise [-1 2 3]) (rdd/min >)) => 3) (fact "key-by works" (-> (rdd/parallelise ["a" "b" "c"]) (rdd/key-by identity) rdd/collect) => [["a" "a"] ["b" "b"] ["c" "c"]]) (fact "flat-map + filter works" (let [result-rdd (-> dummy-rdd (rdd/flat-map aot/split-spaces) (rdd/filter aot/equals-lewis))] (-> result-rdd rdd/collect count) => 18 (-> result-rdd rdd/name) => (complement nil?))) (fact "map works" (-> dummy-rdd (rdd/map count) rdd/collect) => #(every? integer? %)) (fact "reduce works" (-> dummy-rdd (rdd/map count) (rdd/reduce +)) => 2709 (-> (rdd/parallelise [1 2 3 4 5]) (rdd/reduce *)) => 120) (fact "map-to-pair + reduce-by-key + collect work" (-> dummy-pair-rdd (rdd/reduce-by-key +) rdd/collect) => [["Alice’s Adventures in Wonderland" 18] ["at no cost and with" 27] ["of anyone anywhere" 27] ["by Lewis Carroll" 18] ["Project Gutenberg’s" 9] ["This eBook is for the use" 27]] (-> dummy-pair-rdd rdd/collect) => #(and (every? vector? %) (every? (comp (partial = 2) count) %) (every? (comp string? first) %) (every? (comp (partial = 1) second) %))) (fact "sort-by-key works" (-> dummy-pair-rdd (rdd/reduce-by-key +) rdd/sort-by-key rdd/collect) => #(= (sort %) %) (-> dummy-pair-rdd (rdd/reduce-by-key +) (rdd/sort-by-key false) rdd/collect) => #(= (sort %) (reverse %))) (fact "flat-map-to-pair works" (-> (rdd/parallelise ["hello world!" "hello spark and geni!" "the spark world is awesome!"]) (rdd/flat-map-to-pair aot/split-spaces-and-pair) (rdd/reduce-by-key +) rdd/collect set) => #{["spark" 2] ["world" 1] ["and" 1] ["geni!" 1] ["the" 1] ["awesome!" 1] ["is" 1] ["hello" 2] ["world!" 1]}) (fact "map-partitions works" (-> (rdd/parallelise ["abc def" "ghi jkl" "mno pqr"]) (rdd/map-partitions aot/map-split-spaces) rdd/collect) => ["abc" "def" "ghi" "jkl" "mno" "pqr"]) (fact "map-partitions-to-pair works" (-> (rdd/parallelise ["abc def"]) (rdd/map-partitions-to-pair aot/mapcat-split-spaces) rdd/collect) => [["abc" 1] ["def" 1]] (-> (rdd/parallelise ["abc def"]) (rdd/map-partitions-to-pair aot/mapcat-split-spaces true) rdd/num-partitions) => (rdd/default-parallelism)) (fact "map-partitions-with-index works" (-> (rdd/parallelise ["abc def" "ghi jkl" "mno pqr"]) (rdd/map-partitions-with-index aot/map-split-spaces-with-index) rdd/collect) => #(and (every? integer? (map first %)) (= (set (map second %)) #{"abc" "def" "ghi" "jkl" "mno" "pqr"}))) (fact "zips work" (let [left (rdd/parallelise ["a b c" "d e f g h i"]) right (rdd/parallelise ["j k l m n o" "pqr stu"])] (-> (rdd/zip left right) rdd/collect) => [["a b c" "j k l m n o"] ["d e f g h i" "pqr stu"]] (-> (rdd/zip-partitions left right aot/zip-split-spaces) rdd/collect) => ["aj" "bk" "cl" "dpqr" "estu"] (-> (rdd/zip-with-index left) rdd/collect) => [["a b c" 0] ["d e f g h i" 1]]) (let [zipped-values (rdd/collect (rdd/zip-with-unique-id dummy-rdd))] (->> zipped-values (map second) set count) => (rdd/count dummy-rdd))) (fact "sample works" (let [rdd dummy-rdd] (rdd/count (rdd/sample rdd true 0.1)) => #(< 2 % 27) (rdd/count (rdd/sample rdd false 0.1 123)) => #(< 2 % 27))) (fact "coalesce works" (let [rdd (rdd/parallelise ["abc" "def"])] (-> rdd (rdd/coalesce 1) rdd/collect) => ["abc" "def"] (-> rdd (rdd/coalesce 1 true) rdd/collect set) => #{"abc" "def"})) (fact "repartition works" (-> dummy-rdd (rdd/repartition 10) rdd/num-partitions) => 10) (fact "cartesian works" (let [left (rdd/parallelise ["abc" "def"]) right (rdd/parallelise ["def" "ghi"])] (rdd/collect (rdd/cartesian left right)) => [["abc" "def"] ["abc" "ghi"] ["def" "def"] ["def" "ghi"]])) (fact "cache works" (-> dummy-rdd rdd/cache rdd/count) => 126) (fact "distinct works" (-> dummy-rdd rdd/distinct rdd/collect count) => 6 (-> dummy-rdd (rdd/distinct 2) rdd/num-partitions) => 2 (-> dummy-rdd (rdd/distinct 3) rdd/name) => #(string/includes? % "[3]")) (fact "zip-partitions works" (let [left (rdd/parallelise ["a b c" "d e f g h i"]) right (rdd/parallelise ["j k l m n o" "pqr stu"])] (-> (rdd/zip-partitions left right aot/zip-split-spaces) rdd/collect)) => ["aj" "bk" "cl" "dpqr" "estu"]) (fact "union works" (let [rdd (rdd/parallelise ["abc" "def"])] (rdd/collect (rdd/union rdd rdd)) => ["abc" "def" "abc" "def"])) (fact "intersection works" (let [left (rdd/parallelise ["abc" "def"]) right (rdd/parallelise ["def" "ghi"])] (rdd/collect (rdd/intersection left right)) => ["def"])) (fact "glom works" (-> dummy-rdd rdd/glom rdd/count) => #(< % 126)))
122958
(ns zero-one.geni.rdd-test (:require [clojure.java.io :as io] [clojure.string :as string] [midje.sweet :refer [facts fact =>]] [zero-one.geni.aot-functions :as aot] [zero-one.geni.defaults] [zero-one.geni.partitioner :as partitioner] [zero-one.geni.rdd :as rdd] [zero-one.geni.test-resources :refer [create-temp-file!]]) (:import (org.apache.spark SparkContext) (org.apache.spark.api.java JavaRDD JavaSparkContext))) (def dummy-rdd (rdd/text-file "test/resources/rdd.txt")) (def dummy-pair-rdd (rdd/map-to-pair dummy-rdd aot/to-pair)) (facts "On variadic functions" :rdd (fact "expected 0-adic and 1-adic returns" (doall (for [variadic-fn [rdd/cartesian rdd/union rdd/intersection rdd/subtract]] (do (variadic-fn) => rdd/empty? (let [rand-num (rand-int 100)] (-> (rdd/parallelise [rand-num]) variadic-fn rdd/collect) => [rand-num]))))) (fact "expected 3-adic returns" (let [left (rdd/parallelise [1 2 3]) mid (rdd/parallelise [3 4 5]) right (rdd/parallelise [1 4 3])] (rdd/collect (rdd/union left mid right)) => [1 2 3 3 4 5 1 4 3] (rdd/collect (rdd/intersection left mid right)) => [3] (rdd/count (rdd/cartesian left mid right)) => 27 (rdd/collect (rdd/subtract left mid right)) => [2] (rdd/collect (rdd/subtract left mid right (rdd/parallelise [2]))) => empty?))) (facts "On JavaSparkContext methods" :rdd (fact "expected static fields" (rdd/app-name) => "Geni App" (rdd/value (rdd/broadcast [1 2 3])) => [1 2 3] (rdd/checkpoint-dir) => string? (rdd/conf) => map? (rdd/default-min-partitions) => integer? (rdd/default-parallelism) => integer? (rdd/empty-rdd) => (partial instance? JavaRDD) (rdd/jars) => vector? (rdd/local?) => true (rdd/local-property "abc") => nil? (rdd/master) => "local[*]" (rdd/persistent-rdds) => map? (rdd/resources) => {} (rdd/spark-home) => (System/getenv "SPARK_HOME") (rdd/sc) => (partial instance? SparkContext) (rdd/version) => "3.1.1")) (facts "On repartitioning" :rdd (fact "partition-by works" (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/partition-by (partitioner/hash-partitioner 11)) rdd/num-partitions) => 11) (fact "repartition-and-sort-within-partitions works" (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/repartition-and-sort-within-partitions (partitioner/hash-partitioner 1)) rdd/collect distinct) => #(= % (sort %)) (-> (rdd/parallelise [1 2 3 4 5 4 3 2 1]) (rdd/map-to-pair aot/to-pair) (rdd/repartition-and-sort-within-partitions (partitioner/hash-partitioner 1) >) rdd/collect distinct) => #(= % (reverse (sort %))))) (facts "On basic PairRDD transformations" :rdd (fact "cogroup work" (let [left (rdd/flat-map-to-pair dummy-rdd aot/split-spaces-and-pair) mid (rdd/filter left aot/first-equals-lewis-or-carroll) right (rdd/filter left aot/first-equals-lewis)] (-> (rdd/cogroup left mid right) rdd/collect flatten set) => #(every? % [1 "eBook" "Wonderland"]) (-> (rdd/cogroup left mid right 4) rdd/num-partitions) => 4 (-> (rdd/cogroup mid right) rdd/collect rdd/count) => 2)) (fact "sample-by-key + sample-by-key-exact works" (let [fractions {"Alice’s Adventures in Wonderland" 0.1 "Project Gutenberg’s" 0.1 "This eBook is for the use" 0.1 "at no cost and with" 0.1 "by <NAME>" 0.1 "of anyone anywhere" 0.1}] (-> dummy-pair-rdd (rdd/sample-by-key true fractions) rdd/count) => #(< 2 % 27) (-> dummy-pair-rdd (rdd/sample-by-key true fractions 123) rdd/count) => #(< 2 % 27) (-> dummy-pair-rdd (rdd/sample-by-key-exact true fractions) rdd/count) => 14 (-> dummy-pair-rdd (rdd/sample-by-key-exact true fractions 123) rdd/count) => 14)) (fact "reduce-by-key-locally works" (-> dummy-pair-rdd (rdd/reduce-by-key-locally +)) => {"Alice’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by <NAME>" 18 "of anyone anywhere" 27}) (fact "reduce-by-key works" (-> dummy-pair-rdd (rdd/reduce-by-key + 2) rdd/num-partitions) => 2) (fact "count-by-key-approx works" (let [result (-> dummy-pair-rdd (rdd/count-by-key-approx 100) rdd/final-value)] (map (comp keys second) result)) => (fn [ks] (every? #(= [:mean :confidence :low :high] %) ks))) (fact "count-approx-distinct-by-key works" (-> dummy-pair-rdd (rdd/count-approx-distinct-by-key 0.01) rdd/collect) => [["<NAME>’s Adventures in Wonderland" 1] ["at no cost and with" 1] ["of anyone anywhere" 1] ["by <NAME>" 1] ["Project Gutenberg’s" 1] ["This eBook is for the use" 1]] (-> dummy-pair-rdd (rdd/count-approx-distinct-by-key 0.01 3) rdd/num-partitions) => 3) (fact "combine-by-key works" (-> dummy-pair-rdd (rdd/combine-by-key str str str) rdd/collect) => [["<NAME>’s Adventures in Wonderland" "111111111111111111"] ["at no cost and with" "111111111111111111111111111"] ["of anyone anywhere" "111111111111111111111111111"] ["by <NAME>" "111111111111111111"] ["Project Gutenberg’s" "111111111"] ["This eBook is for the use" "111111111111111111111111111"]] (-> dummy-pair-rdd (rdd/combine-by-key str str str 2) rdd/num-partitions) => 2) (fact "fold-by-key works" (-> dummy-pair-rdd (rdd/fold-by-key 100 -) rdd/collect) => [["<NAME>’s Adventures in Wonderland" -2] ["at no cost and with" 1] ["of anyone anywhere" 1] ["by <NAME>" 0] ["Project Gutenberg’s" -1] ["This eBook is for the use" 1]] (-> dummy-pair-rdd (rdd/fold-by-key 0 2 -) rdd/num-partitions) => 2) (fact "aggregate-by-key works" (-> dummy-pair-rdd (rdd/aggregate-by-key 0 + +) rdd/collect) => [["<NAME>’s Adventures in Wonderland" 18] ["at no cost and with" 27] ["of anyone anywhere" 27] ["by <NAME>" 18] ["Project Gutenberg’s" 9] ["This eBook is for the use" 27]] (-> dummy-pair-rdd (rdd/aggregate-by-key 3 0 + +) rdd/num-partitions) => 3) (fact "group-by works" (-> dummy-pair-rdd (rdd/group-by str) rdd/keys rdd/distinct rdd/collect) => ["(Alice’s Adventures in Wonderland,1)" "(of anyone anywhere,1)" "(Project Gutenberg’s,1)" "(by <NAME>,1)" "(at no cost and with,1)" "(This eBook is for the use,1)"] (-> dummy-pair-rdd (rdd/group-by str 7) rdd/num-partitions) => 7 (-> dummy-pair-rdd (rdd/group-by str 11) rdd/name) => #(string/includes? % "[clojure.core/str, 11]")) (fact "count-by-key works" (rdd/count-by-key dummy-pair-rdd) => {"<NAME>’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by <NAME>" 18 "of anyone anywhere" 27}) (fact "lookup works" (-> dummy-pair-rdd (rdd/lookup "at no cost and with") distinct) => [1]) (fact "map-values works" (-> dummy-pair-rdd (rdd/map-values inc) rdd/values rdd/distinct rdd/collect) => [2]) (fact "flat-map-values works" (-> dummy-pair-rdd (rdd/flat-map-values aot/to-pair) rdd/distinct rdd/collect) => [["at no cost and with" 1] ["by <NAME>" 1] ["<NAME>’s Adventures in Wonderland" 1] ["of anyone anywhere" 1] ["This eBook is for the use" 1] ["Project Gutenberg’s" 1]]) (fact "keys + values work" (-> dummy-pair-rdd rdd/keys rdd/distinct rdd/count) => 6 (-> dummy-pair-rdd rdd/values rdd/distinct rdd/collect) => [1]) (fact "filter + join + subtract-by-key work" (let [left (rdd/flat-map-to-pair dummy-rdd aot/split-spaces-and-pair) right (rdd/filter left aot/first-equals-lewis)] (-> right rdd/distinct rdd/collect) => [["<NAME>" 1]] (-> left (rdd/join right) rdd/distinct rdd/collect) => [["<NAME>" [1 1]]] (-> left (rdd/right-outer-join right) rdd/count) => 324 (-> left (rdd/left-outer-join right) rdd/count) => 828 (-> left (rdd/full-outer-join right) rdd/count) => 828 (-> left (rdd/join right 11) rdd/num-partitions) => 11 (-> left (rdd/right-outer-join right 2) rdd/num-partitions) => 2 (-> left (rdd/left-outer-join right 3) rdd/num-partitions) => 3 (-> left (rdd/full-outer-join right 4) rdd/num-partitions) => 4 (-> left (rdd/subtract-by-key right) rdd/distinct rdd/count) => 22 (-> left (rdd/subtract-by-key right 4) rdd/num-partitions) => 4))) (facts "On basic RDD saving and loading" :rdd (fact "binary-files works" (rdd/count (rdd/binary-files "test/resources/housing.parquet/*.parquet")) => 1 (rdd/count (rdd/binary-files "test/resources/housing.parquet/*.parquet" 2)) => 1) (fact "save-as-text-file works" (let [write-rdd (rdd/parallelise (mapv (fn [_] (rand-int 100)) (range 100))) temp-file (create-temp-file! ".rdd") read-rdd (do (io/delete-file temp-file true) (rdd/save-as-text-file write-rdd (str temp-file)) (rdd/text-file (str temp-file)))] (rdd/count read-rdd) => (rdd/count write-rdd) (rdd/count (rdd/whole-text-files (str temp-file))) => pos? (rdd/count (rdd/whole-text-files (str temp-file) 2)) => #(< 1 %)))) (facts "On basic RDD fields" :rdd (let [rdd (rdd/parallelise-doubles [1])] (rdd/context rdd) => (partial instance? JavaSparkContext) (rdd/id rdd) => integer? (rdd/name rdd) => nil? (rdd/checkpointed? rdd) => false (rdd/empty? (rdd/parallelise [])) => true (rdd/empty? rdd) => false (rdd/empty? rdd) => false (rdd/partitioner rdd) => nil? (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/group-by-key (partitioner/hash-partitioner 123)) rdd/partitioner) => (complement nil?))) (facts "On basic PartialResult" :rdd (let [result (rdd/count-approx dummy-rdd 1000)] (rdd/initial-value result) => #(every? % [:mean :low :high :confidence]) (rdd/final-value result) => #(every? % [:mean :low :high :confidence]) (rdd/final? result) => boolean?) (-> (rdd/count-approx dummy-rdd 1000 0.9) rdd/initial-value :low) => #(< 100 %)) (facts "On basic RDD actions" :rdd (fact "collect-async works" @(rdd/collect-async (rdd/parallelise [1])) => [1]) (fact "collect-partitions works" (let [rdd (rdd/parallelise (into [] (range 100))) part-id (->> rdd rdd/partitions (map #(.index %)) first)] (rdd/collect-partitions rdd [part-id])) => #(and (every? seq? %) (every? (set (range 100)) (flatten %)))) (fact "count-approx-distinct works" (rdd/count-approx-distinct dummy-rdd 0.01) => #(< 3 % 7)) (fact "count-async works" @(rdd/count-async dummy-rdd) => 126) (fact "count-by-value works" (rdd/count-by-value dummy-rdd) => {"Alice’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by <NAME>" 18 "of anyone anywhere" 27}) (fact "first works" (rdd/first dummy-rdd) => "Project Gutenberg’s") (fact "foreach works" (rdd/foreach dummy-rdd identity) => nil?) (fact "foreach-async works" @(rdd/foreach-async dummy-rdd identity) => nil?) (fact "foreach-partition works" (rdd/foreach-partition dummy-rdd identity) => nil?) (fact "foreach-partition-async works" @(rdd/foreach-partition-async dummy-rdd identity) => nil?) (fact "take works" (rdd/take dummy-rdd 3) => ["Project Gutenberg’s" "Alice’s Adventures in Wonderland" "by <NAME>"]) (fact "take-async works" @(rdd/take-async dummy-rdd 2) => ["Project Gutenberg’s" "Alice’s Adventures in Wonderland"]) (fact "take-ordered works" (rdd/take-ordered dummy-rdd 20) => #(= (sort %) %) (let [rdd (rdd/parallelise (mapv (fn [_] (rand-int 100)) (range 100)))] (rdd/take-ordered rdd 20 >) => #(= (sort %) (reverse %)))) (fact "take-sample works" (let [rdd (rdd/parallelise (into [] (range 100)))] (rdd/take-sample rdd false 10) => #(= (-> % distinct count) 10)) (let [rdd (rdd/parallelise (into [] (range 100)))] (rdd/take-sample rdd true 100 1) => #(< (-> % distinct count) 100)))) (facts "On basic RDD transformations + actions" :rdd (-> dummy-rdd (rdd/top 2)) => ["of anyone anywhere" "of anyone anywhere"] (-> (rdd/parallelise [1 2 3]) (rdd/top 2 <)) => [3 2] (-> (rdd/text-file "test/resources/rdd.txt" 2) (rdd/map-to-pair aot/to-pair) rdd/group-by-key rdd/num-partitions) => #(< 1 %) (-> (rdd/parallelise-pairs [[1 2] [3 4]]) rdd/collect) => [[1 2] [3 4]] (-> dummy-pair-rdd (rdd/group-by-key 7) rdd/num-partitions) => 7 (fact "aggregate and fold work" (-> (rdd/parallelise (range 10)) (rdd/aggregate 0 + +)) => 45 (-> (rdd/parallelise (range 10)) (rdd/fold 0 +)) => 45) (fact "subtract works" (let [left (rdd/parallelise [1 2 3 4 5]) right (rdd/parallelise [9 8 7 6 5])] (-> (rdd/subtract left right) rdd/collect set) => #{1 2 3 4} (rdd/num-partitions (rdd/subtract left right 3)) => 3)) (fact "random-split works" (->> (rdd/random-split dummy-rdd [0.9 0.1]) (map rdd/count)) => #(< (second %) (first %)) (->> (rdd/random-split dummy-rdd [0.1 0.9] 123) (map rdd/count)) => #(< (first %) (second %))) (fact "persist and unpersist work" (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) rdd/storage-level) => rdd/disk-only (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) rdd/unpersist rdd/storage-level) => #(not= % rdd/disk-only) (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) (rdd/unpersist false) rdd/storage-level) => #(not= % rdd/disk-only)) (fact "max and min work" (-> (rdd/parallelise [-1 2 3]) (rdd/max <)) => 3 (-> (rdd/parallelise [-1 2 3]) (rdd/min >)) => 3) (fact "key-by works" (-> (rdd/parallelise ["a" "b" "c"]) (rdd/key-by identity) rdd/collect) => [["a" "a"] ["b" "b"] ["c" "c"]]) (fact "flat-map + filter works" (let [result-rdd (-> dummy-rdd (rdd/flat-map aot/split-spaces) (rdd/filter aot/equals-lewis))] (-> result-rdd rdd/collect count) => 18 (-> result-rdd rdd/name) => (complement nil?))) (fact "map works" (-> dummy-rdd (rdd/map count) rdd/collect) => #(every? integer? %)) (fact "reduce works" (-> dummy-rdd (rdd/map count) (rdd/reduce +)) => 2709 (-> (rdd/parallelise [1 2 3 4 5]) (rdd/reduce *)) => 120) (fact "map-to-pair + reduce-by-key + collect work" (-> dummy-pair-rdd (rdd/reduce-by-key +) rdd/collect) => [["<NAME>’s Adventures in Wonderland" 18] ["at no cost and with" 27] ["of anyone anywhere" 27] ["by <NAME>" 18] ["Project Gutenberg’s" 9] ["This eBook is for the use" 27]] (-> dummy-pair-rdd rdd/collect) => #(and (every? vector? %) (every? (comp (partial = 2) count) %) (every? (comp string? first) %) (every? (comp (partial = 1) second) %))) (fact "sort-by-key works" (-> dummy-pair-rdd (rdd/reduce-by-key +) rdd/sort-by-key rdd/collect) => #(= (sort %) %) (-> dummy-pair-rdd (rdd/reduce-by-key +) (rdd/sort-by-key false) rdd/collect) => #(= (sort %) (reverse %))) (fact "flat-map-to-pair works" (-> (rdd/parallelise ["hello world!" "hello spark and geni!" "the spark world is awesome!"]) (rdd/flat-map-to-pair aot/split-spaces-and-pair) (rdd/reduce-by-key +) rdd/collect set) => #{["spark" 2] ["world" 1] ["and" 1] ["geni!" 1] ["the" 1] ["awesome!" 1] ["is" 1] ["hello" 2] ["world!" 1]}) (fact "map-partitions works" (-> (rdd/parallelise ["abc def" "ghi jkl" "mno pqr"]) (rdd/map-partitions aot/map-split-spaces) rdd/collect) => ["abc" "def" "ghi" "jkl" "mno" "pqr"]) (fact "map-partitions-to-pair works" (-> (rdd/parallelise ["abc def"]) (rdd/map-partitions-to-pair aot/mapcat-split-spaces) rdd/collect) => [["abc" 1] ["def" 1]] (-> (rdd/parallelise ["abc def"]) (rdd/map-partitions-to-pair aot/mapcat-split-spaces true) rdd/num-partitions) => (rdd/default-parallelism)) (fact "map-partitions-with-index works" (-> (rdd/parallelise ["abc def" "ghi jkl" "mno pqr"]) (rdd/map-partitions-with-index aot/map-split-spaces-with-index) rdd/collect) => #(and (every? integer? (map first %)) (= (set (map second %)) #{"abc" "def" "ghi" "jkl" "mno" "pqr"}))) (fact "zips work" (let [left (rdd/parallelise ["a b c" "d e f g h i"]) right (rdd/parallelise ["j k l m n o" "pqr stu"])] (-> (rdd/zip left right) rdd/collect) => [["a b c" "j k l m n o"] ["d e f g h i" "pqr stu"]] (-> (rdd/zip-partitions left right aot/zip-split-spaces) rdd/collect) => ["aj" "bk" "cl" "dpqr" "estu"] (-> (rdd/zip-with-index left) rdd/collect) => [["a b c" 0] ["d e f g h i" 1]]) (let [zipped-values (rdd/collect (rdd/zip-with-unique-id dummy-rdd))] (->> zipped-values (map second) set count) => (rdd/count dummy-rdd))) (fact "sample works" (let [rdd dummy-rdd] (rdd/count (rdd/sample rdd true 0.1)) => #(< 2 % 27) (rdd/count (rdd/sample rdd false 0.1 123)) => #(< 2 % 27))) (fact "coalesce works" (let [rdd (rdd/parallelise ["abc" "def"])] (-> rdd (rdd/coalesce 1) rdd/collect) => ["abc" "def"] (-> rdd (rdd/coalesce 1 true) rdd/collect set) => #{"abc" "def"})) (fact "repartition works" (-> dummy-rdd (rdd/repartition 10) rdd/num-partitions) => 10) (fact "cartesian works" (let [left (rdd/parallelise ["abc" "def"]) right (rdd/parallelise ["def" "ghi"])] (rdd/collect (rdd/cartesian left right)) => [["abc" "def"] ["abc" "ghi"] ["def" "def"] ["def" "ghi"]])) (fact "cache works" (-> dummy-rdd rdd/cache rdd/count) => 126) (fact "distinct works" (-> dummy-rdd rdd/distinct rdd/collect count) => 6 (-> dummy-rdd (rdd/distinct 2) rdd/num-partitions) => 2 (-> dummy-rdd (rdd/distinct 3) rdd/name) => #(string/includes? % "[3]")) (fact "zip-partitions works" (let [left (rdd/parallelise ["a b c" "d e f g h i"]) right (rdd/parallelise ["j k l m n o" "pqr stu"])] (-> (rdd/zip-partitions left right aot/zip-split-spaces) rdd/collect)) => ["aj" "bk" "cl" "dpqr" "estu"]) (fact "union works" (let [rdd (rdd/parallelise ["abc" "def"])] (rdd/collect (rdd/union rdd rdd)) => ["abc" "def" "abc" "def"])) (fact "intersection works" (let [left (rdd/parallelise ["abc" "def"]) right (rdd/parallelise ["def" "ghi"])] (rdd/collect (rdd/intersection left right)) => ["def"])) (fact "glom works" (-> dummy-rdd rdd/glom rdd/count) => #(< % 126)))
true
(ns zero-one.geni.rdd-test (:require [clojure.java.io :as io] [clojure.string :as string] [midje.sweet :refer [facts fact =>]] [zero-one.geni.aot-functions :as aot] [zero-one.geni.defaults] [zero-one.geni.partitioner :as partitioner] [zero-one.geni.rdd :as rdd] [zero-one.geni.test-resources :refer [create-temp-file!]]) (:import (org.apache.spark SparkContext) (org.apache.spark.api.java JavaRDD JavaSparkContext))) (def dummy-rdd (rdd/text-file "test/resources/rdd.txt")) (def dummy-pair-rdd (rdd/map-to-pair dummy-rdd aot/to-pair)) (facts "On variadic functions" :rdd (fact "expected 0-adic and 1-adic returns" (doall (for [variadic-fn [rdd/cartesian rdd/union rdd/intersection rdd/subtract]] (do (variadic-fn) => rdd/empty? (let [rand-num (rand-int 100)] (-> (rdd/parallelise [rand-num]) variadic-fn rdd/collect) => [rand-num]))))) (fact "expected 3-adic returns" (let [left (rdd/parallelise [1 2 3]) mid (rdd/parallelise [3 4 5]) right (rdd/parallelise [1 4 3])] (rdd/collect (rdd/union left mid right)) => [1 2 3 3 4 5 1 4 3] (rdd/collect (rdd/intersection left mid right)) => [3] (rdd/count (rdd/cartesian left mid right)) => 27 (rdd/collect (rdd/subtract left mid right)) => [2] (rdd/collect (rdd/subtract left mid right (rdd/parallelise [2]))) => empty?))) (facts "On JavaSparkContext methods" :rdd (fact "expected static fields" (rdd/app-name) => "Geni App" (rdd/value (rdd/broadcast [1 2 3])) => [1 2 3] (rdd/checkpoint-dir) => string? (rdd/conf) => map? (rdd/default-min-partitions) => integer? (rdd/default-parallelism) => integer? (rdd/empty-rdd) => (partial instance? JavaRDD) (rdd/jars) => vector? (rdd/local?) => true (rdd/local-property "abc") => nil? (rdd/master) => "local[*]" (rdd/persistent-rdds) => map? (rdd/resources) => {} (rdd/spark-home) => (System/getenv "SPARK_HOME") (rdd/sc) => (partial instance? SparkContext) (rdd/version) => "3.1.1")) (facts "On repartitioning" :rdd (fact "partition-by works" (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/partition-by (partitioner/hash-partitioner 11)) rdd/num-partitions) => 11) (fact "repartition-and-sort-within-partitions works" (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/repartition-and-sort-within-partitions (partitioner/hash-partitioner 1)) rdd/collect distinct) => #(= % (sort %)) (-> (rdd/parallelise [1 2 3 4 5 4 3 2 1]) (rdd/map-to-pair aot/to-pair) (rdd/repartition-and-sort-within-partitions (partitioner/hash-partitioner 1) >) rdd/collect distinct) => #(= % (reverse (sort %))))) (facts "On basic PairRDD transformations" :rdd (fact "cogroup work" (let [left (rdd/flat-map-to-pair dummy-rdd aot/split-spaces-and-pair) mid (rdd/filter left aot/first-equals-lewis-or-carroll) right (rdd/filter left aot/first-equals-lewis)] (-> (rdd/cogroup left mid right) rdd/collect flatten set) => #(every? % [1 "eBook" "Wonderland"]) (-> (rdd/cogroup left mid right 4) rdd/num-partitions) => 4 (-> (rdd/cogroup mid right) rdd/collect rdd/count) => 2)) (fact "sample-by-key + sample-by-key-exact works" (let [fractions {"Alice’s Adventures in Wonderland" 0.1 "Project Gutenberg’s" 0.1 "This eBook is for the use" 0.1 "at no cost and with" 0.1 "by PI:NAME:<NAME>END_PI" 0.1 "of anyone anywhere" 0.1}] (-> dummy-pair-rdd (rdd/sample-by-key true fractions) rdd/count) => #(< 2 % 27) (-> dummy-pair-rdd (rdd/sample-by-key true fractions 123) rdd/count) => #(< 2 % 27) (-> dummy-pair-rdd (rdd/sample-by-key-exact true fractions) rdd/count) => 14 (-> dummy-pair-rdd (rdd/sample-by-key-exact true fractions 123) rdd/count) => 14)) (fact "reduce-by-key-locally works" (-> dummy-pair-rdd (rdd/reduce-by-key-locally +)) => {"Alice’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by PI:NAME:<NAME>END_PI" 18 "of anyone anywhere" 27}) (fact "reduce-by-key works" (-> dummy-pair-rdd (rdd/reduce-by-key + 2) rdd/num-partitions) => 2) (fact "count-by-key-approx works" (let [result (-> dummy-pair-rdd (rdd/count-by-key-approx 100) rdd/final-value)] (map (comp keys second) result)) => (fn [ks] (every? #(= [:mean :confidence :low :high] %) ks))) (fact "count-approx-distinct-by-key works" (-> dummy-pair-rdd (rdd/count-approx-distinct-by-key 0.01) rdd/collect) => [["PI:NAME:<NAME>END_PI’s Adventures in Wonderland" 1] ["at no cost and with" 1] ["of anyone anywhere" 1] ["by PI:NAME:<NAME>END_PI" 1] ["Project Gutenberg’s" 1] ["This eBook is for the use" 1]] (-> dummy-pair-rdd (rdd/count-approx-distinct-by-key 0.01 3) rdd/num-partitions) => 3) (fact "combine-by-key works" (-> dummy-pair-rdd (rdd/combine-by-key str str str) rdd/collect) => [["PI:NAME:<NAME>END_PI’s Adventures in Wonderland" "111111111111111111"] ["at no cost and with" "111111111111111111111111111"] ["of anyone anywhere" "111111111111111111111111111"] ["by PI:NAME:<NAME>END_PI" "111111111111111111"] ["Project Gutenberg’s" "111111111"] ["This eBook is for the use" "111111111111111111111111111"]] (-> dummy-pair-rdd (rdd/combine-by-key str str str 2) rdd/num-partitions) => 2) (fact "fold-by-key works" (-> dummy-pair-rdd (rdd/fold-by-key 100 -) rdd/collect) => [["PI:NAME:<NAME>END_PI’s Adventures in Wonderland" -2] ["at no cost and with" 1] ["of anyone anywhere" 1] ["by PI:NAME:<NAME>END_PI" 0] ["Project Gutenberg’s" -1] ["This eBook is for the use" 1]] (-> dummy-pair-rdd (rdd/fold-by-key 0 2 -) rdd/num-partitions) => 2) (fact "aggregate-by-key works" (-> dummy-pair-rdd (rdd/aggregate-by-key 0 + +) rdd/collect) => [["PI:NAME:<NAME>END_PI’s Adventures in Wonderland" 18] ["at no cost and with" 27] ["of anyone anywhere" 27] ["by PI:NAME:<NAME>END_PI" 18] ["Project Gutenberg’s" 9] ["This eBook is for the use" 27]] (-> dummy-pair-rdd (rdd/aggregate-by-key 3 0 + +) rdd/num-partitions) => 3) (fact "group-by works" (-> dummy-pair-rdd (rdd/group-by str) rdd/keys rdd/distinct rdd/collect) => ["(Alice’s Adventures in Wonderland,1)" "(of anyone anywhere,1)" "(Project Gutenberg’s,1)" "(by PI:NAME:<NAME>END_PI,1)" "(at no cost and with,1)" "(This eBook is for the use,1)"] (-> dummy-pair-rdd (rdd/group-by str 7) rdd/num-partitions) => 7 (-> dummy-pair-rdd (rdd/group-by str 11) rdd/name) => #(string/includes? % "[clojure.core/str, 11]")) (fact "count-by-key works" (rdd/count-by-key dummy-pair-rdd) => {"PI:NAME:<NAME>END_PI’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by PI:NAME:<NAME>END_PI" 18 "of anyone anywhere" 27}) (fact "lookup works" (-> dummy-pair-rdd (rdd/lookup "at no cost and with") distinct) => [1]) (fact "map-values works" (-> dummy-pair-rdd (rdd/map-values inc) rdd/values rdd/distinct rdd/collect) => [2]) (fact "flat-map-values works" (-> dummy-pair-rdd (rdd/flat-map-values aot/to-pair) rdd/distinct rdd/collect) => [["at no cost and with" 1] ["by PI:NAME:<NAME>END_PI" 1] ["PI:NAME:<NAME>END_PI’s Adventures in Wonderland" 1] ["of anyone anywhere" 1] ["This eBook is for the use" 1] ["Project Gutenberg’s" 1]]) (fact "keys + values work" (-> dummy-pair-rdd rdd/keys rdd/distinct rdd/count) => 6 (-> dummy-pair-rdd rdd/values rdd/distinct rdd/collect) => [1]) (fact "filter + join + subtract-by-key work" (let [left (rdd/flat-map-to-pair dummy-rdd aot/split-spaces-and-pair) right (rdd/filter left aot/first-equals-lewis)] (-> right rdd/distinct rdd/collect) => [["PI:NAME:<NAME>END_PI" 1]] (-> left (rdd/join right) rdd/distinct rdd/collect) => [["PI:NAME:<NAME>END_PI" [1 1]]] (-> left (rdd/right-outer-join right) rdd/count) => 324 (-> left (rdd/left-outer-join right) rdd/count) => 828 (-> left (rdd/full-outer-join right) rdd/count) => 828 (-> left (rdd/join right 11) rdd/num-partitions) => 11 (-> left (rdd/right-outer-join right 2) rdd/num-partitions) => 2 (-> left (rdd/left-outer-join right 3) rdd/num-partitions) => 3 (-> left (rdd/full-outer-join right 4) rdd/num-partitions) => 4 (-> left (rdd/subtract-by-key right) rdd/distinct rdd/count) => 22 (-> left (rdd/subtract-by-key right 4) rdd/num-partitions) => 4))) (facts "On basic RDD saving and loading" :rdd (fact "binary-files works" (rdd/count (rdd/binary-files "test/resources/housing.parquet/*.parquet")) => 1 (rdd/count (rdd/binary-files "test/resources/housing.parquet/*.parquet" 2)) => 1) (fact "save-as-text-file works" (let [write-rdd (rdd/parallelise (mapv (fn [_] (rand-int 100)) (range 100))) temp-file (create-temp-file! ".rdd") read-rdd (do (io/delete-file temp-file true) (rdd/save-as-text-file write-rdd (str temp-file)) (rdd/text-file (str temp-file)))] (rdd/count read-rdd) => (rdd/count write-rdd) (rdd/count (rdd/whole-text-files (str temp-file))) => pos? (rdd/count (rdd/whole-text-files (str temp-file) 2)) => #(< 1 %)))) (facts "On basic RDD fields" :rdd (let [rdd (rdd/parallelise-doubles [1])] (rdd/context rdd) => (partial instance? JavaSparkContext) (rdd/id rdd) => integer? (rdd/name rdd) => nil? (rdd/checkpointed? rdd) => false (rdd/empty? (rdd/parallelise [])) => true (rdd/empty? rdd) => false (rdd/empty? rdd) => false (rdd/partitioner rdd) => nil? (-> dummy-rdd (rdd/map-to-pair aot/to-pair) (rdd/group-by-key (partitioner/hash-partitioner 123)) rdd/partitioner) => (complement nil?))) (facts "On basic PartialResult" :rdd (let [result (rdd/count-approx dummy-rdd 1000)] (rdd/initial-value result) => #(every? % [:mean :low :high :confidence]) (rdd/final-value result) => #(every? % [:mean :low :high :confidence]) (rdd/final? result) => boolean?) (-> (rdd/count-approx dummy-rdd 1000 0.9) rdd/initial-value :low) => #(< 100 %)) (facts "On basic RDD actions" :rdd (fact "collect-async works" @(rdd/collect-async (rdd/parallelise [1])) => [1]) (fact "collect-partitions works" (let [rdd (rdd/parallelise (into [] (range 100))) part-id (->> rdd rdd/partitions (map #(.index %)) first)] (rdd/collect-partitions rdd [part-id])) => #(and (every? seq? %) (every? (set (range 100)) (flatten %)))) (fact "count-approx-distinct works" (rdd/count-approx-distinct dummy-rdd 0.01) => #(< 3 % 7)) (fact "count-async works" @(rdd/count-async dummy-rdd) => 126) (fact "count-by-value works" (rdd/count-by-value dummy-rdd) => {"Alice’s Adventures in Wonderland" 18 "Project Gutenberg’s" 9 "This eBook is for the use" 27 "at no cost and with" 27 "by PI:NAME:<NAME>END_PI" 18 "of anyone anywhere" 27}) (fact "first works" (rdd/first dummy-rdd) => "Project Gutenberg’s") (fact "foreach works" (rdd/foreach dummy-rdd identity) => nil?) (fact "foreach-async works" @(rdd/foreach-async dummy-rdd identity) => nil?) (fact "foreach-partition works" (rdd/foreach-partition dummy-rdd identity) => nil?) (fact "foreach-partition-async works" @(rdd/foreach-partition-async dummy-rdd identity) => nil?) (fact "take works" (rdd/take dummy-rdd 3) => ["Project Gutenberg’s" "Alice’s Adventures in Wonderland" "by PI:NAME:<NAME>END_PI"]) (fact "take-async works" @(rdd/take-async dummy-rdd 2) => ["Project Gutenberg’s" "Alice’s Adventures in Wonderland"]) (fact "take-ordered works" (rdd/take-ordered dummy-rdd 20) => #(= (sort %) %) (let [rdd (rdd/parallelise (mapv (fn [_] (rand-int 100)) (range 100)))] (rdd/take-ordered rdd 20 >) => #(= (sort %) (reverse %)))) (fact "take-sample works" (let [rdd (rdd/parallelise (into [] (range 100)))] (rdd/take-sample rdd false 10) => #(= (-> % distinct count) 10)) (let [rdd (rdd/parallelise (into [] (range 100)))] (rdd/take-sample rdd true 100 1) => #(< (-> % distinct count) 100)))) (facts "On basic RDD transformations + actions" :rdd (-> dummy-rdd (rdd/top 2)) => ["of anyone anywhere" "of anyone anywhere"] (-> (rdd/parallelise [1 2 3]) (rdd/top 2 <)) => [3 2] (-> (rdd/text-file "test/resources/rdd.txt" 2) (rdd/map-to-pair aot/to-pair) rdd/group-by-key rdd/num-partitions) => #(< 1 %) (-> (rdd/parallelise-pairs [[1 2] [3 4]]) rdd/collect) => [[1 2] [3 4]] (-> dummy-pair-rdd (rdd/group-by-key 7) rdd/num-partitions) => 7 (fact "aggregate and fold work" (-> (rdd/parallelise (range 10)) (rdd/aggregate 0 + +)) => 45 (-> (rdd/parallelise (range 10)) (rdd/fold 0 +)) => 45) (fact "subtract works" (let [left (rdd/parallelise [1 2 3 4 5]) right (rdd/parallelise [9 8 7 6 5])] (-> (rdd/subtract left right) rdd/collect set) => #{1 2 3 4} (rdd/num-partitions (rdd/subtract left right 3)) => 3)) (fact "random-split works" (->> (rdd/random-split dummy-rdd [0.9 0.1]) (map rdd/count)) => #(< (second %) (first %)) (->> (rdd/random-split dummy-rdd [0.1 0.9] 123) (map rdd/count)) => #(< (first %) (second %))) (fact "persist and unpersist work" (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) rdd/storage-level) => rdd/disk-only (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) rdd/unpersist rdd/storage-level) => #(not= % rdd/disk-only) (-> (rdd/parallelise [1]) (rdd/persist rdd/disk-only) (rdd/unpersist false) rdd/storage-level) => #(not= % rdd/disk-only)) (fact "max and min work" (-> (rdd/parallelise [-1 2 3]) (rdd/max <)) => 3 (-> (rdd/parallelise [-1 2 3]) (rdd/min >)) => 3) (fact "key-by works" (-> (rdd/parallelise ["a" "b" "c"]) (rdd/key-by identity) rdd/collect) => [["a" "a"] ["b" "b"] ["c" "c"]]) (fact "flat-map + filter works" (let [result-rdd (-> dummy-rdd (rdd/flat-map aot/split-spaces) (rdd/filter aot/equals-lewis))] (-> result-rdd rdd/collect count) => 18 (-> result-rdd rdd/name) => (complement nil?))) (fact "map works" (-> dummy-rdd (rdd/map count) rdd/collect) => #(every? integer? %)) (fact "reduce works" (-> dummy-rdd (rdd/map count) (rdd/reduce +)) => 2709 (-> (rdd/parallelise [1 2 3 4 5]) (rdd/reduce *)) => 120) (fact "map-to-pair + reduce-by-key + collect work" (-> dummy-pair-rdd (rdd/reduce-by-key +) rdd/collect) => [["PI:NAME:<NAME>END_PI’s Adventures in Wonderland" 18] ["at no cost and with" 27] ["of anyone anywhere" 27] ["by PI:NAME:<NAME>END_PI" 18] ["Project Gutenberg’s" 9] ["This eBook is for the use" 27]] (-> dummy-pair-rdd rdd/collect) => #(and (every? vector? %) (every? (comp (partial = 2) count) %) (every? (comp string? first) %) (every? (comp (partial = 1) second) %))) (fact "sort-by-key works" (-> dummy-pair-rdd (rdd/reduce-by-key +) rdd/sort-by-key rdd/collect) => #(= (sort %) %) (-> dummy-pair-rdd (rdd/reduce-by-key +) (rdd/sort-by-key false) rdd/collect) => #(= (sort %) (reverse %))) (fact "flat-map-to-pair works" (-> (rdd/parallelise ["hello world!" "hello spark and geni!" "the spark world is awesome!"]) (rdd/flat-map-to-pair aot/split-spaces-and-pair) (rdd/reduce-by-key +) rdd/collect set) => #{["spark" 2] ["world" 1] ["and" 1] ["geni!" 1] ["the" 1] ["awesome!" 1] ["is" 1] ["hello" 2] ["world!" 1]}) (fact "map-partitions works" (-> (rdd/parallelise ["abc def" "ghi jkl" "mno pqr"]) (rdd/map-partitions aot/map-split-spaces) rdd/collect) => ["abc" "def" "ghi" "jkl" "mno" "pqr"]) (fact "map-partitions-to-pair works" (-> (rdd/parallelise ["abc def"]) (rdd/map-partitions-to-pair aot/mapcat-split-spaces) rdd/collect) => [["abc" 1] ["def" 1]] (-> (rdd/parallelise ["abc def"]) (rdd/map-partitions-to-pair aot/mapcat-split-spaces true) rdd/num-partitions) => (rdd/default-parallelism)) (fact "map-partitions-with-index works" (-> (rdd/parallelise ["abc def" "ghi jkl" "mno pqr"]) (rdd/map-partitions-with-index aot/map-split-spaces-with-index) rdd/collect) => #(and (every? integer? (map first %)) (= (set (map second %)) #{"abc" "def" "ghi" "jkl" "mno" "pqr"}))) (fact "zips work" (let [left (rdd/parallelise ["a b c" "d e f g h i"]) right (rdd/parallelise ["j k l m n o" "pqr stu"])] (-> (rdd/zip left right) rdd/collect) => [["a b c" "j k l m n o"] ["d e f g h i" "pqr stu"]] (-> (rdd/zip-partitions left right aot/zip-split-spaces) rdd/collect) => ["aj" "bk" "cl" "dpqr" "estu"] (-> (rdd/zip-with-index left) rdd/collect) => [["a b c" 0] ["d e f g h i" 1]]) (let [zipped-values (rdd/collect (rdd/zip-with-unique-id dummy-rdd))] (->> zipped-values (map second) set count) => (rdd/count dummy-rdd))) (fact "sample works" (let [rdd dummy-rdd] (rdd/count (rdd/sample rdd true 0.1)) => #(< 2 % 27) (rdd/count (rdd/sample rdd false 0.1 123)) => #(< 2 % 27))) (fact "coalesce works" (let [rdd (rdd/parallelise ["abc" "def"])] (-> rdd (rdd/coalesce 1) rdd/collect) => ["abc" "def"] (-> rdd (rdd/coalesce 1 true) rdd/collect set) => #{"abc" "def"})) (fact "repartition works" (-> dummy-rdd (rdd/repartition 10) rdd/num-partitions) => 10) (fact "cartesian works" (let [left (rdd/parallelise ["abc" "def"]) right (rdd/parallelise ["def" "ghi"])] (rdd/collect (rdd/cartesian left right)) => [["abc" "def"] ["abc" "ghi"] ["def" "def"] ["def" "ghi"]])) (fact "cache works" (-> dummy-rdd rdd/cache rdd/count) => 126) (fact "distinct works" (-> dummy-rdd rdd/distinct rdd/collect count) => 6 (-> dummy-rdd (rdd/distinct 2) rdd/num-partitions) => 2 (-> dummy-rdd (rdd/distinct 3) rdd/name) => #(string/includes? % "[3]")) (fact "zip-partitions works" (let [left (rdd/parallelise ["a b c" "d e f g h i"]) right (rdd/parallelise ["j k l m n o" "pqr stu"])] (-> (rdd/zip-partitions left right aot/zip-split-spaces) rdd/collect)) => ["aj" "bk" "cl" "dpqr" "estu"]) (fact "union works" (let [rdd (rdd/parallelise ["abc" "def"])] (rdd/collect (rdd/union rdd rdd)) => ["abc" "def" "abc" "def"])) (fact "intersection works" (let [left (rdd/parallelise ["abc" "def"]) right (rdd/parallelise ["def" "ghi"])] (rdd/collect (rdd/intersection left right)) => ["def"])) (fact "glom works" (-> dummy-rdd rdd/glom rdd/count) => #(< % 126)))
[ { "context": "rones name for any input that someone\n\n;; Input: \"Peter DePaulo\"\n;; Output: \"Peterys DePagaryn\" \"Petobb DePabathe", "end": 194, "score": 0.9998524188995361, "start": 181, "tag": "NAME", "value": "Peter DePaulo" }, { "context": "at someone\n\n;; Input: \"Peter DePaulo\"\n;; Output: \"Peterys DePagaryn\" \"Petobb DePabatheon\" \n\n;; Constants\n(def ", "end": 225, "score": 0.9998531341552734, "start": 208, "tag": "NAME", "value": "Peterys DePagaryn" }, { "context": ": \"Peter DePaulo\"\n;; Output: \"Peterys DePagaryn\" \"Petobb DePabatheon\" \n\n;; Constants\n(def vowels (set (str/spli", "end": 246, "score": 0.9996931552886963, "start": 228, "tag": "NAME", "value": "Petobb DePabatheon" }, { "context": "lify how the list of names works\n(def got-names [\"Petyr Baelish\"\n \"Tyrion\tLannister\"\n ", "end": 402, "score": 0.9997749328613281, "start": 389, "tag": "NAME", "value": "Petyr Baelish" }, { "context": "\n(def got-names [\"Petyr Baelish\"\n \"Tyrion\tLannister\"\n \"Jaime\tLannister\"\n ", "end": 427, "score": 0.9993094801902771, "start": 421, "tag": "NAME", "value": "Tyrion" }, { "context": "t-names [\"Petyr Baelish\"\n \"Tyrion\tLannister\"\n \"Jaime\tLannister\"\n ", "end": 437, "score": 0.9763770699501038, "start": 429, "tag": "NAME", "value": "annister" }, { "context": " \"Tyrion\tLannister\"\n \"Jaime\tLannister\"\n \"Cersei\tLannister\"\n ", "end": 461, "score": 0.9995148181915283, "start": 456, "tag": "NAME", "value": "Jaime" }, { "context": " \"Tyrion\tLannister\"\n \"Jaime\tLannister\"\n \"Cersei\tLannister\"\n ", "end": 471, "score": 0.9765375852584839, "start": 463, "tag": "NAME", "value": "annister" }, { "context": " \"Jaime\tLannister\"\n \"Cersei\tLannister\"\n \"Robob\tStark\"\n ", "end": 496, "score": 0.9996172785758972, "start": 490, "tag": "NAME", "value": "Cersei" }, { "context": " \"Jaime\tLannister\"\n \"Cersei\tLannister\"\n \"Robob\tStark\"\n \"S", "end": 506, "score": 0.8647353649139404, "start": 497, "tag": "NAME", "value": "Lannister" }, { "context": " \"Cersei\tLannister\"\n \"Robob\tStark\"\n \"Sansa\tStark\"\n ", "end": 530, "score": 0.9995583295822144, "start": 525, "tag": "NAME", "value": "Robob" }, { "context": " \"Cersei\tLannister\"\n \"Robob\tStark\"\n \"Sansa\tStark\"\n \"A", "end": 536, "score": 0.7206034660339355, "start": 531, "tag": "NAME", "value": "Stark" }, { "context": "r\"\n \"Robob\tStark\"\n \"Sansa\tStark\"\n \"Arya\tStark\"\n ", "end": 560, "score": 0.9985902309417725, "start": 555, "tag": "NAME", "value": "Sansa" }, { "context": " \"Robob\tStark\"\n \"Sansa\tStark\"\n \"Arya\tStark\"\n \"Br", "end": 566, "score": 0.7111731767654419, "start": 563, "tag": "NAME", "value": "ark" }, { "context": "k\"\n \"Sansa\tStark\"\n \"Arya\tStark\"\n \"Bran\tStark\"\n ", "end": 589, "score": 0.999477207660675, "start": 585, "tag": "NAME", "value": "Arya" }, { "context": "rk\"\n \"Arya\tStark\"\n \"Bran\tStark\"\n \"Ricikon\tStark\"\n ", "end": 618, "score": 0.9986486434936523, "start": 614, "tag": "NAME", "value": "Bran" }, { "context": "rk\"\n \"Bran\tStark\"\n \"Ricikon\tStark\"\n \"Rhaegar\tTargaryen\"\n ", "end": 650, "score": 0.9993590116500854, "start": 643, "tag": "NAME", "value": "Ricikon" }, { "context": "\n \"Ricikon\tStark\"\n \"Rhaegar\tTargaryen\"\n \"Viserys\tTargaryen\"\n ", "end": 682, "score": 0.9158302545547485, "start": 675, "tag": "NAME", "value": "Rhaegar" }, { "context": " \"Ricikon\tStark\"\n \"Rhaegar\tTargaryen\"\n \"Viserys\tTargaryen\"\n ", "end": 692, "score": 0.8989500403404236, "start": 683, "tag": "NAME", "value": "Targaryen" }, { "context": " \"Rhaegar\tTargaryen\"\n \"Viserys\tTargaryen\"\n \"Daenerys\tTargaryen\"])\n", "end": 718, "score": 0.8938586711883545, "start": 711, "tag": "NAME", "value": "Viserys" }, { "context": " \"Rhaegar\tTargaryen\"\n \"Viserys\tTargaryen\"\n \"Daenerys\tTargaryen\"])\n\n;; Chanc", "end": 728, "score": 0.8616697192192078, "start": 719, "tag": "NAME", "value": "Targaryen" }, { "context": " \"Viserys\tTargaryen\"\n \"Daenerys\tTargaryen\"])\n\n;; Chance that bastard name will be", "end": 755, "score": 0.9958507418632507, "start": 747, "tag": "NAME", "value": "Daenerys" }, { "context": " \"Viserys\tTargaryen\"\n \"Daenerys\tTargaryen\"])\n\n;; Chance that bastard name will be chosen\n(d", "end": 765, "score": 0.895014226436615, "start": 756, "tag": "NAME", "value": "Targaryen" } ]
src/name_of_thrones/thronesify.cljs
pndpo/name-of-thrones
1
(ns name-of-thrones.thronesify (:require [clojure.string :as str])) ;; Name of Thrones generator ;; Goal: generate a Game of thrones name for any input that someone ;; Input: "Peter DePaulo" ;; Output: "Peterys DePagaryn" "Petobb DePabatheon" ;; Constants (def vowels (set (str/split "aeiouy" #""))) ;; TODO: Extend and simplify how the list of names works (def got-names ["Petyr Baelish" "Tyrion Lannister" "Jaime Lannister" "Cersei Lannister" "Robob Stark" "Sansa Stark" "Arya Stark" "Bran Stark" "Ricikon Stark" "Rhaegar Targaryen" "Viserys Targaryen" "Daenerys Targaryen"]) ;; Chance that bastard name will be chosen (def chance-of-snow 0.1) (defn check-for-snow [] (<= (rand) chance-of-snow)) (defn constonant? "Checks if char or single string is constonant" [ch] (let [str-ch (str ch)] (if (> (count str-ch) 1) false (not (contains? vowels str-ch))))) (defn find-idx-constonant [nm] (loop [str-vec (map-indexed vector (subs nm 1))] (if (constonant? (->> str-vec first second)) (+ 2 (->> str-vec first first)) ;; refactor: Magic number breaks the string *after* the constonant (recur (rest str-vec))))) (defn split-by-constonant [nm] (let [idx (find-idx-constonant nm)] (vec (map str/join (split-at idx nm))))) (defn convert-name-vec [nm-vec] (vec (map #(split-by-constonant %) nm-vec))) (defn split-name "Splits name by spaces and strips extra spaces" [nm] (str/split nm #"\s+")) (defn break-name [nm] (convert-name-vec (split-name nm))) (defn chop-names [names] (for [nm names] (break-name nm))) (defn grab-part [thing thing2] (str (first thing) (second thing2))) (defn thronesify "Combines the permutation of the names formatted [['n1' 'n2'] ['L1' 'L2']]" [got-name nm] (let [fname (grab-part (first nm) (first got-name)) lname (grab-part (second nm) (second got-name))] (str fname " " lname))) (defn grab-da-throne [got-names] (-> got-names rand-nth break-name)) (defn convert-name [og-name] (let [got-name (grab-da-throne got-names)] (if (check-for-snow) (let [fname (first (str/split og-name #" ")) snow ["Snow" "Sand"]] (str fname " " (rand-nth snow))) (->> og-name break-name (thronesify got-name)))))
109263
(ns name-of-thrones.thronesify (:require [clojure.string :as str])) ;; Name of Thrones generator ;; Goal: generate a Game of thrones name for any input that someone ;; Input: "<NAME>" ;; Output: "<NAME>" "<NAME>" ;; Constants (def vowels (set (str/split "aeiouy" #""))) ;; TODO: Extend and simplify how the list of names works (def got-names ["<NAME>" "<NAME> L<NAME>" "<NAME> L<NAME>" "<NAME> <NAME>" "<NAME> <NAME>" "<NAME> St<NAME>" "<NAME> Stark" "<NAME> Stark" "<NAME> Stark" "<NAME> <NAME>" "<NAME> <NAME>" "<NAME> <NAME>"]) ;; Chance that bastard name will be chosen (def chance-of-snow 0.1) (defn check-for-snow [] (<= (rand) chance-of-snow)) (defn constonant? "Checks if char or single string is constonant" [ch] (let [str-ch (str ch)] (if (> (count str-ch) 1) false (not (contains? vowels str-ch))))) (defn find-idx-constonant [nm] (loop [str-vec (map-indexed vector (subs nm 1))] (if (constonant? (->> str-vec first second)) (+ 2 (->> str-vec first first)) ;; refactor: Magic number breaks the string *after* the constonant (recur (rest str-vec))))) (defn split-by-constonant [nm] (let [idx (find-idx-constonant nm)] (vec (map str/join (split-at idx nm))))) (defn convert-name-vec [nm-vec] (vec (map #(split-by-constonant %) nm-vec))) (defn split-name "Splits name by spaces and strips extra spaces" [nm] (str/split nm #"\s+")) (defn break-name [nm] (convert-name-vec (split-name nm))) (defn chop-names [names] (for [nm names] (break-name nm))) (defn grab-part [thing thing2] (str (first thing) (second thing2))) (defn thronesify "Combines the permutation of the names formatted [['n1' 'n2'] ['L1' 'L2']]" [got-name nm] (let [fname (grab-part (first nm) (first got-name)) lname (grab-part (second nm) (second got-name))] (str fname " " lname))) (defn grab-da-throne [got-names] (-> got-names rand-nth break-name)) (defn convert-name [og-name] (let [got-name (grab-da-throne got-names)] (if (check-for-snow) (let [fname (first (str/split og-name #" ")) snow ["Snow" "Sand"]] (str fname " " (rand-nth snow))) (->> og-name break-name (thronesify got-name)))))
true
(ns name-of-thrones.thronesify (:require [clojure.string :as str])) ;; Name of Thrones generator ;; Goal: generate a Game of thrones name for any input that someone ;; Input: "PI:NAME:<NAME>END_PI" ;; Output: "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" ;; Constants (def vowels (set (str/split "aeiouy" #""))) ;; TODO: Extend and simplify how the list of names works (def got-names ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI LPI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI LPI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI StPI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI Stark" "PI:NAME:<NAME>END_PI Stark" "PI:NAME:<NAME>END_PI Stark" "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI"]) ;; Chance that bastard name will be chosen (def chance-of-snow 0.1) (defn check-for-snow [] (<= (rand) chance-of-snow)) (defn constonant? "Checks if char or single string is constonant" [ch] (let [str-ch (str ch)] (if (> (count str-ch) 1) false (not (contains? vowels str-ch))))) (defn find-idx-constonant [nm] (loop [str-vec (map-indexed vector (subs nm 1))] (if (constonant? (->> str-vec first second)) (+ 2 (->> str-vec first first)) ;; refactor: Magic number breaks the string *after* the constonant (recur (rest str-vec))))) (defn split-by-constonant [nm] (let [idx (find-idx-constonant nm)] (vec (map str/join (split-at idx nm))))) (defn convert-name-vec [nm-vec] (vec (map #(split-by-constonant %) nm-vec))) (defn split-name "Splits name by spaces and strips extra spaces" [nm] (str/split nm #"\s+")) (defn break-name [nm] (convert-name-vec (split-name nm))) (defn chop-names [names] (for [nm names] (break-name nm))) (defn grab-part [thing thing2] (str (first thing) (second thing2))) (defn thronesify "Combines the permutation of the names formatted [['n1' 'n2'] ['L1' 'L2']]" [got-name nm] (let [fname (grab-part (first nm) (first got-name)) lname (grab-part (second nm) (second got-name))] (str fname " " lname))) (defn grab-da-throne [got-names] (-> got-names rand-nth break-name)) (defn convert-name [og-name] (let [got-name (grab-da-throne got-names)] (if (check-for-snow) (let [fname (first (str/split og-name #" ")) snow ["Snow" "Sand"]] (str fname " " (rand-nth snow))) (->> og-name break-name (thronesify got-name)))))
[ { "context": "i (.expandCollapse cy (clj->js {:layoutBy {:name \"dagre\"\n ", "end": 1007, "score": 0.9851945638656616, "start": 1002, "tag": "NAME", "value": "dagre" } ]
src/cljs/com/billpiel/guildsman/cytoscape.cljs
bpiel/guildsman
135
(ns guildsman.cytoscape (:require [re-frame.core :as rf] [reagent.core :as r] [re-com.core :as rc])) (def c1 (volatile! nil)) (def a1 (atom nil)) (def last-node-click (volatile! [nil 0])) (defn now-ts [] (.getTime (js/Date.))) (defn on-click-node [xc-api evt-js] (def evt-js1 evt-js) (let [{target "target"} (js->clj evt-js) [last-node last-ts] @last-node-click] (if (= last-node target) (let [now (now-ts)] (println (- now last-ts)) (if (< (- now last-ts) 750) (cond (.isExpandable xc-api target) (.expand xc-api target) (.isCollapsible xc-api target) (.collapse xc-api target)) (println "single same"))) (println "different")) (vreset! last-node-click [target (now-ts)]) (rf/dispatch [:node-select (.id target)]))) (defn setup-cyto [cy] (println "START setup-cyto") (let [xc-api (.expandCollapse cy (clj->js {:layoutBy {:name "dagre" :nodeSep 600 :rankSep 100} :fisheye false :animate true :undoable false :cueEnabled false}))] (.collapseAll xc-api) (.on cy "tap" "node" (partial on-click-node xc-api)) (println "DONE setup-cyto"))) (defn cyto-state->cyto-gen-map [{:keys [id value]}] (println "cyto-state->cyto-gen-map") (println id) (clj->js (merge value {:container (.getElementById js/document id)}))) (defn gen-cyto [state'] (println "gen-cyto") (let [c (js/cytoscape (cyto-state->cyto-gen-map state'))] (vreset! c1 c) (println "gen-cyto POST vreset") c)) (defn dist [x1 y1 x2 y2] (let [dx (- x2 x1) dy (- y2 y1)] (Math/sqrt (+ (* dy dy) (* dx dx))))) (defn steeper? [x1 y1 x2 y2 x3 y3] (< (* (- x1 x3) (- y1 y2)) (* (- x1 x2) (- y1 y3)))) (defn inside-box? [x1 y1 x2 y2 xp yp] (and (or (< x1 xp x2) (> x1 xp x2)) (or (< y1 yp y2) (> y1 yp y2)))) (defn find-intersection [x1 y1 x2 y2 x3 y3] (let [dx (- x2 x1) dy (- y2 y1) k (/ (- (* dy (- x3 x1)) (* dx (- y3 y1))) (+ (* dy dy) (* dx dx))) x4 (- x3 (* k dy)) y4 (+ y3 (* k dx))] [x4 y4])) (defn rel-coords [x1 y1 x2 y2 x3 y3] (let [[x4 y4] (find-intersection x1 y1 x2 y2 x3 y3) d12 (dist x1 y1 x2 y2) d14 (dist x1 y1 x4 y4) d34 (dist x3 y3 x4 y4) st (if (steeper? x1 y1 x2 y2 x3 y3) 1 -1)] (when (inside-box? x1 y1 x2 y2 x4 y4) [(* d34 st) (/ d14 d12)]))) #_(defn perp-coords [x1 y1 x2 y2 xp yp] (let [dx (- x2 x1) dy (- y2 y1) k (/ (- (* dy (- xp x1)) (* dx (- yp y1))) (+ (* dy dy) (* dx dx))) x4 (- xp (* k dy)) y4 (+ yp (* k dx)) d (Math/sqrt (+ (* (- y2 y1) (- y2 y1)) (* (- x2 x1) (- x2 x1)))) ypt (Math/sqrt (+ (* (- y4 y1) (- y4 y1)) (* (- x4 x1) (- x4 x1)))) xpt (dist x1 y1 x2 y2 xp yp)] [xpt (/ ypt d)])) (defn js->xy [xy] ((juxt #(get % "x") #(get % "y")) (js->clj xy))) (defn node->xy [n] (-> (.position n) js->xy)) (defn manhattan [x1 y1 x2 y2] (+ (Math/abs (- x1 x2)) (Math/abs (- y1 y2)))) (defn p [x] (when false (println x))) (defn find-nearbys [x1 y1 x2 y2] (keep (fn [n] (let [[xp yp] (node->xy n)] (p "------") (when-let [pc (rel-coords x1 y1 x2 y2 xp yp)] (p pc) (p "------") pc))) (.toArray (.$ @c1 "node")))) #_(def e1 (-> (.$ @c1 "edge[source = 'loss']") .first)) (defn near-edge? [[xp yp]] (< -50. xp 50.)) (defn mk-ctrl-point [[x y]] [(if (<= x 0) (+ 50 x) (- x 50)) #_ (if (<= x 0) 100 -100) #_(if (<= x 0) (- -50 x) (- 50 x)) y]) (defn mk-ctrl-styles [ps] [(clojure.string/join " " (map str (map first ps))) (clojure.string/join " " (map str (map second ps)))]) (defn route-edge [edge] (let [[sx sy] (js->xy (.sourceEndpoint edge)) [dx dy] (js->xy (.targetEndpoint edge)) [cpd cpw] (mk-ctrl-styles (sort-by second (map mk-ctrl-point (filter near-edge? (find-nearbys sx sy dx dy)))))] (p [cpd cpw]) (p "===========") (-> edge #_ (.style "curveStyle" "unbundled-bezier") (.style "controlPointDistances" cpd) (.style "controlPointWeights" cpw)))) (defn route-all-edges [] #_(p "route-all-edges") (.map (.$ @c1 "edge") route-edge)) #_(route-all-edges) #_(def in1 (.setInterval js/window route-all-edges 100)) #_(.clearInterval js/window in1) #_(.map (.$ @c1 "edge[source = 'loss']") route-edge) #_(route-edge e1) #_(-> (.$ @c1 "node[") (.map node->xy) js->clj) #_(.fit @c1) #_(-> (.$ @c1 "edge[source = 'loss']") (.style "curveStyle" "unbundled-bezier") (.style "controlPointStepSize" "10") (.style "controlPointWeight" "0.5")) #_(def e1 (-> (.$ @c1 "edge[source = 'loss']") .first)) #_(vreset! c1 (js/cytoscape (clj->js {:container (.getElementById js/document "cyto2") :layout {:name "preset"} :style [{:selector "edge" :style {"curve-style" "unbundled-bezier" "edge-distances" "node-position" :control-point-distances [0] :control-point-weights [0.5]}}] :elements {:nodes [{:data {:id "a"} :position {:x 0 :y 0}} {:data {:id "b"} :position {:x 100 :y 100}} {:data {:id "c"}} {:data {:id "d"}} {:data {:id "e"}} {:data {:id "f"}} ] :edges [{:data {:source "a" :target "b"}} {:data {:source "c" :target "d"}}]}}))) #_(vreset! c1 (js/cytoscape (clj->js {:container (.getElementById js/document "cyto6") :style [{:selector "edge" :style {"curve-style" "unbundled-bezier" "edge-distances" "node-position" :control-point-distances [0] :control-point-weights [0.5]}}] :elements {:nodes [{:data {:id "a"}} {:data {:id "b"}}] :edges [{:data {:source "a" :target "b"}}]}}))) (defn cyto-comp-did-mount [state this] (vswap! state assoc :instance (gen-cyto @state)) (setup-cyto (:instance @state))) (defn cyto-reagent-render [state value] (let [{:keys [id]} @state] (println "cyto-reagent-render") (println id) [:div {:id id :style {:width "100%" :height "100%"}}])) (defn cyto-comp-will-update [state this [_ new-value]] (vswap! state assoc :value new-value)) (defn cyto-comp-did-update [state this [_ {:keys [config data highlighted selected] :as old-val}]] (let [{:keys [value] :as state'} @state] (cond #_(not= config (:config state')) (not= value old-val) (do (println "generate") (vswap! state assoc :instance (gen-cyto state')) (setup-cyto (:instance @state))) #_ ((not= data (:data state')) (do (println "load") (.load instance (clj->js (merge (:data state') {:unload true})))) (not= [highlighted selected] [(:highlighted state') (:selected state')]) (do ;(println "flush") (.flush instance)))))) (defn cytoscape [value] (println "cyto/cyto") (let [state (volatile! {:id (str (gensym "cyto")) :value value})] (r/create-class {:component-did-mount (partial cyto-comp-did-mount state) :component-did-update (partial cyto-comp-did-update state) :component-will-update (partial cyto-comp-will-update state) :reagent-render (partial cyto-reagent-render state)})))
71911
(ns guildsman.cytoscape (:require [re-frame.core :as rf] [reagent.core :as r] [re-com.core :as rc])) (def c1 (volatile! nil)) (def a1 (atom nil)) (def last-node-click (volatile! [nil 0])) (defn now-ts [] (.getTime (js/Date.))) (defn on-click-node [xc-api evt-js] (def evt-js1 evt-js) (let [{target "target"} (js->clj evt-js) [last-node last-ts] @last-node-click] (if (= last-node target) (let [now (now-ts)] (println (- now last-ts)) (if (< (- now last-ts) 750) (cond (.isExpandable xc-api target) (.expand xc-api target) (.isCollapsible xc-api target) (.collapse xc-api target)) (println "single same"))) (println "different")) (vreset! last-node-click [target (now-ts)]) (rf/dispatch [:node-select (.id target)]))) (defn setup-cyto [cy] (println "START setup-cyto") (let [xc-api (.expandCollapse cy (clj->js {:layoutBy {:name "<NAME>" :nodeSep 600 :rankSep 100} :fisheye false :animate true :undoable false :cueEnabled false}))] (.collapseAll xc-api) (.on cy "tap" "node" (partial on-click-node xc-api)) (println "DONE setup-cyto"))) (defn cyto-state->cyto-gen-map [{:keys [id value]}] (println "cyto-state->cyto-gen-map") (println id) (clj->js (merge value {:container (.getElementById js/document id)}))) (defn gen-cyto [state'] (println "gen-cyto") (let [c (js/cytoscape (cyto-state->cyto-gen-map state'))] (vreset! c1 c) (println "gen-cyto POST vreset") c)) (defn dist [x1 y1 x2 y2] (let [dx (- x2 x1) dy (- y2 y1)] (Math/sqrt (+ (* dy dy) (* dx dx))))) (defn steeper? [x1 y1 x2 y2 x3 y3] (< (* (- x1 x3) (- y1 y2)) (* (- x1 x2) (- y1 y3)))) (defn inside-box? [x1 y1 x2 y2 xp yp] (and (or (< x1 xp x2) (> x1 xp x2)) (or (< y1 yp y2) (> y1 yp y2)))) (defn find-intersection [x1 y1 x2 y2 x3 y3] (let [dx (- x2 x1) dy (- y2 y1) k (/ (- (* dy (- x3 x1)) (* dx (- y3 y1))) (+ (* dy dy) (* dx dx))) x4 (- x3 (* k dy)) y4 (+ y3 (* k dx))] [x4 y4])) (defn rel-coords [x1 y1 x2 y2 x3 y3] (let [[x4 y4] (find-intersection x1 y1 x2 y2 x3 y3) d12 (dist x1 y1 x2 y2) d14 (dist x1 y1 x4 y4) d34 (dist x3 y3 x4 y4) st (if (steeper? x1 y1 x2 y2 x3 y3) 1 -1)] (when (inside-box? x1 y1 x2 y2 x4 y4) [(* d34 st) (/ d14 d12)]))) #_(defn perp-coords [x1 y1 x2 y2 xp yp] (let [dx (- x2 x1) dy (- y2 y1) k (/ (- (* dy (- xp x1)) (* dx (- yp y1))) (+ (* dy dy) (* dx dx))) x4 (- xp (* k dy)) y4 (+ yp (* k dx)) d (Math/sqrt (+ (* (- y2 y1) (- y2 y1)) (* (- x2 x1) (- x2 x1)))) ypt (Math/sqrt (+ (* (- y4 y1) (- y4 y1)) (* (- x4 x1) (- x4 x1)))) xpt (dist x1 y1 x2 y2 xp yp)] [xpt (/ ypt d)])) (defn js->xy [xy] ((juxt #(get % "x") #(get % "y")) (js->clj xy))) (defn node->xy [n] (-> (.position n) js->xy)) (defn manhattan [x1 y1 x2 y2] (+ (Math/abs (- x1 x2)) (Math/abs (- y1 y2)))) (defn p [x] (when false (println x))) (defn find-nearbys [x1 y1 x2 y2] (keep (fn [n] (let [[xp yp] (node->xy n)] (p "------") (when-let [pc (rel-coords x1 y1 x2 y2 xp yp)] (p pc) (p "------") pc))) (.toArray (.$ @c1 "node")))) #_(def e1 (-> (.$ @c1 "edge[source = 'loss']") .first)) (defn near-edge? [[xp yp]] (< -50. xp 50.)) (defn mk-ctrl-point [[x y]] [(if (<= x 0) (+ 50 x) (- x 50)) #_ (if (<= x 0) 100 -100) #_(if (<= x 0) (- -50 x) (- 50 x)) y]) (defn mk-ctrl-styles [ps] [(clojure.string/join " " (map str (map first ps))) (clojure.string/join " " (map str (map second ps)))]) (defn route-edge [edge] (let [[sx sy] (js->xy (.sourceEndpoint edge)) [dx dy] (js->xy (.targetEndpoint edge)) [cpd cpw] (mk-ctrl-styles (sort-by second (map mk-ctrl-point (filter near-edge? (find-nearbys sx sy dx dy)))))] (p [cpd cpw]) (p "===========") (-> edge #_ (.style "curveStyle" "unbundled-bezier") (.style "controlPointDistances" cpd) (.style "controlPointWeights" cpw)))) (defn route-all-edges [] #_(p "route-all-edges") (.map (.$ @c1 "edge") route-edge)) #_(route-all-edges) #_(def in1 (.setInterval js/window route-all-edges 100)) #_(.clearInterval js/window in1) #_(.map (.$ @c1 "edge[source = 'loss']") route-edge) #_(route-edge e1) #_(-> (.$ @c1 "node[") (.map node->xy) js->clj) #_(.fit @c1) #_(-> (.$ @c1 "edge[source = 'loss']") (.style "curveStyle" "unbundled-bezier") (.style "controlPointStepSize" "10") (.style "controlPointWeight" "0.5")) #_(def e1 (-> (.$ @c1 "edge[source = 'loss']") .first)) #_(vreset! c1 (js/cytoscape (clj->js {:container (.getElementById js/document "cyto2") :layout {:name "preset"} :style [{:selector "edge" :style {"curve-style" "unbundled-bezier" "edge-distances" "node-position" :control-point-distances [0] :control-point-weights [0.5]}}] :elements {:nodes [{:data {:id "a"} :position {:x 0 :y 0}} {:data {:id "b"} :position {:x 100 :y 100}} {:data {:id "c"}} {:data {:id "d"}} {:data {:id "e"}} {:data {:id "f"}} ] :edges [{:data {:source "a" :target "b"}} {:data {:source "c" :target "d"}}]}}))) #_(vreset! c1 (js/cytoscape (clj->js {:container (.getElementById js/document "cyto6") :style [{:selector "edge" :style {"curve-style" "unbundled-bezier" "edge-distances" "node-position" :control-point-distances [0] :control-point-weights [0.5]}}] :elements {:nodes [{:data {:id "a"}} {:data {:id "b"}}] :edges [{:data {:source "a" :target "b"}}]}}))) (defn cyto-comp-did-mount [state this] (vswap! state assoc :instance (gen-cyto @state)) (setup-cyto (:instance @state))) (defn cyto-reagent-render [state value] (let [{:keys [id]} @state] (println "cyto-reagent-render") (println id) [:div {:id id :style {:width "100%" :height "100%"}}])) (defn cyto-comp-will-update [state this [_ new-value]] (vswap! state assoc :value new-value)) (defn cyto-comp-did-update [state this [_ {:keys [config data highlighted selected] :as old-val}]] (let [{:keys [value] :as state'} @state] (cond #_(not= config (:config state')) (not= value old-val) (do (println "generate") (vswap! state assoc :instance (gen-cyto state')) (setup-cyto (:instance @state))) #_ ((not= data (:data state')) (do (println "load") (.load instance (clj->js (merge (:data state') {:unload true})))) (not= [highlighted selected] [(:highlighted state') (:selected state')]) (do ;(println "flush") (.flush instance)))))) (defn cytoscape [value] (println "cyto/cyto") (let [state (volatile! {:id (str (gensym "cyto")) :value value})] (r/create-class {:component-did-mount (partial cyto-comp-did-mount state) :component-did-update (partial cyto-comp-did-update state) :component-will-update (partial cyto-comp-will-update state) :reagent-render (partial cyto-reagent-render state)})))
true
(ns guildsman.cytoscape (:require [re-frame.core :as rf] [reagent.core :as r] [re-com.core :as rc])) (def c1 (volatile! nil)) (def a1 (atom nil)) (def last-node-click (volatile! [nil 0])) (defn now-ts [] (.getTime (js/Date.))) (defn on-click-node [xc-api evt-js] (def evt-js1 evt-js) (let [{target "target"} (js->clj evt-js) [last-node last-ts] @last-node-click] (if (= last-node target) (let [now (now-ts)] (println (- now last-ts)) (if (< (- now last-ts) 750) (cond (.isExpandable xc-api target) (.expand xc-api target) (.isCollapsible xc-api target) (.collapse xc-api target)) (println "single same"))) (println "different")) (vreset! last-node-click [target (now-ts)]) (rf/dispatch [:node-select (.id target)]))) (defn setup-cyto [cy] (println "START setup-cyto") (let [xc-api (.expandCollapse cy (clj->js {:layoutBy {:name "PI:NAME:<NAME>END_PI" :nodeSep 600 :rankSep 100} :fisheye false :animate true :undoable false :cueEnabled false}))] (.collapseAll xc-api) (.on cy "tap" "node" (partial on-click-node xc-api)) (println "DONE setup-cyto"))) (defn cyto-state->cyto-gen-map [{:keys [id value]}] (println "cyto-state->cyto-gen-map") (println id) (clj->js (merge value {:container (.getElementById js/document id)}))) (defn gen-cyto [state'] (println "gen-cyto") (let [c (js/cytoscape (cyto-state->cyto-gen-map state'))] (vreset! c1 c) (println "gen-cyto POST vreset") c)) (defn dist [x1 y1 x2 y2] (let [dx (- x2 x1) dy (- y2 y1)] (Math/sqrt (+ (* dy dy) (* dx dx))))) (defn steeper? [x1 y1 x2 y2 x3 y3] (< (* (- x1 x3) (- y1 y2)) (* (- x1 x2) (- y1 y3)))) (defn inside-box? [x1 y1 x2 y2 xp yp] (and (or (< x1 xp x2) (> x1 xp x2)) (or (< y1 yp y2) (> y1 yp y2)))) (defn find-intersection [x1 y1 x2 y2 x3 y3] (let [dx (- x2 x1) dy (- y2 y1) k (/ (- (* dy (- x3 x1)) (* dx (- y3 y1))) (+ (* dy dy) (* dx dx))) x4 (- x3 (* k dy)) y4 (+ y3 (* k dx))] [x4 y4])) (defn rel-coords [x1 y1 x2 y2 x3 y3] (let [[x4 y4] (find-intersection x1 y1 x2 y2 x3 y3) d12 (dist x1 y1 x2 y2) d14 (dist x1 y1 x4 y4) d34 (dist x3 y3 x4 y4) st (if (steeper? x1 y1 x2 y2 x3 y3) 1 -1)] (when (inside-box? x1 y1 x2 y2 x4 y4) [(* d34 st) (/ d14 d12)]))) #_(defn perp-coords [x1 y1 x2 y2 xp yp] (let [dx (- x2 x1) dy (- y2 y1) k (/ (- (* dy (- xp x1)) (* dx (- yp y1))) (+ (* dy dy) (* dx dx))) x4 (- xp (* k dy)) y4 (+ yp (* k dx)) d (Math/sqrt (+ (* (- y2 y1) (- y2 y1)) (* (- x2 x1) (- x2 x1)))) ypt (Math/sqrt (+ (* (- y4 y1) (- y4 y1)) (* (- x4 x1) (- x4 x1)))) xpt (dist x1 y1 x2 y2 xp yp)] [xpt (/ ypt d)])) (defn js->xy [xy] ((juxt #(get % "x") #(get % "y")) (js->clj xy))) (defn node->xy [n] (-> (.position n) js->xy)) (defn manhattan [x1 y1 x2 y2] (+ (Math/abs (- x1 x2)) (Math/abs (- y1 y2)))) (defn p [x] (when false (println x))) (defn find-nearbys [x1 y1 x2 y2] (keep (fn [n] (let [[xp yp] (node->xy n)] (p "------") (when-let [pc (rel-coords x1 y1 x2 y2 xp yp)] (p pc) (p "------") pc))) (.toArray (.$ @c1 "node")))) #_(def e1 (-> (.$ @c1 "edge[source = 'loss']") .first)) (defn near-edge? [[xp yp]] (< -50. xp 50.)) (defn mk-ctrl-point [[x y]] [(if (<= x 0) (+ 50 x) (- x 50)) #_ (if (<= x 0) 100 -100) #_(if (<= x 0) (- -50 x) (- 50 x)) y]) (defn mk-ctrl-styles [ps] [(clojure.string/join " " (map str (map first ps))) (clojure.string/join " " (map str (map second ps)))]) (defn route-edge [edge] (let [[sx sy] (js->xy (.sourceEndpoint edge)) [dx dy] (js->xy (.targetEndpoint edge)) [cpd cpw] (mk-ctrl-styles (sort-by second (map mk-ctrl-point (filter near-edge? (find-nearbys sx sy dx dy)))))] (p [cpd cpw]) (p "===========") (-> edge #_ (.style "curveStyle" "unbundled-bezier") (.style "controlPointDistances" cpd) (.style "controlPointWeights" cpw)))) (defn route-all-edges [] #_(p "route-all-edges") (.map (.$ @c1 "edge") route-edge)) #_(route-all-edges) #_(def in1 (.setInterval js/window route-all-edges 100)) #_(.clearInterval js/window in1) #_(.map (.$ @c1 "edge[source = 'loss']") route-edge) #_(route-edge e1) #_(-> (.$ @c1 "node[") (.map node->xy) js->clj) #_(.fit @c1) #_(-> (.$ @c1 "edge[source = 'loss']") (.style "curveStyle" "unbundled-bezier") (.style "controlPointStepSize" "10") (.style "controlPointWeight" "0.5")) #_(def e1 (-> (.$ @c1 "edge[source = 'loss']") .first)) #_(vreset! c1 (js/cytoscape (clj->js {:container (.getElementById js/document "cyto2") :layout {:name "preset"} :style [{:selector "edge" :style {"curve-style" "unbundled-bezier" "edge-distances" "node-position" :control-point-distances [0] :control-point-weights [0.5]}}] :elements {:nodes [{:data {:id "a"} :position {:x 0 :y 0}} {:data {:id "b"} :position {:x 100 :y 100}} {:data {:id "c"}} {:data {:id "d"}} {:data {:id "e"}} {:data {:id "f"}} ] :edges [{:data {:source "a" :target "b"}} {:data {:source "c" :target "d"}}]}}))) #_(vreset! c1 (js/cytoscape (clj->js {:container (.getElementById js/document "cyto6") :style [{:selector "edge" :style {"curve-style" "unbundled-bezier" "edge-distances" "node-position" :control-point-distances [0] :control-point-weights [0.5]}}] :elements {:nodes [{:data {:id "a"}} {:data {:id "b"}}] :edges [{:data {:source "a" :target "b"}}]}}))) (defn cyto-comp-did-mount [state this] (vswap! state assoc :instance (gen-cyto @state)) (setup-cyto (:instance @state))) (defn cyto-reagent-render [state value] (let [{:keys [id]} @state] (println "cyto-reagent-render") (println id) [:div {:id id :style {:width "100%" :height "100%"}}])) (defn cyto-comp-will-update [state this [_ new-value]] (vswap! state assoc :value new-value)) (defn cyto-comp-did-update [state this [_ {:keys [config data highlighted selected] :as old-val}]] (let [{:keys [value] :as state'} @state] (cond #_(not= config (:config state')) (not= value old-val) (do (println "generate") (vswap! state assoc :instance (gen-cyto state')) (setup-cyto (:instance @state))) #_ ((not= data (:data state')) (do (println "load") (.load instance (clj->js (merge (:data state') {:unload true})))) (not= [highlighted selected] [(:highlighted state') (:selected state')]) (do ;(println "flush") (.flush instance)))))) (defn cytoscape [value] (println "cyto/cyto") (let [state (volatile! {:id (str (gensym "cyto")) :value value})] (r/create-class {:component-did-mount (partial cyto-comp-did-mount state) :component-did-update (partial cyto-comp-did-update state) :component-will-update (partial cyto-comp-will-update state) :reagent-render (partial cyto-reagent-render state)})))
[ { "context": "ns under the License.\n;;\n;; Copyright Β© 2013-2022, Kenneth Leung. All rights reserved.\n\n(ns\n\n czlab.cljant.antlib", "end": 597, "score": 0.9998566508293152, "start": 584, "tag": "NAME", "value": "Kenneth Leung" } ]
src/main/clojure/czlab/cljant/antlib.clj
llnek/cljant
0
;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; Copyright Β© 2013-2022, Kenneth Leung. All rights reserved. (ns czlab.cljant.antlib "Apache Ant project & task wrappers. The anatomy of an ant task is a xml construct, where the attributes are termed as options and nested elements are treated as vectors inside of a vector." (:import [org.apache.tools.ant.taskdefs.optional.unix Symlink] [org.apache.tools.ant.types AbstractFileSet] [org.apache.tools.ant.taskdefs Delete] [java.beans FeatureDescriptor MethodDescriptor Introspector PropertyDescriptor] [java.lang.reflect Constructor Method] [java.util Map] [java.io File] [org.apache.tools.ant.listener AnsiColorLogger TimestampedLogger] [org.apache.tools.ant IntrospectionHelper ProjectComponent NoBannerLogger Project Target Task] [java.rmi.server UID] [clojure.lang APersistentMap]) (:require [clojure.java.io :as io] [clojure.core :as cc] [clojure.string :as cs]) (:refer-clojure :exclude [apply get sync concat replace])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;(set! *warn-on-reflection* true) (def ^:private tmpdir (io/file (System/getProperty "java.io.tmpdir"))) (defn uid "Generate an unique identifier." {:tag String :arglists '([])} [] (.replaceAll (str (UID.)) "[:\\-]+" "")) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private do-with [bindings & more] (assert (== 2 (count bindings))) (let [a (first bindings) b (last bindings)] `(let [~a ~b] ~@more ~a))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (declare cfg-nested) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private skipped-tasks #{"ant" "antcall" "import" "include" "copydir" "copyfile" "copypath" "deltree" "execon" "javadoc2" "jlink" "jspc" "mimemail" "rename" "renameext" "filter" "antstructure" "antversion"}) (def ^:private pred-t (constantly true)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private nth?? [c p] `(first (drop (dec ~p) ~c))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private trap! [& xs] `(throw (Exception. ~@xs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- ctor! "Create an object from class, and set it to point to the project." [^Class cz ^Project pj] (let [^Constructor c0 (try (->> (make-array Class 0) (.getConstructor cz)) (catch Throwable _)) ^Constructor c1 (if (nil? c0) (try (->> [Project] (into-array Class) (.getConstructor cz)) (catch Throwable _)))] (doto (or (some-> c0 (.newInstance (make-array Object 0))) (some-> c1 (.newInstance (into-array Object [pj])))) (some->> (.setProjectReference pj))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;better colors that are not *dimmed* (def ^:private _ansi-logger_ (let [f (io/file tmpdir "czlab-antlogansi.colors") s (cs/join "\n" ["AnsiColorLogger.ERROR_COLOR=0;31" "AnsiColorLogger.WARNING_COLOR=0;35" "AnsiColorLogger.INFO_COLOR=0;36" "AnsiColorLogger.VERBOSE_COLOR=0;32" "AnsiColorLogger.DEBUG_COLOR=0;34"])] (if-not (.exists f) (spit f s)) (System/setProperty "ant.logger.defaults" (.getCanonicalPath f)) (doto (AnsiColorLogger.) (.setOutputPrintStream System/out) (.setErrorPrintStream System/err) (.setMessageOutputLevel Project/MSG_INFO)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- project<> ^Project [] (doto (Project.) .init (.setName "projx"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- exec-target [^Target t] (-> (.getProject t) (.executeTarget (.getName t)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;trick to type hint to avoid reflection warning (defmacro ^:private gfdn [d] `(.getName ~(with-meta d {:tag 'FeatureDescriptor}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private create-opstrs ["addConfigured" "add" "create"]) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private create-ops (zipmap create-opstrs (mapv #(count %) create-opstrs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- clj-map ([m] (clj-map m pred-t)) ([m pred] (persistent! (reduce #(let [[k v] %2] (if (pred k v) (assoc! %1 k v) %1)) (transient {}) m)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defonce ^:private beans-cooked? (atom false)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;create a default project. (defonce ^:private dftprj (atom (project<>))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;Get a list of task definitions. (def ^:private _tasks (clj-map (.getTaskDefinitions ^Project @dftprj) (fn [k v] (not (contains? skipped-tasks k))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;Get all the type definitions. (def ^:private _types (clj-map (.getDataTypeDefinitions ^Project @dftprj ))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- tst-spec-ops? "Test for special methods which aggregates nested elements." [^MethodDescriptor d] (let [pms (.. d getMethod getParameterTypes) mn (gfdn d) pc (count pms)] (or (and (cs/starts-with? mn "create") (== 0 pc)) (and (cs/starts-with? mn "add") (== 1 pc)) (and (cs/starts-with? mn "addConfigured") (== 1 pc))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-creator-info [descs] (persistent! (reduce #(if (tst-spec-ops? %2) (assoc! %1 (cs/lower-case (gfdn %2)) %2) %1) (transient {}) descs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-binfo [descs] (persistent! (reduce #(assoc! %1 (keyword (gfdn %2)) %2) (transient {}) descs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-bean-info [^Class cz] (let [b (Introspector/getBeanInfo cz)] {:props (get-binfo (.getPropertyDescriptors b)) ;;:ops (get-binfo (.getMethodDescriptors b)) :aggrs (get-creator-info (.getMethodDescriptors b))})) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- beanie [m] (persistent! (reduce #(let [[_ v] %2] (assoc! %1 v (get-bean-info v))) (transient {}) m))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;cache bean-info of class (if-not @beans-cooked? (do (def ^:private _beans (atom (merge (beanie _tasks) (beanie _types)))) (reset! beans-cooked? true))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- set-options "Use reflection to invoke setters -> to set options on the pojo: see ref. ant#IntrospectionHelper." [^Project pj pojo options] (let [z (class pojo) h (IntrospectionHelper/getHelper pj z)] (doseq [[k v] (->> (cond (instance? AbstractFileSet pojo) {:erroronmissingdir false} (= z Delete) {:includeemptydirs true}) (merge options))] (.setAttribute h pj pojo (name k) v)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- projcomp<> "Configure a project component." {:tag ProjectComponent} ([pj pc options nested] (set-options pj pc options) (cfg-nested pj pc nested) pc) ([pj pc options] (projcomp<> pj pc options nil)) ([pj pc] (projcomp<> pj pc nil nil))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- nest "Element is like [:fileset {:a b :c d} [[nested ...][nested ...]]]. At the end of this function, the parent would have *added* this element as a child object" [pj par elem aggrs] (let [s (cs/lower-case (name (first elem))) dc (or (cc/get aggrs (str "addconfigured" s)) (cc/get aggrs (str "add" s)) (cc/get aggrs (str "create" s)))] (if-some [md (some-> ^MethodDescriptor dc .getMethod)] (let [rt (.getReturnType md) mn (.getName md) pms (.getParameterTypes md)] (if (cs/starts-with? mn "add") (let [^Class dt (cc/get _types s) ^Class p1 (first pms)] (do-with [co (if (some->> dt (.isAssignableFrom p1)) (ctor! dt pj) (ctor! p1 pj))] (.invoke md par (into-array Object [co])))) (.invoke md par (make-array Object 0)))) (trap! (str "Unknown element " (first elem)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- cfg-nested "*nested* typically is a vector of vectors. Each vector models an xml element. However, a special case is when nested is a string in which case the method addText is called." [pj par nested] (let [pz (class par) ;; if we find a new class, bean it and cache it b (cc/get @_beans pz) {:keys [aggrs] :as B} (if (nil? b) (do-with [m (get-bean-info pz)] (swap! _beans assoc pz m)) b)] (if (nil? B) (trap! (str "no bean info for " pz))) (cond (string? nested) (if-some [dc (cc/get aggrs "addtext")] (-> (.getMethod ^MethodDescriptor dc) (.invoke par (into-array Object [nested]))) (trap! (str "wrong use of text string for " pz))) (or (nil? nested) (coll? nested)) (doseq [p nested :let [p2 (second p) pc (count p) p3 (nth?? p 3) n (nest pj par p aggrs)]] ;; deal with cases where options are skipped (if (and (== 2 pc) (not (map? p2))) (projcomp<> pj n nil p2) (projcomp<> pj n p2 p3)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- ctask<> ^Task [^Project p ^String tt ^String tm] (doto (.createTask p tt) (.setTaskName tm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- config-task "Reify and configure actual ant tasks." ^Task [^Project pj ^Target target {:keys [tname ttype options nested]}] (do-with [tk (ctask<> pj ttype tname)] (->> (doto tk (.setProject pj) (.setOwningTarget target)) (.addTask target)) (set-options pj tk options) (cfg-nested pj tk nested))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- proj-ant-tasks "Bind all the tasks to a target and a project." ^Target [^String target tasks] (do-with [tg (Target.)] (let [pj @dftprj] (.setName tg (or target "")) (.addOrReplaceTarget ^Project pj tg) (doseq [t tasks] (config-task pj tg t))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- run-target "Run ant target." [target tasks] (-> (proj-ant-tasks target tasks) exec-target)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn run-target* "Run an ant target." {:arglists '([target & tasks])} [^String target & tasks] (run-target target tasks)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn run* "Run these ant tasks." {:arglists '([& tasks])} [& tasks] (run-target "" tasks)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private ant-task<> "Generate wrapper function for an ant task." [pj sym docstr func] (let [s (str func) tm (cs/lower-case (subs s (+ 1 (or (cs/last-index-of s ".") -1))))] `(defn ~sym ~docstr ;;{:no-doc true} ;; if not options then it could be nested ([~'options] (if-not (map? ~'options) (~sym nil ~'options) (~sym ~'options nil))) ([] (~sym nil nil)) ([~'options ~'nestedElements] (array-map :tname ~tm :ttype ~s :options (or ~'options {}) :nested (or ~'nestedElements [])))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private decl-ant-tasks "Introspect the default project and cache all registered ant tasks." [pj] (let [ts (map #(symbol %) (keys _tasks))] `(do ~@(map (fn [a] `(ant-task<> ~pj ~a "" ~a)) ts)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;this is the key process - extracting task information from ;ant.jar (decl-ant-tasks @dftprj) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn read-properties* "Read all ant properties." {:arglists '([])} [] (let [f (io/file tmpdir (uid)) ps (java.util.Properties.)] (run* (echoproperties {:failonerror false :destfile f})) (with-open [inp (io/input-stream f)] (.load ps inp)) (clj-map ps))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn clean-dir* "Clean an existing dir or create it." {:arglists '([d] [d options])} ([d] (clean-dir* d nil)) ([d {:keys [quiet] :or {quiet true}}] (let [dir (io/file d)] (if (.exists dir) (run* (delete {:removeNotFollowedSymlinks true :quiet quiet} [[:fileset {:followsymlinks false :dir dir} [[:include {:name "**/*"}]]]])) (.mkdirs dir))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn delete-dir* "Remove a directory." {:arglists '([d] [d options])} ([d] (delete-dir* d nil)) ([d {:keys [quiet] :or {quiet true}}] (let [dir (io/file d)] (when (.exists dir) (run* (delete {:removeNotFollowedSymlinks true :quiet quiet} [[:fileset {:followsymlinks false :dir dir}]])))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn copy-file* "Copy a file to the target folder." {:arglists '([file toDir])} [file toDir] (.mkdirs (io/file toDir)) (run* (copy {:file file :todir toDir}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn move-file* "Move a file to the target folder." {:arglists '([file toDir])} [file toDir] (.mkdirs (io/file toDir)) (run* (move {:file file :todir toDir}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn delete-link* "Delete a file system symbolic link." {:arglists '([link])} [link] (run* (symlink {:action "delete" :link link}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn create-link* "Create a file system symbolic link." {:arglists '([link target] [link target overwrite?])} ([link target] (create-link* link target true)) ([link target overwrite?] (run* (symlink {:overwrite (boolean overwrite?) :action "single" :link link :resource target})))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn disable-ant-logger* "Remove build logger." {:arglists '([])} [] (if (-> (.getBuildListeners ^Project @dftprj) (.contains _ansi-logger_)) (.removeBuildListener ^Project @dftprj _ansi-logger_))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn enable-ant-logger* "Add build logger." {:arglists '([])} [] (if-not (-> (.getBuildListeners ^Project @dftprj) (.contains _ansi-logger_)) (.addBuildListener ^Project @dftprj _ansi-logger_))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;EOF
52642
;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; Copyright Β© 2013-2022, <NAME>. All rights reserved. (ns czlab.cljant.antlib "Apache Ant project & task wrappers. The anatomy of an ant task is a xml construct, where the attributes are termed as options and nested elements are treated as vectors inside of a vector." (:import [org.apache.tools.ant.taskdefs.optional.unix Symlink] [org.apache.tools.ant.types AbstractFileSet] [org.apache.tools.ant.taskdefs Delete] [java.beans FeatureDescriptor MethodDescriptor Introspector PropertyDescriptor] [java.lang.reflect Constructor Method] [java.util Map] [java.io File] [org.apache.tools.ant.listener AnsiColorLogger TimestampedLogger] [org.apache.tools.ant IntrospectionHelper ProjectComponent NoBannerLogger Project Target Task] [java.rmi.server UID] [clojure.lang APersistentMap]) (:require [clojure.java.io :as io] [clojure.core :as cc] [clojure.string :as cs]) (:refer-clojure :exclude [apply get sync concat replace])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;(set! *warn-on-reflection* true) (def ^:private tmpdir (io/file (System/getProperty "java.io.tmpdir"))) (defn uid "Generate an unique identifier." {:tag String :arglists '([])} [] (.replaceAll (str (UID.)) "[:\\-]+" "")) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private do-with [bindings & more] (assert (== 2 (count bindings))) (let [a (first bindings) b (last bindings)] `(let [~a ~b] ~@more ~a))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (declare cfg-nested) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private skipped-tasks #{"ant" "antcall" "import" "include" "copydir" "copyfile" "copypath" "deltree" "execon" "javadoc2" "jlink" "jspc" "mimemail" "rename" "renameext" "filter" "antstructure" "antversion"}) (def ^:private pred-t (constantly true)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private nth?? [c p] `(first (drop (dec ~p) ~c))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private trap! [& xs] `(throw (Exception. ~@xs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- ctor! "Create an object from class, and set it to point to the project." [^Class cz ^Project pj] (let [^Constructor c0 (try (->> (make-array Class 0) (.getConstructor cz)) (catch Throwable _)) ^Constructor c1 (if (nil? c0) (try (->> [Project] (into-array Class) (.getConstructor cz)) (catch Throwable _)))] (doto (or (some-> c0 (.newInstance (make-array Object 0))) (some-> c1 (.newInstance (into-array Object [pj])))) (some->> (.setProjectReference pj))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;better colors that are not *dimmed* (def ^:private _ansi-logger_ (let [f (io/file tmpdir "czlab-antlogansi.colors") s (cs/join "\n" ["AnsiColorLogger.ERROR_COLOR=0;31" "AnsiColorLogger.WARNING_COLOR=0;35" "AnsiColorLogger.INFO_COLOR=0;36" "AnsiColorLogger.VERBOSE_COLOR=0;32" "AnsiColorLogger.DEBUG_COLOR=0;34"])] (if-not (.exists f) (spit f s)) (System/setProperty "ant.logger.defaults" (.getCanonicalPath f)) (doto (AnsiColorLogger.) (.setOutputPrintStream System/out) (.setErrorPrintStream System/err) (.setMessageOutputLevel Project/MSG_INFO)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- project<> ^Project [] (doto (Project.) .init (.setName "projx"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- exec-target [^Target t] (-> (.getProject t) (.executeTarget (.getName t)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;trick to type hint to avoid reflection warning (defmacro ^:private gfdn [d] `(.getName ~(with-meta d {:tag 'FeatureDescriptor}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private create-opstrs ["addConfigured" "add" "create"]) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private create-ops (zipmap create-opstrs (mapv #(count %) create-opstrs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- clj-map ([m] (clj-map m pred-t)) ([m pred] (persistent! (reduce #(let [[k v] %2] (if (pred k v) (assoc! %1 k v) %1)) (transient {}) m)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defonce ^:private beans-cooked? (atom false)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;create a default project. (defonce ^:private dftprj (atom (project<>))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;Get a list of task definitions. (def ^:private _tasks (clj-map (.getTaskDefinitions ^Project @dftprj) (fn [k v] (not (contains? skipped-tasks k))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;Get all the type definitions. (def ^:private _types (clj-map (.getDataTypeDefinitions ^Project @dftprj ))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- tst-spec-ops? "Test for special methods which aggregates nested elements." [^MethodDescriptor d] (let [pms (.. d getMethod getParameterTypes) mn (gfdn d) pc (count pms)] (or (and (cs/starts-with? mn "create") (== 0 pc)) (and (cs/starts-with? mn "add") (== 1 pc)) (and (cs/starts-with? mn "addConfigured") (== 1 pc))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-creator-info [descs] (persistent! (reduce #(if (tst-spec-ops? %2) (assoc! %1 (cs/lower-case (gfdn %2)) %2) %1) (transient {}) descs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-binfo [descs] (persistent! (reduce #(assoc! %1 (keyword (gfdn %2)) %2) (transient {}) descs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-bean-info [^Class cz] (let [b (Introspector/getBeanInfo cz)] {:props (get-binfo (.getPropertyDescriptors b)) ;;:ops (get-binfo (.getMethodDescriptors b)) :aggrs (get-creator-info (.getMethodDescriptors b))})) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- beanie [m] (persistent! (reduce #(let [[_ v] %2] (assoc! %1 v (get-bean-info v))) (transient {}) m))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;cache bean-info of class (if-not @beans-cooked? (do (def ^:private _beans (atom (merge (beanie _tasks) (beanie _types)))) (reset! beans-cooked? true))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- set-options "Use reflection to invoke setters -> to set options on the pojo: see ref. ant#IntrospectionHelper." [^Project pj pojo options] (let [z (class pojo) h (IntrospectionHelper/getHelper pj z)] (doseq [[k v] (->> (cond (instance? AbstractFileSet pojo) {:erroronmissingdir false} (= z Delete) {:includeemptydirs true}) (merge options))] (.setAttribute h pj pojo (name k) v)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- projcomp<> "Configure a project component." {:tag ProjectComponent} ([pj pc options nested] (set-options pj pc options) (cfg-nested pj pc nested) pc) ([pj pc options] (projcomp<> pj pc options nil)) ([pj pc] (projcomp<> pj pc nil nil))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- nest "Element is like [:fileset {:a b :c d} [[nested ...][nested ...]]]. At the end of this function, the parent would have *added* this element as a child object" [pj par elem aggrs] (let [s (cs/lower-case (name (first elem))) dc (or (cc/get aggrs (str "addconfigured" s)) (cc/get aggrs (str "add" s)) (cc/get aggrs (str "create" s)))] (if-some [md (some-> ^MethodDescriptor dc .getMethod)] (let [rt (.getReturnType md) mn (.getName md) pms (.getParameterTypes md)] (if (cs/starts-with? mn "add") (let [^Class dt (cc/get _types s) ^Class p1 (first pms)] (do-with [co (if (some->> dt (.isAssignableFrom p1)) (ctor! dt pj) (ctor! p1 pj))] (.invoke md par (into-array Object [co])))) (.invoke md par (make-array Object 0)))) (trap! (str "Unknown element " (first elem)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- cfg-nested "*nested* typically is a vector of vectors. Each vector models an xml element. However, a special case is when nested is a string in which case the method addText is called." [pj par nested] (let [pz (class par) ;; if we find a new class, bean it and cache it b (cc/get @_beans pz) {:keys [aggrs] :as B} (if (nil? b) (do-with [m (get-bean-info pz)] (swap! _beans assoc pz m)) b)] (if (nil? B) (trap! (str "no bean info for " pz))) (cond (string? nested) (if-some [dc (cc/get aggrs "addtext")] (-> (.getMethod ^MethodDescriptor dc) (.invoke par (into-array Object [nested]))) (trap! (str "wrong use of text string for " pz))) (or (nil? nested) (coll? nested)) (doseq [p nested :let [p2 (second p) pc (count p) p3 (nth?? p 3) n (nest pj par p aggrs)]] ;; deal with cases where options are skipped (if (and (== 2 pc) (not (map? p2))) (projcomp<> pj n nil p2) (projcomp<> pj n p2 p3)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- ctask<> ^Task [^Project p ^String tt ^String tm] (doto (.createTask p tt) (.setTaskName tm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- config-task "Reify and configure actual ant tasks." ^Task [^Project pj ^Target target {:keys [tname ttype options nested]}] (do-with [tk (ctask<> pj ttype tname)] (->> (doto tk (.setProject pj) (.setOwningTarget target)) (.addTask target)) (set-options pj tk options) (cfg-nested pj tk nested))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- proj-ant-tasks "Bind all the tasks to a target and a project." ^Target [^String target tasks] (do-with [tg (Target.)] (let [pj @dftprj] (.setName tg (or target "")) (.addOrReplaceTarget ^Project pj tg) (doseq [t tasks] (config-task pj tg t))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- run-target "Run ant target." [target tasks] (-> (proj-ant-tasks target tasks) exec-target)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn run-target* "Run an ant target." {:arglists '([target & tasks])} [^String target & tasks] (run-target target tasks)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn run* "Run these ant tasks." {:arglists '([& tasks])} [& tasks] (run-target "" tasks)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private ant-task<> "Generate wrapper function for an ant task." [pj sym docstr func] (let [s (str func) tm (cs/lower-case (subs s (+ 1 (or (cs/last-index-of s ".") -1))))] `(defn ~sym ~docstr ;;{:no-doc true} ;; if not options then it could be nested ([~'options] (if-not (map? ~'options) (~sym nil ~'options) (~sym ~'options nil))) ([] (~sym nil nil)) ([~'options ~'nestedElements] (array-map :tname ~tm :ttype ~s :options (or ~'options {}) :nested (or ~'nestedElements [])))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private decl-ant-tasks "Introspect the default project and cache all registered ant tasks." [pj] (let [ts (map #(symbol %) (keys _tasks))] `(do ~@(map (fn [a] `(ant-task<> ~pj ~a "" ~a)) ts)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;this is the key process - extracting task information from ;ant.jar (decl-ant-tasks @dftprj) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn read-properties* "Read all ant properties." {:arglists '([])} [] (let [f (io/file tmpdir (uid)) ps (java.util.Properties.)] (run* (echoproperties {:failonerror false :destfile f})) (with-open [inp (io/input-stream f)] (.load ps inp)) (clj-map ps))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn clean-dir* "Clean an existing dir or create it." {:arglists '([d] [d options])} ([d] (clean-dir* d nil)) ([d {:keys [quiet] :or {quiet true}}] (let [dir (io/file d)] (if (.exists dir) (run* (delete {:removeNotFollowedSymlinks true :quiet quiet} [[:fileset {:followsymlinks false :dir dir} [[:include {:name "**/*"}]]]])) (.mkdirs dir))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn delete-dir* "Remove a directory." {:arglists '([d] [d options])} ([d] (delete-dir* d nil)) ([d {:keys [quiet] :or {quiet true}}] (let [dir (io/file d)] (when (.exists dir) (run* (delete {:removeNotFollowedSymlinks true :quiet quiet} [[:fileset {:followsymlinks false :dir dir}]])))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn copy-file* "Copy a file to the target folder." {:arglists '([file toDir])} [file toDir] (.mkdirs (io/file toDir)) (run* (copy {:file file :todir toDir}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn move-file* "Move a file to the target folder." {:arglists '([file toDir])} [file toDir] (.mkdirs (io/file toDir)) (run* (move {:file file :todir toDir}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn delete-link* "Delete a file system symbolic link." {:arglists '([link])} [link] (run* (symlink {:action "delete" :link link}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn create-link* "Create a file system symbolic link." {:arglists '([link target] [link target overwrite?])} ([link target] (create-link* link target true)) ([link target overwrite?] (run* (symlink {:overwrite (boolean overwrite?) :action "single" :link link :resource target})))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn disable-ant-logger* "Remove build logger." {:arglists '([])} [] (if (-> (.getBuildListeners ^Project @dftprj) (.contains _ansi-logger_)) (.removeBuildListener ^Project @dftprj _ansi-logger_))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn enable-ant-logger* "Add build logger." {:arglists '([])} [] (if-not (-> (.getBuildListeners ^Project @dftprj) (.contains _ansi-logger_)) (.addBuildListener ^Project @dftprj _ansi-logger_))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;EOF
true
;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; Copyright Β© 2013-2022, PI:NAME:<NAME>END_PI. All rights reserved. (ns czlab.cljant.antlib "Apache Ant project & task wrappers. The anatomy of an ant task is a xml construct, where the attributes are termed as options and nested elements are treated as vectors inside of a vector." (:import [org.apache.tools.ant.taskdefs.optional.unix Symlink] [org.apache.tools.ant.types AbstractFileSet] [org.apache.tools.ant.taskdefs Delete] [java.beans FeatureDescriptor MethodDescriptor Introspector PropertyDescriptor] [java.lang.reflect Constructor Method] [java.util Map] [java.io File] [org.apache.tools.ant.listener AnsiColorLogger TimestampedLogger] [org.apache.tools.ant IntrospectionHelper ProjectComponent NoBannerLogger Project Target Task] [java.rmi.server UID] [clojure.lang APersistentMap]) (:require [clojure.java.io :as io] [clojure.core :as cc] [clojure.string :as cs]) (:refer-clojure :exclude [apply get sync concat replace])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;(set! *warn-on-reflection* true) (def ^:private tmpdir (io/file (System/getProperty "java.io.tmpdir"))) (defn uid "Generate an unique identifier." {:tag String :arglists '([])} [] (.replaceAll (str (UID.)) "[:\\-]+" "")) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private do-with [bindings & more] (assert (== 2 (count bindings))) (let [a (first bindings) b (last bindings)] `(let [~a ~b] ~@more ~a))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (declare cfg-nested) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private skipped-tasks #{"ant" "antcall" "import" "include" "copydir" "copyfile" "copypath" "deltree" "execon" "javadoc2" "jlink" "jspc" "mimemail" "rename" "renameext" "filter" "antstructure" "antversion"}) (def ^:private pred-t (constantly true)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private nth?? [c p] `(first (drop (dec ~p) ~c))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private trap! [& xs] `(throw (Exception. ~@xs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- ctor! "Create an object from class, and set it to point to the project." [^Class cz ^Project pj] (let [^Constructor c0 (try (->> (make-array Class 0) (.getConstructor cz)) (catch Throwable _)) ^Constructor c1 (if (nil? c0) (try (->> [Project] (into-array Class) (.getConstructor cz)) (catch Throwable _)))] (doto (or (some-> c0 (.newInstance (make-array Object 0))) (some-> c1 (.newInstance (into-array Object [pj])))) (some->> (.setProjectReference pj))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;better colors that are not *dimmed* (def ^:private _ansi-logger_ (let [f (io/file tmpdir "czlab-antlogansi.colors") s (cs/join "\n" ["AnsiColorLogger.ERROR_COLOR=0;31" "AnsiColorLogger.WARNING_COLOR=0;35" "AnsiColorLogger.INFO_COLOR=0;36" "AnsiColorLogger.VERBOSE_COLOR=0;32" "AnsiColorLogger.DEBUG_COLOR=0;34"])] (if-not (.exists f) (spit f s)) (System/setProperty "ant.logger.defaults" (.getCanonicalPath f)) (doto (AnsiColorLogger.) (.setOutputPrintStream System/out) (.setErrorPrintStream System/err) (.setMessageOutputLevel Project/MSG_INFO)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- project<> ^Project [] (doto (Project.) .init (.setName "projx"))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- exec-target [^Target t] (-> (.getProject t) (.executeTarget (.getName t)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;trick to type hint to avoid reflection warning (defmacro ^:private gfdn [d] `(.getName ~(with-meta d {:tag 'FeatureDescriptor}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private create-opstrs ["addConfigured" "add" "create"]) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def ^:private create-ops (zipmap create-opstrs (mapv #(count %) create-opstrs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- clj-map ([m] (clj-map m pred-t)) ([m pred] (persistent! (reduce #(let [[k v] %2] (if (pred k v) (assoc! %1 k v) %1)) (transient {}) m)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defonce ^:private beans-cooked? (atom false)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;create a default project. (defonce ^:private dftprj (atom (project<>))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;Get a list of task definitions. (def ^:private _tasks (clj-map (.getTaskDefinitions ^Project @dftprj) (fn [k v] (not (contains? skipped-tasks k))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;Get all the type definitions. (def ^:private _types (clj-map (.getDataTypeDefinitions ^Project @dftprj ))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- tst-spec-ops? "Test for special methods which aggregates nested elements." [^MethodDescriptor d] (let [pms (.. d getMethod getParameterTypes) mn (gfdn d) pc (count pms)] (or (and (cs/starts-with? mn "create") (== 0 pc)) (and (cs/starts-with? mn "add") (== 1 pc)) (and (cs/starts-with? mn "addConfigured") (== 1 pc))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-creator-info [descs] (persistent! (reduce #(if (tst-spec-ops? %2) (assoc! %1 (cs/lower-case (gfdn %2)) %2) %1) (transient {}) descs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-binfo [descs] (persistent! (reduce #(assoc! %1 (keyword (gfdn %2)) %2) (transient {}) descs))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- get-bean-info [^Class cz] (let [b (Introspector/getBeanInfo cz)] {:props (get-binfo (.getPropertyDescriptors b)) ;;:ops (get-binfo (.getMethodDescriptors b)) :aggrs (get-creator-info (.getMethodDescriptors b))})) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- beanie [m] (persistent! (reduce #(let [[_ v] %2] (assoc! %1 v (get-bean-info v))) (transient {}) m))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;cache bean-info of class (if-not @beans-cooked? (do (def ^:private _beans (atom (merge (beanie _tasks) (beanie _types)))) (reset! beans-cooked? true))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- set-options "Use reflection to invoke setters -> to set options on the pojo: see ref. ant#IntrospectionHelper." [^Project pj pojo options] (let [z (class pojo) h (IntrospectionHelper/getHelper pj z)] (doseq [[k v] (->> (cond (instance? AbstractFileSet pojo) {:erroronmissingdir false} (= z Delete) {:includeemptydirs true}) (merge options))] (.setAttribute h pj pojo (name k) v)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- projcomp<> "Configure a project component." {:tag ProjectComponent} ([pj pc options nested] (set-options pj pc options) (cfg-nested pj pc nested) pc) ([pj pc options] (projcomp<> pj pc options nil)) ([pj pc] (projcomp<> pj pc nil nil))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- nest "Element is like [:fileset {:a b :c d} [[nested ...][nested ...]]]. At the end of this function, the parent would have *added* this element as a child object" [pj par elem aggrs] (let [s (cs/lower-case (name (first elem))) dc (or (cc/get aggrs (str "addconfigured" s)) (cc/get aggrs (str "add" s)) (cc/get aggrs (str "create" s)))] (if-some [md (some-> ^MethodDescriptor dc .getMethod)] (let [rt (.getReturnType md) mn (.getName md) pms (.getParameterTypes md)] (if (cs/starts-with? mn "add") (let [^Class dt (cc/get _types s) ^Class p1 (first pms)] (do-with [co (if (some->> dt (.isAssignableFrom p1)) (ctor! dt pj) (ctor! p1 pj))] (.invoke md par (into-array Object [co])))) (.invoke md par (make-array Object 0)))) (trap! (str "Unknown element " (first elem)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- cfg-nested "*nested* typically is a vector of vectors. Each vector models an xml element. However, a special case is when nested is a string in which case the method addText is called." [pj par nested] (let [pz (class par) ;; if we find a new class, bean it and cache it b (cc/get @_beans pz) {:keys [aggrs] :as B} (if (nil? b) (do-with [m (get-bean-info pz)] (swap! _beans assoc pz m)) b)] (if (nil? B) (trap! (str "no bean info for " pz))) (cond (string? nested) (if-some [dc (cc/get aggrs "addtext")] (-> (.getMethod ^MethodDescriptor dc) (.invoke par (into-array Object [nested]))) (trap! (str "wrong use of text string for " pz))) (or (nil? nested) (coll? nested)) (doseq [p nested :let [p2 (second p) pc (count p) p3 (nth?? p 3) n (nest pj par p aggrs)]] ;; deal with cases where options are skipped (if (and (== 2 pc) (not (map? p2))) (projcomp<> pj n nil p2) (projcomp<> pj n p2 p3)))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- ctask<> ^Task [^Project p ^String tt ^String tm] (doto (.createTask p tt) (.setTaskName tm))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- config-task "Reify and configure actual ant tasks." ^Task [^Project pj ^Target target {:keys [tname ttype options nested]}] (do-with [tk (ctask<> pj ttype tname)] (->> (doto tk (.setProject pj) (.setOwningTarget target)) (.addTask target)) (set-options pj tk options) (cfg-nested pj tk nested))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- proj-ant-tasks "Bind all the tasks to a target and a project." ^Target [^String target tasks] (do-with [tg (Target.)] (let [pj @dftprj] (.setName tg (or target "")) (.addOrReplaceTarget ^Project pj tg) (doseq [t tasks] (config-task pj tg t))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn- run-target "Run ant target." [target tasks] (-> (proj-ant-tasks target tasks) exec-target)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn run-target* "Run an ant target." {:arglists '([target & tasks])} [^String target & tasks] (run-target target tasks)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn run* "Run these ant tasks." {:arglists '([& tasks])} [& tasks] (run-target "" tasks)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private ant-task<> "Generate wrapper function for an ant task." [pj sym docstr func] (let [s (str func) tm (cs/lower-case (subs s (+ 1 (or (cs/last-index-of s ".") -1))))] `(defn ~sym ~docstr ;;{:no-doc true} ;; if not options then it could be nested ([~'options] (if-not (map? ~'options) (~sym nil ~'options) (~sym ~'options nil))) ([] (~sym nil nil)) ([~'options ~'nestedElements] (array-map :tname ~tm :ttype ~s :options (or ~'options {}) :nested (or ~'nestedElements [])))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defmacro ^:private decl-ant-tasks "Introspect the default project and cache all registered ant tasks." [pj] (let [ts (map #(symbol %) (keys _tasks))] `(do ~@(map (fn [a] `(ant-task<> ~pj ~a "" ~a)) ts)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;this is the key process - extracting task information from ;ant.jar (decl-ant-tasks @dftprj) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn read-properties* "Read all ant properties." {:arglists '([])} [] (let [f (io/file tmpdir (uid)) ps (java.util.Properties.)] (run* (echoproperties {:failonerror false :destfile f})) (with-open [inp (io/input-stream f)] (.load ps inp)) (clj-map ps))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn clean-dir* "Clean an existing dir or create it." {:arglists '([d] [d options])} ([d] (clean-dir* d nil)) ([d {:keys [quiet] :or {quiet true}}] (let [dir (io/file d)] (if (.exists dir) (run* (delete {:removeNotFollowedSymlinks true :quiet quiet} [[:fileset {:followsymlinks false :dir dir} [[:include {:name "**/*"}]]]])) (.mkdirs dir))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn delete-dir* "Remove a directory." {:arglists '([d] [d options])} ([d] (delete-dir* d nil)) ([d {:keys [quiet] :or {quiet true}}] (let [dir (io/file d)] (when (.exists dir) (run* (delete {:removeNotFollowedSymlinks true :quiet quiet} [[:fileset {:followsymlinks false :dir dir}]])))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn copy-file* "Copy a file to the target folder." {:arglists '([file toDir])} [file toDir] (.mkdirs (io/file toDir)) (run* (copy {:file file :todir toDir}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn move-file* "Move a file to the target folder." {:arglists '([file toDir])} [file toDir] (.mkdirs (io/file toDir)) (run* (move {:file file :todir toDir}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn delete-link* "Delete a file system symbolic link." {:arglists '([link])} [link] (run* (symlink {:action "delete" :link link}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn create-link* "Create a file system symbolic link." {:arglists '([link target] [link target overwrite?])} ([link target] (create-link* link target true)) ([link target overwrite?] (run* (symlink {:overwrite (boolean overwrite?) :action "single" :link link :resource target})))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn disable-ant-logger* "Remove build logger." {:arglists '([])} [] (if (-> (.getBuildListeners ^Project @dftprj) (.contains _ansi-logger_)) (.removeBuildListener ^Project @dftprj _ansi-logger_))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defn enable-ant-logger* "Add build logger." {:arglists '([])} [] (if-not (-> (.getBuildListeners ^Project @dftprj) (.contains _ansi-logger_)) (.addBuildListener ^Project @dftprj _ansi-logger_))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;EOF
[ { "context": "(ns ^{:author \"Leeor Engel\"}\n chapter-4.chapter-4-q3)\n\n(defn list-of-depths", "end": 26, "score": 0.9998974800109863, "start": 15, "tag": "NAME", "value": "Leeor Engel" } ]
Clojure/src/chapter_4/chapter_4_q3.clj
Kiandr/crackingcodinginterview
0
(ns ^{:author "Leeor Engel"} chapter-4.chapter-4-q3) (defn list-of-depths-helper [node depth] (if (some? node) (merge-with concat {depth (list (:data node))} (list-of-depths-helper (:left node) (inc depth)) (list-of-depths-helper (:right node) (inc depth))) nil)) (defn list-of-depths [tree] (list-of-depths-helper tree 0))
13321
(ns ^{:author "<NAME>"} chapter-4.chapter-4-q3) (defn list-of-depths-helper [node depth] (if (some? node) (merge-with concat {depth (list (:data node))} (list-of-depths-helper (:left node) (inc depth)) (list-of-depths-helper (:right node) (inc depth))) nil)) (defn list-of-depths [tree] (list-of-depths-helper tree 0))
true
(ns ^{:author "PI:NAME:<NAME>END_PI"} chapter-4.chapter-4-q3) (defn list-of-depths-helper [node depth] (if (some? node) (merge-with concat {depth (list (:data node))} (list-of-depths-helper (:left node) (inc depth)) (list-of-depths-helper (:right node) (inc depth))) nil)) (defn list-of-depths [tree] (list-of-depths-helper tree 0))
[ { "context": "! *unchecked-math* :warn-on-boxed)\n(ns ^{:author \"wahpenayo at gmail dot com\" \n :since \"2017-11-10\"\n :date \"2017-11-", "end": 113, "score": 0.8058697581291199, "start": 87, "tag": "EMAIL", "value": "wahpenayo at gmail dot com" } ]
src/scripts/clojure/taiga/scripts/quantiles/all.clj
wahpenayo/taiga
4
(set! *warn-on-reflection* true) (set! *unchecked-math* :warn-on-boxed) (ns ^{:author "wahpenayo at gmail dot com" :since "2017-11-10" :date "2017-11-15" :doc "Complete data generation, train, test."} taiga.scripts.quantiles.all (:require [zana.api :as z] [taiga.scripts.quantiles.defs :as defs])) ;;---------------------------------------------------------------- (println (str *ns*) defs/n defs/nterms) (z/seconds "data" (defs/generate-data defs/n)) (z/seconds "mean" (defs/mean-regression defs/n)) (z/seconds "measure" (defs/real-probability-measure defs/n)) (doseq [prefix ["mean" "measure" "test"]] (z/seconds (print-str "predict" prefix) (defs/predict defs/n prefix))) (doseq [prefix ["mean" "measure" "test"]] (z/seconds (print-str "cost" prefix) (defs/relative-cost defs/n prefix))) ;;----------------------------------------------------------------
70526
(set! *warn-on-reflection* true) (set! *unchecked-math* :warn-on-boxed) (ns ^{:author "<EMAIL>" :since "2017-11-10" :date "2017-11-15" :doc "Complete data generation, train, test."} taiga.scripts.quantiles.all (:require [zana.api :as z] [taiga.scripts.quantiles.defs :as defs])) ;;---------------------------------------------------------------- (println (str *ns*) defs/n defs/nterms) (z/seconds "data" (defs/generate-data defs/n)) (z/seconds "mean" (defs/mean-regression defs/n)) (z/seconds "measure" (defs/real-probability-measure defs/n)) (doseq [prefix ["mean" "measure" "test"]] (z/seconds (print-str "predict" prefix) (defs/predict defs/n prefix))) (doseq [prefix ["mean" "measure" "test"]] (z/seconds (print-str "cost" prefix) (defs/relative-cost defs/n prefix))) ;;----------------------------------------------------------------
true
(set! *warn-on-reflection* true) (set! *unchecked-math* :warn-on-boxed) (ns ^{:author "PI:EMAIL:<EMAIL>END_PI" :since "2017-11-10" :date "2017-11-15" :doc "Complete data generation, train, test."} taiga.scripts.quantiles.all (:require [zana.api :as z] [taiga.scripts.quantiles.defs :as defs])) ;;---------------------------------------------------------------- (println (str *ns*) defs/n defs/nterms) (z/seconds "data" (defs/generate-data defs/n)) (z/seconds "mean" (defs/mean-regression defs/n)) (z/seconds "measure" (defs/real-probability-measure defs/n)) (doseq [prefix ["mean" "measure" "test"]] (z/seconds (print-str "predict" prefix) (defs/predict defs/n prefix))) (doseq [prefix ["mean" "measure" "test"]] (z/seconds (print-str "cost" prefix) (defs/relative-cost defs/n prefix))) ;;----------------------------------------------------------------
[ { "context": "\"Namn pΓ₯ finska\"}\n \"name-localized.se\"\n {:fi \"Nimi ruotsiksi\" :en \"Swedish name\" :se \"Namn pΓ₯ svenska\"}\n \"ma", "end": 1657, "score": 0.9986452460289001, "start": 1643, "tag": "NAME", "value": "Nimi ruotsiksi" }, { "context": "e \"Namn pΓ₯ svenska\"}\n \"marketing-name\"\n {:fi \"Markkinointinimi\" :en \"Marketing name\" :se \"VarumΓ€rkesnamn\"}\n \"e", "end": 1746, "score": 0.9990805387496948, "start": 1730, "tag": "NAME", "value": "Markkinointinimi" } ]
webapp/src/cljc/lipas/reports.cljc
lipas-liikuntapaikat/lipas
49
(ns lipas.reports (:require [lipas.utils :as utils] [lipas.data.prop-types :as prop-types])) (defn- all-energy-data-exists? [{:keys [energy-consumption]}] (let [{:keys [electricity-mwh heat-mwh water-m3]} energy-consumption] (and (some? electricity-mwh) (some? heat-mwh) (some? water-m3)))) (defn- get-values [sites field-kw] (->> sites (map (comp field-kw :energy-consumption)) (remove nil?))) (defn- ->data-point [{:keys [lipas-id name energy-consumption]}] (-> energy-consumption (assoc :energy-mwh (+ (:heat-mwh energy-consumption 0) (:electricity-mwh energy-consumption 0))) (assoc :name name) (assoc :lipas-id lipas-id))) (defn energy-report [sites] {:total-count (count sites) :electricity-mwh (utils/simple-stats (get-values sites :electricity-mwh)) :heat-mwh (utils/simple-stats (get-values sites :heat-mwh)) :water-m3 (utils/simple-stats (get-values sites :water-m3)) :data-points (->> sites (filter all-energy-data-exists?) (map ->data-point)) :hall-of-fame (->> sites (filter all-energy-data-exists?) (map #(select-keys % [:lipas-id :name])))}) (defn ->row [fields m] (reduce (fn [res f] (let [v (utils/get-in-path m f)] (conj res (if (coll? v) (utils/join v) v)))) [] fields)) (def basic-fields {"lipas-id" {:fi "Lipas-id" :en "Lipas-id" :se "Lipas-id"} "name" {:fi "Nimi suomeksi" :en "Finnish name" :se "Namn pΓ₯ finska"} "name-localized.se" {:fi "Nimi ruotsiksi" :en "Swedish name" :se "Namn pΓ₯ svenska"} "marketing-name" {:fi "Markkinointinimi" :en "Marketing name" :se "VarumΓ€rkesnamn"} "event-date" {:fi "Muokattu viimeksi" :en "Last modified" :se "Senaste redigerad"} "owner" {:fi "Omistaja" :en "Owner" :se "Γ„gare"} "admin" {:fi "YllΓ€pitΓ€jΓ€" :en "Administrator" :se "AdministratΓΆr"} "construction-year" {:fi "Rakennusvuosi" :en "Construction year" :se "ByggΓ₯r"} "renovation-years" {:fi "Peruskorjausvuodet" :en "Renovation years" :se "RenoveringsΓ₯r"} "phone-number" {:fi "Puhelinnumero" :en "Phone number" :se "Telefonnummer"} "email" {:fi "SΓ€hkΓΆposti" :en "Email" :se "Epost"} "www" {:fi "WWW" :en "WWW" :se "WWW"} "comment" {:fi "Kommentti" :en "Comment" :se "Ytterligare information"} "type.type-code" {:fi "Tyyppikoodi" :en "Type code" :se "Typkod"} "type.type-name" {:fi "Liikuntapaikkatyyppi" :en "Type" :se "Typ"} "location.city.city-code" {:fi "Kuntanumero" :en "City code" :se "Kommunkod"} "location.city.city-name" {:fi "Kunta" :en "City" :se "Stat"} "location.city.neighborhood" {:fi "Kuntaosa" :en "Neighborhood" :se "Kommundel"} "location.address" {:fi "Katuosoite" :en "Address" :se "Adress"} "location.postal-code" {:fi "Postinumero" :en "Postal code" :se "Postnummer"} "location.postal-office" {:fi "Postitoimipaikka" :en "Postal office" :se "Postkontor"}}) (def prop-fields (reduce (fn [res [k v]] (assoc res (str "properties." (name k)) (:name v))) {} prop-types/all)) (def meta-fields {"search-meta.location.wgs84-point" {:fi "Koordinaatit (WGS84)" :se "Koordinater (WGS84)" :en "Coordinates (WGS84)"} "search-meta.location.avi-area.name.fi" {:fi "AVI-alue" :en "AVI-area" :se "AVI"} "search-meta.location.province.name.fi" {:fi "Maakunta" :en "Province" :se "Landskap"} "search-meta.type.main-category.name.fi" {:fi "Liikuntapaikkatyypin pÀÀryhmΓ€" :en "Type main-category" :se "Typ huvud kategori"} "search-meta.type.sub-category.name.fi" {:fi "Liikuntapaikkatyypin alaryhmΓ€" :en "Type sub-category" :se "Typ under kategori"}}) (def area-fields (select-keys prop-fields ["properties.area-m2" "properties.area-km2" "properties.pool-water-area-m2"])) (def surface-material-fields (select-keys prop-fields ["properties.surface-material" "properties.surface-material-info" "properties.running-track-surface-material" "properties.training-spot-surface-material" "properties.inruns-material" "properties.skijump-hill-material"])) (def length-fields (select-keys prop-fields ["properties.field-length-m" "properties.hall-length-m" "properties.inner-lane-length-m" "properties.route-length-km" "properties.lit-route-length-km" "properties.pool-length-m" "properties.sprint-track-length-m" "properties.track-length-m" "properties.beach-length-m" "properties.longest-slope-m" "properties.shortest-slope-m"])) (def width-fields (select-keys prop-fields ["properties.field-width-m" "properties.climbing-wall-width-m" "properties.hall-width-m" "properties.pool-width-m" "properties.route-width-m" "properties.track-width-m"])) (def height-fields (select-keys prop-fields ["properties.height-m" "properties.climbing-wall-height-m"])) (def other-measures (select-keys prop-fields ["properties.p-point" "properties.k-point" "properties.altitude-difference"])) (def measure-fields (merge area-fields length-fields width-fields height-fields other-measures)) (def service-fields (select-keys prop-fields ["properties.equipment-rental?" "properties.ski-service?" "properties.kiosk?" "properties.shower?" "properties.parking-place?" "properties.playground?" "properties.pier?" "properties.rest-places-count" "properties.toilet?" "properties.changing-rooms?" "properties.sauna?"])) (def activity-fields (select-keys prop-fields ["properties.rifle-shooting?" "properties.shotgun-shooting?" "properties.pistol-shooting?" "properties.free-rifle-shooting?" "properties.air-gun-shooting?" "properties.shooting-positions-count" "properties.tatamis-count" "properties.badminton-courts-count" "properties.hammer-throw-places-count" "properties.landing-places-count" "properties.weight-lifting-spots-count" "properties.exercise-machines-count" "properties.ice-rinks-count" "properties.futsal-fields-count" "properties.training-wall?" "properties.winter-swimming?" "properties.ski-track-traditional?" "properties.gymnastics-space?" "properties.shotput-count" "properties.fencing-bases-count" "properties.basketball-fields-count" "properties.freestyle-slope?" "properties.throwing-sports-spots-count" "properties.range?" "properties.green?" "properties.longjump-places-count" "properties.holes-count" "properties.boat-places-count" "properties.outdoor-exercise-machines?" "properties.cosmic-bowling?" "properties.spinning-hall?" "properties.climbing-routes-count" "properties.handball-fields-count" "properties.javelin-throw-places-count" "properties.lit-slopes-count" "properties.fields-count" "properties.table-tennis-count" "properties.volleyball-fields-count" "properties.gymnastic-routines-count" "properties.boxing-rings-count" "properties.football-fields-count" "properties.polevault-places-count" "properties.climbing-wall?" "properties.archery?" "properties.jumps-count" "properties.discus-throw-places" "properties.wrestling-mats-count" "properties.show-jumping?" "properties.curling-lanes-count" "properties.bowling-lanes-count" "properties.floorball-fields-count" "properties.highjump-places-count" "properties.other-platforms?" "properties.toboggan-run?" "properties.halfpipe-count" "properties.tennis-courts-count" "properties.slopes-count" "properties.snowpark-or-street?" "properties.circular-lanes-count" "properties.boat-launching-spot?" "properties.plastic-outrun?" "properties.ice-climbing?" "properties.squash-courts-count" "properties.group-exercise-rooms-count" "properties.sprint-lanes-count"])) (def other-fields (select-keys prop-fields ["properties.accessibility-info" "properties.basketball-field-type" "properties.summer-usage?" "properties.winter-usage?" "properties.ice-reduction?" "properties.ligthing?" "properties.lifts-count" "properties.school-use?" "properties.skijump-hill-type" "properties.track-type" "properties.covered-stand-person-count" "properties.stand-capacity-person" "properties.eu-beach?" "properties.may-be-shown-in-excursion-map-fi?" "properties.ski-track-freestyle?" "properties.free-use?" "properties.heating?"])) (def competition-fields (select-keys prop-fields ["properties.match-clock?" "properties.automated-timing?" "properties.automated-scoring?" "properties.scoreboard?" "properties.loudspeakers?" "properties.finish-line-camera?"])) (def fields (merge basic-fields meta-fields prop-fields)) (def default-fields (select-keys fields ["lipas-id" "name" ;;"marketing-name" "type.type-name" "location.city.city-name" "properties.surface-material" "properties.area-m2"])) (def stats-metrics {"investments" {:fi "Investoinnit" :se "Investeringar" :en "Investments"} "operating-expenses" {:fi "KΓ€yttΓΆkustannukset" :se "Driftskostnader" :en "Operating expenses"} "operating-incomes" {:fi "KΓ€yttΓΆtuotot" :se "DriftsintΓ€kter" :en "Operating incomes"} "subsidies" {:fi "Kunnan myΓΆntΓ€mΓ€t avustukset" :se "UnderstΓΆd och bidrag frΓ₯n kommunen" :en "Subsidies"} "net-costs" {:fi "Nettokustannukset" :se "Nettokostnader" :en "Net costs"}}) (def city-services {"sports-services" {:fi "Liikuntatoimi" :se "IdrottsvΓ€sende" :en "Sports services"} "youth-services" {:fi "Nuorisotoimi" :se "UngdomsvΓ€sende" :en "Youth services"}}) (def stats-units {"1000-euros" {:fi "Tuhatta €" :se "1000 €" :en "€1000"} "euros-per-capita" {:fi "€ / Asukas" :se "€ / InvΓ₯nare" :en "€ / Capita"}}) (def age-structure-groupings {"owner" {:fi "Omistaja" :se "Γ„gare" :en "Owner"} "admin" {:fi "YllΓ€pitΓ€jΓ€" :se "AdministratΓΆr" :en "Administrator"}}) (def sports-stats-groupings {"location.city.city-code" {:fi "Kunta" :se "Kommun" :en "City"} "type.type-code" {:fi "Tyyppi" :se "Typ" :en "Type"}}) (def sports-stats-metrics {"sites-count" {:fi "Liikuntapaikkojen lkm" :se "Antal av platser" :en "Sports facility count"} "sites-count-p1000c" {:fi "Liikuntapaikkojen lkm/1000 asukasta" :se "Antal av platser/1000 invΓ₯nare" :en "Sports facility count/1000 person"} "area-m2-sum" {:fi "Liikuntapinta-ala mΒ²" :se "Idrottsareal mΒ²" :en "Surface area mΒ²"} "area-m2-pc" {:fi "Liikuntapinta-ala mΒ²/asukas" :se "Idrottsareal mΒ²/invΓ₯nare" :en "Surface area mΒ²/capita"} "length-km-sum" {:fi "Reittien pituus km" :se "Idrottsrutters totalt lΓ€ngd km" :en "Routes total length km"} "length-km-pc" {:fi "Reittien pituus km/asukas" :se "Idrottsrutters totalt lΓ€ngd km/invΓ₯nare" :en "Routes total length km/capita"}}) (def finance-stats-groupings {"avi" {:fi "AVI-alue" :se "AVI" :en "AVI-area"} "province" {:fi "Maakunta" :se "Landskap" :en "Province"} "city" {:fi "Kunta" :se "Kommun" :en "City"}}) (def subsidies-groupings (merge finance-stats-groupings {"type" {:fi "Tyyppi" :se "Typ" :en "Type"}})) (def subsidies-issuers {"AVI" {:fi "AVI" :se "AVI" :en "AVI"} "OKM" {:fi "OKM" :se "OKM" :en "OKM"}}) (defn- service-avgs [service year cities] (let [ms (map (comp #(get % service) :services #(get % year) :stats) cities) ks (-> stats-metrics keys (->> (map keyword)))] (reduce (fn [res k] (assoc res k (->> ms (map k) (remove nil?) utils/simple-stats))) {} ks))) (defn calc-avgs [year cities] {:population (->> cities (map (comp :population #(get % year) :stats)) (remove nil?) utils/simple-stats) :services {:youth-services (service-avgs :youth-services year cities) :sports-services (service-avgs :sports-services year cities) :youth-services-pc (service-avgs :youth-services-pc year cities) :sports-services-pc (service-avgs :sports-services-pc year cities)}}) (def calc-avgs-memo (memoize calc-avgs)) (defn calc-stats [years cities] (reduce (fn [res year] (assoc res year (calc-avgs-memo year cities))) {} years)) (defn calc-per-capita [population m] (reduce (fn [m [k v]] (assoc m k (/ (* 1000 v) population))) {} m)) (defn finance-report [city-codes all-cities] (let [cities (utils/index-by :city-code all-cities) years (into #{} (mapcat (comp keys :stats)) all-cities)] {:country-averages (calc-stats years all-cities) :data-points (select-keys cities city-codes)})) (defn calculate-stats-by-city [aggs-data pop-data] (reduce (fn [res m] (let [city-code (:key m) population (pop-data city-code) m2-sum (-> m :area_m2_stats :sum) km-sum (-> m :length_km_stats :sum) area-m2-stats (-> m :area_m2_stats (assoc :pc (when (and population m2-sum) (double (/ m2-sum population)))) (utils/->prefix-map "area-m2-")) length-km-stats (-> m :length_km_stats (assoc :pc (when (and population km-sum) (double (/ km-sum population)))) (utils/->prefix-map "length-km-")) sites-count (:doc_count m) entry (merge area-m2-stats length-km-stats {:population population :sites-count sites-count :sites-count-p1000c (when (and population sites-count) (double (/ sites-count (/ population 1000))))})] (assoc res city-code entry))) {} aggs-data)) (defn calculate-stats-by-type [aggs-data pop-data city-codes] (reduce (fn [res m] (let [type-code (:key m) populations (if (empty? city-codes) pop-data ;; all (select-keys pop-data city-codes)) population (->> populations vals (reduce +)) m2-sum (-> m :area_m2_stats :sum) km-sum (-> m :length_km_stats :sum) area-m2-stats (-> m :area_m2_stats (assoc :pc (when (and population m2-sum) (double (/ m2-sum population)))) (utils/->prefix-map "area-m2-")) length-km-stats (-> m :length_km_stats (assoc :pc (when (and population km-sum) (double (/ km-sum population)))) (utils/->prefix-map "length-km-")) sites-count (:doc_count m) entry (merge area-m2-stats length-km-stats {:population population :sites-count sites-count :sites-count-p1000c (when (and population sites-count) (double (/ sites-count (/ population 1000))))})] (assoc res type-code entry))) {} aggs-data))
52614
(ns lipas.reports (:require [lipas.utils :as utils] [lipas.data.prop-types :as prop-types])) (defn- all-energy-data-exists? [{:keys [energy-consumption]}] (let [{:keys [electricity-mwh heat-mwh water-m3]} energy-consumption] (and (some? electricity-mwh) (some? heat-mwh) (some? water-m3)))) (defn- get-values [sites field-kw] (->> sites (map (comp field-kw :energy-consumption)) (remove nil?))) (defn- ->data-point [{:keys [lipas-id name energy-consumption]}] (-> energy-consumption (assoc :energy-mwh (+ (:heat-mwh energy-consumption 0) (:electricity-mwh energy-consumption 0))) (assoc :name name) (assoc :lipas-id lipas-id))) (defn energy-report [sites] {:total-count (count sites) :electricity-mwh (utils/simple-stats (get-values sites :electricity-mwh)) :heat-mwh (utils/simple-stats (get-values sites :heat-mwh)) :water-m3 (utils/simple-stats (get-values sites :water-m3)) :data-points (->> sites (filter all-energy-data-exists?) (map ->data-point)) :hall-of-fame (->> sites (filter all-energy-data-exists?) (map #(select-keys % [:lipas-id :name])))}) (defn ->row [fields m] (reduce (fn [res f] (let [v (utils/get-in-path m f)] (conj res (if (coll? v) (utils/join v) v)))) [] fields)) (def basic-fields {"lipas-id" {:fi "Lipas-id" :en "Lipas-id" :se "Lipas-id"} "name" {:fi "Nimi suomeksi" :en "Finnish name" :se "Namn pΓ₯ finska"} "name-localized.se" {:fi "<NAME>" :en "Swedish name" :se "Namn pΓ₯ svenska"} "marketing-name" {:fi "<NAME>" :en "Marketing name" :se "VarumΓ€rkesnamn"} "event-date" {:fi "Muokattu viimeksi" :en "Last modified" :se "Senaste redigerad"} "owner" {:fi "Omistaja" :en "Owner" :se "Γ„gare"} "admin" {:fi "YllΓ€pitΓ€jΓ€" :en "Administrator" :se "AdministratΓΆr"} "construction-year" {:fi "Rakennusvuosi" :en "Construction year" :se "ByggΓ₯r"} "renovation-years" {:fi "Peruskorjausvuodet" :en "Renovation years" :se "RenoveringsΓ₯r"} "phone-number" {:fi "Puhelinnumero" :en "Phone number" :se "Telefonnummer"} "email" {:fi "SΓ€hkΓΆposti" :en "Email" :se "Epost"} "www" {:fi "WWW" :en "WWW" :se "WWW"} "comment" {:fi "Kommentti" :en "Comment" :se "Ytterligare information"} "type.type-code" {:fi "Tyyppikoodi" :en "Type code" :se "Typkod"} "type.type-name" {:fi "Liikuntapaikkatyyppi" :en "Type" :se "Typ"} "location.city.city-code" {:fi "Kuntanumero" :en "City code" :se "Kommunkod"} "location.city.city-name" {:fi "Kunta" :en "City" :se "Stat"} "location.city.neighborhood" {:fi "Kuntaosa" :en "Neighborhood" :se "Kommundel"} "location.address" {:fi "Katuosoite" :en "Address" :se "Adress"} "location.postal-code" {:fi "Postinumero" :en "Postal code" :se "Postnummer"} "location.postal-office" {:fi "Postitoimipaikka" :en "Postal office" :se "Postkontor"}}) (def prop-fields (reduce (fn [res [k v]] (assoc res (str "properties." (name k)) (:name v))) {} prop-types/all)) (def meta-fields {"search-meta.location.wgs84-point" {:fi "Koordinaatit (WGS84)" :se "Koordinater (WGS84)" :en "Coordinates (WGS84)"} "search-meta.location.avi-area.name.fi" {:fi "AVI-alue" :en "AVI-area" :se "AVI"} "search-meta.location.province.name.fi" {:fi "Maakunta" :en "Province" :se "Landskap"} "search-meta.type.main-category.name.fi" {:fi "Liikuntapaikkatyypin pÀÀryhmΓ€" :en "Type main-category" :se "Typ huvud kategori"} "search-meta.type.sub-category.name.fi" {:fi "Liikuntapaikkatyypin alaryhmΓ€" :en "Type sub-category" :se "Typ under kategori"}}) (def area-fields (select-keys prop-fields ["properties.area-m2" "properties.area-km2" "properties.pool-water-area-m2"])) (def surface-material-fields (select-keys prop-fields ["properties.surface-material" "properties.surface-material-info" "properties.running-track-surface-material" "properties.training-spot-surface-material" "properties.inruns-material" "properties.skijump-hill-material"])) (def length-fields (select-keys prop-fields ["properties.field-length-m" "properties.hall-length-m" "properties.inner-lane-length-m" "properties.route-length-km" "properties.lit-route-length-km" "properties.pool-length-m" "properties.sprint-track-length-m" "properties.track-length-m" "properties.beach-length-m" "properties.longest-slope-m" "properties.shortest-slope-m"])) (def width-fields (select-keys prop-fields ["properties.field-width-m" "properties.climbing-wall-width-m" "properties.hall-width-m" "properties.pool-width-m" "properties.route-width-m" "properties.track-width-m"])) (def height-fields (select-keys prop-fields ["properties.height-m" "properties.climbing-wall-height-m"])) (def other-measures (select-keys prop-fields ["properties.p-point" "properties.k-point" "properties.altitude-difference"])) (def measure-fields (merge area-fields length-fields width-fields height-fields other-measures)) (def service-fields (select-keys prop-fields ["properties.equipment-rental?" "properties.ski-service?" "properties.kiosk?" "properties.shower?" "properties.parking-place?" "properties.playground?" "properties.pier?" "properties.rest-places-count" "properties.toilet?" "properties.changing-rooms?" "properties.sauna?"])) (def activity-fields (select-keys prop-fields ["properties.rifle-shooting?" "properties.shotgun-shooting?" "properties.pistol-shooting?" "properties.free-rifle-shooting?" "properties.air-gun-shooting?" "properties.shooting-positions-count" "properties.tatamis-count" "properties.badminton-courts-count" "properties.hammer-throw-places-count" "properties.landing-places-count" "properties.weight-lifting-spots-count" "properties.exercise-machines-count" "properties.ice-rinks-count" "properties.futsal-fields-count" "properties.training-wall?" "properties.winter-swimming?" "properties.ski-track-traditional?" "properties.gymnastics-space?" "properties.shotput-count" "properties.fencing-bases-count" "properties.basketball-fields-count" "properties.freestyle-slope?" "properties.throwing-sports-spots-count" "properties.range?" "properties.green?" "properties.longjump-places-count" "properties.holes-count" "properties.boat-places-count" "properties.outdoor-exercise-machines?" "properties.cosmic-bowling?" "properties.spinning-hall?" "properties.climbing-routes-count" "properties.handball-fields-count" "properties.javelin-throw-places-count" "properties.lit-slopes-count" "properties.fields-count" "properties.table-tennis-count" "properties.volleyball-fields-count" "properties.gymnastic-routines-count" "properties.boxing-rings-count" "properties.football-fields-count" "properties.polevault-places-count" "properties.climbing-wall?" "properties.archery?" "properties.jumps-count" "properties.discus-throw-places" "properties.wrestling-mats-count" "properties.show-jumping?" "properties.curling-lanes-count" "properties.bowling-lanes-count" "properties.floorball-fields-count" "properties.highjump-places-count" "properties.other-platforms?" "properties.toboggan-run?" "properties.halfpipe-count" "properties.tennis-courts-count" "properties.slopes-count" "properties.snowpark-or-street?" "properties.circular-lanes-count" "properties.boat-launching-spot?" "properties.plastic-outrun?" "properties.ice-climbing?" "properties.squash-courts-count" "properties.group-exercise-rooms-count" "properties.sprint-lanes-count"])) (def other-fields (select-keys prop-fields ["properties.accessibility-info" "properties.basketball-field-type" "properties.summer-usage?" "properties.winter-usage?" "properties.ice-reduction?" "properties.ligthing?" "properties.lifts-count" "properties.school-use?" "properties.skijump-hill-type" "properties.track-type" "properties.covered-stand-person-count" "properties.stand-capacity-person" "properties.eu-beach?" "properties.may-be-shown-in-excursion-map-fi?" "properties.ski-track-freestyle?" "properties.free-use?" "properties.heating?"])) (def competition-fields (select-keys prop-fields ["properties.match-clock?" "properties.automated-timing?" "properties.automated-scoring?" "properties.scoreboard?" "properties.loudspeakers?" "properties.finish-line-camera?"])) (def fields (merge basic-fields meta-fields prop-fields)) (def default-fields (select-keys fields ["lipas-id" "name" ;;"marketing-name" "type.type-name" "location.city.city-name" "properties.surface-material" "properties.area-m2"])) (def stats-metrics {"investments" {:fi "Investoinnit" :se "Investeringar" :en "Investments"} "operating-expenses" {:fi "KΓ€yttΓΆkustannukset" :se "Driftskostnader" :en "Operating expenses"} "operating-incomes" {:fi "KΓ€yttΓΆtuotot" :se "DriftsintΓ€kter" :en "Operating incomes"} "subsidies" {:fi "Kunnan myΓΆntΓ€mΓ€t avustukset" :se "UnderstΓΆd och bidrag frΓ₯n kommunen" :en "Subsidies"} "net-costs" {:fi "Nettokustannukset" :se "Nettokostnader" :en "Net costs"}}) (def city-services {"sports-services" {:fi "Liikuntatoimi" :se "IdrottsvΓ€sende" :en "Sports services"} "youth-services" {:fi "Nuorisotoimi" :se "UngdomsvΓ€sende" :en "Youth services"}}) (def stats-units {"1000-euros" {:fi "Tuhatta €" :se "1000 €" :en "€1000"} "euros-per-capita" {:fi "€ / Asukas" :se "€ / InvΓ₯nare" :en "€ / Capita"}}) (def age-structure-groupings {"owner" {:fi "Omistaja" :se "Γ„gare" :en "Owner"} "admin" {:fi "YllΓ€pitΓ€jΓ€" :se "AdministratΓΆr" :en "Administrator"}}) (def sports-stats-groupings {"location.city.city-code" {:fi "Kunta" :se "Kommun" :en "City"} "type.type-code" {:fi "Tyyppi" :se "Typ" :en "Type"}}) (def sports-stats-metrics {"sites-count" {:fi "Liikuntapaikkojen lkm" :se "Antal av platser" :en "Sports facility count"} "sites-count-p1000c" {:fi "Liikuntapaikkojen lkm/1000 asukasta" :se "Antal av platser/1000 invΓ₯nare" :en "Sports facility count/1000 person"} "area-m2-sum" {:fi "Liikuntapinta-ala mΒ²" :se "Idrottsareal mΒ²" :en "Surface area mΒ²"} "area-m2-pc" {:fi "Liikuntapinta-ala mΒ²/asukas" :se "Idrottsareal mΒ²/invΓ₯nare" :en "Surface area mΒ²/capita"} "length-km-sum" {:fi "Reittien pituus km" :se "Idrottsrutters totalt lΓ€ngd km" :en "Routes total length km"} "length-km-pc" {:fi "Reittien pituus km/asukas" :se "Idrottsrutters totalt lΓ€ngd km/invΓ₯nare" :en "Routes total length km/capita"}}) (def finance-stats-groupings {"avi" {:fi "AVI-alue" :se "AVI" :en "AVI-area"} "province" {:fi "Maakunta" :se "Landskap" :en "Province"} "city" {:fi "Kunta" :se "Kommun" :en "City"}}) (def subsidies-groupings (merge finance-stats-groupings {"type" {:fi "Tyyppi" :se "Typ" :en "Type"}})) (def subsidies-issuers {"AVI" {:fi "AVI" :se "AVI" :en "AVI"} "OKM" {:fi "OKM" :se "OKM" :en "OKM"}}) (defn- service-avgs [service year cities] (let [ms (map (comp #(get % service) :services #(get % year) :stats) cities) ks (-> stats-metrics keys (->> (map keyword)))] (reduce (fn [res k] (assoc res k (->> ms (map k) (remove nil?) utils/simple-stats))) {} ks))) (defn calc-avgs [year cities] {:population (->> cities (map (comp :population #(get % year) :stats)) (remove nil?) utils/simple-stats) :services {:youth-services (service-avgs :youth-services year cities) :sports-services (service-avgs :sports-services year cities) :youth-services-pc (service-avgs :youth-services-pc year cities) :sports-services-pc (service-avgs :sports-services-pc year cities)}}) (def calc-avgs-memo (memoize calc-avgs)) (defn calc-stats [years cities] (reduce (fn [res year] (assoc res year (calc-avgs-memo year cities))) {} years)) (defn calc-per-capita [population m] (reduce (fn [m [k v]] (assoc m k (/ (* 1000 v) population))) {} m)) (defn finance-report [city-codes all-cities] (let [cities (utils/index-by :city-code all-cities) years (into #{} (mapcat (comp keys :stats)) all-cities)] {:country-averages (calc-stats years all-cities) :data-points (select-keys cities city-codes)})) (defn calculate-stats-by-city [aggs-data pop-data] (reduce (fn [res m] (let [city-code (:key m) population (pop-data city-code) m2-sum (-> m :area_m2_stats :sum) km-sum (-> m :length_km_stats :sum) area-m2-stats (-> m :area_m2_stats (assoc :pc (when (and population m2-sum) (double (/ m2-sum population)))) (utils/->prefix-map "area-m2-")) length-km-stats (-> m :length_km_stats (assoc :pc (when (and population km-sum) (double (/ km-sum population)))) (utils/->prefix-map "length-km-")) sites-count (:doc_count m) entry (merge area-m2-stats length-km-stats {:population population :sites-count sites-count :sites-count-p1000c (when (and population sites-count) (double (/ sites-count (/ population 1000))))})] (assoc res city-code entry))) {} aggs-data)) (defn calculate-stats-by-type [aggs-data pop-data city-codes] (reduce (fn [res m] (let [type-code (:key m) populations (if (empty? city-codes) pop-data ;; all (select-keys pop-data city-codes)) population (->> populations vals (reduce +)) m2-sum (-> m :area_m2_stats :sum) km-sum (-> m :length_km_stats :sum) area-m2-stats (-> m :area_m2_stats (assoc :pc (when (and population m2-sum) (double (/ m2-sum population)))) (utils/->prefix-map "area-m2-")) length-km-stats (-> m :length_km_stats (assoc :pc (when (and population km-sum) (double (/ km-sum population)))) (utils/->prefix-map "length-km-")) sites-count (:doc_count m) entry (merge area-m2-stats length-km-stats {:population population :sites-count sites-count :sites-count-p1000c (when (and population sites-count) (double (/ sites-count (/ population 1000))))})] (assoc res type-code entry))) {} aggs-data))
true
(ns lipas.reports (:require [lipas.utils :as utils] [lipas.data.prop-types :as prop-types])) (defn- all-energy-data-exists? [{:keys [energy-consumption]}] (let [{:keys [electricity-mwh heat-mwh water-m3]} energy-consumption] (and (some? electricity-mwh) (some? heat-mwh) (some? water-m3)))) (defn- get-values [sites field-kw] (->> sites (map (comp field-kw :energy-consumption)) (remove nil?))) (defn- ->data-point [{:keys [lipas-id name energy-consumption]}] (-> energy-consumption (assoc :energy-mwh (+ (:heat-mwh energy-consumption 0) (:electricity-mwh energy-consumption 0))) (assoc :name name) (assoc :lipas-id lipas-id))) (defn energy-report [sites] {:total-count (count sites) :electricity-mwh (utils/simple-stats (get-values sites :electricity-mwh)) :heat-mwh (utils/simple-stats (get-values sites :heat-mwh)) :water-m3 (utils/simple-stats (get-values sites :water-m3)) :data-points (->> sites (filter all-energy-data-exists?) (map ->data-point)) :hall-of-fame (->> sites (filter all-energy-data-exists?) (map #(select-keys % [:lipas-id :name])))}) (defn ->row [fields m] (reduce (fn [res f] (let [v (utils/get-in-path m f)] (conj res (if (coll? v) (utils/join v) v)))) [] fields)) (def basic-fields {"lipas-id" {:fi "Lipas-id" :en "Lipas-id" :se "Lipas-id"} "name" {:fi "Nimi suomeksi" :en "Finnish name" :se "Namn pΓ₯ finska"} "name-localized.se" {:fi "PI:NAME:<NAME>END_PI" :en "Swedish name" :se "Namn pΓ₯ svenska"} "marketing-name" {:fi "PI:NAME:<NAME>END_PI" :en "Marketing name" :se "VarumΓ€rkesnamn"} "event-date" {:fi "Muokattu viimeksi" :en "Last modified" :se "Senaste redigerad"} "owner" {:fi "Omistaja" :en "Owner" :se "Γ„gare"} "admin" {:fi "YllΓ€pitΓ€jΓ€" :en "Administrator" :se "AdministratΓΆr"} "construction-year" {:fi "Rakennusvuosi" :en "Construction year" :se "ByggΓ₯r"} "renovation-years" {:fi "Peruskorjausvuodet" :en "Renovation years" :se "RenoveringsΓ₯r"} "phone-number" {:fi "Puhelinnumero" :en "Phone number" :se "Telefonnummer"} "email" {:fi "SΓ€hkΓΆposti" :en "Email" :se "Epost"} "www" {:fi "WWW" :en "WWW" :se "WWW"} "comment" {:fi "Kommentti" :en "Comment" :se "Ytterligare information"} "type.type-code" {:fi "Tyyppikoodi" :en "Type code" :se "Typkod"} "type.type-name" {:fi "Liikuntapaikkatyyppi" :en "Type" :se "Typ"} "location.city.city-code" {:fi "Kuntanumero" :en "City code" :se "Kommunkod"} "location.city.city-name" {:fi "Kunta" :en "City" :se "Stat"} "location.city.neighborhood" {:fi "Kuntaosa" :en "Neighborhood" :se "Kommundel"} "location.address" {:fi "Katuosoite" :en "Address" :se "Adress"} "location.postal-code" {:fi "Postinumero" :en "Postal code" :se "Postnummer"} "location.postal-office" {:fi "Postitoimipaikka" :en "Postal office" :se "Postkontor"}}) (def prop-fields (reduce (fn [res [k v]] (assoc res (str "properties." (name k)) (:name v))) {} prop-types/all)) (def meta-fields {"search-meta.location.wgs84-point" {:fi "Koordinaatit (WGS84)" :se "Koordinater (WGS84)" :en "Coordinates (WGS84)"} "search-meta.location.avi-area.name.fi" {:fi "AVI-alue" :en "AVI-area" :se "AVI"} "search-meta.location.province.name.fi" {:fi "Maakunta" :en "Province" :se "Landskap"} "search-meta.type.main-category.name.fi" {:fi "Liikuntapaikkatyypin pÀÀryhmΓ€" :en "Type main-category" :se "Typ huvud kategori"} "search-meta.type.sub-category.name.fi" {:fi "Liikuntapaikkatyypin alaryhmΓ€" :en "Type sub-category" :se "Typ under kategori"}}) (def area-fields (select-keys prop-fields ["properties.area-m2" "properties.area-km2" "properties.pool-water-area-m2"])) (def surface-material-fields (select-keys prop-fields ["properties.surface-material" "properties.surface-material-info" "properties.running-track-surface-material" "properties.training-spot-surface-material" "properties.inruns-material" "properties.skijump-hill-material"])) (def length-fields (select-keys prop-fields ["properties.field-length-m" "properties.hall-length-m" "properties.inner-lane-length-m" "properties.route-length-km" "properties.lit-route-length-km" "properties.pool-length-m" "properties.sprint-track-length-m" "properties.track-length-m" "properties.beach-length-m" "properties.longest-slope-m" "properties.shortest-slope-m"])) (def width-fields (select-keys prop-fields ["properties.field-width-m" "properties.climbing-wall-width-m" "properties.hall-width-m" "properties.pool-width-m" "properties.route-width-m" "properties.track-width-m"])) (def height-fields (select-keys prop-fields ["properties.height-m" "properties.climbing-wall-height-m"])) (def other-measures (select-keys prop-fields ["properties.p-point" "properties.k-point" "properties.altitude-difference"])) (def measure-fields (merge area-fields length-fields width-fields height-fields other-measures)) (def service-fields (select-keys prop-fields ["properties.equipment-rental?" "properties.ski-service?" "properties.kiosk?" "properties.shower?" "properties.parking-place?" "properties.playground?" "properties.pier?" "properties.rest-places-count" "properties.toilet?" "properties.changing-rooms?" "properties.sauna?"])) (def activity-fields (select-keys prop-fields ["properties.rifle-shooting?" "properties.shotgun-shooting?" "properties.pistol-shooting?" "properties.free-rifle-shooting?" "properties.air-gun-shooting?" "properties.shooting-positions-count" "properties.tatamis-count" "properties.badminton-courts-count" "properties.hammer-throw-places-count" "properties.landing-places-count" "properties.weight-lifting-spots-count" "properties.exercise-machines-count" "properties.ice-rinks-count" "properties.futsal-fields-count" "properties.training-wall?" "properties.winter-swimming?" "properties.ski-track-traditional?" "properties.gymnastics-space?" "properties.shotput-count" "properties.fencing-bases-count" "properties.basketball-fields-count" "properties.freestyle-slope?" "properties.throwing-sports-spots-count" "properties.range?" "properties.green?" "properties.longjump-places-count" "properties.holes-count" "properties.boat-places-count" "properties.outdoor-exercise-machines?" "properties.cosmic-bowling?" "properties.spinning-hall?" "properties.climbing-routes-count" "properties.handball-fields-count" "properties.javelin-throw-places-count" "properties.lit-slopes-count" "properties.fields-count" "properties.table-tennis-count" "properties.volleyball-fields-count" "properties.gymnastic-routines-count" "properties.boxing-rings-count" "properties.football-fields-count" "properties.polevault-places-count" "properties.climbing-wall?" "properties.archery?" "properties.jumps-count" "properties.discus-throw-places" "properties.wrestling-mats-count" "properties.show-jumping?" "properties.curling-lanes-count" "properties.bowling-lanes-count" "properties.floorball-fields-count" "properties.highjump-places-count" "properties.other-platforms?" "properties.toboggan-run?" "properties.halfpipe-count" "properties.tennis-courts-count" "properties.slopes-count" "properties.snowpark-or-street?" "properties.circular-lanes-count" "properties.boat-launching-spot?" "properties.plastic-outrun?" "properties.ice-climbing?" "properties.squash-courts-count" "properties.group-exercise-rooms-count" "properties.sprint-lanes-count"])) (def other-fields (select-keys prop-fields ["properties.accessibility-info" "properties.basketball-field-type" "properties.summer-usage?" "properties.winter-usage?" "properties.ice-reduction?" "properties.ligthing?" "properties.lifts-count" "properties.school-use?" "properties.skijump-hill-type" "properties.track-type" "properties.covered-stand-person-count" "properties.stand-capacity-person" "properties.eu-beach?" "properties.may-be-shown-in-excursion-map-fi?" "properties.ski-track-freestyle?" "properties.free-use?" "properties.heating?"])) (def competition-fields (select-keys prop-fields ["properties.match-clock?" "properties.automated-timing?" "properties.automated-scoring?" "properties.scoreboard?" "properties.loudspeakers?" "properties.finish-line-camera?"])) (def fields (merge basic-fields meta-fields prop-fields)) (def default-fields (select-keys fields ["lipas-id" "name" ;;"marketing-name" "type.type-name" "location.city.city-name" "properties.surface-material" "properties.area-m2"])) (def stats-metrics {"investments" {:fi "Investoinnit" :se "Investeringar" :en "Investments"} "operating-expenses" {:fi "KΓ€yttΓΆkustannukset" :se "Driftskostnader" :en "Operating expenses"} "operating-incomes" {:fi "KΓ€yttΓΆtuotot" :se "DriftsintΓ€kter" :en "Operating incomes"} "subsidies" {:fi "Kunnan myΓΆntΓ€mΓ€t avustukset" :se "UnderstΓΆd och bidrag frΓ₯n kommunen" :en "Subsidies"} "net-costs" {:fi "Nettokustannukset" :se "Nettokostnader" :en "Net costs"}}) (def city-services {"sports-services" {:fi "Liikuntatoimi" :se "IdrottsvΓ€sende" :en "Sports services"} "youth-services" {:fi "Nuorisotoimi" :se "UngdomsvΓ€sende" :en "Youth services"}}) (def stats-units {"1000-euros" {:fi "Tuhatta €" :se "1000 €" :en "€1000"} "euros-per-capita" {:fi "€ / Asukas" :se "€ / InvΓ₯nare" :en "€ / Capita"}}) (def age-structure-groupings {"owner" {:fi "Omistaja" :se "Γ„gare" :en "Owner"} "admin" {:fi "YllΓ€pitΓ€jΓ€" :se "AdministratΓΆr" :en "Administrator"}}) (def sports-stats-groupings {"location.city.city-code" {:fi "Kunta" :se "Kommun" :en "City"} "type.type-code" {:fi "Tyyppi" :se "Typ" :en "Type"}}) (def sports-stats-metrics {"sites-count" {:fi "Liikuntapaikkojen lkm" :se "Antal av platser" :en "Sports facility count"} "sites-count-p1000c" {:fi "Liikuntapaikkojen lkm/1000 asukasta" :se "Antal av platser/1000 invΓ₯nare" :en "Sports facility count/1000 person"} "area-m2-sum" {:fi "Liikuntapinta-ala mΒ²" :se "Idrottsareal mΒ²" :en "Surface area mΒ²"} "area-m2-pc" {:fi "Liikuntapinta-ala mΒ²/asukas" :se "Idrottsareal mΒ²/invΓ₯nare" :en "Surface area mΒ²/capita"} "length-km-sum" {:fi "Reittien pituus km" :se "Idrottsrutters totalt lΓ€ngd km" :en "Routes total length km"} "length-km-pc" {:fi "Reittien pituus km/asukas" :se "Idrottsrutters totalt lΓ€ngd km/invΓ₯nare" :en "Routes total length km/capita"}}) (def finance-stats-groupings {"avi" {:fi "AVI-alue" :se "AVI" :en "AVI-area"} "province" {:fi "Maakunta" :se "Landskap" :en "Province"} "city" {:fi "Kunta" :se "Kommun" :en "City"}}) (def subsidies-groupings (merge finance-stats-groupings {"type" {:fi "Tyyppi" :se "Typ" :en "Type"}})) (def subsidies-issuers {"AVI" {:fi "AVI" :se "AVI" :en "AVI"} "OKM" {:fi "OKM" :se "OKM" :en "OKM"}}) (defn- service-avgs [service year cities] (let [ms (map (comp #(get % service) :services #(get % year) :stats) cities) ks (-> stats-metrics keys (->> (map keyword)))] (reduce (fn [res k] (assoc res k (->> ms (map k) (remove nil?) utils/simple-stats))) {} ks))) (defn calc-avgs [year cities] {:population (->> cities (map (comp :population #(get % year) :stats)) (remove nil?) utils/simple-stats) :services {:youth-services (service-avgs :youth-services year cities) :sports-services (service-avgs :sports-services year cities) :youth-services-pc (service-avgs :youth-services-pc year cities) :sports-services-pc (service-avgs :sports-services-pc year cities)}}) (def calc-avgs-memo (memoize calc-avgs)) (defn calc-stats [years cities] (reduce (fn [res year] (assoc res year (calc-avgs-memo year cities))) {} years)) (defn calc-per-capita [population m] (reduce (fn [m [k v]] (assoc m k (/ (* 1000 v) population))) {} m)) (defn finance-report [city-codes all-cities] (let [cities (utils/index-by :city-code all-cities) years (into #{} (mapcat (comp keys :stats)) all-cities)] {:country-averages (calc-stats years all-cities) :data-points (select-keys cities city-codes)})) (defn calculate-stats-by-city [aggs-data pop-data] (reduce (fn [res m] (let [city-code (:key m) population (pop-data city-code) m2-sum (-> m :area_m2_stats :sum) km-sum (-> m :length_km_stats :sum) area-m2-stats (-> m :area_m2_stats (assoc :pc (when (and population m2-sum) (double (/ m2-sum population)))) (utils/->prefix-map "area-m2-")) length-km-stats (-> m :length_km_stats (assoc :pc (when (and population km-sum) (double (/ km-sum population)))) (utils/->prefix-map "length-km-")) sites-count (:doc_count m) entry (merge area-m2-stats length-km-stats {:population population :sites-count sites-count :sites-count-p1000c (when (and population sites-count) (double (/ sites-count (/ population 1000))))})] (assoc res city-code entry))) {} aggs-data)) (defn calculate-stats-by-type [aggs-data pop-data city-codes] (reduce (fn [res m] (let [type-code (:key m) populations (if (empty? city-codes) pop-data ;; all (select-keys pop-data city-codes)) population (->> populations vals (reduce +)) m2-sum (-> m :area_m2_stats :sum) km-sum (-> m :length_km_stats :sum) area-m2-stats (-> m :area_m2_stats (assoc :pc (when (and population m2-sum) (double (/ m2-sum population)))) (utils/->prefix-map "area-m2-")) length-km-stats (-> m :length_km_stats (assoc :pc (when (and population km-sum) (double (/ km-sum population)))) (utils/->prefix-map "length-km-")) sites-count (:doc_count m) entry (merge area-m2-stats length-km-stats {:population population :sites-count sites-count :sites-count-p1000c (when (and population sites-count) (double (/ sites-count (/ population 1000))))})] (assoc res type-code entry))) {} aggs-data))
[ { "context": "po/\"\n :authentication {:username \"zcaudate\"\n :password \"hel", "end": 3936, "score": 0.9996137022972107, "start": 3928, "tag": "USERNAME", "value": "zcaudate" }, { "context": "ate\"\n :password \"hello\"}}}))\n", "end": 3988, "score": 0.9994276165962219, "start": 3983, "tag": "PASSWORD", "value": "hello" } ]
test/lucid/aether_test.clj
willcohen/lucidity
3
(ns lucid.aether-test (:use hara.test) (:require [lucid.aether :refer :all])) ^{:refer lucid.aether/resolve-hierarchy :added "1.1"} (fact " shows the dependency hierachy for all packages" (resolve-hierarchy '[midje "1.6.3"]) => '{[midje/midje "1.6.3"] [{[ordered/ordered "1.2.0"] []} {[org.clojure/math.combinatorics "0.0.7"] []} {[org.clojure/core.unify "0.5.2"] []} {[utilize/utilize "0.2.3"] [{[org.clojure/tools.macro "0.1.1"] []} {[joda-time/joda-time "2.0"] []} {[ordered/ordered "1.0.0"] []}]} {[colorize/colorize "0.1.1"] []} {[org.clojure/tools.macro "0.1.5"] []} {[dynapath/dynapath "0.2.0"] []} {[swiss-arrows/swiss-arrows "1.0.0"] []} {[org.clojure/tools.namespace "0.2.4"] []} {[slingshot/slingshot "0.10.3"] []} {[commons-codec/commons-codec "1.9"] []} {[gui-diff/gui-diff "0.5.0"] [{[org.clojars.trptcolin/sjacket "0.1.3"] [{[net.cgrand/regex "1.1.0"] []} {[net.cgrand/parsley "0.9.1"] [{[net.cgrand/regex "1.1.0"] []}]}]} {[ordered/ordered "1.2.0"] []}]} {[clj-time/clj-time "0.6.0"] [{[joda-time/joda-time "2.2"] []}]}]}) ^{:refer lucid.aether/resolve-dependencies :added "1.1"} (fact "resolves maven dependencies for a set of coordinates" (resolve-dependencies '[prismatic/schema "1.1.3"]) => '[[prismatic/schema "1.1.3"]] (resolve-dependencies '[midje "1.6.3"]) => '[[utilize/utilize "0.2.3"] [swiss-arrows/swiss-arrows "1.0.0"] [slingshot/slingshot "0.10.3"] [org.clojure/tools.namespace "0.2.4"] [org.clojure/tools.macro "0.1.5"] [org.clojure/math.combinatorics "0.0.7"] [org.clojure/core.unify "0.5.2"] [org.clojars.trptcolin/sjacket "0.1.3"] [ordered/ordered "1.2.0"] [net.cgrand/regex "1.1.0"] [net.cgrand/parsley "0.9.1"] [midje/midje "1.6.3"] [joda-time/joda-time "2.2"] [gui-diff/gui-diff "0.5.0"] [dynapath/dynapath "0.2.0"] [commons-codec/commons-codec "1.9"] [colorize/colorize "0.1.1"] [clj-time/clj-time "0.6.0"]]) ^{:refer lucid.aether/populate-artifact :added "1.2"} (fact "allows coordinate to fill rest of values" (populate-artifact '[midje "1.6.3"] {:artifacts [{:extension "pom" :file "midje.pom"} {:extension "jar" :file "midje.jar"}]}) => {:artifacts [{:extension "pom", :file "midje.pom", :artifact "midje", :group "midje", :version "1.6.3"} {:extension "jar", :file "midje.jar", :artifact "midje", :group "midje", :version "1.6.3"}]}) ^{:refer lucid.aether/install-artifact :added "1.2"} (comment "installs artifacts to the given coordinate" (install-artifact '[im.chit/hara.io.classpath "2.4.8"] {:artifacts [{:file "hara_io_classpath-2.4.8.jar" :extension "jar"} {:file "hara_io_classpath-2.4.8.pom" :extension "pom"}]})) ^{:refer lucid.aether/deploy-artifact :added "1.2"} (comment "deploys artifacts to the given coordinate" (deploy-artifact '[im.chit/hara.io.classpath "2.4.8"] {:artifacts [{:file "hara_io_classpath-2.4.8.jar" :extension "jar"} {:file "hara_io_classpath-2.4.8.pom" :extension "pom"} {:file "hara_io_classpath-2.4.8.pom.asc" :extension "pom.asc"} {:file "hara_io_classpath-2.4.8.jar.asc" :extension "jar.asc"}] :repository {:id "clojars" :url "https://clojars.org/repo/" :authentication {:username "zcaudate" :password "hello"}}}))
63126
(ns lucid.aether-test (:use hara.test) (:require [lucid.aether :refer :all])) ^{:refer lucid.aether/resolve-hierarchy :added "1.1"} (fact " shows the dependency hierachy for all packages" (resolve-hierarchy '[midje "1.6.3"]) => '{[midje/midje "1.6.3"] [{[ordered/ordered "1.2.0"] []} {[org.clojure/math.combinatorics "0.0.7"] []} {[org.clojure/core.unify "0.5.2"] []} {[utilize/utilize "0.2.3"] [{[org.clojure/tools.macro "0.1.1"] []} {[joda-time/joda-time "2.0"] []} {[ordered/ordered "1.0.0"] []}]} {[colorize/colorize "0.1.1"] []} {[org.clojure/tools.macro "0.1.5"] []} {[dynapath/dynapath "0.2.0"] []} {[swiss-arrows/swiss-arrows "1.0.0"] []} {[org.clojure/tools.namespace "0.2.4"] []} {[slingshot/slingshot "0.10.3"] []} {[commons-codec/commons-codec "1.9"] []} {[gui-diff/gui-diff "0.5.0"] [{[org.clojars.trptcolin/sjacket "0.1.3"] [{[net.cgrand/regex "1.1.0"] []} {[net.cgrand/parsley "0.9.1"] [{[net.cgrand/regex "1.1.0"] []}]}]} {[ordered/ordered "1.2.0"] []}]} {[clj-time/clj-time "0.6.0"] [{[joda-time/joda-time "2.2"] []}]}]}) ^{:refer lucid.aether/resolve-dependencies :added "1.1"} (fact "resolves maven dependencies for a set of coordinates" (resolve-dependencies '[prismatic/schema "1.1.3"]) => '[[prismatic/schema "1.1.3"]] (resolve-dependencies '[midje "1.6.3"]) => '[[utilize/utilize "0.2.3"] [swiss-arrows/swiss-arrows "1.0.0"] [slingshot/slingshot "0.10.3"] [org.clojure/tools.namespace "0.2.4"] [org.clojure/tools.macro "0.1.5"] [org.clojure/math.combinatorics "0.0.7"] [org.clojure/core.unify "0.5.2"] [org.clojars.trptcolin/sjacket "0.1.3"] [ordered/ordered "1.2.0"] [net.cgrand/regex "1.1.0"] [net.cgrand/parsley "0.9.1"] [midje/midje "1.6.3"] [joda-time/joda-time "2.2"] [gui-diff/gui-diff "0.5.0"] [dynapath/dynapath "0.2.0"] [commons-codec/commons-codec "1.9"] [colorize/colorize "0.1.1"] [clj-time/clj-time "0.6.0"]]) ^{:refer lucid.aether/populate-artifact :added "1.2"} (fact "allows coordinate to fill rest of values" (populate-artifact '[midje "1.6.3"] {:artifacts [{:extension "pom" :file "midje.pom"} {:extension "jar" :file "midje.jar"}]}) => {:artifacts [{:extension "pom", :file "midje.pom", :artifact "midje", :group "midje", :version "1.6.3"} {:extension "jar", :file "midje.jar", :artifact "midje", :group "midje", :version "1.6.3"}]}) ^{:refer lucid.aether/install-artifact :added "1.2"} (comment "installs artifacts to the given coordinate" (install-artifact '[im.chit/hara.io.classpath "2.4.8"] {:artifacts [{:file "hara_io_classpath-2.4.8.jar" :extension "jar"} {:file "hara_io_classpath-2.4.8.pom" :extension "pom"}]})) ^{:refer lucid.aether/deploy-artifact :added "1.2"} (comment "deploys artifacts to the given coordinate" (deploy-artifact '[im.chit/hara.io.classpath "2.4.8"] {:artifacts [{:file "hara_io_classpath-2.4.8.jar" :extension "jar"} {:file "hara_io_classpath-2.4.8.pom" :extension "pom"} {:file "hara_io_classpath-2.4.8.pom.asc" :extension "pom.asc"} {:file "hara_io_classpath-2.4.8.jar.asc" :extension "jar.asc"}] :repository {:id "clojars" :url "https://clojars.org/repo/" :authentication {:username "zcaudate" :password "<PASSWORD>"}}}))
true
(ns lucid.aether-test (:use hara.test) (:require [lucid.aether :refer :all])) ^{:refer lucid.aether/resolve-hierarchy :added "1.1"} (fact " shows the dependency hierachy for all packages" (resolve-hierarchy '[midje "1.6.3"]) => '{[midje/midje "1.6.3"] [{[ordered/ordered "1.2.0"] []} {[org.clojure/math.combinatorics "0.0.7"] []} {[org.clojure/core.unify "0.5.2"] []} {[utilize/utilize "0.2.3"] [{[org.clojure/tools.macro "0.1.1"] []} {[joda-time/joda-time "2.0"] []} {[ordered/ordered "1.0.0"] []}]} {[colorize/colorize "0.1.1"] []} {[org.clojure/tools.macro "0.1.5"] []} {[dynapath/dynapath "0.2.0"] []} {[swiss-arrows/swiss-arrows "1.0.0"] []} {[org.clojure/tools.namespace "0.2.4"] []} {[slingshot/slingshot "0.10.3"] []} {[commons-codec/commons-codec "1.9"] []} {[gui-diff/gui-diff "0.5.0"] [{[org.clojars.trptcolin/sjacket "0.1.3"] [{[net.cgrand/regex "1.1.0"] []} {[net.cgrand/parsley "0.9.1"] [{[net.cgrand/regex "1.1.0"] []}]}]} {[ordered/ordered "1.2.0"] []}]} {[clj-time/clj-time "0.6.0"] [{[joda-time/joda-time "2.2"] []}]}]}) ^{:refer lucid.aether/resolve-dependencies :added "1.1"} (fact "resolves maven dependencies for a set of coordinates" (resolve-dependencies '[prismatic/schema "1.1.3"]) => '[[prismatic/schema "1.1.3"]] (resolve-dependencies '[midje "1.6.3"]) => '[[utilize/utilize "0.2.3"] [swiss-arrows/swiss-arrows "1.0.0"] [slingshot/slingshot "0.10.3"] [org.clojure/tools.namespace "0.2.4"] [org.clojure/tools.macro "0.1.5"] [org.clojure/math.combinatorics "0.0.7"] [org.clojure/core.unify "0.5.2"] [org.clojars.trptcolin/sjacket "0.1.3"] [ordered/ordered "1.2.0"] [net.cgrand/regex "1.1.0"] [net.cgrand/parsley "0.9.1"] [midje/midje "1.6.3"] [joda-time/joda-time "2.2"] [gui-diff/gui-diff "0.5.0"] [dynapath/dynapath "0.2.0"] [commons-codec/commons-codec "1.9"] [colorize/colorize "0.1.1"] [clj-time/clj-time "0.6.0"]]) ^{:refer lucid.aether/populate-artifact :added "1.2"} (fact "allows coordinate to fill rest of values" (populate-artifact '[midje "1.6.3"] {:artifacts [{:extension "pom" :file "midje.pom"} {:extension "jar" :file "midje.jar"}]}) => {:artifacts [{:extension "pom", :file "midje.pom", :artifact "midje", :group "midje", :version "1.6.3"} {:extension "jar", :file "midje.jar", :artifact "midje", :group "midje", :version "1.6.3"}]}) ^{:refer lucid.aether/install-artifact :added "1.2"} (comment "installs artifacts to the given coordinate" (install-artifact '[im.chit/hara.io.classpath "2.4.8"] {:artifacts [{:file "hara_io_classpath-2.4.8.jar" :extension "jar"} {:file "hara_io_classpath-2.4.8.pom" :extension "pom"}]})) ^{:refer lucid.aether/deploy-artifact :added "1.2"} (comment "deploys artifacts to the given coordinate" (deploy-artifact '[im.chit/hara.io.classpath "2.4.8"] {:artifacts [{:file "hara_io_classpath-2.4.8.jar" :extension "jar"} {:file "hara_io_classpath-2.4.8.pom" :extension "pom"} {:file "hara_io_classpath-2.4.8.pom.asc" :extension "pom.asc"} {:file "hara_io_classpath-2.4.8.jar.asc" :extension "jar.asc"}] :repository {:id "clojars" :url "https://clojars.org/repo/" :authentication {:username "zcaudate" :password "PI:PASSWORD:<PASSWORD>END_PI"}}}))
[ { "context": "-value-pairs [:id :surname :year :group_id] [\"1\" \"Ivanov\" \"1996\"])\n;; => (:id \"1\" :surname \"Ivanov\" :year ", "end": 525, "score": 0.9963240623474121, "start": 519, "tag": "NAME", "value": "Ivanov" }, { "context": "] [\"1\" \"Ivanov\" \"1996\"])\n;; => (:id \"1\" :surname \"Ivanov\" :year \"1996\")\n;;\n;; Hint: flatten, map, list\n(de", "end": 567, "score": 0.9996721744537354, "start": 561, "tag": "NAME", "value": "Ivanov" }, { "context": "(data-record [:id :surname :year :group_id] [\"1\" \"Ivanov\" \"1996\"])\n;; => {:surname \"Ivanov\", :year \"1996\",", "end": 762, "score": 0.9924935698509216, "start": 756, "tag": "NAME", "value": "Ivanov" }, { "context": "group_id] [\"1\" \"Ivanov\" \"1996\"])\n;; => {:surname \"Ivanov\", :year \"1996\", :id \"1\"}\n;;\n;; Hint: apply, hash-", "end": 796, "score": 0.9995622038841248, "start": 790, "tag": "NAME", "value": "Ivanov" }, { "context": "))\n\n;; (data-table student-tbl)\n;; => ({:surname \"Ivanov\", :year \"1996\", :id \"1\"}\n;; {:surname \"Petrov", "end": 1018, "score": 0.9995848536491394, "start": 1012, "tag": "NAME", "value": "Ivanov" }, { "context": "Ivanov\", :year \"1996\", :id \"1\"}\n;; {:surname \"Petrov\", :year \"1996\", :id \"2\"}\n;; {:surname \"Sidoro", "end": 1068, "score": 0.9996205568313599, "start": 1062, "tag": "NAME", "value": "Petrov" }, { "context": "Petrov\", :year \"1996\", :id \"2\"}\n;; {:surname \"Sidorov\", :year \"1997\", :id \"3\"})\n;;\n;; Hint: let, map, n", "end": 1119, "score": 0.9997239708900452, "start": 1112, "tag": "NAME", "value": "Sidorov" }, { "context": ") records)))\n\n;; (str-field-to-int :id {:surname \"Ivanov\", :year \"1996\", :id \"1\"})\n;; => {:surname \"Ivanov", "end": 1364, "score": 0.9997270703315735, "start": 1358, "tag": "NAME", "value": "Ivanov" }, { "context": "Ivanov\", :year \"1996\", :id \"1\"})\n;; => {:surname \"Ivanov\", :year \"1996\", :id 1}\n;;\n;; Hint: assoc, Integer", "end": 1414, "score": 0.9997590184211731, "start": 1408, "tag": "NAME", "value": "Ivanov" }, { "context": "ent (fn [rec] (> (:id rec) 1)))\n;; => ({:surname \"Petrov\", :year 1997, :id 2} {:surname \"Sidorov\", :year 1", "end": 2100, "score": 0.9997059106826782, "start": 2094, "tag": "NAME", "value": "Petrov" }, { "context": "{:surname \"Petrov\", :year 1997, :id 2} {:surname \"Sidorov\", :year 1996, :id 3})\n;;\n;; Hint: if-not, filter\n", "end": 2140, "score": 0.9997066855430603, "start": 2133, "tag": "NAME", "value": "Sidorov" }, { "context": " data)))\n\n;; (limit* student 1)\n;; => ({:surname \"Ivanov\", :year 1998, :id 1})\n;;\n;; Hint: if-not, take\n(d", "end": 2333, "score": 0.9996511340141296, "start": 2327, "tag": "NAME", "value": "Ivanov" }, { "context": ")\n\n;; (order-by* student :year)\n;; => ({:surname \"Sidorov\", :year 1996, :id 3} {:surname \"Petrov\", :year 19", "end": 2497, "score": 0.9996716976165771, "start": 2490, "tag": "NAME", "value": "Sidorov" }, { "context": ":surname \"Sidorov\", :year 1996, :id 3} {:surname \"Petrov\", :year 1997, :id 2} {:surname \"Ivanov\", :year 19", "end": 2536, "score": 0.999714732170105, "start": 2530, "tag": "NAME", "value": "Petrov" }, { "context": "{:surname \"Petrov\", :year 1997, :id 2} {:surname \"Ivanov\", :year 1998, :id 1})\n;; Hint: if-not, sort-by\n(d", "end": 2575, "score": 0.9996115565299988, "start": 2569, "tag": "NAME", "value": "Ivanov" }, { "context": ";; => [{:subject \"Math\", :subject_id 1, :surname \"Ivanov\", :year 1998, :student_id 1, :id 1}\n;; {:subj", "end": 2839, "score": 0.9997609257698059, "start": 2833, "tag": "NAME", "value": "Ivanov" }, { "context": ";; {:subject \"Math\", :subject_id 1, :surname \"Petrov\", :year 1997, :student_id 2, :id 2}\n;; {:subj", "end": 2932, "score": 0.9997702836990356, "start": 2926, "tag": "NAME", "value": "Petrov" }, { "context": "}\n;; {:subject \"CS\", :subject_id 2, :surname \"Petrov\", :year 1997, :student_id 2, :id 2}\n;; {:subj", "end": 3023, "score": 0.9997783899307251, "start": 3017, "tag": "NAME", "value": "Petrov" }, { "context": "}\n;; {:subject \"CS\", :subject_id 2, :surname \"Sidorov\", :year 1996, :student_id 3, :id 3}]\n;;\n;; Hint: ", "end": 3115, "score": 0.9997704029083252, "start": 3108, "tag": "NAME", "value": "Sidorov" }, { "context": ";; => [{:subject \"Math\", :subject_id 1, :surname \"Ivanov\", :year 1998, :student_id 1, :id 1} {:subject \"Ma", "end": 3970, "score": 0.9989261031150818, "start": 3964, "tag": "NAME", "value": "Ivanov" }, { "context": ":id 1} {:subject \"Math\", :subject_id 1, :surname \"Petrov\", :year 1997, :student_id 2, :id 2} {:subject \"CS", "end": 4056, "score": 0.9997939467430115, "start": 4050, "tag": "NAME", "value": "Petrov" }, { "context": ", :id 2} {:subject \"CS\", :subject_id 2, :surname \"Petrov\", :year 1997, :student_id 2, :id 2} {:subject \"CS", "end": 4140, "score": 0.9997917413711548, "start": 4134, "tag": "NAME", "value": "Petrov" }, { "context": ", :id 2} {:subject \"CS\", :subject_id 2, :surname \"Sidorov\", :year 1996, :student_id 3, :id 3}]\n;;\n;; Hint: ", "end": 4225, "score": 0.9996344447135925, "start": 4218, "tag": "NAME", "value": "Sidorov" }, { "context": "ect student)\n;; => [{:id 1, :year 1998, :surname \"Ivanov\"} {:id 2, :year 1997, :surname \"Petrov\"} {:id 3, ", "end": 4777, "score": 0.9986863732337952, "start": 4771, "tag": "NAME", "value": "Ivanov" }, { "context": " :surname \"Ivanov\"} {:id 2, :year 1997, :surname \"Petrov\"} {:id 3, :year 1996, :surname \"Sidorov\"}]\n\n(sele", "end": 4816, "score": 0.9993312358856201, "start": 4810, "tag": "NAME", "value": "Petrov" }, { "context": " :surname \"Petrov\"} {:id 3, :year 1996, :surname \"Sidorov\"}]\n\n(select student :order-by :year)\n;; => ({:id ", "end": 4856, "score": 0.9990806579589844, "start": 4849, "tag": "NAME", "value": "Sidorov" }, { "context": "er-by :year)\n;; => ({:id 3, :year 1996, :surname \"Sidorov\"} {:id 2, :year 1997, :surname \"Petrov\"} {:id 1, ", "end": 4938, "score": 0.9993986487388611, "start": 4931, "tag": "NAME", "value": "Sidorov" }, { "context": ":surname \"Sidorov\"} {:id 2, :year 1997, :surname \"Petrov\"} {:id 1, :year 1998, :surname \"Ivanov\"})\n\n(selec", "end": 4977, "score": 0.9995642304420471, "start": 4971, "tag": "NAME", "value": "Petrov" }, { "context": " :surname \"Petrov\"} {:id 1, :year 1998, :surname \"Ivanov\"})\n\n(select student :where #(> (:id %) 1))\n;; => ", "end": 5016, "score": 0.9984986186027527, "start": 5010, "tag": "NAME", "value": "Ivanov" }, { "context": " (:id %) 1))\n;; => ({:id 2, :year 1997, :surname \"Petrov\"} {:id 3, :year 1996, :surname \"Sidorov\"})\n\n(sele", "end": 5103, "score": 0.9994984865188599, "start": 5097, "tag": "NAME", "value": "Petrov" }, { "context": " :surname \"Petrov\"} {:id 3, :year 1996, :surname \"Sidorov\"})\n\n(select student :limit 2)\n;; => ({:id 1, :yea", "end": 5143, "score": 0.9995864033699036, "start": 5136, "tag": "NAME", "value": "Sidorov" }, { "context": "nt :limit 2)\n;; => ({:id 1, :year 1998, :surname \"Ivanov\"} {:id 2, :year 1997, :surname \"Petrov\"})\n\n(selec", "end": 5217, "score": 0.9701921939849854, "start": 5211, "tag": "NAME", "value": "Ivanov" }, { "context": " :surname \"Ivanov\"} {:id 2, :year 1997, :surname \"Petrov\"})\n\n(select student :where #(> (:id %) 1) :limit ", "end": 5256, "score": 0.99945068359375, "start": 5250, "tag": "NAME", "value": "Petrov" }, { "context": "1) :limit 1)\n;; => ({:id 2, :year 1997, :surname \"Petrov\"})\n\n(select student :where #(> (:id %) 1) :order-", "end": 5352, "score": 0.9993460774421692, "start": 5346, "tag": "NAME", "value": "Petrov" }, { "context": "ar :limit 2)\n;; => ({:id 3, :year 1996, :surname \"Sidorov\"} {:id 2, :year 1997, :surname \"Petrov\"})\n\n(selec", "end": 5465, "score": 0.9997745156288147, "start": 5458, "tag": "NAME", "value": "Sidorov" }, { "context": ":surname \"Sidorov\"} {:id 2, :year 1997, :surname \"Petrov\"})\n\n(select student-subject :joins [[:student_id ", "end": 5504, "score": 0.9997861385345459, "start": 5498, "tag": "NAME", "value": "Petrov" }, { "context": ";; => [{:subject \"Math\", :subject_id 1, :surname \"Ivanov\", :year 1998, :student_id 1, :id 1} {:subject \"Ma", "end": 5651, "score": 0.9997809529304504, "start": 5645, "tag": "NAME", "value": "Ivanov" }, { "context": ":id 1} {:subject \"Math\", :subject_id 1, :surname \"Petrov\", :year 1997, :student_id 2, :id 2} {:subject \"CS", "end": 5737, "score": 0.9997856616973877, "start": 5731, "tag": "NAME", "value": "Petrov" }, { "context": ", :id 2} {:subject \"CS\", :subject_id 2, :surname \"Petrov\", :year 1997, :student_id 2, :id 2} {:subject \"CS", "end": 5821, "score": 0.9997643232345581, "start": 5815, "tag": "NAME", "value": "Petrov" }, { "context": ", :id 2} {:subject \"CS\", :subject_id 2, :surname \"Sidorov\", :year 1996, :student_id 3, :id 3}]\n\n(select stu", "end": 5906, "score": 0.99977707862854, "start": 5899, "tag": "NAME", "value": "Sidorov" }, { "context": ";; => ({:subject \"Math\", :subject_id 1, :surname \"Ivanov\", :year 1998, :student_id 1, :id 1} {:subject \"Ma", "end": 6096, "score": 0.9997780323028564, "start": 6090, "tag": "NAME", "value": "Ivanov" }, { "context": ":id 1} {:subject \"Math\", :subject_id 1, :surname \"Petrov\", :year 1997, :student_id 2, :id 2})\n", "end": 6182, "score": 0.9997761249542236, "start": 6176, "tag": "NAME", "value": "Petrov" } ]
1st_week/task01/src/csvdb/core.clj
Seryiza/clojurecourse-soulutions
0
(ns csvdb.core (:require [clojure-csv.core :as csv])) (defn- parse-int [int-str] (Integer/parseInt int-str)) (def student-tbl (csv/parse-csv (slurp "student.csv"))) (def subject-tbl (csv/parse-csv (slurp "subject.csv"))) (def student-subject-tbl (csv/parse-csv (slurp "student_subject.csv"))) ;; (table-keys student-tbl) ;; => [:id :surname :year :group_id] ;; ;; Hint: vec, map, keyword, first (defn table-keys [tbl] (vec (map keyword (first tbl)))) ;; (key-value-pairs [:id :surname :year :group_id] ["1" "Ivanov" "1996"]) ;; => (:id "1" :surname "Ivanov" :year "1996") ;; ;; Hint: flatten, map, list (defn key-value-pairs [tbl-keys tbl-record] (flatten (map list tbl-keys tbl-record))) ;; (data-record [:id :surname :year :group_id] ["1" "Ivanov" "1996"]) ;; => {:surname "Ivanov", :year "1996", :id "1"} ;; ;; Hint: apply, hash-map, key-value-pairs (defn data-record [tbl-keys tbl-record] (apply hash-map (key-value-pairs tbl-keys tbl-record))) ;; (data-table student-tbl) ;; => ({:surname "Ivanov", :year "1996", :id "1"} ;; {:surname "Petrov", :year "1996", :id "2"} ;; {:surname "Sidorov", :year "1997", :id "3"}) ;; ;; Hint: let, map, next, table-keys, data-record (defn data-table [tbl] (let [keys (table-keys tbl) records (next tbl)] (map #(data-record keys %) records))) ;; (str-field-to-int :id {:surname "Ivanov", :year "1996", :id "1"}) ;; => {:surname "Ivanov", :year "1996", :id 1} ;; ;; Hint: assoc, Integer/parseInt, get (defn str-field-to-int [field rec] (let [parsed (parse-int (get rec field))] (assoc rec field parsed))) (def student (->> (data-table student-tbl) (map #(str-field-to-int :id %)) (map #(str-field-to-int :year %)))) (def subject (->> (data-table subject-tbl) (map #(str-field-to-int :id %)))) (def student-subject (->> (data-table student-subject-tbl) (map #(str-field-to-int :subject_id %)) (map #(str-field-to-int :student_id %)))) ;; (where* student (fn [rec] (> (:id rec) 1))) ;; => ({:surname "Petrov", :year 1997, :id 2} {:surname "Sidorov", :year 1996, :id 3}) ;; ;; Hint: if-not, filter (defn where* [data condition-func] (if-not condition-func data (filter condition-func data))) ;; (limit* student 1) ;; => ({:surname "Ivanov", :year 1998, :id 1}) ;; ;; Hint: if-not, take (defn limit* [data lim] (if-not lim data (take lim data))) ;; (order-by* student :year) ;; => ({:surname "Sidorov", :year 1996, :id 3} {:surname "Petrov", :year 1997, :id 2} {:surname "Ivanov", :year 1998, :id 1}) ;; Hint: if-not, sort-by (defn order-by* [data column] (if-not column data (sort-by column data))) ;; (join* (join* student-subject :student_id student :id) :subject_id subject :id) ;; => [{:subject "Math", :subject_id 1, :surname "Ivanov", :year 1998, :student_id 1, :id 1} ;; {:subject "Math", :subject_id 1, :surname "Petrov", :year 1997, :student_id 2, :id 2} ;; {:subject "CS", :subject_id 2, :surname "Petrov", :year 1997, :student_id 2, :id 2} ;; {:subject "CS", :subject_id 2, :surname "Sidorov", :year 1996, :student_id 3, :id 3}] ;; ;; Hint: reduce, conj, merge, first, filter, get ;; Here column1 belongs to data1, column2 belongs to data2. (defn join* [data1 column1 data2 column2] ;; 1. Start collecting results from empty collection. ;; 2. Go through each element of data1. ;; 3. For each element of data1 (lets call it element1) find all elements of data2 (lets call each as element2) where column1 = column2. ;; 4. Use function 'merge' and merge element1 with each element2. (reduce (fn [joined element1] (apply conj joined (map #(merge % element1) (filter (fn [element2] (= (get element1 column1) (get element2 column2))) data2)))) [] data1)) ;; 5. Collect merged elements. ;; (perform-joins student-subject [[:student_id student :id] [:subject_id subject :id]]) ;; => [{:subject "Math", :subject_id 1, :surname "Ivanov", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "Petrov", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "Petrov", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "Sidorov", :year 1996, :student_id 3, :id 3}] ;; ;; Hint: loop-recur, let, first, next, join* (defn perform-joins [data joins*] (loop [data1 data joins joins*] (if (empty? joins) data1 (let [[col1 data2 col2] (first joins)] (recur (join* data1 col1 data2 col2) (next joins)))))) (defn select [data & {:keys [where limit order-by joins]}] (-> data (perform-joins joins) (where* where) (order-by* order-by) (limit* limit))) (select student) ;; => [{:id 1, :year 1998, :surname "Ivanov"} {:id 2, :year 1997, :surname "Petrov"} {:id 3, :year 1996, :surname "Sidorov"}] (select student :order-by :year) ;; => ({:id 3, :year 1996, :surname "Sidorov"} {:id 2, :year 1997, :surname "Petrov"} {:id 1, :year 1998, :surname "Ivanov"}) (select student :where #(> (:id %) 1)) ;; => ({:id 2, :year 1997, :surname "Petrov"} {:id 3, :year 1996, :surname "Sidorov"}) (select student :limit 2) ;; => ({:id 1, :year 1998, :surname "Ivanov"} {:id 2, :year 1997, :surname "Petrov"}) (select student :where #(> (:id %) 1) :limit 1) ;; => ({:id 2, :year 1997, :surname "Petrov"}) (select student :where #(> (:id %) 1) :order-by :year :limit 2) ;; => ({:id 3, :year 1996, :surname "Sidorov"} {:id 2, :year 1997, :surname "Petrov"}) (select student-subject :joins [[:student_id student :id] [:subject_id subject :id]]) ;; => [{:subject "Math", :subject_id 1, :surname "Ivanov", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "Petrov", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "Petrov", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "Sidorov", :year 1996, :student_id 3, :id 3}] (select student-subject :limit 2 :joins [[:student_id student :id] [:subject_id subject :id]]) ;; => ({:subject "Math", :subject_id 1, :surname "Ivanov", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "Petrov", :year 1997, :student_id 2, :id 2})
15426
(ns csvdb.core (:require [clojure-csv.core :as csv])) (defn- parse-int [int-str] (Integer/parseInt int-str)) (def student-tbl (csv/parse-csv (slurp "student.csv"))) (def subject-tbl (csv/parse-csv (slurp "subject.csv"))) (def student-subject-tbl (csv/parse-csv (slurp "student_subject.csv"))) ;; (table-keys student-tbl) ;; => [:id :surname :year :group_id] ;; ;; Hint: vec, map, keyword, first (defn table-keys [tbl] (vec (map keyword (first tbl)))) ;; (key-value-pairs [:id :surname :year :group_id] ["1" "<NAME>" "1996"]) ;; => (:id "1" :surname "<NAME>" :year "1996") ;; ;; Hint: flatten, map, list (defn key-value-pairs [tbl-keys tbl-record] (flatten (map list tbl-keys tbl-record))) ;; (data-record [:id :surname :year :group_id] ["1" "<NAME>" "1996"]) ;; => {:surname "<NAME>", :year "1996", :id "1"} ;; ;; Hint: apply, hash-map, key-value-pairs (defn data-record [tbl-keys tbl-record] (apply hash-map (key-value-pairs tbl-keys tbl-record))) ;; (data-table student-tbl) ;; => ({:surname "<NAME>", :year "1996", :id "1"} ;; {:surname "<NAME>", :year "1996", :id "2"} ;; {:surname "<NAME>", :year "1997", :id "3"}) ;; ;; Hint: let, map, next, table-keys, data-record (defn data-table [tbl] (let [keys (table-keys tbl) records (next tbl)] (map #(data-record keys %) records))) ;; (str-field-to-int :id {:surname "<NAME>", :year "1996", :id "1"}) ;; => {:surname "<NAME>", :year "1996", :id 1} ;; ;; Hint: assoc, Integer/parseInt, get (defn str-field-to-int [field rec] (let [parsed (parse-int (get rec field))] (assoc rec field parsed))) (def student (->> (data-table student-tbl) (map #(str-field-to-int :id %)) (map #(str-field-to-int :year %)))) (def subject (->> (data-table subject-tbl) (map #(str-field-to-int :id %)))) (def student-subject (->> (data-table student-subject-tbl) (map #(str-field-to-int :subject_id %)) (map #(str-field-to-int :student_id %)))) ;; (where* student (fn [rec] (> (:id rec) 1))) ;; => ({:surname "<NAME>", :year 1997, :id 2} {:surname "<NAME>", :year 1996, :id 3}) ;; ;; Hint: if-not, filter (defn where* [data condition-func] (if-not condition-func data (filter condition-func data))) ;; (limit* student 1) ;; => ({:surname "<NAME>", :year 1998, :id 1}) ;; ;; Hint: if-not, take (defn limit* [data lim] (if-not lim data (take lim data))) ;; (order-by* student :year) ;; => ({:surname "<NAME>", :year 1996, :id 3} {:surname "<NAME>", :year 1997, :id 2} {:surname "<NAME>", :year 1998, :id 1}) ;; Hint: if-not, sort-by (defn order-by* [data column] (if-not column data (sort-by column data))) ;; (join* (join* student-subject :student_id student :id) :subject_id subject :id) ;; => [{:subject "Math", :subject_id 1, :surname "<NAME>", :year 1998, :student_id 1, :id 1} ;; {:subject "Math", :subject_id 1, :surname "<NAME>", :year 1997, :student_id 2, :id 2} ;; {:subject "CS", :subject_id 2, :surname "<NAME>", :year 1997, :student_id 2, :id 2} ;; {:subject "CS", :subject_id 2, :surname "<NAME>", :year 1996, :student_id 3, :id 3}] ;; ;; Hint: reduce, conj, merge, first, filter, get ;; Here column1 belongs to data1, column2 belongs to data2. (defn join* [data1 column1 data2 column2] ;; 1. Start collecting results from empty collection. ;; 2. Go through each element of data1. ;; 3. For each element of data1 (lets call it element1) find all elements of data2 (lets call each as element2) where column1 = column2. ;; 4. Use function 'merge' and merge element1 with each element2. (reduce (fn [joined element1] (apply conj joined (map #(merge % element1) (filter (fn [element2] (= (get element1 column1) (get element2 column2))) data2)))) [] data1)) ;; 5. Collect merged elements. ;; (perform-joins student-subject [[:student_id student :id] [:subject_id subject :id]]) ;; => [{:subject "Math", :subject_id 1, :surname "<NAME>", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "<NAME>", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "<NAME>", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "<NAME>", :year 1996, :student_id 3, :id 3}] ;; ;; Hint: loop-recur, let, first, next, join* (defn perform-joins [data joins*] (loop [data1 data joins joins*] (if (empty? joins) data1 (let [[col1 data2 col2] (first joins)] (recur (join* data1 col1 data2 col2) (next joins)))))) (defn select [data & {:keys [where limit order-by joins]}] (-> data (perform-joins joins) (where* where) (order-by* order-by) (limit* limit))) (select student) ;; => [{:id 1, :year 1998, :surname "<NAME>"} {:id 2, :year 1997, :surname "<NAME>"} {:id 3, :year 1996, :surname "<NAME>"}] (select student :order-by :year) ;; => ({:id 3, :year 1996, :surname "<NAME>"} {:id 2, :year 1997, :surname "<NAME>"} {:id 1, :year 1998, :surname "<NAME>"}) (select student :where #(> (:id %) 1)) ;; => ({:id 2, :year 1997, :surname "<NAME>"} {:id 3, :year 1996, :surname "<NAME>"}) (select student :limit 2) ;; => ({:id 1, :year 1998, :surname "<NAME>"} {:id 2, :year 1997, :surname "<NAME>"}) (select student :where #(> (:id %) 1) :limit 1) ;; => ({:id 2, :year 1997, :surname "<NAME>"}) (select student :where #(> (:id %) 1) :order-by :year :limit 2) ;; => ({:id 3, :year 1996, :surname "<NAME>"} {:id 2, :year 1997, :surname "<NAME>"}) (select student-subject :joins [[:student_id student :id] [:subject_id subject :id]]) ;; => [{:subject "Math", :subject_id 1, :surname "<NAME>", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "<NAME>", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "<NAME>", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "<NAME>", :year 1996, :student_id 3, :id 3}] (select student-subject :limit 2 :joins [[:student_id student :id] [:subject_id subject :id]]) ;; => ({:subject "Math", :subject_id 1, :surname "<NAME>", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "<NAME>", :year 1997, :student_id 2, :id 2})
true
(ns csvdb.core (:require [clojure-csv.core :as csv])) (defn- parse-int [int-str] (Integer/parseInt int-str)) (def student-tbl (csv/parse-csv (slurp "student.csv"))) (def subject-tbl (csv/parse-csv (slurp "subject.csv"))) (def student-subject-tbl (csv/parse-csv (slurp "student_subject.csv"))) ;; (table-keys student-tbl) ;; => [:id :surname :year :group_id] ;; ;; Hint: vec, map, keyword, first (defn table-keys [tbl] (vec (map keyword (first tbl)))) ;; (key-value-pairs [:id :surname :year :group_id] ["1" "PI:NAME:<NAME>END_PI" "1996"]) ;; => (:id "1" :surname "PI:NAME:<NAME>END_PI" :year "1996") ;; ;; Hint: flatten, map, list (defn key-value-pairs [tbl-keys tbl-record] (flatten (map list tbl-keys tbl-record))) ;; (data-record [:id :surname :year :group_id] ["1" "PI:NAME:<NAME>END_PI" "1996"]) ;; => {:surname "PI:NAME:<NAME>END_PI", :year "1996", :id "1"} ;; ;; Hint: apply, hash-map, key-value-pairs (defn data-record [tbl-keys tbl-record] (apply hash-map (key-value-pairs tbl-keys tbl-record))) ;; (data-table student-tbl) ;; => ({:surname "PI:NAME:<NAME>END_PI", :year "1996", :id "1"} ;; {:surname "PI:NAME:<NAME>END_PI", :year "1996", :id "2"} ;; {:surname "PI:NAME:<NAME>END_PI", :year "1997", :id "3"}) ;; ;; Hint: let, map, next, table-keys, data-record (defn data-table [tbl] (let [keys (table-keys tbl) records (next tbl)] (map #(data-record keys %) records))) ;; (str-field-to-int :id {:surname "PI:NAME:<NAME>END_PI", :year "1996", :id "1"}) ;; => {:surname "PI:NAME:<NAME>END_PI", :year "1996", :id 1} ;; ;; Hint: assoc, Integer/parseInt, get (defn str-field-to-int [field rec] (let [parsed (parse-int (get rec field))] (assoc rec field parsed))) (def student (->> (data-table student-tbl) (map #(str-field-to-int :id %)) (map #(str-field-to-int :year %)))) (def subject (->> (data-table subject-tbl) (map #(str-field-to-int :id %)))) (def student-subject (->> (data-table student-subject-tbl) (map #(str-field-to-int :subject_id %)) (map #(str-field-to-int :student_id %)))) ;; (where* student (fn [rec] (> (:id rec) 1))) ;; => ({:surname "PI:NAME:<NAME>END_PI", :year 1997, :id 2} {:surname "PI:NAME:<NAME>END_PI", :year 1996, :id 3}) ;; ;; Hint: if-not, filter (defn where* [data condition-func] (if-not condition-func data (filter condition-func data))) ;; (limit* student 1) ;; => ({:surname "PI:NAME:<NAME>END_PI", :year 1998, :id 1}) ;; ;; Hint: if-not, take (defn limit* [data lim] (if-not lim data (take lim data))) ;; (order-by* student :year) ;; => ({:surname "PI:NAME:<NAME>END_PI", :year 1996, :id 3} {:surname "PI:NAME:<NAME>END_PI", :year 1997, :id 2} {:surname "PI:NAME:<NAME>END_PI", :year 1998, :id 1}) ;; Hint: if-not, sort-by (defn order-by* [data column] (if-not column data (sort-by column data))) ;; (join* (join* student-subject :student_id student :id) :subject_id subject :id) ;; => [{:subject "Math", :subject_id 1, :surname "PI:NAME:<NAME>END_PI", :year 1998, :student_id 1, :id 1} ;; {:subject "Math", :subject_id 1, :surname "PI:NAME:<NAME>END_PI", :year 1997, :student_id 2, :id 2} ;; {:subject "CS", :subject_id 2, :surname "PI:NAME:<NAME>END_PI", :year 1997, :student_id 2, :id 2} ;; {:subject "CS", :subject_id 2, :surname "PI:NAME:<NAME>END_PI", :year 1996, :student_id 3, :id 3}] ;; ;; Hint: reduce, conj, merge, first, filter, get ;; Here column1 belongs to data1, column2 belongs to data2. (defn join* [data1 column1 data2 column2] ;; 1. Start collecting results from empty collection. ;; 2. Go through each element of data1. ;; 3. For each element of data1 (lets call it element1) find all elements of data2 (lets call each as element2) where column1 = column2. ;; 4. Use function 'merge' and merge element1 with each element2. (reduce (fn [joined element1] (apply conj joined (map #(merge % element1) (filter (fn [element2] (= (get element1 column1) (get element2 column2))) data2)))) [] data1)) ;; 5. Collect merged elements. ;; (perform-joins student-subject [[:student_id student :id] [:subject_id subject :id]]) ;; => [{:subject "Math", :subject_id 1, :surname "PI:NAME:<NAME>END_PI", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "PI:NAME:<NAME>END_PI", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "PI:NAME:<NAME>END_PI", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "PI:NAME:<NAME>END_PI", :year 1996, :student_id 3, :id 3}] ;; ;; Hint: loop-recur, let, first, next, join* (defn perform-joins [data joins*] (loop [data1 data joins joins*] (if (empty? joins) data1 (let [[col1 data2 col2] (first joins)] (recur (join* data1 col1 data2 col2) (next joins)))))) (defn select [data & {:keys [where limit order-by joins]}] (-> data (perform-joins joins) (where* where) (order-by* order-by) (limit* limit))) (select student) ;; => [{:id 1, :year 1998, :surname "PI:NAME:<NAME>END_PI"} {:id 2, :year 1997, :surname "PI:NAME:<NAME>END_PI"} {:id 3, :year 1996, :surname "PI:NAME:<NAME>END_PI"}] (select student :order-by :year) ;; => ({:id 3, :year 1996, :surname "PI:NAME:<NAME>END_PI"} {:id 2, :year 1997, :surname "PI:NAME:<NAME>END_PI"} {:id 1, :year 1998, :surname "PI:NAME:<NAME>END_PI"}) (select student :where #(> (:id %) 1)) ;; => ({:id 2, :year 1997, :surname "PI:NAME:<NAME>END_PI"} {:id 3, :year 1996, :surname "PI:NAME:<NAME>END_PI"}) (select student :limit 2) ;; => ({:id 1, :year 1998, :surname "PI:NAME:<NAME>END_PI"} {:id 2, :year 1997, :surname "PI:NAME:<NAME>END_PI"}) (select student :where #(> (:id %) 1) :limit 1) ;; => ({:id 2, :year 1997, :surname "PI:NAME:<NAME>END_PI"}) (select student :where #(> (:id %) 1) :order-by :year :limit 2) ;; => ({:id 3, :year 1996, :surname "PI:NAME:<NAME>END_PI"} {:id 2, :year 1997, :surname "PI:NAME:<NAME>END_PI"}) (select student-subject :joins [[:student_id student :id] [:subject_id subject :id]]) ;; => [{:subject "Math", :subject_id 1, :surname "PI:NAME:<NAME>END_PI", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "PI:NAME:<NAME>END_PI", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "PI:NAME:<NAME>END_PI", :year 1997, :student_id 2, :id 2} {:subject "CS", :subject_id 2, :surname "PI:NAME:<NAME>END_PI", :year 1996, :student_id 3, :id 3}] (select student-subject :limit 2 :joins [[:student_id student :id] [:subject_id subject :id]]) ;; => ({:subject "Math", :subject_id 1, :surname "PI:NAME:<NAME>END_PI", :year 1998, :student_id 1, :id 1} {:subject "Math", :subject_id 1, :surname "PI:NAME:<NAME>END_PI", :year 1997, :student_id 2, :id 2})
[ { "context": "deftest can-get-hello\n (is (= (get-hello {:name \"Friboo\"} nil nil)\n {:status 200\n :head", "end": 226, "score": 0.8567919135093689, "start": 220, "tag": "NAME", "value": "Friboo" }, { "context": "ication/json\"}\n :body {:message \"Hello Friboo\"}})))\n\n(deftest can-delete-greeting-template\n (l", "end": 358, "score": 0.8538083434104919, "start": 352, "tag": "NAME", "value": "Friboo" } ]
resources/leiningen/new/friboo/api_test.clj
dryewo/friboo-template
0
(ns {{namespace}}.api-test (:require [clojure.test :refer :all] [{{namespace}}.db :as db] [{{namespace}}.api :refer :all] [midje.sweet :refer :all])) (deftest can-get-hello (is (= (get-hello {:name "Friboo"} nil nil) {:status 200 :headers {"Content-Type" "application/json"} :body {:message "Hello Friboo"}}))) (deftest can-delete-greeting-template (let [number-of-calls (atom 0)] (with-redefs [db/cmd-delete-greeting! (fn [data conn] (swap! number-of-calls inc) (is (= data {:id "foo"})) (is (= conn {:connection "db-conn"})))] (is (= (select-keys (delete-greeting-template {:greeting_id "foo"} nil "db-conn") [:status]) {:status 204})) (is (= @number-of-calls 1))))) (deftest wrap-midje-facts (facts "about delete-greeting-template" (fact "works" (delete-greeting-template {:greeting_id ..greeting-id..} nil ..db..) => (contains {:status 204}) (provided (db/cmd-delete-greeting! {:id ..greeting-id..} {:connection ..db..}) => nil :times 1))) )
43187
(ns {{namespace}}.api-test (:require [clojure.test :refer :all] [{{namespace}}.db :as db] [{{namespace}}.api :refer :all] [midje.sweet :refer :all])) (deftest can-get-hello (is (= (get-hello {:name "<NAME>"} nil nil) {:status 200 :headers {"Content-Type" "application/json"} :body {:message "Hello <NAME>"}}))) (deftest can-delete-greeting-template (let [number-of-calls (atom 0)] (with-redefs [db/cmd-delete-greeting! (fn [data conn] (swap! number-of-calls inc) (is (= data {:id "foo"})) (is (= conn {:connection "db-conn"})))] (is (= (select-keys (delete-greeting-template {:greeting_id "foo"} nil "db-conn") [:status]) {:status 204})) (is (= @number-of-calls 1))))) (deftest wrap-midje-facts (facts "about delete-greeting-template" (fact "works" (delete-greeting-template {:greeting_id ..greeting-id..} nil ..db..) => (contains {:status 204}) (provided (db/cmd-delete-greeting! {:id ..greeting-id..} {:connection ..db..}) => nil :times 1))) )
true
(ns {{namespace}}.api-test (:require [clojure.test :refer :all] [{{namespace}}.db :as db] [{{namespace}}.api :refer :all] [midje.sweet :refer :all])) (deftest can-get-hello (is (= (get-hello {:name "PI:NAME:<NAME>END_PI"} nil nil) {:status 200 :headers {"Content-Type" "application/json"} :body {:message "Hello PI:NAME:<NAME>END_PI"}}))) (deftest can-delete-greeting-template (let [number-of-calls (atom 0)] (with-redefs [db/cmd-delete-greeting! (fn [data conn] (swap! number-of-calls inc) (is (= data {:id "foo"})) (is (= conn {:connection "db-conn"})))] (is (= (select-keys (delete-greeting-template {:greeting_id "foo"} nil "db-conn") [:status]) {:status 204})) (is (= @number-of-calls 1))))) (deftest wrap-midje-facts (facts "about delete-greeting-template" (fact "works" (delete-greeting-template {:greeting_id ..greeting-id..} nil ..db..) => (contains {:status 204}) (provided (db/cmd-delete-greeting! {:id ..greeting-id..} {:connection ..db..}) => nil :times 1))) )
[ { "context": "nse.\n;;\n;; This is a derivative work, modified by Edward Ross.\n;; The modifications are Copyright 2012 Edward ", "end": 659, "score": 0.9995584487915039, "start": 648, "tag": "NAME", "value": "Edward Ross" }, { "context": "ard Ross.\n;; The modifications are Copyright 2012 Edward Ross, and licensed\n;; upder the Apache License, Versi", "end": 713, "score": 0.9998663663864136, "start": 702, "tag": "NAME", "value": "Edward Ross" }, { "context": "r-options { ... }\n ...\n)\n\nSee https://github.com/eddingtonross/lein-antlr4 for a full listing of configuration o", "end": 7648, "score": 0.9996886253356934, "start": 7635, "tag": "USERNAME", "value": "eddingtonross" } ]
src/leiningen/antlr4.clj
eddingtonross/lein-antlr4
0
;; Copyright 2010 Revelytix, Inc. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; This is a derivative work, modified by Edward Ross. ;; The modifications are Copyright 2012 Edward Ross, and licensed ;; upder the Apache License, Version 2.0. (ns leiningen.antlr4 (:use [leiningen.clean :as clean :only (clean delete-file-recursively)] [robert.hooke :only (add-hook)]) (:import [java.io File FileFilter] [java.net URI] [org.antlr.v4 Tool])) (defn sub-dirs "Recursively find all subdirectories under the given root directory. Also returns the root directory." [^File f] (if (.isDirectory f) (cons f (apply concat (for [d (.listFiles f (proxy [FileFilter] [] (accept [child] (.isDirectory child))))] (sub-dirs d)))) '())) (defn has-suffix? "Returns true if file f has one of the given suffixes as its filename extension. The suffixes should not include the '.' character." [suffixes f] (let [n (.getName f) idx (.lastIndexOf n ".") suffix (if (> idx -1) (.substring n (inc idx)))] (contains? suffixes suffix))) (defn dirs-with-type "Recursively find all directories rooted at f that contain a file with one of the given suffixes as a direct child." [^File f suffixes] (filter (fn [^File d] (some (partial has-suffix? suffixes) (.listFiles d))) (sub-dirs f))) (defn files-of-type "List all files that are direct children of the given directory with names ending in one of the given suffixes." [^File dir suffixes] (seq (.listFiles dir (proxy [FileFilter] [] (accept [f] (has-suffix? suffixes f)))))) (defn relative-paths "Takes a root directory 'parent' and a seq of children within the directory, and returns a seq of relative file: URIs that give the pathnames of the children relative to the root directory." [^File parent children] (let [^URI parent-uri (.toURI parent)] (for [^File child children] (.relativize parent-uri (.toURI child))))) (defn absolute-files "Takes a root directory 'parent' and a seq of relative file: URIs (as generated by relative-paths) and returns a seq of absolute File objects that represent those relative paths resolved against the root directory." [^File parent child-paths] (let [^URI parent-uri (URI. (str (.toURI parent) "/"))] (for [^URI child-path child-paths] (File. (.resolve parent-uri child-path))))) (def ^{:doc "Default options for the ANTLR tool."} default-antlr-opts {:atn false :message-format "antlr" :listener true :visitor false :encoding "UTF-8" :package false :depend false :D {} :warn-error false :save-lexer false :debug-string-template false :force-atn false :log false :verbose-dfa false }) (defn option-text [text] (fn [x] (if x [(str "-" text)] []))) (defn binary-option [text] (fn [x] [(str "-" (if x "" "no-") text)])) (defn argument-option [text] (fn [x] (if x [(str "-" text) x] []))) (for [[option value] {"language" "java" "header" "none"}] (str "-D" option "=" value)) (def ^{:doc "Mapping of option names to functions mapping the value of the option to a corresponding command line string"} opts-to-command { :atn (option-text "atn") :message-format (argument-option "message-format") :listener (binary-option "listener") :visitor (binary-option "visitor") :encoding (argument-option "encoding") :package (argument-option "package") :depend (option-text "depend") :D #(for [[option value] %] (str "-D" option "=" value)) :warn-error (option-text "Werror") :save-lexer (option-text "Xsave-lexer") :debug-string-template (option-text "XdbgST") :force-atn (option-text "Xforce-atn") :log (option-text "Xlog") :verbose-dfa (option-text "Xverbose-dfa") }) (defn output-command [output-dir] (list "-o" (str output-dir))) (defn input-command [input-dir] (list "-lib" (str input-dir))) (defn options-command [options] (filter identity (apply concat (for [[option value] options] ((opts-to-command option) value))))) (def ^{:doc "The collection of file extensions that ANTLR accepts (hard-coded in the ANTLR tool)."} file-types #{"g" "g4"}) (defn antlr-main [args] "Implements antlr tool almost as if invoked from the command line." (let [antlr (Tool. args) err (.errMgr antlr) log (.logMgr antlr)] (try (.processGrammarsOnCommandLine antlr) (finally (if (.log antlr) (try (println (str "wrote " (.save log))) (catch java.io.IOException ioe (. err toolError org.antlr.v4.tool.ErrorType/INTERNAL_ERROR ioe)))))) (if (> (.getNumErrors err) 0) (throw (RuntimeException. (str "ANTLR detected " (.getNumErrors err) " grammar errors.")))))) (defn process-antlr-dir "Processes ANTLR grammar files in the given intput directory to generate output in the given output directory with the given configuration options." [^File input-dir ^File output-dir antlr-opts] (let [grammar-files (files-of-type input-dir file-types) command-array (into-array (concat (input-command input-dir) (output-command output-dir) (options-command antlr-opts) (map str grammar-files)))] (println "Compiling ANTLR grammars:" (apply str (interpose " " (map #(.getName %) grammar-files))) "...") ;; The ANTLR tool uses static state to track errors -- reset before each run. (antlr-main command-array))) (defn compile-antlr "Recursively process all subdirectories within the given top-level source directory that contain ANTLR grammar files to generate output in a corresponding subdirectory of the destination directory, using the given config options." ([^File src-dir ^File dest-dir] (compile-antlr src-dir dest-dir nil)) ([^File src-dir ^File dest-dir antlr-opts] (let [input-dirs (dirs-with-type src-dir file-types)] (if (empty? input-dirs) (println "ANTLR source directory" (.getPath src-dir) "is empty.") (let [output-dirs (absolute-files dest-dir (relative-paths src-dir input-dirs))] (doseq [[input-dir output-dir] (map list input-dirs output-dirs)] (process-antlr-dir input-dir output-dir antlr-opts))))))) (defn antlr-src-dir "Determine the ANTLR source directory for the project." [project] (File. (get project :antlr-src-dir "src/antlr"))) (defn antlr-dest-dir "Determine the ANTLR target directory for the project." [project] (File. (get project :antlr-dest-dir "gen-src"))) (defn antlr-options "Determine the ANTLR config options for the project." [project] (get project :antlr-options)) (defn antlr4 "Generate Java source from an ANTLR grammar. A typical project configuration will look like: (defproject my-project ... :antlr-src-dir \"src/antlr\" :antlr-dest-dir \"gen-src\" :antlr-options { ... } ... ) See https://github.com/eddingtonross/lein-antlr4 for a full listing of configuration options." [project] (compile-antlr (antlr-src-dir project) (antlr-dest-dir project) (antlr-options project))) (defn clean-antlr-hook "Clean the ANTLR output directory." [f & [project & _ :as args]] (apply f args) (clean/delete-file-recursively (antlr-dest-dir project) true)) ;; Add a hook to the "lein clean" task to clean the ANTLR target directory. (defn activate [] (add-hook #'clean/clean clean-antlr-hook))
80839
;; Copyright 2010 Revelytix, Inc. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; This is a derivative work, modified by <NAME>. ;; The modifications are Copyright 2012 <NAME>, and licensed ;; upder the Apache License, Version 2.0. (ns leiningen.antlr4 (:use [leiningen.clean :as clean :only (clean delete-file-recursively)] [robert.hooke :only (add-hook)]) (:import [java.io File FileFilter] [java.net URI] [org.antlr.v4 Tool])) (defn sub-dirs "Recursively find all subdirectories under the given root directory. Also returns the root directory." [^File f] (if (.isDirectory f) (cons f (apply concat (for [d (.listFiles f (proxy [FileFilter] [] (accept [child] (.isDirectory child))))] (sub-dirs d)))) '())) (defn has-suffix? "Returns true if file f has one of the given suffixes as its filename extension. The suffixes should not include the '.' character." [suffixes f] (let [n (.getName f) idx (.lastIndexOf n ".") suffix (if (> idx -1) (.substring n (inc idx)))] (contains? suffixes suffix))) (defn dirs-with-type "Recursively find all directories rooted at f that contain a file with one of the given suffixes as a direct child." [^File f suffixes] (filter (fn [^File d] (some (partial has-suffix? suffixes) (.listFiles d))) (sub-dirs f))) (defn files-of-type "List all files that are direct children of the given directory with names ending in one of the given suffixes." [^File dir suffixes] (seq (.listFiles dir (proxy [FileFilter] [] (accept [f] (has-suffix? suffixes f)))))) (defn relative-paths "Takes a root directory 'parent' and a seq of children within the directory, and returns a seq of relative file: URIs that give the pathnames of the children relative to the root directory." [^File parent children] (let [^URI parent-uri (.toURI parent)] (for [^File child children] (.relativize parent-uri (.toURI child))))) (defn absolute-files "Takes a root directory 'parent' and a seq of relative file: URIs (as generated by relative-paths) and returns a seq of absolute File objects that represent those relative paths resolved against the root directory." [^File parent child-paths] (let [^URI parent-uri (URI. (str (.toURI parent) "/"))] (for [^URI child-path child-paths] (File. (.resolve parent-uri child-path))))) (def ^{:doc "Default options for the ANTLR tool."} default-antlr-opts {:atn false :message-format "antlr" :listener true :visitor false :encoding "UTF-8" :package false :depend false :D {} :warn-error false :save-lexer false :debug-string-template false :force-atn false :log false :verbose-dfa false }) (defn option-text [text] (fn [x] (if x [(str "-" text)] []))) (defn binary-option [text] (fn [x] [(str "-" (if x "" "no-") text)])) (defn argument-option [text] (fn [x] (if x [(str "-" text) x] []))) (for [[option value] {"language" "java" "header" "none"}] (str "-D" option "=" value)) (def ^{:doc "Mapping of option names to functions mapping the value of the option to a corresponding command line string"} opts-to-command { :atn (option-text "atn") :message-format (argument-option "message-format") :listener (binary-option "listener") :visitor (binary-option "visitor") :encoding (argument-option "encoding") :package (argument-option "package") :depend (option-text "depend") :D #(for [[option value] %] (str "-D" option "=" value)) :warn-error (option-text "Werror") :save-lexer (option-text "Xsave-lexer") :debug-string-template (option-text "XdbgST") :force-atn (option-text "Xforce-atn") :log (option-text "Xlog") :verbose-dfa (option-text "Xverbose-dfa") }) (defn output-command [output-dir] (list "-o" (str output-dir))) (defn input-command [input-dir] (list "-lib" (str input-dir))) (defn options-command [options] (filter identity (apply concat (for [[option value] options] ((opts-to-command option) value))))) (def ^{:doc "The collection of file extensions that ANTLR accepts (hard-coded in the ANTLR tool)."} file-types #{"g" "g4"}) (defn antlr-main [args] "Implements antlr tool almost as if invoked from the command line." (let [antlr (Tool. args) err (.errMgr antlr) log (.logMgr antlr)] (try (.processGrammarsOnCommandLine antlr) (finally (if (.log antlr) (try (println (str "wrote " (.save log))) (catch java.io.IOException ioe (. err toolError org.antlr.v4.tool.ErrorType/INTERNAL_ERROR ioe)))))) (if (> (.getNumErrors err) 0) (throw (RuntimeException. (str "ANTLR detected " (.getNumErrors err) " grammar errors.")))))) (defn process-antlr-dir "Processes ANTLR grammar files in the given intput directory to generate output in the given output directory with the given configuration options." [^File input-dir ^File output-dir antlr-opts] (let [grammar-files (files-of-type input-dir file-types) command-array (into-array (concat (input-command input-dir) (output-command output-dir) (options-command antlr-opts) (map str grammar-files)))] (println "Compiling ANTLR grammars:" (apply str (interpose " " (map #(.getName %) grammar-files))) "...") ;; The ANTLR tool uses static state to track errors -- reset before each run. (antlr-main command-array))) (defn compile-antlr "Recursively process all subdirectories within the given top-level source directory that contain ANTLR grammar files to generate output in a corresponding subdirectory of the destination directory, using the given config options." ([^File src-dir ^File dest-dir] (compile-antlr src-dir dest-dir nil)) ([^File src-dir ^File dest-dir antlr-opts] (let [input-dirs (dirs-with-type src-dir file-types)] (if (empty? input-dirs) (println "ANTLR source directory" (.getPath src-dir) "is empty.") (let [output-dirs (absolute-files dest-dir (relative-paths src-dir input-dirs))] (doseq [[input-dir output-dir] (map list input-dirs output-dirs)] (process-antlr-dir input-dir output-dir antlr-opts))))))) (defn antlr-src-dir "Determine the ANTLR source directory for the project." [project] (File. (get project :antlr-src-dir "src/antlr"))) (defn antlr-dest-dir "Determine the ANTLR target directory for the project." [project] (File. (get project :antlr-dest-dir "gen-src"))) (defn antlr-options "Determine the ANTLR config options for the project." [project] (get project :antlr-options)) (defn antlr4 "Generate Java source from an ANTLR grammar. A typical project configuration will look like: (defproject my-project ... :antlr-src-dir \"src/antlr\" :antlr-dest-dir \"gen-src\" :antlr-options { ... } ... ) See https://github.com/eddingtonross/lein-antlr4 for a full listing of configuration options." [project] (compile-antlr (antlr-src-dir project) (antlr-dest-dir project) (antlr-options project))) (defn clean-antlr-hook "Clean the ANTLR output directory." [f & [project & _ :as args]] (apply f args) (clean/delete-file-recursively (antlr-dest-dir project) true)) ;; Add a hook to the "lein clean" task to clean the ANTLR target directory. (defn activate [] (add-hook #'clean/clean clean-antlr-hook))
true
;; Copyright 2010 Revelytix, Inc. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; This is a derivative work, modified by PI:NAME:<NAME>END_PI. ;; The modifications are Copyright 2012 PI:NAME:<NAME>END_PI, and licensed ;; upder the Apache License, Version 2.0. (ns leiningen.antlr4 (:use [leiningen.clean :as clean :only (clean delete-file-recursively)] [robert.hooke :only (add-hook)]) (:import [java.io File FileFilter] [java.net URI] [org.antlr.v4 Tool])) (defn sub-dirs "Recursively find all subdirectories under the given root directory. Also returns the root directory." [^File f] (if (.isDirectory f) (cons f (apply concat (for [d (.listFiles f (proxy [FileFilter] [] (accept [child] (.isDirectory child))))] (sub-dirs d)))) '())) (defn has-suffix? "Returns true if file f has one of the given suffixes as its filename extension. The suffixes should not include the '.' character." [suffixes f] (let [n (.getName f) idx (.lastIndexOf n ".") suffix (if (> idx -1) (.substring n (inc idx)))] (contains? suffixes suffix))) (defn dirs-with-type "Recursively find all directories rooted at f that contain a file with one of the given suffixes as a direct child." [^File f suffixes] (filter (fn [^File d] (some (partial has-suffix? suffixes) (.listFiles d))) (sub-dirs f))) (defn files-of-type "List all files that are direct children of the given directory with names ending in one of the given suffixes." [^File dir suffixes] (seq (.listFiles dir (proxy [FileFilter] [] (accept [f] (has-suffix? suffixes f)))))) (defn relative-paths "Takes a root directory 'parent' and a seq of children within the directory, and returns a seq of relative file: URIs that give the pathnames of the children relative to the root directory." [^File parent children] (let [^URI parent-uri (.toURI parent)] (for [^File child children] (.relativize parent-uri (.toURI child))))) (defn absolute-files "Takes a root directory 'parent' and a seq of relative file: URIs (as generated by relative-paths) and returns a seq of absolute File objects that represent those relative paths resolved against the root directory." [^File parent child-paths] (let [^URI parent-uri (URI. (str (.toURI parent) "/"))] (for [^URI child-path child-paths] (File. (.resolve parent-uri child-path))))) (def ^{:doc "Default options for the ANTLR tool."} default-antlr-opts {:atn false :message-format "antlr" :listener true :visitor false :encoding "UTF-8" :package false :depend false :D {} :warn-error false :save-lexer false :debug-string-template false :force-atn false :log false :verbose-dfa false }) (defn option-text [text] (fn [x] (if x [(str "-" text)] []))) (defn binary-option [text] (fn [x] [(str "-" (if x "" "no-") text)])) (defn argument-option [text] (fn [x] (if x [(str "-" text) x] []))) (for [[option value] {"language" "java" "header" "none"}] (str "-D" option "=" value)) (def ^{:doc "Mapping of option names to functions mapping the value of the option to a corresponding command line string"} opts-to-command { :atn (option-text "atn") :message-format (argument-option "message-format") :listener (binary-option "listener") :visitor (binary-option "visitor") :encoding (argument-option "encoding") :package (argument-option "package") :depend (option-text "depend") :D #(for [[option value] %] (str "-D" option "=" value)) :warn-error (option-text "Werror") :save-lexer (option-text "Xsave-lexer") :debug-string-template (option-text "XdbgST") :force-atn (option-text "Xforce-atn") :log (option-text "Xlog") :verbose-dfa (option-text "Xverbose-dfa") }) (defn output-command [output-dir] (list "-o" (str output-dir))) (defn input-command [input-dir] (list "-lib" (str input-dir))) (defn options-command [options] (filter identity (apply concat (for [[option value] options] ((opts-to-command option) value))))) (def ^{:doc "The collection of file extensions that ANTLR accepts (hard-coded in the ANTLR tool)."} file-types #{"g" "g4"}) (defn antlr-main [args] "Implements antlr tool almost as if invoked from the command line." (let [antlr (Tool. args) err (.errMgr antlr) log (.logMgr antlr)] (try (.processGrammarsOnCommandLine antlr) (finally (if (.log antlr) (try (println (str "wrote " (.save log))) (catch java.io.IOException ioe (. err toolError org.antlr.v4.tool.ErrorType/INTERNAL_ERROR ioe)))))) (if (> (.getNumErrors err) 0) (throw (RuntimeException. (str "ANTLR detected " (.getNumErrors err) " grammar errors.")))))) (defn process-antlr-dir "Processes ANTLR grammar files in the given intput directory to generate output in the given output directory with the given configuration options." [^File input-dir ^File output-dir antlr-opts] (let [grammar-files (files-of-type input-dir file-types) command-array (into-array (concat (input-command input-dir) (output-command output-dir) (options-command antlr-opts) (map str grammar-files)))] (println "Compiling ANTLR grammars:" (apply str (interpose " " (map #(.getName %) grammar-files))) "...") ;; The ANTLR tool uses static state to track errors -- reset before each run. (antlr-main command-array))) (defn compile-antlr "Recursively process all subdirectories within the given top-level source directory that contain ANTLR grammar files to generate output in a corresponding subdirectory of the destination directory, using the given config options." ([^File src-dir ^File dest-dir] (compile-antlr src-dir dest-dir nil)) ([^File src-dir ^File dest-dir antlr-opts] (let [input-dirs (dirs-with-type src-dir file-types)] (if (empty? input-dirs) (println "ANTLR source directory" (.getPath src-dir) "is empty.") (let [output-dirs (absolute-files dest-dir (relative-paths src-dir input-dirs))] (doseq [[input-dir output-dir] (map list input-dirs output-dirs)] (process-antlr-dir input-dir output-dir antlr-opts))))))) (defn antlr-src-dir "Determine the ANTLR source directory for the project." [project] (File. (get project :antlr-src-dir "src/antlr"))) (defn antlr-dest-dir "Determine the ANTLR target directory for the project." [project] (File. (get project :antlr-dest-dir "gen-src"))) (defn antlr-options "Determine the ANTLR config options for the project." [project] (get project :antlr-options)) (defn antlr4 "Generate Java source from an ANTLR grammar. A typical project configuration will look like: (defproject my-project ... :antlr-src-dir \"src/antlr\" :antlr-dest-dir \"gen-src\" :antlr-options { ... } ... ) See https://github.com/eddingtonross/lein-antlr4 for a full listing of configuration options." [project] (compile-antlr (antlr-src-dir project) (antlr-dest-dir project) (antlr-options project))) (defn clean-antlr-hook "Clean the ANTLR output directory." [f & [project & _ :as args]] (apply f args) (clean/delete-file-recursively (antlr-dest-dir project) true)) ;; Add a hook to the "lein clean" task to clean the ANTLR target directory. (defn activate [] (add-hook #'clean/clean clean-antlr-hook))
[ { "context": "\n (* 0.1 (sin-osc freq)))\"\n contributed-by \"Sam Aaron\"))\n\n(defexamples dbrown\n (:rand-walk\n \"Random ", "end": 1225, "score": 0.9998767375946045, "start": 1216, "tag": "NAME", "value": "Sam Aaron" }, { "context": "\n (* 0.1 (sin-osc freq)))\"\n contributed-by \"Sam Aaron\"))\n\n(defexamples diwhite\n (:rand-seq\n \"Play a ", "end": 2313, "score": 0.9998600482940674, "start": 2304, "tag": "NAME", "value": "Sam Aaron" }, { "context": "\n (* 0.1 (sin-osc freq)))\"\n contributed-by \"Sam Aaron\"))\n\n(defexamples dwhite\n (:rand-seq\n \"Play a r", "end": 3204, "score": 0.9998642802238464, "start": 3195, "tag": "NAME", "value": "Sam Aaron" }, { "context": "\n (* 0.1 (sin-osc freq)))\"\n contributed-by \"Sam Aaron\"))\n", "end": 4096, "score": 0.9998967051506042, "start": 4087, "tag": "NAME", "value": "Sam Aaron" } ]
src/overtone/sc/examples/demand.clj
rosejn/overtone
4
(ns overtone.sc.examples.demand (:use [overtone.sc.machinery defexample] [overtone.sc ugens] [overtone.sc.cgens demand])) (defexamples dibrown (:rand-walk "Random whole number walk through freqs with rate determined by mouse-x" "Here we use dibrown to create an infinite sequence of values between 0 and max where each successive value is a whole number no more than step plus or minus the last value. This creates a walk-like effect. We then pull out the values from dibrown using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."} step {:default 1 :doc "Step size. Increase to allow for larger steps (will sound more random)"}] " (let [vals (dibrown 0 15 1 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand trig 0 vals) poll (poll trig val \"dibrown val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "Sam Aaron")) (defexamples dbrown (:rand-walk "Random floating point number walk through freqs with rate determined by mouse-x" "Here we use dbrown to create an infinite sequence of values between 0 and max where each successive value is a float no more than step plus or minus the last value. This creates a walk-like effect. We then pull out the values from dibrown using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."} step {:default 1 :doc "Step size. Increase to allow for larger steps (will sound more random)"}] " (let [vals (dbrown 0 15 1 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand trig 0 vals) poll (poll trig val \"dbrown val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "Sam Aaron")) (defexamples diwhite (:rand-seq "Play a random sequence of integers mapped to freqs with rate determined by mouse-x" "Here we use diwhite to create an infinite sequence of random integer values between 0 and max. We then pull out the values from diwhite using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."}] " (let [vals (diwhite 0 15 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand:kr trig 0 vals) poll (poll trig val \"diwhite val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "Sam Aaron")) (defexamples dwhite (:rand-seq "Play a random sequence of floats mapped to freqs with rate determined by mouse-x" "Here we use dwhite to create an infinite sequence of random floating point values between 0 and max. We then pull out the values from diwhite using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."}] " (let [vals (dwhite 0 15 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand:kr trig 0 vals) poll (poll trig val \"dwhite val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "Sam Aaron"))
37282
(ns overtone.sc.examples.demand (:use [overtone.sc.machinery defexample] [overtone.sc ugens] [overtone.sc.cgens demand])) (defexamples dibrown (:rand-walk "Random whole number walk through freqs with rate determined by mouse-x" "Here we use dibrown to create an infinite sequence of values between 0 and max where each successive value is a whole number no more than step plus or minus the last value. This creates a walk-like effect. We then pull out the values from dibrown using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."} step {:default 1 :doc "Step size. Increase to allow for larger steps (will sound more random)"}] " (let [vals (dibrown 0 15 1 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand trig 0 vals) poll (poll trig val \"dibrown val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "<NAME>")) (defexamples dbrown (:rand-walk "Random floating point number walk through freqs with rate determined by mouse-x" "Here we use dbrown to create an infinite sequence of values between 0 and max where each successive value is a float no more than step plus or minus the last value. This creates a walk-like effect. We then pull out the values from dibrown using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."} step {:default 1 :doc "Step size. Increase to allow for larger steps (will sound more random)"}] " (let [vals (dbrown 0 15 1 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand trig 0 vals) poll (poll trig val \"dbrown val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "<NAME>")) (defexamples diwhite (:rand-seq "Play a random sequence of integers mapped to freqs with rate determined by mouse-x" "Here we use diwhite to create an infinite sequence of random integer values between 0 and max. We then pull out the values from diwhite using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."}] " (let [vals (diwhite 0 15 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand:kr trig 0 vals) poll (poll trig val \"diwhite val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "<NAME>")) (defexamples dwhite (:rand-seq "Play a random sequence of floats mapped to freqs with rate determined by mouse-x" "Here we use dwhite to create an infinite sequence of random floating point values between 0 and max. We then pull out the values from diwhite using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."}] " (let [vals (dwhite 0 15 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand:kr trig 0 vals) poll (poll trig val \"dwhite val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "<NAME>"))
true
(ns overtone.sc.examples.demand (:use [overtone.sc.machinery defexample] [overtone.sc ugens] [overtone.sc.cgens demand])) (defexamples dibrown (:rand-walk "Random whole number walk through freqs with rate determined by mouse-x" "Here we use dibrown to create an infinite sequence of values between 0 and max where each successive value is a whole number no more than step plus or minus the last value. This creates a walk-like effect. We then pull out the values from dibrown using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."} step {:default 1 :doc "Step size. Increase to allow for larger steps (will sound more random)"}] " (let [vals (dibrown 0 15 1 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand trig 0 vals) poll (poll trig val \"dibrown val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "PI:NAME:<NAME>END_PI")) (defexamples dbrown (:rand-walk "Random floating point number walk through freqs with rate determined by mouse-x" "Here we use dbrown to create an infinite sequence of values between 0 and max where each successive value is a float no more than step plus or minus the last value. This creates a walk-like effect. We then pull out the values from dibrown using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."} step {:default 1 :doc "Step size. Increase to allow for larger steps (will sound more random)"}] " (let [vals (dbrown 0 15 1 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand trig 0 vals) poll (poll trig val \"dbrown val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "PI:NAME:<NAME>END_PI")) (defexamples diwhite (:rand-seq "Play a random sequence of integers mapped to freqs with rate determined by mouse-x" "Here we use diwhite to create an infinite sequence of random integer values between 0 and max. We then pull out the values from diwhite using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."}] " (let [vals (diwhite 0 15 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand:kr trig 0 vals) poll (poll trig val \"diwhite val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "PI:NAME:<NAME>END_PI")) (defexamples dwhite (:rand-seq "Play a random sequence of floats mapped to freqs with rate determined by mouse-x" "Here we use dwhite to create an infinite sequence of random floating point values between 0 and max. We then pull out the values from diwhite using the demand ugen, pulling at the rate defined by the impulse - which is between 1 and 40 depending on the mouse-x coord. Therefore, you can use the mouse to speed up and slow down the walk. We poll the current value so you can see the output as well as hear it" rate :ar [max {:default 15 :doc "Max walk range. Increase to allow for higher freqs."}] " (let [vals (dwhite 0 15 INF) trig (impulse:kr (mouse-x 1 40 1)) val (demand:kr trig 0 vals) poll (poll trig val \"dwhite val:\") freq (+ 340 (* 30 val))] (* 0.1 (sin-osc freq)))" contributed-by "PI:NAME:<NAME>END_PI"))
[ { "context": "[n]\n (partition-all n))\n\n(def base {:first-name :jim :second-name :spence :amount 100 :ccy :usd})\n\n(de", "end": 178, "score": 0.997154951095581, "start": 175, "tag": "NAME", "value": "jim" }, { "context": "ll n))\n\n(def base {:first-name :jim :second-name :spence :amount 100 :ccy :usd})\n\n(def id (atom 0))\n(defn ", "end": 199, "score": 0.99793541431427, "start": 193, "tag": "NAME", "value": "spence" } ]
src/whopper/elasticsearch.clj
madmacfrosty/whopper
0
(ns whopper.elasticsearch (:require [clojure.data.json :as json] [whopper.generator :as gen])) (defn batch [n] (partition-all n)) (def base {:first-name :jim :second-name :spence :amount 100 :ccy :usd}) (def id (atom 0)) (defn tx-id [] (str (swap! id inc))) (defn add-type [m t] (map json/write-str [{:_index :acp :_type t :_id (tx-id)} m])) (defn ->person [{:keys [person]}] (when-let [[first-name second-name] person] (add-type {:prename first-name :surname second-name} :person))) (defn ->standing-order [m] (when (-> m :transaction-type (= :standing-order)) (add-type (dissoc m :person) :standing-order))) (defn ->payment [m] (add-type m :payment)) (defn ->* [tx] (mapcat #(% tx) [->person ->standing-order ->payment])) (def xform (comp (mapcat ->*) (remove nil?) (partition-all 10))) (defn doit [] (->> (gen/transactions) (take 100) (transduce xform conj)))
87420
(ns whopper.elasticsearch (:require [clojure.data.json :as json] [whopper.generator :as gen])) (defn batch [n] (partition-all n)) (def base {:first-name :<NAME> :second-name :<NAME> :amount 100 :ccy :usd}) (def id (atom 0)) (defn tx-id [] (str (swap! id inc))) (defn add-type [m t] (map json/write-str [{:_index :acp :_type t :_id (tx-id)} m])) (defn ->person [{:keys [person]}] (when-let [[first-name second-name] person] (add-type {:prename first-name :surname second-name} :person))) (defn ->standing-order [m] (when (-> m :transaction-type (= :standing-order)) (add-type (dissoc m :person) :standing-order))) (defn ->payment [m] (add-type m :payment)) (defn ->* [tx] (mapcat #(% tx) [->person ->standing-order ->payment])) (def xform (comp (mapcat ->*) (remove nil?) (partition-all 10))) (defn doit [] (->> (gen/transactions) (take 100) (transduce xform conj)))
true
(ns whopper.elasticsearch (:require [clojure.data.json :as json] [whopper.generator :as gen])) (defn batch [n] (partition-all n)) (def base {:first-name :PI:NAME:<NAME>END_PI :second-name :PI:NAME:<NAME>END_PI :amount 100 :ccy :usd}) (def id (atom 0)) (defn tx-id [] (str (swap! id inc))) (defn add-type [m t] (map json/write-str [{:_index :acp :_type t :_id (tx-id)} m])) (defn ->person [{:keys [person]}] (when-let [[first-name second-name] person] (add-type {:prename first-name :surname second-name} :person))) (defn ->standing-order [m] (when (-> m :transaction-type (= :standing-order)) (add-type (dissoc m :person) :standing-order))) (defn ->payment [m] (add-type m :payment)) (defn ->* [tx] (mapcat #(% tx) [->person ->standing-order ->payment])) (def xform (comp (mapcat ->*) (remove nil?) (partition-all 10))) (defn doit [] (->> (gen/transactions) (take 100) (transduce xform conj)))
[ { "context": "kv-store *api*)\n [{:crux.db/id :a1 :user/name \"patrik\" :user/post 1 :post/cost 30}\n {:crux.db/id", "end": 379, "score": 0.6287389993667603, "start": 376, "tag": "USERNAME", "value": "pat" }, { "context": "store *api*)\n [{:crux.db/id :a1 :user/name \"patrik\" :user/post 1 :post/cost 30}\n {:crux.db/id :a", "end": 382, "score": 0.7417176365852356, "start": 379, "tag": "NAME", "value": "rik" }, { "context": " :post/cost 30}\n {:crux.db/id :a2 :user/name \"patrik\" :user/post 2 :post/cost 35}\n {:crux.db/id :a", "end": 452, "score": 0.6786943674087524, "start": 446, "tag": "NAME", "value": "patrik" }, { "context": " :post/cost 35}\n {:crux.db/id :a3 :user/name \"patrik\" :user/post 3 :post/cost 5}\n {:crux.db/id :a4", "end": 522, "score": 0.8038646578788757, "start": 516, "tag": "NAME", "value": "patrik" }, { "context": "3 :post/cost 5}\n {:crux.db/id :a4 :user/name \"niclas\" :user/post 1 :post/cost 8}])\n\n (t/testing \"with", "end": 591, "score": 0.6452230215072632, "start": 585, "tag": "USERNAME", "value": "niclas" }, { "context": "g \"with vector syntax\"\n (t/is (= [{:user-name \"niclas\" :post-count 1 :cost-sum 8}\n {:user-", "end": 690, "score": 0.9869821071624756, "start": 684, "tag": "USERNAME", "value": "niclas" }, { "context": "t-count 1 :cost-sum 8}\n {:user-name \"patrik\" :post-count 3 :cost-sum 70}]\n (aggr/", "end": 752, "score": 0.7313586473464966, "start": 746, "tag": "USERNAME", "value": "patrik" }, { "context": "ducer function syntax\"\n (t/is (= [{:user-name \"niclas\" :post-count 1 :cost-sum 8}\n {:user-", "end": 1219, "score": 0.9949109554290771, "start": 1213, "tag": "USERNAME", "value": "niclas" }, { "context": "t-count 1 :cost-sum 8}\n {:user-name \"patrik\" :post-count 3 :cost-sum 70}]\n (aggr/", "end": 1281, "score": 0.9882684946060181, "start": 1275, "tag": "USERNAME", "value": "patrik" }, { "context": "a query without aggr clause\"\n (t/is (= #{[:a4 \"niclas\" 8]\n [:a3 \"patrik\" 5]\n ", "end": 1755, "score": 0.998673677444458, "start": 1749, "tag": "NAME", "value": "niclas" }, { "context": " (t/is (= #{[:a4 \"niclas\" 8]\n [:a3 \"patrik\" 5]\n [:a2 \"patrik\" 35]\n ", "end": 1787, "score": 0.9989340901374817, "start": 1781, "tag": "NAME", "value": "patrik" }, { "context": " [:a3 \"patrik\" 5]\n [:a2 \"patrik\" 35]\n [:a1 \"patrik\" 30]}\n ", "end": 1819, "score": 0.9986656904220581, "start": 1813, "tag": "NAME", "value": "patrik" }, { "context": " [:a2 \"patrik\" 35]\n [:a1 \"patrik\" 30]}\n (aggr/q\n (api/db", "end": 1852, "score": 0.9992387294769287, "start": 1846, "tag": "NAME", "value": "patrik" }, { "context": "kv-store *api*)\n [{:crux.db/id :a1 :user/name \"patrik\" :user/post 1 :post/cost 30}\n {:crux.db/id :a", "end": 2193, "score": 0.993362545967102, "start": 2187, "tag": "NAME", "value": "patrik" }, { "context": " :post/cost 30}\n {:crux.db/id :a2 :user/name \"patrik\" :user/post 2 :post/cost 35}\n {:crux.db/id :a", "end": 2263, "score": 0.9871551990509033, "start": 2257, "tag": "NAME", "value": "patrik" }, { "context": " :post/cost 35}\n {:crux.db/id :a3 :user/name \"patrik\" :user/post 3 :post/cost 5}\n {:crux.db/id :a4", "end": 2333, "score": 0.9958956241607666, "start": 2327, "tag": "NAME", "value": "patrik" }, { "context": "3 :post/cost 5}\n {:crux.db/id :a4 :user/name \"niclas\" :user/post 1 :post/cost 8}])\n\n (let [decorated ", "end": 2402, "score": 0.9964010715484619, "start": 2396, "tag": "NAME", "value": "niclas" }, { "context": "tion-decorator *api*)]\n (t/is (= [{:user-name \"niclas\" :post-count 1 :cost-sum 8}\n {:user-", "end": 2521, "score": 0.9078793525695801, "start": 2515, "tag": "NAME", "value": "niclas" }, { "context": "t-count 1 :cost-sum 8}\n {:user-name \"patrik\" :post-count 3 :cost-sum 70}]\n (api/q", "end": 2583, "score": 0.9825748205184937, "start": 2577, "tag": "NAME", "value": "patrik" } ]
test/crux/decorators/aggregation_test.clj
keesterbrugge/crux
0
(ns crux.decorators.aggregation-test (:require [crux.decorators.aggregation.alpha :as aggr] [crux.fixtures :as f :refer [*api*]] [crux.api :as api] [clojure.test :as t])) (t/use-fixtures :each f/with-standalone-system) (t/deftest test-count-aggregation (f/transact-entity-maps! (:kv-store *api*) [{:crux.db/id :a1 :user/name "patrik" :user/post 1 :post/cost 30} {:crux.db/id :a2 :user/name "patrik" :user/post 2 :post/cost 35} {:crux.db/id :a3 :user/name "patrik" :user/post 3 :post/cost 5} {:crux.db/id :a4 :user/name "niclas" :user/post 1 :post/cost 8}]) (t/testing "with vector syntax" (t/is (= [{:user-name "niclas" :post-count 1 :cost-sum 8} {:user-name "patrik" :post-count 3 :cost-sum 70}] (aggr/q (api/db *api*) '{:aggr {:partition-by [?user-name] :select {?cost-sum [0 (+ acc ?post-cost)] ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))) (t/testing "with reducer function syntax" (t/is (= [{:user-name "niclas" :post-count 1 :cost-sum 8} {:user-name "patrik" :post-count 3 :cost-sum 70}] (aggr/q (api/db *api*) '{:aggr {:partition-by [?user-name] :select {?cost-sum (+ ?post-cost) ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))) (t/testing "not doing anything to a query without aggr clause" (t/is (= #{[:a4 "niclas" 8] [:a3 "patrik" 5] [:a2 "patrik" 35] [:a1 "patrik" 30]} (aggr/q (api/db *api*) '{:find [?e ?user-name ?post-cost] :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]}))))) (t/deftest test-with-decorator (f/transact-entity-maps! (:kv-store *api*) [{:crux.db/id :a1 :user/name "patrik" :user/post 1 :post/cost 30} {:crux.db/id :a2 :user/name "patrik" :user/post 2 :post/cost 35} {:crux.db/id :a3 :user/name "patrik" :user/post 3 :post/cost 5} {:crux.db/id :a4 :user/name "niclas" :user/post 1 :post/cost 8}]) (let [decorated (aggr/aggregation-decorator *api*)] (t/is (= [{:user-name "niclas" :post-count 1 :cost-sum 8} {:user-name "patrik" :post-count 3 :cost-sum 70}] (api/q (api/db decorated) '{:aggr {:partition-by [?user-name] :select {?cost-sum (+ ?post-cost) ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))))
35687
(ns crux.decorators.aggregation-test (:require [crux.decorators.aggregation.alpha :as aggr] [crux.fixtures :as f :refer [*api*]] [crux.api :as api] [clojure.test :as t])) (t/use-fixtures :each f/with-standalone-system) (t/deftest test-count-aggregation (f/transact-entity-maps! (:kv-store *api*) [{:crux.db/id :a1 :user/name "pat<NAME>" :user/post 1 :post/cost 30} {:crux.db/id :a2 :user/name "<NAME>" :user/post 2 :post/cost 35} {:crux.db/id :a3 :user/name "<NAME>" :user/post 3 :post/cost 5} {:crux.db/id :a4 :user/name "niclas" :user/post 1 :post/cost 8}]) (t/testing "with vector syntax" (t/is (= [{:user-name "niclas" :post-count 1 :cost-sum 8} {:user-name "patrik" :post-count 3 :cost-sum 70}] (aggr/q (api/db *api*) '{:aggr {:partition-by [?user-name] :select {?cost-sum [0 (+ acc ?post-cost)] ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))) (t/testing "with reducer function syntax" (t/is (= [{:user-name "niclas" :post-count 1 :cost-sum 8} {:user-name "patrik" :post-count 3 :cost-sum 70}] (aggr/q (api/db *api*) '{:aggr {:partition-by [?user-name] :select {?cost-sum (+ ?post-cost) ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))) (t/testing "not doing anything to a query without aggr clause" (t/is (= #{[:a4 "<NAME>" 8] [:a3 "<NAME>" 5] [:a2 "<NAME>" 35] [:a1 "<NAME>" 30]} (aggr/q (api/db *api*) '{:find [?e ?user-name ?post-cost] :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]}))))) (t/deftest test-with-decorator (f/transact-entity-maps! (:kv-store *api*) [{:crux.db/id :a1 :user/name "<NAME>" :user/post 1 :post/cost 30} {:crux.db/id :a2 :user/name "<NAME>" :user/post 2 :post/cost 35} {:crux.db/id :a3 :user/name "<NAME>" :user/post 3 :post/cost 5} {:crux.db/id :a4 :user/name "<NAME>" :user/post 1 :post/cost 8}]) (let [decorated (aggr/aggregation-decorator *api*)] (t/is (= [{:user-name "<NAME>" :post-count 1 :cost-sum 8} {:user-name "<NAME>" :post-count 3 :cost-sum 70}] (api/q (api/db decorated) '{:aggr {:partition-by [?user-name] :select {?cost-sum (+ ?post-cost) ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))))
true
(ns crux.decorators.aggregation-test (:require [crux.decorators.aggregation.alpha :as aggr] [crux.fixtures :as f :refer [*api*]] [crux.api :as api] [clojure.test :as t])) (t/use-fixtures :each f/with-standalone-system) (t/deftest test-count-aggregation (f/transact-entity-maps! (:kv-store *api*) [{:crux.db/id :a1 :user/name "patPI:NAME:<NAME>END_PI" :user/post 1 :post/cost 30} {:crux.db/id :a2 :user/name "PI:NAME:<NAME>END_PI" :user/post 2 :post/cost 35} {:crux.db/id :a3 :user/name "PI:NAME:<NAME>END_PI" :user/post 3 :post/cost 5} {:crux.db/id :a4 :user/name "niclas" :user/post 1 :post/cost 8}]) (t/testing "with vector syntax" (t/is (= [{:user-name "niclas" :post-count 1 :cost-sum 8} {:user-name "patrik" :post-count 3 :cost-sum 70}] (aggr/q (api/db *api*) '{:aggr {:partition-by [?user-name] :select {?cost-sum [0 (+ acc ?post-cost)] ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))) (t/testing "with reducer function syntax" (t/is (= [{:user-name "niclas" :post-count 1 :cost-sum 8} {:user-name "patrik" :post-count 3 :cost-sum 70}] (aggr/q (api/db *api*) '{:aggr {:partition-by [?user-name] :select {?cost-sum (+ ?post-cost) ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))) (t/testing "not doing anything to a query without aggr clause" (t/is (= #{[:a4 "PI:NAME:<NAME>END_PI" 8] [:a3 "PI:NAME:<NAME>END_PI" 5] [:a2 "PI:NAME:<NAME>END_PI" 35] [:a1 "PI:NAME:<NAME>END_PI" 30]} (aggr/q (api/db *api*) '{:find [?e ?user-name ?post-cost] :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]}))))) (t/deftest test-with-decorator (f/transact-entity-maps! (:kv-store *api*) [{:crux.db/id :a1 :user/name "PI:NAME:<NAME>END_PI" :user/post 1 :post/cost 30} {:crux.db/id :a2 :user/name "PI:NAME:<NAME>END_PI" :user/post 2 :post/cost 35} {:crux.db/id :a3 :user/name "PI:NAME:<NAME>END_PI" :user/post 3 :post/cost 5} {:crux.db/id :a4 :user/name "PI:NAME:<NAME>END_PI" :user/post 1 :post/cost 8}]) (let [decorated (aggr/aggregation-decorator *api*)] (t/is (= [{:user-name "PI:NAME:<NAME>END_PI" :post-count 1 :cost-sum 8} {:user-name "PI:NAME:<NAME>END_PI" :post-count 3 :cost-sum 70}] (api/q (api/db decorated) '{:aggr {:partition-by [?user-name] :select {?cost-sum (+ ?post-cost) ?post-count [0 (inc acc) ?e]}} :where [[?e :user/name ?user-name] [?e :post/cost ?post-cost]]})))))
[ { "context": "47c68c581362de8\"\n whirlpool \"d4b3ad3619bc70157376c5426b558dbdad30654cf441ab21d7c08e993873256becc80f32448d0218d5b1aab30bf4209e20e3928df002d3cbcfbe501a184680a8\")))\n", "end": 3329, "score": 0.9888800978660583, "start": 3209, "tag": "KEY", "value": "19bc70157376c5426b558dbdad30654cf441ab21d7c08e993873256becc80f32448d0218d5b1aab30bf4209e20e3928df002d3cbcfbe501a184680a8" } ]
test/pandect/hash_test.clj
projetoeureka/pandect
0
(ns pandect.hash-test (:import java.io.File) (:require [clojure.test :refer :all] [pandect.core :refer :all] [clojure.java.io :as io])) (def test-string "Hello World!") (let [inputs {:string identity :bytes #(.getBytes ^String % "UTF-8") :stream #(java.io.ByteArrayInputStream. (.getBytes ^String % "UTF-8"))}] (deftest t-hash-algorithms (are [algorithm result] (do (doseq [[k input] inputs] (is (= result (algorithm (input test-string))) (str "input was: " k))) true) adler32 "1c49043e" crc32 "1c291ca3" blake2b-160 "e7338d05e5aa2b5e4943389f9475fce2525b92f2" blake2b-256 "bf56c0728fd4e9cf64bfaf6dabab81554103298cdee5cc4d580433aa25e98b00" blake2b-384 "53fd759520545fe93270e61bac03b243b686af32ed39a4aa635555be47a89004851d6a13ece00d95b7bdf9910cb71071" blake2b-512 "54b113f499799d2f3c0711da174e3bc724737ad18f63feb286184f0597e1466436705d6c8e8c7d3d3b88f5a22e83496e0043c44a3c2b1700e0e02259f8ac468e" gost "636a32a952ecb9e8529ea759ecff1c33623945e5d868352a7df5f240ea747ded" md2 "315f7c67223f01fb7cab4b95100e872e" md4 "b2a5cc34fc21a764ae2fad94d56fadf6" md5 "ed076287532e86365e841e92bfc50d8c" sha1 "2ef7bde608ce5404e97d5f042f95f89f1c232871" sha224 "4575bb4ec129df6380cedde6d71217fe0536f8ffc4e18bca530a7a1b" sha256 "7f83b1657ff1fc53b92dc18148a1d65dfc2d4b1fa3d677284addd200126d9069" sha384 "bfd76c0ebbd006fee583410547c1887b0292be76d582d96c242d2a792723e3fd6fd061f9d5cfd13b8f961358e6adba4a" sha512 "861844d6704e8573fec34d967e20bcfef3d424cf48be04e6dc08f2bd58c729743371015ead891cc3cf1c9d34b49264b510751b1ff9e537937bc46b5d6ff4ecc8" sha3-224 "716596afadfa17cd1cb35133829a02b03e4eed398ce029ce78a2161d" sha3-256 "d0e47486bbf4c16acac26f8b653592973c1362909f90262877089f9c8a4536af" sha3-384 "f324cbd421326a2abaedf6f395d1a51e189d4a71c755f531289e519f079b224664961e385afcc37da348bd859f34fd1c" sha3-512 "32400b5e89822de254e8d5d94252c52bdcb27a3562ca593e980364d9848b8041b98eabe16c1a6797484941d2376864a1b0e248b0f7af8b1555a778c336a5bf48" keccak-224 "71519a3ec955d57fce5eabf34f64296e80890478eba9e9b36c9c9d5b" keccak-256 "3ea2f1d0abf3fc66cf29eebb70cbd4e7fe762ef8a09bcc06c8edf641230afec0" keccak-384 "1f93aefa2bf7e59893b2f29e0a21a58a7e9bbc3f3ce21f3ab3f7d41aa49fa27ca62fd1f42dc99f8497c346a505154b7e" keccak-512 "75b70545b09569a8d61251b06fc49b520b6ad5322684fd9466836eb143670afdfa25e0403492e0a7dfb7298a9c7e08576bcf26bc9875adfa88e886009cb2fe00" ripemd128 "24e23e5c25bc06c8aa43b696c1e11669" ripemd160 "8476ee4631b9b30ac2754b0ee0c47e161d3f724c" ripemd256 "c298f45ef908ac440513d24bc157efd387948584710236d2b7154b22503bcb51" ripemd320 "3735014bedbbee608b1f70cc885681c046be778cd177e5d65d973a95f34a24eb5fa79b28409ac3dd" tiger "93afa8a33159ad5e9a2e818ca3582bb9247c68c581362de8" whirlpool "d4b3ad3619bc70157376c5426b558dbdad30654cf441ab21d7c08e993873256becc80f32448d0218d5b1aab30bf4209e20e3928df002d3cbcfbe501a184680a8")))
42771
(ns pandect.hash-test (:import java.io.File) (:require [clojure.test :refer :all] [pandect.core :refer :all] [clojure.java.io :as io])) (def test-string "Hello World!") (let [inputs {:string identity :bytes #(.getBytes ^String % "UTF-8") :stream #(java.io.ByteArrayInputStream. (.getBytes ^String % "UTF-8"))}] (deftest t-hash-algorithms (are [algorithm result] (do (doseq [[k input] inputs] (is (= result (algorithm (input test-string))) (str "input was: " k))) true) adler32 "1c49043e" crc32 "1c291ca3" blake2b-160 "e7338d05e5aa2b5e4943389f9475fce2525b92f2" blake2b-256 "bf56c0728fd4e9cf64bfaf6dabab81554103298cdee5cc4d580433aa25e98b00" blake2b-384 "53fd759520545fe93270e61bac03b243b686af32ed39a4aa635555be47a89004851d6a13ece00d95b7bdf9910cb71071" blake2b-512 "54b113f499799d2f3c0711da174e3bc724737ad18f63feb286184f0597e1466436705d6c8e8c7d3d3b88f5a22e83496e0043c44a3c2b1700e0e02259f8ac468e" gost "636a32a952ecb9e8529ea759ecff1c33623945e5d868352a7df5f240ea747ded" md2 "315f7c67223f01fb7cab4b95100e872e" md4 "b2a5cc34fc21a764ae2fad94d56fadf6" md5 "ed076287532e86365e841e92bfc50d8c" sha1 "2ef7bde608ce5404e97d5f042f95f89f1c232871" sha224 "4575bb4ec129df6380cedde6d71217fe0536f8ffc4e18bca530a7a1b" sha256 "7f83b1657ff1fc53b92dc18148a1d65dfc2d4b1fa3d677284addd200126d9069" sha384 "bfd76c0ebbd006fee583410547c1887b0292be76d582d96c242d2a792723e3fd6fd061f9d5cfd13b8f961358e6adba4a" sha512 "861844d6704e8573fec34d967e20bcfef3d424cf48be04e6dc08f2bd58c729743371015ead891cc3cf1c9d34b49264b510751b1ff9e537937bc46b5d6ff4ecc8" sha3-224 "716596afadfa17cd1cb35133829a02b03e4eed398ce029ce78a2161d" sha3-256 "d0e47486bbf4c16acac26f8b653592973c1362909f90262877089f9c8a4536af" sha3-384 "f324cbd421326a2abaedf6f395d1a51e189d4a71c755f531289e519f079b224664961e385afcc37da348bd859f34fd1c" sha3-512 "32400b5e89822de254e8d5d94252c52bdcb27a3562ca593e980364d9848b8041b98eabe16c1a6797484941d2376864a1b0e248b0f7af8b1555a778c336a5bf48" keccak-224 "71519a3ec955d57fce5eabf34f64296e80890478eba9e9b36c9c9d5b" keccak-256 "3ea2f1d0abf3fc66cf29eebb70cbd4e7fe762ef8a09bcc06c8edf641230afec0" keccak-384 "1f93aefa2bf7e59893b2f29e0a21a58a7e9bbc3f3ce21f3ab3f7d41aa49fa27ca62fd1f42dc99f8497c346a505154b7e" keccak-512 "75b70545b09569a8d61251b06fc49b520b6ad5322684fd9466836eb143670afdfa25e0403492e0a7dfb7298a9c7e08576bcf26bc9875adfa88e886009cb2fe00" ripemd128 "24e23e5c25bc06c8aa43b696c1e11669" ripemd160 "8476ee4631b9b30ac2754b0ee0c47e161d3f724c" ripemd256 "c298f45ef908ac440513d24bc157efd387948584710236d2b7154b22503bcb51" ripemd320 "3735014bedbbee608b1f70cc885681c046be778cd177e5d65d973a95f34a24eb5fa79b28409ac3dd" tiger "93afa8a33159ad5e9a2e818ca3582bb9247c68c581362de8" whirlpool "d4b3ad36<KEY>")))
true
(ns pandect.hash-test (:import java.io.File) (:require [clojure.test :refer :all] [pandect.core :refer :all] [clojure.java.io :as io])) (def test-string "Hello World!") (let [inputs {:string identity :bytes #(.getBytes ^String % "UTF-8") :stream #(java.io.ByteArrayInputStream. (.getBytes ^String % "UTF-8"))}] (deftest t-hash-algorithms (are [algorithm result] (do (doseq [[k input] inputs] (is (= result (algorithm (input test-string))) (str "input was: " k))) true) adler32 "1c49043e" crc32 "1c291ca3" blake2b-160 "e7338d05e5aa2b5e4943389f9475fce2525b92f2" blake2b-256 "bf56c0728fd4e9cf64bfaf6dabab81554103298cdee5cc4d580433aa25e98b00" blake2b-384 "53fd759520545fe93270e61bac03b243b686af32ed39a4aa635555be47a89004851d6a13ece00d95b7bdf9910cb71071" blake2b-512 "54b113f499799d2f3c0711da174e3bc724737ad18f63feb286184f0597e1466436705d6c8e8c7d3d3b88f5a22e83496e0043c44a3c2b1700e0e02259f8ac468e" gost "636a32a952ecb9e8529ea759ecff1c33623945e5d868352a7df5f240ea747ded" md2 "315f7c67223f01fb7cab4b95100e872e" md4 "b2a5cc34fc21a764ae2fad94d56fadf6" md5 "ed076287532e86365e841e92bfc50d8c" sha1 "2ef7bde608ce5404e97d5f042f95f89f1c232871" sha224 "4575bb4ec129df6380cedde6d71217fe0536f8ffc4e18bca530a7a1b" sha256 "7f83b1657ff1fc53b92dc18148a1d65dfc2d4b1fa3d677284addd200126d9069" sha384 "bfd76c0ebbd006fee583410547c1887b0292be76d582d96c242d2a792723e3fd6fd061f9d5cfd13b8f961358e6adba4a" sha512 "861844d6704e8573fec34d967e20bcfef3d424cf48be04e6dc08f2bd58c729743371015ead891cc3cf1c9d34b49264b510751b1ff9e537937bc46b5d6ff4ecc8" sha3-224 "716596afadfa17cd1cb35133829a02b03e4eed398ce029ce78a2161d" sha3-256 "d0e47486bbf4c16acac26f8b653592973c1362909f90262877089f9c8a4536af" sha3-384 "f324cbd421326a2abaedf6f395d1a51e189d4a71c755f531289e519f079b224664961e385afcc37da348bd859f34fd1c" sha3-512 "32400b5e89822de254e8d5d94252c52bdcb27a3562ca593e980364d9848b8041b98eabe16c1a6797484941d2376864a1b0e248b0f7af8b1555a778c336a5bf48" keccak-224 "71519a3ec955d57fce5eabf34f64296e80890478eba9e9b36c9c9d5b" keccak-256 "3ea2f1d0abf3fc66cf29eebb70cbd4e7fe762ef8a09bcc06c8edf641230afec0" keccak-384 "1f93aefa2bf7e59893b2f29e0a21a58a7e9bbc3f3ce21f3ab3f7d41aa49fa27ca62fd1f42dc99f8497c346a505154b7e" keccak-512 "75b70545b09569a8d61251b06fc49b520b6ad5322684fd9466836eb143670afdfa25e0403492e0a7dfb7298a9c7e08576bcf26bc9875adfa88e886009cb2fe00" ripemd128 "24e23e5c25bc06c8aa43b696c1e11669" ripemd160 "8476ee4631b9b30ac2754b0ee0c47e161d3f724c" ripemd256 "c298f45ef908ac440513d24bc157efd387948584710236d2b7154b22503bcb51" ripemd320 "3735014bedbbee608b1f70cc885681c046be778cd177e5d65d973a95f34a24eb5fa79b28409ac3dd" tiger "93afa8a33159ad5e9a2e818ca3582bb9247c68c581362de8" whirlpool "d4b3ad36PI:KEY:<KEY>END_PI")))
[ { "context": "s/document.getElementById \"app\"))\n \"<h1>Hello Chestnut!</h1>\")\n", "end": 125, "score": 0.8140649199485779, "start": 117, "tag": "NAME", "value": "Chestnut" } ]
src/cljs/clojagar/core.cljs
lpan/clojagar
0
(ns clojagar.core) (enable-console-print!) (set! (.-innerHTML (js/document.getElementById "app")) "<h1>Hello Chestnut!</h1>")
72129
(ns clojagar.core) (enable-console-print!) (set! (.-innerHTML (js/document.getElementById "app")) "<h1>Hello <NAME>!</h1>")
true
(ns clojagar.core) (enable-console-print!) (set! (.-innerHTML (js/document.getElementById "app")) "<h1>Hello PI:NAME:<NAME>END_PI!</h1>")
[ { "context": " 1487219645,\n :author-ident {:email-address \"[email protected]\",\n :name \"Chris Zheng\",\n ", "end": 4650, "score": 0.9999243021011353, "start": 4638, "tag": "EMAIL", "value": "[email protected]" }, { "context": "dress \"[email protected]\",\n :name \"Chris Zheng\",\n :time-zone-offset 660,\n ", "end": 4692, "score": 0.9998811483383179, "start": 4681, "tag": "NAME", "value": "Chris Zheng" }, { "context": " 1487219780,\n :author-ident {:email-address \"[email protected]\",\n :name \"Chris Zheng\",\n ", "end": 5162, "score": 0.999912679195404, "start": 5150, "tag": "EMAIL", "value": "[email protected]" }, { "context": "dress \"[email protected]\",\n :name \"Chris Zheng\",\n :time-zone-offset 660,\n ", "end": 5204, "score": 0.999869167804718, "start": 5193, "tag": "NAME", "value": "Chris Zheng" }, { "context": "essage \"added hello.txt with fix\",\n :name \"fa705232c13d19d3ef4b4b6cfd6993593615a55d\"})\n\n[[:subsection {:title \":log\"}]]\n\n\"``\"\n\n(comme", "end": 5419, "score": 0.7831861972808838, "start": 5381, "tag": "PASSWORD", "value": "705232c13d19d3ef4b4b6cfd6993593615a55d" }, { "context": "1487219780,\n :author-ident {:email-address \"[email protected]\",\n :name \"Chris Zheng\",\n ", "end": 5567, "score": 0.999893307685852, "start": 5555, "tag": "EMAIL", "value": "[email protected]" }, { "context": "dress \"[email protected]\",\n :name \"Chris Zheng\",\n :time-zone-offset 660,\n ", "end": 5609, "score": 0.9998769760131836, "start": 5598, "tag": "NAME", "value": "Chris Zheng" }, { "context": "ssage \"added hello.txt with fix\",\n :name \"fa705232c13d19d3ef4b4b6cfd6993593615a55d\"}]\n )\n\n[[:subsection {:title \":rm\"}]]\n\n\"`:rm` re", "end": 5826, "score": 0.7444902658462524, "start": 5788, "tag": "PASSWORD", "value": "705232c13d19d3ef4b4b6cfd6993593615a55d" }, { "context": "1487219780,\n :author-ident {:email-address \"[email protected]\",\n :name \"Chris Zheng\",\n ", "end": 6297, "score": 0.9999201893806458, "start": 6285, "tag": "EMAIL", "value": "[email protected]" }, { "context": "ress \"[email protected]\",\n :name \"Chris Zheng\",\n :time-zone-offset 660,\n ", "end": 6340, "score": 0.9998781085014343, "start": 6329, "tag": "NAME", "value": "Chris Zheng" }, { "context": "1487223636,\n :author-ident {:email-address \"[email protected]\",\n :name \"Chris Zheng\",\n ", "end": 6810, "score": 0.999921441078186, "start": 6798, "tag": "EMAIL", "value": "[email protected]" }, { "context": "ress \"[email protected]\",\n :name \"Chris Zheng\",\n :time-zone-offset 660,\n ", "end": 6853, "score": 0.999884307384491, "start": 6842, "tag": "NAME", "value": "Chris Zheng" }, { "context": "1487219780,\n :author-ident {:email-address \"[email protected]\",\n :name \"Chris Zheng\",\n ", "end": 7167, "score": 0.9999219179153442, "start": 7155, "tag": "EMAIL", "value": "[email protected]" }, { "context": "ress \"[email protected]\",\n :name \"Chris Zheng\",\n :time-zone-offset 660,\n ", "end": 7210, "score": 0.9998781085014343, "start": 7199, "tag": "NAME", "value": "Chris Zheng" }, { "context": " 1487224441,\n :author-ident {:email-address \"[email protected]\"\n :name \"Chris Zheng\",\n ", "end": 7719, "score": 0.9999281167984009, "start": 7707, "tag": "EMAIL", "value": "[email protected]" }, { "context": "ddress \"[email protected]\"\n :name \"Chris Zheng\",\n :time-zone-offset 660,\n ", "end": 7760, "score": 0.9998729825019836, "start": 7749, "tag": "NAME", "value": "Chris Zheng" }, { "context": " :full-message \"added ALL files\",\n :name \"02cecb2b8c08c1599ba95c165e08b633698cad99\"}\n\n (git :st", "end": 7930, "score": 0.4228266477584839, "start": 7928, "tag": "KEY", "value": "ce" }, { "context": "ull-message \"added ALL files\",\n :name \"02cecb2b8c08c1599ba95c165e08b633698cad99\"}\n\n (git :status)\n => {:clean? true})\n\n[[:secti", "end": 7966, "score": 0.5511946678161621, "start": 7932, "tag": "KEY", "value": "2b8c08c1599ba95c165e08b633698cad99" } ]
test/documentation/lucid_git.clj
willcohen/lucidity
3
(ns documentation.lucid-git (:require [lucid.git :as git])) [[:chapter {:title "Introduction"}]] "`lucid.git` is used to as an interface to [jgit](https://eclipse.org/jgit/). The aims of this project are: - self-directed exploration of the jgit library - simple and easy to use interface " [[:section {:title "Installation"}]] "Add to `project.clj` dependencies: [im.chit/lucid.git \"{{PROJECT.version}}\"] All functionality is in the `lucid.git` namespace:" (comment (require '[lucid.git :refer [git]])) [[:chapter {:title "Usage"}]] [[:section {:title "Help"}]] "To see all commands, there is the `:help` command:" (comment (git :help) ;; or (git) => [:add :apply :archive :blame :branch :checkout :cherry :clean :clone :commit :describe :diff :fetch :gc :init :log :ls :merge :name :notes :pull :push :rebase :reflog :remote :reset :revert :rm :stash :status :submodule :tag]) "To see a command's sub-commands, just " (comment (git :branch) => #{:create :delete :rename :list}) [[:section {:title "Basics"}]] [[:subsection {:title ":init"}]] "The `:init` command initialise a directory:" (comment (git :init :directory "/tmp/git-example") => "/tmp/git-example/.git") [[:subsection {:title ":status"}]] "The `:status` command checks the status of our new repository:" (comment (git "/tmp/git-example" :status) => {:clean? true :uncommitted-changes! false}) "Having a directory as the first parameter sets the default directory so that next time `:status` is called, it doesn't need to be set:" (comment (git :status) => {:clean? true :uncommitted-changes! false}) "We can add a file and check for the repository status:" (comment (spit "/tmp/git-example/hello.txt" "hello there") (git :status) => {:clean? false, :uncommitted-changes! false, :untracked #{"hello.txt"}}) [[:subsection {:title ":cd"}]] "`:cd` sets the default directory:" (comment (git :cd "/tmp/git-example") (git :status) => {:clean? false, :uncommitted-changes! false, :untracked #{"hello.txt"}}) [[:subsection {:title ":pwd"}]] "`:pwd` retruns the current working directory:" (comment (git :pwd) => "/tmp/git-example") [[:section {:title "Exploration"}]] "Help is avaliable at anytime by using `:?` or `:help` after the first command:" (comment (git :init :?) => {:bare boolean, :directory java.lang.String, :git-dir java.lang.String}) "Additional parameters may be put in:" (comment (git :init :bare false :directory "/tmp/git-example" :git-dir "/tmp/git-example/.mercurial") => "/tmp/git-example/.mercurial") "Let's take a closer look at `:status`" (comment (git :status :?) => {:working-tree-it org.eclipse.jgit.treewalk.WorkingTreeIterator, :progress-monitor org.eclipse.jgit.lib.ProgressMonitor, :ignore-submodules #{"NONE" "UNTRACKED" "DIRTY" "ALL"}, :path [java.lang.String]}) "We can decode the representation of the options needed for: - `:working-tree-it` an input of type `org.eclipse.jgit.treewalk.WorkingTreeIterator` - `:progress-monitor` an input of type `org.eclipse.jgit.lib.ProgressMonitor` - `:ignore-submodules` an input of the following options `#{\"NONE\" \"UNTRACKED\" \"DIRTY\" \"ALL\"}` - `:path`, either a single or a vector input of type `java.lang.String`" [[:section {:title "Local"}]] [[:subsection {:title ":add"}]] "Options that `:add` take are:" (comment (git :add :?) => {:filepattern [java.lang.String] :update boolean :working-tree-iterator org.eclipse.jgit.treewalk.WorkingTreeIterator}) "We can now create three files and calling `:add` on `hello.txt`" (comment (git :init :directory "/tmp/git-example") (git :cd "/tmp/git-example") (do (spit "/tmp/git-example/hello.txt" "hello") (spit "/tmp/git-example/world.txt" "world") (spit "/tmp/git-example/again.txt" "again")) (git :add :filepattern ["hello.txt"]) => {"hello.txt" #{:merged}}) [[:subsection {:title ":commit"}]] "Options that `:commit` take are:" (comment (git :commit :?) => {:all boolean :allow-empty boolean :amend boolean :author java.lang.String :committer java.lang.String :hook-output-stream java.lang.String :insert-change-id boolean :message java.lang.String :no-verify boolean :only java.lang.String :reflog-comment java.lang.String}) "The revision can now be committed:" (comment (git :commit :message "added hello.txt") => {:commit-time 1487219645, :author-ident {:email-address "[email protected]", :name "Chris Zheng", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt", :name "c115771e38cfd22954cfbc0c1a5c0b7e7890b09f"}) "`:commit` works like the shell `git commit` command, such as if the message is neede to be amended:" (comment (git :commit :message "added hello.txt with fix" :amend true) => {:commit-time 1487219780, :author-ident {:email-address "[email protected]", :name "Chris Zheng", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa705232c13d19d3ef4b4b6cfd6993593615a55d"}) [[:subsection {:title ":log"}]] "``" (comment (git :log) => [{:commit-time 1487219780, :author-ident {:email-address "[email protected]", :name "Chris Zheng", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa705232c13d19d3ef4b4b6cfd6993593615a55d"}] ) [[:subsection {:title ":rm"}]] "`:rm` removes files from git:" (comment (git :rm :?) => {:filepattern [java.lang.String], :cached boolean}) "`hello.txt` is removed and the revision committed" (comment (git :rm :filepattern ["hello.txt"]) (git :add :filepattern ["again.txt"]) => {"again.txt" #{:merged}} (git :commit :message "added again.txt, removed hello.txt") => {:commit-time 1487219780, :author-ident {:email-address "[email protected]", :name "Chris Zheng", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa705232c13d19d3ef4b4b6cfd6993593615a55d"} ) [[:subsection {:title "workflow"}]] "`:log` and `:status` show more information about the revision whilst more changes can be committed" (comment (git :log) => [{:commit-time 1487223636, :author-ident {:email-address "[email protected]", :name "Chris Zheng", :time-zone-offset 660, :when #inst "2017-02-16T05:40:36.000-00:00"}, :full-message "added again.txt, removed hello.txt", :name "f00dae4b70f00daf90816ece100d49678b0f9271"} {:commit-time 1487219780, :author-ident {:email-address "[email protected]", :name "Chris Zheng", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa705232c13d19d3ef4b4b6cfd6993593615a55d"}] (git :status) => {:untracked ["world.txt"] :clean? false} (git :add :filepattern ["."]) => {"again.txt" #{:merged}, "world.txt" #{:merged}} (git :commit :message "added ALL files" :amend true) => {:commit-time 1487224441, :author-ident {:email-address "[email protected]" :name "Chris Zheng", :time-zone-offset 660, :when #inst "2017-02-16T05:52:34.000-00:00"}, :full-message "added ALL files", :name "02cecb2b8c08c1599ba95c165e08b633698cad99"} (git :status) => {:clean? true}) [[:section {:title "Raw Objects"}]] "When `:&` is used in the parameter, the raw result of the commant call is returned instead of being converted into a corresponding map/string:" (comment (git :status :&) ;; => #status{:conflicting [] ;; :untracked-folders [] ;; :missing [] ;; :removed [] ;; :clean? true} (type (git :status :&)) => org.eclipse.jgit.api.Status (type (git :log :&)) => org.eclipse.jgit.revwalk.RevWalk) [[:section {:title "Branching"}]] [[:subsection {:title ":branch"}]] "The subcommands for :branch are:" (comment (git :branch) => #{:create :delete :rename :list}) "Branches for the repository are shown with the `:list` subcommand:" (comment (git :branch :list) => [{:name "refs/heads/master", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}]) "New branches are be created through the `:create` subcommand:" (comment (git :branch :create :name "dev") => {:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} (git :branch :list) => [{:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} {:name "refs/heads/master", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}]) "Branches are renamed through the `:rename` subcommand:" (comment (git :branch :rename :?) => {:new-name java.lang.String, :old-name java.lang.String} (git :branch :rename :new-name "development" :old-name "dev") => {:name "refs/heads/development", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}) "Branches are deleted through the `:delete` subcommand:" (comment (git :branch :delete :?) => {:branch-names [java.lang.String] :force boolean} (git :branch :delete :branch-names (into-array ["development"])) => ["refs/heads/development"]) [[:subsection {:title ":checkout"}]] "Branches for the repository can be accessed through `:checkout`" (comment (git :checkout :?) => {:path [java.lang.String], :start-point java.lang.String, :create-branch boolean, :stage #{"BASE" "THEIRS" "OURS"}, :force boolean, :name java.lang.String, :paths [java.util.List], :upstream-mode #{"SET_UPSTREAM" "NOTRACK" "TRACK"}, :all-paths boolean, :orphan boolean} (git :branch :create :name "dev") (git :checkout :name "dev") => {:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} )
83891
(ns documentation.lucid-git (:require [lucid.git :as git])) [[:chapter {:title "Introduction"}]] "`lucid.git` is used to as an interface to [jgit](https://eclipse.org/jgit/). The aims of this project are: - self-directed exploration of the jgit library - simple and easy to use interface " [[:section {:title "Installation"}]] "Add to `project.clj` dependencies: [im.chit/lucid.git \"{{PROJECT.version}}\"] All functionality is in the `lucid.git` namespace:" (comment (require '[lucid.git :refer [git]])) [[:chapter {:title "Usage"}]] [[:section {:title "Help"}]] "To see all commands, there is the `:help` command:" (comment (git :help) ;; or (git) => [:add :apply :archive :blame :branch :checkout :cherry :clean :clone :commit :describe :diff :fetch :gc :init :log :ls :merge :name :notes :pull :push :rebase :reflog :remote :reset :revert :rm :stash :status :submodule :tag]) "To see a command's sub-commands, just " (comment (git :branch) => #{:create :delete :rename :list}) [[:section {:title "Basics"}]] [[:subsection {:title ":init"}]] "The `:init` command initialise a directory:" (comment (git :init :directory "/tmp/git-example") => "/tmp/git-example/.git") [[:subsection {:title ":status"}]] "The `:status` command checks the status of our new repository:" (comment (git "/tmp/git-example" :status) => {:clean? true :uncommitted-changes! false}) "Having a directory as the first parameter sets the default directory so that next time `:status` is called, it doesn't need to be set:" (comment (git :status) => {:clean? true :uncommitted-changes! false}) "We can add a file and check for the repository status:" (comment (spit "/tmp/git-example/hello.txt" "hello there") (git :status) => {:clean? false, :uncommitted-changes! false, :untracked #{"hello.txt"}}) [[:subsection {:title ":cd"}]] "`:cd` sets the default directory:" (comment (git :cd "/tmp/git-example") (git :status) => {:clean? false, :uncommitted-changes! false, :untracked #{"hello.txt"}}) [[:subsection {:title ":pwd"}]] "`:pwd` retruns the current working directory:" (comment (git :pwd) => "/tmp/git-example") [[:section {:title "Exploration"}]] "Help is avaliable at anytime by using `:?` or `:help` after the first command:" (comment (git :init :?) => {:bare boolean, :directory java.lang.String, :git-dir java.lang.String}) "Additional parameters may be put in:" (comment (git :init :bare false :directory "/tmp/git-example" :git-dir "/tmp/git-example/.mercurial") => "/tmp/git-example/.mercurial") "Let's take a closer look at `:status`" (comment (git :status :?) => {:working-tree-it org.eclipse.jgit.treewalk.WorkingTreeIterator, :progress-monitor org.eclipse.jgit.lib.ProgressMonitor, :ignore-submodules #{"NONE" "UNTRACKED" "DIRTY" "ALL"}, :path [java.lang.String]}) "We can decode the representation of the options needed for: - `:working-tree-it` an input of type `org.eclipse.jgit.treewalk.WorkingTreeIterator` - `:progress-monitor` an input of type `org.eclipse.jgit.lib.ProgressMonitor` - `:ignore-submodules` an input of the following options `#{\"NONE\" \"UNTRACKED\" \"DIRTY\" \"ALL\"}` - `:path`, either a single or a vector input of type `java.lang.String`" [[:section {:title "Local"}]] [[:subsection {:title ":add"}]] "Options that `:add` take are:" (comment (git :add :?) => {:filepattern [java.lang.String] :update boolean :working-tree-iterator org.eclipse.jgit.treewalk.WorkingTreeIterator}) "We can now create three files and calling `:add` on `hello.txt`" (comment (git :init :directory "/tmp/git-example") (git :cd "/tmp/git-example") (do (spit "/tmp/git-example/hello.txt" "hello") (spit "/tmp/git-example/world.txt" "world") (spit "/tmp/git-example/again.txt" "again")) (git :add :filepattern ["hello.txt"]) => {"hello.txt" #{:merged}}) [[:subsection {:title ":commit"}]] "Options that `:commit` take are:" (comment (git :commit :?) => {:all boolean :allow-empty boolean :amend boolean :author java.lang.String :committer java.lang.String :hook-output-stream java.lang.String :insert-change-id boolean :message java.lang.String :no-verify boolean :only java.lang.String :reflog-comment java.lang.String}) "The revision can now be committed:" (comment (git :commit :message "added hello.txt") => {:commit-time 1487219645, :author-ident {:email-address "<EMAIL>", :name "<NAME>", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt", :name "c115771e38cfd22954cfbc0c1a5c0b7e7890b09f"}) "`:commit` works like the shell `git commit` command, such as if the message is neede to be amended:" (comment (git :commit :message "added hello.txt with fix" :amend true) => {:commit-time 1487219780, :author-ident {:email-address "<EMAIL>", :name "<NAME>", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa<PASSWORD>"}) [[:subsection {:title ":log"}]] "``" (comment (git :log) => [{:commit-time 1487219780, :author-ident {:email-address "<EMAIL>", :name "<NAME>", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa<PASSWORD>"}] ) [[:subsection {:title ":rm"}]] "`:rm` removes files from git:" (comment (git :rm :?) => {:filepattern [java.lang.String], :cached boolean}) "`hello.txt` is removed and the revision committed" (comment (git :rm :filepattern ["hello.txt"]) (git :add :filepattern ["again.txt"]) => {"again.txt" #{:merged}} (git :commit :message "added again.txt, removed hello.txt") => {:commit-time 1487219780, :author-ident {:email-address "<EMAIL>", :name "<NAME>", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa705232c13d19d3ef4b4b6cfd6993593615a55d"} ) [[:subsection {:title "workflow"}]] "`:log` and `:status` show more information about the revision whilst more changes can be committed" (comment (git :log) => [{:commit-time 1487223636, :author-ident {:email-address "<EMAIL>", :name "<NAME>", :time-zone-offset 660, :when #inst "2017-02-16T05:40:36.000-00:00"}, :full-message "added again.txt, removed hello.txt", :name "f00dae4b70f00daf90816ece100d49678b0f9271"} {:commit-time 1487219780, :author-ident {:email-address "<EMAIL>", :name "<NAME>", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa705232c13d19d3ef4b4b6cfd6993593615a55d"}] (git :status) => {:untracked ["world.txt"] :clean? false} (git :add :filepattern ["."]) => {"again.txt" #{:merged}, "world.txt" #{:merged}} (git :commit :message "added ALL files" :amend true) => {:commit-time 1487224441, :author-ident {:email-address "<EMAIL>" :name "<NAME>", :time-zone-offset 660, :when #inst "2017-02-16T05:52:34.000-00:00"}, :full-message "added ALL files", :name "02<KEY>cb<KEY>"} (git :status) => {:clean? true}) [[:section {:title "Raw Objects"}]] "When `:&` is used in the parameter, the raw result of the commant call is returned instead of being converted into a corresponding map/string:" (comment (git :status :&) ;; => #status{:conflicting [] ;; :untracked-folders [] ;; :missing [] ;; :removed [] ;; :clean? true} (type (git :status :&)) => org.eclipse.jgit.api.Status (type (git :log :&)) => org.eclipse.jgit.revwalk.RevWalk) [[:section {:title "Branching"}]] [[:subsection {:title ":branch"}]] "The subcommands for :branch are:" (comment (git :branch) => #{:create :delete :rename :list}) "Branches for the repository are shown with the `:list` subcommand:" (comment (git :branch :list) => [{:name "refs/heads/master", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}]) "New branches are be created through the `:create` subcommand:" (comment (git :branch :create :name "dev") => {:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} (git :branch :list) => [{:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} {:name "refs/heads/master", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}]) "Branches are renamed through the `:rename` subcommand:" (comment (git :branch :rename :?) => {:new-name java.lang.String, :old-name java.lang.String} (git :branch :rename :new-name "development" :old-name "dev") => {:name "refs/heads/development", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}) "Branches are deleted through the `:delete` subcommand:" (comment (git :branch :delete :?) => {:branch-names [java.lang.String] :force boolean} (git :branch :delete :branch-names (into-array ["development"])) => ["refs/heads/development"]) [[:subsection {:title ":checkout"}]] "Branches for the repository can be accessed through `:checkout`" (comment (git :checkout :?) => {:path [java.lang.String], :start-point java.lang.String, :create-branch boolean, :stage #{"BASE" "THEIRS" "OURS"}, :force boolean, :name java.lang.String, :paths [java.util.List], :upstream-mode #{"SET_UPSTREAM" "NOTRACK" "TRACK"}, :all-paths boolean, :orphan boolean} (git :branch :create :name "dev") (git :checkout :name "dev") => {:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} )
true
(ns documentation.lucid-git (:require [lucid.git :as git])) [[:chapter {:title "Introduction"}]] "`lucid.git` is used to as an interface to [jgit](https://eclipse.org/jgit/). The aims of this project are: - self-directed exploration of the jgit library - simple and easy to use interface " [[:section {:title "Installation"}]] "Add to `project.clj` dependencies: [im.chit/lucid.git \"{{PROJECT.version}}\"] All functionality is in the `lucid.git` namespace:" (comment (require '[lucid.git :refer [git]])) [[:chapter {:title "Usage"}]] [[:section {:title "Help"}]] "To see all commands, there is the `:help` command:" (comment (git :help) ;; or (git) => [:add :apply :archive :blame :branch :checkout :cherry :clean :clone :commit :describe :diff :fetch :gc :init :log :ls :merge :name :notes :pull :push :rebase :reflog :remote :reset :revert :rm :stash :status :submodule :tag]) "To see a command's sub-commands, just " (comment (git :branch) => #{:create :delete :rename :list}) [[:section {:title "Basics"}]] [[:subsection {:title ":init"}]] "The `:init` command initialise a directory:" (comment (git :init :directory "/tmp/git-example") => "/tmp/git-example/.git") [[:subsection {:title ":status"}]] "The `:status` command checks the status of our new repository:" (comment (git "/tmp/git-example" :status) => {:clean? true :uncommitted-changes! false}) "Having a directory as the first parameter sets the default directory so that next time `:status` is called, it doesn't need to be set:" (comment (git :status) => {:clean? true :uncommitted-changes! false}) "We can add a file and check for the repository status:" (comment (spit "/tmp/git-example/hello.txt" "hello there") (git :status) => {:clean? false, :uncommitted-changes! false, :untracked #{"hello.txt"}}) [[:subsection {:title ":cd"}]] "`:cd` sets the default directory:" (comment (git :cd "/tmp/git-example") (git :status) => {:clean? false, :uncommitted-changes! false, :untracked #{"hello.txt"}}) [[:subsection {:title ":pwd"}]] "`:pwd` retruns the current working directory:" (comment (git :pwd) => "/tmp/git-example") [[:section {:title "Exploration"}]] "Help is avaliable at anytime by using `:?` or `:help` after the first command:" (comment (git :init :?) => {:bare boolean, :directory java.lang.String, :git-dir java.lang.String}) "Additional parameters may be put in:" (comment (git :init :bare false :directory "/tmp/git-example" :git-dir "/tmp/git-example/.mercurial") => "/tmp/git-example/.mercurial") "Let's take a closer look at `:status`" (comment (git :status :?) => {:working-tree-it org.eclipse.jgit.treewalk.WorkingTreeIterator, :progress-monitor org.eclipse.jgit.lib.ProgressMonitor, :ignore-submodules #{"NONE" "UNTRACKED" "DIRTY" "ALL"}, :path [java.lang.String]}) "We can decode the representation of the options needed for: - `:working-tree-it` an input of type `org.eclipse.jgit.treewalk.WorkingTreeIterator` - `:progress-monitor` an input of type `org.eclipse.jgit.lib.ProgressMonitor` - `:ignore-submodules` an input of the following options `#{\"NONE\" \"UNTRACKED\" \"DIRTY\" \"ALL\"}` - `:path`, either a single or a vector input of type `java.lang.String`" [[:section {:title "Local"}]] [[:subsection {:title ":add"}]] "Options that `:add` take are:" (comment (git :add :?) => {:filepattern [java.lang.String] :update boolean :working-tree-iterator org.eclipse.jgit.treewalk.WorkingTreeIterator}) "We can now create three files and calling `:add` on `hello.txt`" (comment (git :init :directory "/tmp/git-example") (git :cd "/tmp/git-example") (do (spit "/tmp/git-example/hello.txt" "hello") (spit "/tmp/git-example/world.txt" "world") (spit "/tmp/git-example/again.txt" "again")) (git :add :filepattern ["hello.txt"]) => {"hello.txt" #{:merged}}) [[:subsection {:title ":commit"}]] "Options that `:commit` take are:" (comment (git :commit :?) => {:all boolean :allow-empty boolean :amend boolean :author java.lang.String :committer java.lang.String :hook-output-stream java.lang.String :insert-change-id boolean :message java.lang.String :no-verify boolean :only java.lang.String :reflog-comment java.lang.String}) "The revision can now be committed:" (comment (git :commit :message "added hello.txt") => {:commit-time 1487219645, :author-ident {:email-address "PI:EMAIL:<EMAIL>END_PI", :name "PI:NAME:<NAME>END_PI", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt", :name "c115771e38cfd22954cfbc0c1a5c0b7e7890b09f"}) "`:commit` works like the shell `git commit` command, such as if the message is neede to be amended:" (comment (git :commit :message "added hello.txt with fix" :amend true) => {:commit-time 1487219780, :author-ident {:email-address "PI:EMAIL:<EMAIL>END_PI", :name "PI:NAME:<NAME>END_PI", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "faPI:PASSWORD:<PASSWORD>END_PI"}) [[:subsection {:title ":log"}]] "``" (comment (git :log) => [{:commit-time 1487219780, :author-ident {:email-address "PI:EMAIL:<EMAIL>END_PI", :name "PI:NAME:<NAME>END_PI", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "faPI:PASSWORD:<PASSWORD>END_PI"}] ) [[:subsection {:title ":rm"}]] "`:rm` removes files from git:" (comment (git :rm :?) => {:filepattern [java.lang.String], :cached boolean}) "`hello.txt` is removed and the revision committed" (comment (git :rm :filepattern ["hello.txt"]) (git :add :filepattern ["again.txt"]) => {"again.txt" #{:merged}} (git :commit :message "added again.txt, removed hello.txt") => {:commit-time 1487219780, :author-ident {:email-address "PI:EMAIL:<EMAIL>END_PI", :name "PI:NAME:<NAME>END_PI", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa705232c13d19d3ef4b4b6cfd6993593615a55d"} ) [[:subsection {:title "workflow"}]] "`:log` and `:status` show more information about the revision whilst more changes can be committed" (comment (git :log) => [{:commit-time 1487223636, :author-ident {:email-address "PI:EMAIL:<EMAIL>END_PI", :name "PI:NAME:<NAME>END_PI", :time-zone-offset 660, :when #inst "2017-02-16T05:40:36.000-00:00"}, :full-message "added again.txt, removed hello.txt", :name "f00dae4b70f00daf90816ece100d49678b0f9271"} {:commit-time 1487219780, :author-ident {:email-address "PI:EMAIL:<EMAIL>END_PI", :name "PI:NAME:<NAME>END_PI", :time-zone-offset 660, :when #inst "2017-02-16T04:34:05.000-00:00"}, :full-message "added hello.txt with fix", :name "fa705232c13d19d3ef4b4b6cfd6993593615a55d"}] (git :status) => {:untracked ["world.txt"] :clean? false} (git :add :filepattern ["."]) => {"again.txt" #{:merged}, "world.txt" #{:merged}} (git :commit :message "added ALL files" :amend true) => {:commit-time 1487224441, :author-ident {:email-address "PI:EMAIL:<EMAIL>END_PI" :name "PI:NAME:<NAME>END_PI", :time-zone-offset 660, :when #inst "2017-02-16T05:52:34.000-00:00"}, :full-message "added ALL files", :name "02PI:KEY:<KEY>END_PIcbPI:KEY:<KEY>END_PI"} (git :status) => {:clean? true}) [[:section {:title "Raw Objects"}]] "When `:&` is used in the parameter, the raw result of the commant call is returned instead of being converted into a corresponding map/string:" (comment (git :status :&) ;; => #status{:conflicting [] ;; :untracked-folders [] ;; :missing [] ;; :removed [] ;; :clean? true} (type (git :status :&)) => org.eclipse.jgit.api.Status (type (git :log :&)) => org.eclipse.jgit.revwalk.RevWalk) [[:section {:title "Branching"}]] [[:subsection {:title ":branch"}]] "The subcommands for :branch are:" (comment (git :branch) => #{:create :delete :rename :list}) "Branches for the repository are shown with the `:list` subcommand:" (comment (git :branch :list) => [{:name "refs/heads/master", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}]) "New branches are be created through the `:create` subcommand:" (comment (git :branch :create :name "dev") => {:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} (git :branch :list) => [{:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} {:name "refs/heads/master", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}]) "Branches are renamed through the `:rename` subcommand:" (comment (git :branch :rename :?) => {:new-name java.lang.String, :old-name java.lang.String} (git :branch :rename :new-name "development" :old-name "dev") => {:name "refs/heads/development", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false}) "Branches are deleted through the `:delete` subcommand:" (comment (git :branch :delete :?) => {:branch-names [java.lang.String] :force boolean} (git :branch :delete :branch-names (into-array ["development"])) => ["refs/heads/development"]) [[:subsection {:title ":checkout"}]] "Branches for the repository can be accessed through `:checkout`" (comment (git :checkout :?) => {:path [java.lang.String], :start-point java.lang.String, :create-branch boolean, :stage #{"BASE" "THEIRS" "OURS"}, :force boolean, :name java.lang.String, :paths [java.util.List], :upstream-mode #{"SET_UPSTREAM" "NOTRACK" "TRACK"}, :all-paths boolean, :orphan boolean} (git :branch :create :name "dev") (git :checkout :name "dev") => {:name "refs/heads/dev", :object-id "02cecb2b8c08c1599ba95c165e08b633698cad99", :storage "LOOSE", :peeled? false, :symbolic? false} )
[ { "context": "(comment \n re-core, Copyright 2012 Ronen Narkis, narkisr.com\n Licensed under the Apache License", "end": 49, "score": 0.9998823404312134, "start": 37, "tag": "NAME", "value": "Ronen Narkis" }, { "context": "omment \n re-core, Copyright 2012 Ronen Narkis, narkisr.com\n Licensed under the Apache License,\n Version ", "end": 62, "score": 0.8142081499099731, "start": 52, "tag": "EMAIL", "value": "arkisr.com" } ]
src/hooks/consul.clj
celestial-ops/core
1
(comment re-core, Copyright 2012 Ronen Narkis, narkisr.com Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.) (ns hooks.consul (:require [re-core.persistency.systems :as s] [conjul.catalog :refer (register de-register)] [re-core.common :refer (import-logging)])) (import-logging) (defn add-node [{:keys [system-id consul]:as args}] (let [{:keys [machine env]} (s/get-system system-id) {:keys [dc host] :as c} (consul env)] (when c (register host (machine :hostname) (machine :ip) dc) (debug "registered node in consul host" host "dc" dc)))) (defn remove-node [{:keys [env machine consul]:as args}] (when-let [{:keys [dc host]} (consul env)] (de-register host (machine :hostname) dc) (debug "removed node from consul host" host "dc" dc))) (def actions {:reload {:success add-node} :create {:success add-node} :destroy {:success remove-node :error remove-node} :stage {:success add-node}}) (defn with-defaults "Add an empty consul if not defined" [args] (merge {:consul {}} args)) (defn update-node [{:keys [event workflow] :as args}] (when-let [action (get-in actions [workflow event])] (action (with-defaults args))))
21274
(comment re-core, Copyright 2012 <NAME>, n<EMAIL> Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.) (ns hooks.consul (:require [re-core.persistency.systems :as s] [conjul.catalog :refer (register de-register)] [re-core.common :refer (import-logging)])) (import-logging) (defn add-node [{:keys [system-id consul]:as args}] (let [{:keys [machine env]} (s/get-system system-id) {:keys [dc host] :as c} (consul env)] (when c (register host (machine :hostname) (machine :ip) dc) (debug "registered node in consul host" host "dc" dc)))) (defn remove-node [{:keys [env machine consul]:as args}] (when-let [{:keys [dc host]} (consul env)] (de-register host (machine :hostname) dc) (debug "removed node from consul host" host "dc" dc))) (def actions {:reload {:success add-node} :create {:success add-node} :destroy {:success remove-node :error remove-node} :stage {:success add-node}}) (defn with-defaults "Add an empty consul if not defined" [args] (merge {:consul {}} args)) (defn update-node [{:keys [event workflow] :as args}] (when-let [action (get-in actions [workflow event])] (action (with-defaults args))))
true
(comment re-core, Copyright 2012 PI:NAME:<NAME>END_PI, nPI:EMAIL:<EMAIL>END_PI Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.) (ns hooks.consul (:require [re-core.persistency.systems :as s] [conjul.catalog :refer (register de-register)] [re-core.common :refer (import-logging)])) (import-logging) (defn add-node [{:keys [system-id consul]:as args}] (let [{:keys [machine env]} (s/get-system system-id) {:keys [dc host] :as c} (consul env)] (when c (register host (machine :hostname) (machine :ip) dc) (debug "registered node in consul host" host "dc" dc)))) (defn remove-node [{:keys [env machine consul]:as args}] (when-let [{:keys [dc host]} (consul env)] (de-register host (machine :hostname) dc) (debug "removed node from consul host" host "dc" dc))) (def actions {:reload {:success add-node} :create {:success add-node} :destroy {:success remove-node :error remove-node} :stage {:success add-node}}) (defn with-defaults "Add an empty consul if not defined" [args] (merge {:consul {}} args)) (defn update-node [{:keys [event workflow] :as args}] (when-let [action (get-in actions [workflow event])] (action (with-defaults args))))
[ { "context": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n; Copyright 2015 Xebia B.V.\n;\n; Licensed under the Apache License, Version 2", "end": 107, "score": 0.9699687361717224, "start": 98, "tag": "NAME", "value": "Xebia B.V" } ]
src/integration/clojure/com/xebia/visualreview/itest/run_test.clj
andstepanuk/VisualReview
290
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Copyright 2015 Xebia B.V. ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (ns com.xebia.visualreview.itest.run-test (:require [clojure.test :refer :all] [com.xebia.visualreview.mock :as mock] [com.xebia.visualreview.service.project :as project] [com.xebia.visualreview.service.suite :as suite] [com.xebia.visualreview.service.run :as run])) (use-fixtures :each mock/logging-fixture mock/rebind-db-spec-fixture mock/setup-db-fixture) (deftest run-service-store (testing "stores and retrieves a run" (let [project-id (project/create-project! mock/*conn* "some project") suite-id (suite/create-suite-for-project! mock/*conn* "some project" "some suite") run-id (run/create-run! mock/*conn* suite-id) run (run/get-run mock/*conn* run-id)] (is (= (:id run) run-id)) (is (= (:project-id run) project-id)) (is (= (:suite-id run) suite-id)) (is (= (:status run) "running")) (is (= (:end-time run) nil)) (is (not (nil? (:start-time run))))))) (deftest run-service-list (testing "retrieves a list of runs" (let [project-id (project/create-project! mock/*conn* "some project") suite-id (suite/create-suite-for-project! mock/*conn* "some project" "some suite") run-1 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) run-2 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) run-3 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) runs (run/get-runs mock/*conn* project-id suite-id)] (is (= (count runs) 3)) (is (= (nth runs 0) run-3)) (is (= (nth runs 1) run-2)) (is (= (nth runs 2) run-1))))) (deftest run-service-delete (testing "deletes runs" (let [project-id (project/create-project! mock/*conn* "project name") created-project (project/get-project-by-id mock/*conn* project-id) suite-id (suite/create-suite-for-project! mock/*conn* "project name" "suite name") suite (suite/get-suite-by-id mock/*conn* suite-id) run-id-1 (run/create-run! mock/*conn* suite-id) run-1 (run/get-run mock/*conn* run-id-1) run-id-2(run/create-run! mock/*conn* suite-id) run-2 (run/get-run mock/*conn* run-id-2) deleted-run (run/delete-run! mock/*conn* run-id-1) run-1-after-deletion (run/get-run mock/*conn* run-id-1) run-2-after-deletion (run/get-run mock/*conn* run-id-2) suite-after-deletion (suite/get-suite-by-id mock/*conn* suite-id)] (is (= deleted-run true)) ; sanity checks (is (not (nil? created-project))) (is (not (nil? suite))) (is (not (nil? run-1))) (is (not (nil? run-2))) (is (nil? run-1-after-deletion)) (is (not (nil? run-2-after-deletion))) (is (not (nil? suite-after-deletion)))))) ; tests if cascade deletes don't delete too much
69039
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Copyright 2015 <NAME>. ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (ns com.xebia.visualreview.itest.run-test (:require [clojure.test :refer :all] [com.xebia.visualreview.mock :as mock] [com.xebia.visualreview.service.project :as project] [com.xebia.visualreview.service.suite :as suite] [com.xebia.visualreview.service.run :as run])) (use-fixtures :each mock/logging-fixture mock/rebind-db-spec-fixture mock/setup-db-fixture) (deftest run-service-store (testing "stores and retrieves a run" (let [project-id (project/create-project! mock/*conn* "some project") suite-id (suite/create-suite-for-project! mock/*conn* "some project" "some suite") run-id (run/create-run! mock/*conn* suite-id) run (run/get-run mock/*conn* run-id)] (is (= (:id run) run-id)) (is (= (:project-id run) project-id)) (is (= (:suite-id run) suite-id)) (is (= (:status run) "running")) (is (= (:end-time run) nil)) (is (not (nil? (:start-time run))))))) (deftest run-service-list (testing "retrieves a list of runs" (let [project-id (project/create-project! mock/*conn* "some project") suite-id (suite/create-suite-for-project! mock/*conn* "some project" "some suite") run-1 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) run-2 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) run-3 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) runs (run/get-runs mock/*conn* project-id suite-id)] (is (= (count runs) 3)) (is (= (nth runs 0) run-3)) (is (= (nth runs 1) run-2)) (is (= (nth runs 2) run-1))))) (deftest run-service-delete (testing "deletes runs" (let [project-id (project/create-project! mock/*conn* "project name") created-project (project/get-project-by-id mock/*conn* project-id) suite-id (suite/create-suite-for-project! mock/*conn* "project name" "suite name") suite (suite/get-suite-by-id mock/*conn* suite-id) run-id-1 (run/create-run! mock/*conn* suite-id) run-1 (run/get-run mock/*conn* run-id-1) run-id-2(run/create-run! mock/*conn* suite-id) run-2 (run/get-run mock/*conn* run-id-2) deleted-run (run/delete-run! mock/*conn* run-id-1) run-1-after-deletion (run/get-run mock/*conn* run-id-1) run-2-after-deletion (run/get-run mock/*conn* run-id-2) suite-after-deletion (suite/get-suite-by-id mock/*conn* suite-id)] (is (= deleted-run true)) ; sanity checks (is (not (nil? created-project))) (is (not (nil? suite))) (is (not (nil? run-1))) (is (not (nil? run-2))) (is (nil? run-1-after-deletion)) (is (not (nil? run-2-after-deletion))) (is (not (nil? suite-after-deletion)))))) ; tests if cascade deletes don't delete too much
true
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Copyright 2015 PI:NAME:<NAME>END_PI. ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (ns com.xebia.visualreview.itest.run-test (:require [clojure.test :refer :all] [com.xebia.visualreview.mock :as mock] [com.xebia.visualreview.service.project :as project] [com.xebia.visualreview.service.suite :as suite] [com.xebia.visualreview.service.run :as run])) (use-fixtures :each mock/logging-fixture mock/rebind-db-spec-fixture mock/setup-db-fixture) (deftest run-service-store (testing "stores and retrieves a run" (let [project-id (project/create-project! mock/*conn* "some project") suite-id (suite/create-suite-for-project! mock/*conn* "some project" "some suite") run-id (run/create-run! mock/*conn* suite-id) run (run/get-run mock/*conn* run-id)] (is (= (:id run) run-id)) (is (= (:project-id run) project-id)) (is (= (:suite-id run) suite-id)) (is (= (:status run) "running")) (is (= (:end-time run) nil)) (is (not (nil? (:start-time run))))))) (deftest run-service-list (testing "retrieves a list of runs" (let [project-id (project/create-project! mock/*conn* "some project") suite-id (suite/create-suite-for-project! mock/*conn* "some project" "some suite") run-1 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) run-2 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) run-3 (run/get-run mock/*conn* (run/create-run! mock/*conn* suite-id)) runs (run/get-runs mock/*conn* project-id suite-id)] (is (= (count runs) 3)) (is (= (nth runs 0) run-3)) (is (= (nth runs 1) run-2)) (is (= (nth runs 2) run-1))))) (deftest run-service-delete (testing "deletes runs" (let [project-id (project/create-project! mock/*conn* "project name") created-project (project/get-project-by-id mock/*conn* project-id) suite-id (suite/create-suite-for-project! mock/*conn* "project name" "suite name") suite (suite/get-suite-by-id mock/*conn* suite-id) run-id-1 (run/create-run! mock/*conn* suite-id) run-1 (run/get-run mock/*conn* run-id-1) run-id-2(run/create-run! mock/*conn* suite-id) run-2 (run/get-run mock/*conn* run-id-2) deleted-run (run/delete-run! mock/*conn* run-id-1) run-1-after-deletion (run/get-run mock/*conn* run-id-1) run-2-after-deletion (run/get-run mock/*conn* run-id-2) suite-after-deletion (suite/get-suite-by-id mock/*conn* suite-id)] (is (= deleted-run true)) ; sanity checks (is (not (nil? created-project))) (is (not (nil? suite))) (is (not (nil? run-1))) (is (not (nil? run-2))) (is (nil? run-1-after-deletion)) (is (not (nil? run-2-after-deletion))) (is (not (nil? suite-after-deletion)))))) ; tests if cascade deletes don't delete too much
[ { "context": "in (time/t -2 1900)\n :max (time/t -2 2100)}\n\n \"nu\"\n \"just nu\"\n (datetime 2013 2 12 4 30 00)\n\n \"i", "end": 185, "score": 0.6206521987915039, "start": 183, "tag": "NAME", "value": "nu" }, { "context": "u\"\n \"just nu\"\n (datetime 2013 2 12 4 30 00)\n\n \"idag\"\n (datetime 2013 2 12)\n\n \"igΓ₯r\"\n (datetime 201", "end": 238, "score": 0.9154409170150757, "start": 234, "tag": "NAME", "value": "idag" }, { "context": " 12 4 30 00)\n\n \"idag\"\n (datetime 2013 2 12)\n\n \"igΓ₯r\"\n (datetime 2013 2 11)\n\n \"imorgon\"\n (datetime ", "end": 271, "score": 0.910003125667572, "start": 267, "tag": "NAME", "value": "igΓ₯r" }, { "context": "e 2013 2 12)\n\n \"igΓ₯r\"\n (datetime 2013 2 11)\n\n \"imorgon\"\n (datetime 2013 2 13)\n\n \"mΓ₯ndag\"\n \"mΓ₯n\"\n \"pΓ₯", "end": 307, "score": 0.9963383674621582, "start": 300, "tag": "NAME", "value": "imorgon" }, { "context": "013 2 11)\n\n \"imorgon\"\n (datetime 2013 2 13)\n\n \"mΓ₯ndag\"\n \"mΓ₯n\"\n \"pΓ₯ mΓ₯ndag\"\n (datetime 2013 2 18 :day", "end": 342, "score": 0.8978912830352783, "start": 336, "tag": "NAME", "value": "mΓ₯ndag" }, { "context": " \"imorgon\"\n (datetime 2013 2 13)\n\n \"mΓ₯ndag\"\n \"mΓ₯n\"\n \"pΓ₯ mΓ₯ndag\"\n (datetime 2013 2 18 :day-of-week", "end": 350, "score": 0.5806628465652466, "start": 347, "tag": "NAME", "value": "mΓ₯n" }, { "context": "mΓ₯ndag\"\n (datetime 2013 2 18 :day-of-week 1)\n\n \"MΓ₯ndag den 18 februari\"\n \"MΓ₯n, 18 februari\"\n (datetime", "end": 414, "score": 0.869922399520874, "start": 408, "tag": "NAME", "value": "MΓ₯ndag" }, { "context": "me 2013 2 18 :day-of-week 1 :day 18 :month 2)\n\n \"tisdag\"\n (datetime 2013 2 19)\n\n \"torsdag\"\n \"tors\"\n \"", "end": 518, "score": 0.9873167276382446, "start": 512, "tag": "NAME", "value": "tisdag" }, { "context": " :month 2)\n\n \"tisdag\"\n (datetime 2013 2 19)\n\n \"torsdag\"\n \"tors\"\n \"tors.\"\n (datetime 2013 2 14)\n\n \"fr", "end": 554, "score": 0.9710920453071594, "start": 547, "tag": "NAME", "value": "torsdag" }, { "context": " \"tisdag\"\n (datetime 2013 2 19)\n\n \"torsdag\"\n \"tors\"\n \"tors.\"\n (datetime 2013 2 14)\n\n \"fredag\"\n \"", "end": 563, "score": 0.9332830309867859, "start": 559, "tag": "NAME", "value": "tors" }, { "context": "ag\"\n \"tors\"\n \"tors.\"\n (datetime 2013 2 14)\n\n \"fredag\"\n \"fre\"\n \"fre.\"\n (datetime 2013 2 15)\n\n \"lΓΆrd", "end": 608, "score": 0.9901798367500305, "start": 602, "tag": "NAME", "value": "fredag" }, { "context": "\"\n \"tors.\"\n (datetime 2013 2 14)\n\n \"fredag\"\n \"fre\"\n \"fre.\"\n (datetime 2013 2 15)\n\n \"lΓΆrdag\"\n \"l", "end": 616, "score": 0.9837464094161987, "start": 613, "tag": "NAME", "value": "fre" }, { "context": "edag\"\n \"fre\"\n \"fre.\"\n (datetime 2013 2 15)\n\n \"lΓΆrdag\"\n \"lΓΆr\"\n \"lΓΆr.\"\n (datetime 2013 2 16)\n\n \"sΓΆnd", "end": 660, "score": 0.8666607141494751, "start": 654, "tag": "NAME", "value": "lΓΆrdag" }, { "context": "e\"\n \"fre.\"\n (datetime 2013 2 15)\n\n \"lΓΆrdag\"\n \"lΓΆr\"\n \"lΓΆr.\"\n (datetime 2013 2 16)\n\n \"sΓΆndag\"\n \"s", "end": 668, "score": 0.7299718856811523, "start": 665, "tag": "NAME", "value": "lΓΆr" }, { "context": "rdag\"\n \"lΓΆr\"\n \"lΓΆr.\"\n (datetime 2013 2 16)\n\n \"sΓΆndag\"\n \"sΓΆn\"\n \"sΓΆn.\"\n (datetime 2013 2 17)\n\n \"Den ", "end": 712, "score": 0.8214431405067444, "start": 706, "tag": "NAME", "value": "sΓΆndag" }, { "context": "sdagen i Oktober 2014\"\n (datetime 2014 10 1)\n\n \"andra onsdagen i oktober 2014\"\n \"andra onsdagen i Okto", "end": 4478, "score": 0.7703508138656616, "start": 4473, "tag": "NAME", "value": "andra" }, { "context": " 2014 10 1)\n\n \"andra onsdagen i oktober 2014\"\n \"andra onsdagen i Oktober 2014\"\n (datetime 2014 10 8)\n\n", "end": 4512, "score": 0.5298837423324585, "start": 4507, "tag": "NAME", "value": "andra" }, { "context": "ber 2014\"\n (datetime 2014 10 8)\n\n ;; Hours\n\n \"klockan 3\"\n \"kl. 3\"\n (datetime 2013 2 13 3)\n\n \"3:18\"", "end": 4582, "score": 0.521965742111206, "start": 4578, "tag": "NAME", "value": "lock" }, { "context": " 13 3)\n\n \"3:18\"\n (datetime 2013 2 13 3 18)\n\n \"klockan 15\"\n \"kl. 15\"\n \"15h\"\n (datetime 2013 2 12 15 :", "end": 4672, "score": 0.7973870038986206, "start": 4666, "tag": "NAME", "value": "lockan" }, { "context": "ay-of-week 5 :hour 7 :meridiem :pm)\n\n \"kl. 19:30, LΓΆr, 20 sep\"\n (datetime 2014 9 20 19 30 :day-of-week", "end": 5766, "score": 0.9510523080825806, "start": 5763, "tag": "NAME", "value": "LΓΆr" }, { "context": "2 25)\n\n \"nyΓ₯rsafton\"\n (datetime 2013 12 31)\n\n \"nyΓ₯rsdagen\"\n\t\"nyΓ₯rsdag\"\n (datetime 2014 1 1)\n\n ; Part of d", "end": 7703, "score": 0.9943880438804626, "start": 7693, "tag": "NAME", "value": "nyΓ₯rsdagen" }, { "context": "safton\"\n (datetime 2013 12 31)\n\n \"nyΓ₯rsdagen\"\n\t\"nyΓ₯rsdag\"\n (datetime 2014 1 1)\n\n ; Part of day (morning,", "end": 7715, "score": 0.9912676215171814, "start": 7707, "tag": "NAME", "value": "nyΓ₯rsdag" }, { "context": "1 1)\n\n ; Part of day (morning, afternoon...)\n\n \"ikvΓ€ll\"\n (datetime-interval [2013 2 12 18] [2013 2 13 0", "end": 7790, "score": 0.931255578994751, "start": 7784, "tag": "NAME", "value": "ikvΓ€ll" }, { "context": "etime-interval [2013 2 12 18] [2013 2 13 00])\n\n \"fΓΆrra helg\"\n (datetime-interval [2013 2 8 18] [2013 2 11 00", "end": 7858, "score": 0.9843570590019226, "start": 7848, "tag": "NAME", "value": "fΓΆrra helg" }, { "context": "tetime-interval [2013 2 8 18] [2013 2 11 00])\n\n \"imorgon kvΓ€ll\"\n (datetime-interval [2013 2 13 18] [2013 2 14 0", "end": 7928, "score": 0.9885537028312683, "start": 7915, "tag": "NAME", "value": "imorgon kvΓ€ll" }, { "context": "etime-interval [2013 2 13 18] [2013 2 14 00])\n\n \"imorgon lunch\"\n (datetime-interval [2013 2 13 12] [2013 2 13 1", "end": 7999, "score": 0.923629641532898, "start": 7986, "tag": "NAME", "value": "imorgon lunch" }, { "context": "etime-interval [2013 2 13 12] [2013 2 13 14])\n\n \"igΓ₯r kvΓ€ll\"\n (datetime-interval [2013 2 11 18] [2013 2 12 0", "end": 8067, "score": 0.8585001230239868, "start": 8057, "tag": "NAME", "value": "igΓ₯r kvΓ€ll" }, { "context": "etime-interval [2013 2 11 18] [2013 2 12 00])\n\n \"denna helgen\"\n \"denna helg\"\n \"i helgen\"\n (datetime-interval", "end": 8137, "score": 0.998964786529541, "start": 8125, "tag": "NAME", "value": "denna helgen" }, { "context": "013 2 11 18] [2013 2 12 00])\n\n \"denna helgen\"\n \"denna helg\"\n \"i helgen\"\n (datetime-interval [2013 2 15 18]", "end": 8152, "score": 0.9980586171150208, "start": 8142, "tag": "NAME", "value": "denna helg" }, { "context": "013 2 12 00])\n\n \"denna helgen\"\n \"denna helg\"\n \"i helgen\"\n (datetime-interval [2013 2 15 18] [2013 2 18 0", "end": 8165, "score": 0.9618022441864014, "start": 8157, "tag": "NAME", "value": "i helgen" }, { "context": "etime-interval [2013 2 15 18] [2013 2 18 00])\n\n \"mΓ₯ndag morgon\"\n (datetime-interval [2013 2 18 4] [2013 2 18 12", "end": 8236, "score": 0.998919665813446, "start": 8223, "tag": "NAME", "value": "mΓ₯ndag morgon" }, { "context": "17])\n\n \"10:30\"\n (datetime 2013 2 12 10 30)\n\n \"morgon\" ;; how should we deal with fb mornings?\n (datet", "end": 12432, "score": 0.7604275941848755, "start": 12427, "tag": "NAME", "value": "orgon" } ]
resources/languages/sv/corpus/time.clj
guivn/duckling
922
( ; Context map ; Tuesday Feb 12, 2013 at 4:30am is the "now" for the tests {:reference-time (time/t -2 2013 2 12 4 30 0) :min (time/t -2 1900) :max (time/t -2 2100)} "nu" "just nu" (datetime 2013 2 12 4 30 00) "idag" (datetime 2013 2 12) "igΓ₯r" (datetime 2013 2 11) "imorgon" (datetime 2013 2 13) "mΓ₯ndag" "mΓ₯n" "pΓ₯ mΓ₯ndag" (datetime 2013 2 18 :day-of-week 1) "MΓ₯ndag den 18 februari" "MΓ₯n, 18 februari" (datetime 2013 2 18 :day-of-week 1 :day 18 :month 2) "tisdag" (datetime 2013 2 19) "torsdag" "tors" "tors." (datetime 2013 2 14) "fredag" "fre" "fre." (datetime 2013 2 15) "lΓΆrdag" "lΓΆr" "lΓΆr." (datetime 2013 2 16) "sΓΆndag" "sΓΆn" "sΓΆn." (datetime 2013 2 17) "Den fΓΆrste mars" "Den fΓΆrsta mars" "1. mars" "Den 1. mars" (datetime 2013 3 1 :day 1 :month 3) "3 mars" "den tredje mars" "den 3. mars" (datetime 2013 3 3 :day 3 :month 3) "3 mars 2015" "tredje mars 2015" "3. mars 2015" "3-3-2015" "03-03-2015" "3/3/2015" "3/3/15" "2015-3-3" "2015-03-03" (datetime 2015 3 3 :day 3 :month 3 :year 2015) "PΓ₯ den 15." "PΓ₯ den 15" "Den 15." "Den 15" (datetime 2013 2 15 :day 15) "den 15. februari" "15. februari" "februari 15" "15-02" "15/02" (datetime 2013 2 15 :day 15 :month 2) "8 Aug" (datetime 2013 8 8 :day 8 :month 8) "Oktober 2014" (datetime 2014 10 :year 2014 :month 10) "31/10/1974" "31/10/74" "31-10-74" (datetime 1974 10 31 :day 31 :month 10 :year 1974) "14april 2015" "April 14, 2015" "fjortonde April 15" (datetime 2015 4 14 :day 14 :month 4 :years 2015) "nΓ€sta fredag igen" (datetime 2013 2 22 :day-of-week 2) "nΓ€sta mars" (datetime 2013 3) "nΓ€sta mars igen" (datetime 2014 3) "SΓΆndag, 10 feb" "SΓΆndag 10 Feb" (datetime 2013 2 10 :day-of-week 7 :day 10 :month 2) "Ons, Feb13" "Ons feb13" (datetime 2013 2 13 :day-of-week 3 :day 13 :month 2) "MΓ₯ndag, Feb 18" "MΓ₯n, februari 18" (datetime 2013 2 18 :day-of-week 1 :day 18 :month 2) ; ;; Cycles "denna vecka" (datetime 2013 2 11 :grain :week) "fΓΆrra vecka" (datetime 2013 2 4 :grain :week) "nΓ€sta vecka" (datetime 2013 2 18 :grain :week) "fΓΆrra mΓ₯nad" (datetime 2013 1) "nΓ€sta mΓ₯nad" (datetime 2013 3) "detta kvartal" (datetime 2013 1 1 :grain :quarter) "nΓ€sta kvartal" (datetime 2013 4 1 :grain :quarter) "tredje kvartalet" "3. kvartal" "3 kvartal" (datetime 2013 7 1 :grain :quarter) "4. kvartal 2018" "fjΓ€rde kvartalet 2018" (datetime 2018 10 1 :grain :quarter) "fΓΆrra Γ₯r" (datetime 2012) "i fjol" (datetime 2012) "i Γ₯r" "detta Γ₯r" (datetime 2013) "nΓ€sta Γ₯r" (datetime 2014) "fΓΆrra sΓΆndag" "sΓΆndag i fΓΆrra veckan" "sΓΆndag fΓΆrra veckan" (datetime 2013 2 10 :day-of-week 7) "fΓΆrra tisdag" (datetime 2013 2 5 :day-of-week 2) "nΓ€sta tisdag" ; when today is Tuesday, "nΓ€sta tirsdag" (next tuesday) is a week from now (datetime 2013 2 19 :day-of-week 2) "nΓ€sta onsdag" ; when today is Tuesday, "nΓ€sta onsdag" (next wednesday) is tomorrow (datetime 2013 2 13 :day-of-week 3) "onsdag i nΓ€sta vecka" "onsdag nΓ€sta vecka" "nΓ€sta onsdag igen" (datetime 2013 2 20 :day-of-week 3) "nΓ€sta fredag igen" (datetime 2013 2 22 :day-of-week 5) "mΓ₯ndag denna veckan" (datetime 2013 2 11 :day-of-week 1) "tisdag denna vecka" (datetime 2013 2 12 :day-of-week 2) "onsdag denna vecka" (datetime 2013 2 13 :day-of-week 3) "i ΓΆverimorgon" (datetime 2013 2 14) "i fΓΆrrgΓ₯r" (datetime 2013 2 10) "sista mΓ₯ndag i mars" (datetime 2013 3 25 :day-of-week 1) "sista sΓΆndag i mars 2014" (datetime 2014 3 30 :day-of-week 7) "tredje dagen i oktober" "tredje dagen i Oktober" (datetime 2013 10 3) "fΓΆrsta veckan i oktober 2014" "fΓΆrsta veckan i Oktober 2014" (datetime 2014 10 6 :grain :week) ;"the week of october 6th" ;"the week of october 7th" ;(datetime 2013 10 7 :grain :week) "sista dagen i oktober 2015" "sista dagen i Oktober 2015" (datetime 2015 10 31) "sista veckan i september 2014" "sista veckan i September 2014" (datetime 2014 9 22 :grain :week) ;; nth of "fΓΆrsta tisdag i oktober" "fΓΆrsta tisdagen i Oktober" (datetime 2013 10 1) "tredje tisdagen i september 2014" "tredje tisdagen i September 2014" (datetime 2014 9 16) "fΓΆrsta onsdagen i oktober 2014" "fΓΆrsta onsdagen i Oktober 2014" (datetime 2014 10 1) "andra onsdagen i oktober 2014" "andra onsdagen i Oktober 2014" (datetime 2014 10 8) ;; Hours "klockan 3" "kl. 3" (datetime 2013 2 13 3) "3:18" (datetime 2013 2 13 3 18) "klockan 15" "kl. 15" "15h" (datetime 2013 2 12 15 :hour 3 :meridiem :pm) "ca. kl. 15" ;; FIXME pm overrides precision "cirka kl. 15" "omkring klockan 15" (datetime 2013 2 12 15 :hour 3 :meridiem :pm) ;; :precision "approximate" "imorgon klockan 17 exakt" ;; FIXME precision is lost "imorgon kl. 17 precis" (datetime 2013 2 13 17 :hour 5 :meridiem :pm) ;; :precision "exact" "kvart ΓΆver 15" "15:15" (datetime 2013 2 12 15 15 :hour 3 :minute 15 :meridiem :pm) "kl. 20 ΓΆver 15" "klockan 20 ΓΆver 15" "tjugo ΓΆver 15" "kl. 15:20" "15:20" (datetime 2013 2 12 15 20 :hour 3 :minute 20 :meridiem :pm) "15:30" (datetime 2013 2 12 15 30 :hour 3 :minute 30 :meridiem :pm) "15:23:24" (datetime 2013 2 12 15 23 24 :hour 15 :minute 23 :second 24) "kvart i 12" "kvart i tolv" "11:45" (datetime 2013 2 12 11 45 :hour 11 :minute 45) ;; Mixing date and time "klockan 9 pΓ₯ lΓΆrdag" (datetime 2013 2 16 9 :day-of-week 6 :hour 9 :meridiem :am) "Fre, Jul 18, 2014 19:00" (datetime 2014 7 18 19 0 :day-of-week 5 :hour 7 :meridiem :pm) "kl. 19:30, LΓΆr, 20 sep" (datetime 2014 9 20 19 30 :day-of-week 6 :hour 7 :minute 30 :meridiem :pm) ; ;; Involving periods "om 1 sekund" "om en sekund" "en sekund frΓ₯n nu" (datetime 2013 2 12 4 30 1) "om 1 minut" "om en minut" (datetime 2013 2 12 4 31 0) "om 2 minuter" "om tvΓ₯ minuter" "om 2 minuter mer" "om tvΓ₯ minuter mer" "2 minuter frΓ₯n nu" "tvΓ₯ minuter frΓ₯n nu" (datetime 2013 2 12 4 32 0) "om 60 minuter" (datetime 2013 2 12 5 30 0) "om en halv timme" (datetime 2013 2 12 5 0 0) "om 2,5 timme" "om 2 och en halv timme" "om tvΓ₯ och en halv timme" (datetime 2013 2 12 7 0 0) "om en timme" "om 1 timme" "om 1t" (datetime 2013 2 12 5 30) "om ett par timmar" (datetime 2013 2 12 6 30) "om 24 timmar" (datetime 2013 2 13 4 30) "om en dag" (datetime 2013 2 13 4) "3 Γ₯r frΓ₯n idag" (datetime 2016 2) "om 7 dagar" (datetime 2013 2 19 4) "om en vecka" (datetime 2013 2 19) "om ca. en halv timme" ;; FIXME precision is lost "om cirka en halv timme" (datetime 2013 2 12 5 0 0) ;; :precision "approximate" "7 dagar sedan" "sju dagar sedan" (datetime 2013 2 5 4) "14 dagar sedan" "fjorton dagar sedan" (datetime 2013 1 29 4) "en vecka sedan" "1 vecka sedan" (datetime 2013 2 5) "3 veckor sedan" "tre veckor sedan" (datetime 2013 1 22) "3 mΓ₯nader sedan" "tre mΓ₯nader sedan" (datetime 2012 11 12) "tvΓ₯ Γ₯r sedan" "2 Γ₯r sedan" (datetime 2011 2) "1954" (datetime 1954) ; "1 Γ₯r efter julafton" ; "ett Γ₯r efter julafton" ; (datetime 2013 12) ; resolves as after last Xmas... ; Seasons "denna sommaren" "den hΓ€r sommaren" (datetime-interval [2013 6 21] [2013 9 24]) "denna vintern" "den hΓ€r vintern" (datetime-interval [2012 12 21] [2013 3 21]) ; US holidays (http://www.timeanddate.com/holidays/us/) "juldagen" (datetime 2013 12 25) "nyΓ₯rsafton" (datetime 2013 12 31) "nyΓ₯rsdagen" "nyΓ₯rsdag" (datetime 2014 1 1) ; Part of day (morning, afternoon...) "ikvΓ€ll" (datetime-interval [2013 2 12 18] [2013 2 13 00]) "fΓΆrra helg" (datetime-interval [2013 2 8 18] [2013 2 11 00]) "imorgon kvΓ€ll" (datetime-interval [2013 2 13 18] [2013 2 14 00]) "imorgon lunch" (datetime-interval [2013 2 13 12] [2013 2 13 14]) "igΓ₯r kvΓ€ll" (datetime-interval [2013 2 11 18] [2013 2 12 00]) "denna helgen" "denna helg" "i helgen" (datetime-interval [2013 2 15 18] [2013 2 18 00]) "mΓ₯ndag morgon" (datetime-interval [2013 2 18 4] [2013 2 18 12]) ; Intervals involving cycles "senaste 2 sekunder" "senaste tvΓ₯ sekunderna" (datetime-interval [2013 2 12 4 29 58] [2013 2 12 4 30 00]) "nΓ€sta 3 sekunder" "nΓ€sta tre sekunder" (datetime-interval [2013 2 12 4 30 01] [2013 2 12 4 30 04]) "senaste 2 minuter" "senaste tvΓ₯ minuter" (datetime-interval [2013 2 12 4 28] [2013 2 12 4 30]) "nΓ€sta 3 minuter" "nΓ€sta tre minuter" (datetime-interval [2013 2 12 4 31] [2013 2 12 4 34]) "senaste 1 timme" ; "senaste timme" (datetime-interval [2013 2 12 3] [2013 2 12 4]) "nΓ€sta 3 timmar" "nΓ€sta tre timmar" (datetime-interval [2013 2 12 5] [2013 2 12 8]) "senaste 2 dagar" "senaste tvΓ₯ dagar" "senaste 2 dagar" (datetime-interval [2013 2 10] [2013 2 12]) "nΓ€sta 3 dagar" "nΓ€sta tre dagar" (datetime-interval [2013 2 13] [2013 2 16]) "senaste 2 veckor" "senaste tvΓ₯ veckorna" "senaste tvΓ₯ veckor" (datetime-interval [2013 1 28 :grain :week] [2013 2 11 :grain :week]) "nΓ€sta 3 veckor" "nΓ€sta tre veckorna" (datetime-interval [2013 2 18 :grain :week] [2013 3 11 :grain :week]) "senaste 2 mΓ₯nader" "senaste tvΓ₯ mΓ₯nader" "senaste tvΓ₯ mΓ₯nader" (datetime-interval [2012 12] [2013 02]) "nΓ€sta 3 mΓ₯nader" "nΓ€sta tre mΓ₯nader" (datetime-interval [2013 3] [2013 6]) "senaste 2 Γ₯r" "senaste tvΓ₯ Γ₯r" "senaste 2 Γ₯r" (datetime-interval [2011] [2013]) "nΓ€sta 3 Γ₯r" "nΓ€sta tre Γ₯r" (datetime-interval [2014] [2017]) ; Explicit intervals "13-15 juli" "13-15 Juli" "13 till 15 Juli" "13 juli till 15 juli" (datetime-interval [2013 7 13] [2013 7 16]) "8 Aug - 12 Aug" "8 Aug - 12 aug" "8 aug - 12 aug" "8 augusti - 12 augusti" (datetime-interval [2013 8 8] [2013 8 13]) "9:30 - 11:00" "9:30 till 11:00" (datetime-interval [2013 2 12 9 30] [2013 2 12 11 1]) "frΓ₯n 9:30 - 11:00 pΓ₯ torsdag" "frΓ₯n 9:30 till 11:00 pΓ₯ torsdag" "mellan 9:30 och 11:00 pΓ₯ torsdag" "9:30 - 11:00 pΓ₯ torsdag" "9:30 till 11:00 pΓ₯ torsdag" "efter 9:30 men fΓΆre 11:00 pΓ₯ torsdag" "torsdag frΓ₯n 9:30 till 11:00" "torsdag mellan 9:30 och 11:00" "frΓ₯n 9:30 till 11:00 pΓ₯ torsdag" (datetime-interval [2013 2 14 9 30] [2013 2 14 11 1]) "torsdag frΓ₯n 9 till 11" (datetime-interval [2013 2 14 9] [2013 2 14 12]) "11:30-13:30" ; go train this rule! "11:30-13:30" "11:30-13:30" "11:30-13:30" "11:30-13:30" "11:30-13:30" (datetime-interval [2013 2 12 11 30] [2013 2 12 13 31]) "inom 2 veckor" (datetime-interval [2013 2 12 4 30 0] [2013 2 26]) "innan kl. 14" "innan klockan 14" (datetime 2013 2 12 14 :direction :before) ; Timezones "16 CET" "kl. 16 CET" "klockan 16 CET" (datetime 2013 2 12 16 :hour 4 :meridiem :pm :timezone "CET") "torsdag kl. 8:00 GMT" "torsdag klockan 8:00 GMT" "torsdag 08:00 GMT" (datetime 2013 2 14 8 00 :timezone "GMT") ;; Bookface tests "idag kl. 14" "idag klockan 14" "kl. 14" "klockan 14" (datetime 2013 2 12 14) "25/4 kl. 16:00" "25/4 klockan 16:00" "25-04 klockan 16:00" "25-4 kl. 16:00" (datetime 2013 4 25 16 0) "15:00 imorgon" "kl. 15:00 imorgon" "klockan 15:00 imorgon" (datetime 2013 2 13 15 0) "efter kl. 14" "efter klockan 14" (datetime 2013 2 12 14 :direction :after) "efter 5 dagar" "efter fem dagar" (datetime 2013 2 17 4 :direction :after) "om 5 dagar" "om fem dagar" (datetime 2013 2 17 4) "efter imorgon kl. 14" "efter imorgon klockan 14" "imorgon efter kl. 14" ;; FIXME this is actually not ambiguous it's 2pm - midnight. "imorgon efter klockan 14" (datetime 2013 2 13 14 :direction :after) "fΓΆre kl. 11" "fΓΆre klockan 11" (datetime 2013 2 12 11 :direction :before) "imorgon fΓΆre kl. 11" ;; FIXME this is actually not ambiguous. it's midnight to 11 am "imorgon fΓΆre klockan 11" (datetime 2013 2 13 11 :direction :before) "under eftermiddagen" (datetime-interval [2013 2 12 12] [2013 2 12 19]) "kl. 13:30" "klockan 13:30" (datetime 2013 2 12 13 30) "om 15 minuter" (datetime 2013 2 12 4 45 0) "efter lunch" (datetime-interval [2013 2 12 13] [2013 2 12 17]) "10:30" (datetime 2013 2 12 10 30) "morgon" ;; how should we deal with fb mornings? (datetime-interval [2013 2 12 4] [2013 2 12 12]) "nΓ€sta mΓ₯ndag" (datetime 2013 2 18 :day-of-week 1) )
23035
( ; Context map ; Tuesday Feb 12, 2013 at 4:30am is the "now" for the tests {:reference-time (time/t -2 2013 2 12 4 30 0) :min (time/t -2 1900) :max (time/t -2 2100)} "<NAME>" "just nu" (datetime 2013 2 12 4 30 00) "<NAME>" (datetime 2013 2 12) "<NAME>" (datetime 2013 2 11) "<NAME>" (datetime 2013 2 13) "<NAME>" "<NAME>" "pΓ₯ mΓ₯ndag" (datetime 2013 2 18 :day-of-week 1) "<NAME> den 18 februari" "MΓ₯n, 18 februari" (datetime 2013 2 18 :day-of-week 1 :day 18 :month 2) "<NAME>" (datetime 2013 2 19) "<NAME>" "<NAME>" "tors." (datetime 2013 2 14) "<NAME>" "<NAME>" "fre." (datetime 2013 2 15) "<NAME>" "<NAME>" "lΓΆr." (datetime 2013 2 16) "<NAME>" "sΓΆn" "sΓΆn." (datetime 2013 2 17) "Den fΓΆrste mars" "Den fΓΆrsta mars" "1. mars" "Den 1. mars" (datetime 2013 3 1 :day 1 :month 3) "3 mars" "den tredje mars" "den 3. mars" (datetime 2013 3 3 :day 3 :month 3) "3 mars 2015" "tredje mars 2015" "3. mars 2015" "3-3-2015" "03-03-2015" "3/3/2015" "3/3/15" "2015-3-3" "2015-03-03" (datetime 2015 3 3 :day 3 :month 3 :year 2015) "PΓ₯ den 15." "PΓ₯ den 15" "Den 15." "Den 15" (datetime 2013 2 15 :day 15) "den 15. februari" "15. februari" "februari 15" "15-02" "15/02" (datetime 2013 2 15 :day 15 :month 2) "8 Aug" (datetime 2013 8 8 :day 8 :month 8) "Oktober 2014" (datetime 2014 10 :year 2014 :month 10) "31/10/1974" "31/10/74" "31-10-74" (datetime 1974 10 31 :day 31 :month 10 :year 1974) "14april 2015" "April 14, 2015" "fjortonde April 15" (datetime 2015 4 14 :day 14 :month 4 :years 2015) "nΓ€sta fredag igen" (datetime 2013 2 22 :day-of-week 2) "nΓ€sta mars" (datetime 2013 3) "nΓ€sta mars igen" (datetime 2014 3) "SΓΆndag, 10 feb" "SΓΆndag 10 Feb" (datetime 2013 2 10 :day-of-week 7 :day 10 :month 2) "Ons, Feb13" "Ons feb13" (datetime 2013 2 13 :day-of-week 3 :day 13 :month 2) "MΓ₯ndag, Feb 18" "MΓ₯n, februari 18" (datetime 2013 2 18 :day-of-week 1 :day 18 :month 2) ; ;; Cycles "denna vecka" (datetime 2013 2 11 :grain :week) "fΓΆrra vecka" (datetime 2013 2 4 :grain :week) "nΓ€sta vecka" (datetime 2013 2 18 :grain :week) "fΓΆrra mΓ₯nad" (datetime 2013 1) "nΓ€sta mΓ₯nad" (datetime 2013 3) "detta kvartal" (datetime 2013 1 1 :grain :quarter) "nΓ€sta kvartal" (datetime 2013 4 1 :grain :quarter) "tredje kvartalet" "3. kvartal" "3 kvartal" (datetime 2013 7 1 :grain :quarter) "4. kvartal 2018" "fjΓ€rde kvartalet 2018" (datetime 2018 10 1 :grain :quarter) "fΓΆrra Γ₯r" (datetime 2012) "i fjol" (datetime 2012) "i Γ₯r" "detta Γ₯r" (datetime 2013) "nΓ€sta Γ₯r" (datetime 2014) "fΓΆrra sΓΆndag" "sΓΆndag i fΓΆrra veckan" "sΓΆndag fΓΆrra veckan" (datetime 2013 2 10 :day-of-week 7) "fΓΆrra tisdag" (datetime 2013 2 5 :day-of-week 2) "nΓ€sta tisdag" ; when today is Tuesday, "nΓ€sta tirsdag" (next tuesday) is a week from now (datetime 2013 2 19 :day-of-week 2) "nΓ€sta onsdag" ; when today is Tuesday, "nΓ€sta onsdag" (next wednesday) is tomorrow (datetime 2013 2 13 :day-of-week 3) "onsdag i nΓ€sta vecka" "onsdag nΓ€sta vecka" "nΓ€sta onsdag igen" (datetime 2013 2 20 :day-of-week 3) "nΓ€sta fredag igen" (datetime 2013 2 22 :day-of-week 5) "mΓ₯ndag denna veckan" (datetime 2013 2 11 :day-of-week 1) "tisdag denna vecka" (datetime 2013 2 12 :day-of-week 2) "onsdag denna vecka" (datetime 2013 2 13 :day-of-week 3) "i ΓΆverimorgon" (datetime 2013 2 14) "i fΓΆrrgΓ₯r" (datetime 2013 2 10) "sista mΓ₯ndag i mars" (datetime 2013 3 25 :day-of-week 1) "sista sΓΆndag i mars 2014" (datetime 2014 3 30 :day-of-week 7) "tredje dagen i oktober" "tredje dagen i Oktober" (datetime 2013 10 3) "fΓΆrsta veckan i oktober 2014" "fΓΆrsta veckan i Oktober 2014" (datetime 2014 10 6 :grain :week) ;"the week of october 6th" ;"the week of october 7th" ;(datetime 2013 10 7 :grain :week) "sista dagen i oktober 2015" "sista dagen i Oktober 2015" (datetime 2015 10 31) "sista veckan i september 2014" "sista veckan i September 2014" (datetime 2014 9 22 :grain :week) ;; nth of "fΓΆrsta tisdag i oktober" "fΓΆrsta tisdagen i Oktober" (datetime 2013 10 1) "tredje tisdagen i september 2014" "tredje tisdagen i September 2014" (datetime 2014 9 16) "fΓΆrsta onsdagen i oktober 2014" "fΓΆrsta onsdagen i Oktober 2014" (datetime 2014 10 1) "<NAME> onsdagen i oktober 2014" "<NAME> onsdagen i Oktober 2014" (datetime 2014 10 8) ;; Hours "k<NAME>an 3" "kl. 3" (datetime 2013 2 13 3) "3:18" (datetime 2013 2 13 3 18) "k<NAME> 15" "kl. 15" "15h" (datetime 2013 2 12 15 :hour 3 :meridiem :pm) "ca. kl. 15" ;; FIXME pm overrides precision "cirka kl. 15" "omkring klockan 15" (datetime 2013 2 12 15 :hour 3 :meridiem :pm) ;; :precision "approximate" "imorgon klockan 17 exakt" ;; FIXME precision is lost "imorgon kl. 17 precis" (datetime 2013 2 13 17 :hour 5 :meridiem :pm) ;; :precision "exact" "kvart ΓΆver 15" "15:15" (datetime 2013 2 12 15 15 :hour 3 :minute 15 :meridiem :pm) "kl. 20 ΓΆver 15" "klockan 20 ΓΆver 15" "tjugo ΓΆver 15" "kl. 15:20" "15:20" (datetime 2013 2 12 15 20 :hour 3 :minute 20 :meridiem :pm) "15:30" (datetime 2013 2 12 15 30 :hour 3 :minute 30 :meridiem :pm) "15:23:24" (datetime 2013 2 12 15 23 24 :hour 15 :minute 23 :second 24) "kvart i 12" "kvart i tolv" "11:45" (datetime 2013 2 12 11 45 :hour 11 :minute 45) ;; Mixing date and time "klockan 9 pΓ₯ lΓΆrdag" (datetime 2013 2 16 9 :day-of-week 6 :hour 9 :meridiem :am) "Fre, Jul 18, 2014 19:00" (datetime 2014 7 18 19 0 :day-of-week 5 :hour 7 :meridiem :pm) "kl. 19:30, <NAME>, 20 sep" (datetime 2014 9 20 19 30 :day-of-week 6 :hour 7 :minute 30 :meridiem :pm) ; ;; Involving periods "om 1 sekund" "om en sekund" "en sekund frΓ₯n nu" (datetime 2013 2 12 4 30 1) "om 1 minut" "om en minut" (datetime 2013 2 12 4 31 0) "om 2 minuter" "om tvΓ₯ minuter" "om 2 minuter mer" "om tvΓ₯ minuter mer" "2 minuter frΓ₯n nu" "tvΓ₯ minuter frΓ₯n nu" (datetime 2013 2 12 4 32 0) "om 60 minuter" (datetime 2013 2 12 5 30 0) "om en halv timme" (datetime 2013 2 12 5 0 0) "om 2,5 timme" "om 2 och en halv timme" "om tvΓ₯ och en halv timme" (datetime 2013 2 12 7 0 0) "om en timme" "om 1 timme" "om 1t" (datetime 2013 2 12 5 30) "om ett par timmar" (datetime 2013 2 12 6 30) "om 24 timmar" (datetime 2013 2 13 4 30) "om en dag" (datetime 2013 2 13 4) "3 Γ₯r frΓ₯n idag" (datetime 2016 2) "om 7 dagar" (datetime 2013 2 19 4) "om en vecka" (datetime 2013 2 19) "om ca. en halv timme" ;; FIXME precision is lost "om cirka en halv timme" (datetime 2013 2 12 5 0 0) ;; :precision "approximate" "7 dagar sedan" "sju dagar sedan" (datetime 2013 2 5 4) "14 dagar sedan" "fjorton dagar sedan" (datetime 2013 1 29 4) "en vecka sedan" "1 vecka sedan" (datetime 2013 2 5) "3 veckor sedan" "tre veckor sedan" (datetime 2013 1 22) "3 mΓ₯nader sedan" "tre mΓ₯nader sedan" (datetime 2012 11 12) "tvΓ₯ Γ₯r sedan" "2 Γ₯r sedan" (datetime 2011 2) "1954" (datetime 1954) ; "1 Γ₯r efter julafton" ; "ett Γ₯r efter julafton" ; (datetime 2013 12) ; resolves as after last Xmas... ; Seasons "denna sommaren" "den hΓ€r sommaren" (datetime-interval [2013 6 21] [2013 9 24]) "denna vintern" "den hΓ€r vintern" (datetime-interval [2012 12 21] [2013 3 21]) ; US holidays (http://www.timeanddate.com/holidays/us/) "juldagen" (datetime 2013 12 25) "nyΓ₯rsafton" (datetime 2013 12 31) "<NAME>" "<NAME>" (datetime 2014 1 1) ; Part of day (morning, afternoon...) "<NAME>" (datetime-interval [2013 2 12 18] [2013 2 13 00]) "<NAME>" (datetime-interval [2013 2 8 18] [2013 2 11 00]) "<NAME>" (datetime-interval [2013 2 13 18] [2013 2 14 00]) "<NAME>" (datetime-interval [2013 2 13 12] [2013 2 13 14]) "<NAME>" (datetime-interval [2013 2 11 18] [2013 2 12 00]) "<NAME>" "<NAME>" "<NAME>" (datetime-interval [2013 2 15 18] [2013 2 18 00]) "<NAME>" (datetime-interval [2013 2 18 4] [2013 2 18 12]) ; Intervals involving cycles "senaste 2 sekunder" "senaste tvΓ₯ sekunderna" (datetime-interval [2013 2 12 4 29 58] [2013 2 12 4 30 00]) "nΓ€sta 3 sekunder" "nΓ€sta tre sekunder" (datetime-interval [2013 2 12 4 30 01] [2013 2 12 4 30 04]) "senaste 2 minuter" "senaste tvΓ₯ minuter" (datetime-interval [2013 2 12 4 28] [2013 2 12 4 30]) "nΓ€sta 3 minuter" "nΓ€sta tre minuter" (datetime-interval [2013 2 12 4 31] [2013 2 12 4 34]) "senaste 1 timme" ; "senaste timme" (datetime-interval [2013 2 12 3] [2013 2 12 4]) "nΓ€sta 3 timmar" "nΓ€sta tre timmar" (datetime-interval [2013 2 12 5] [2013 2 12 8]) "senaste 2 dagar" "senaste tvΓ₯ dagar" "senaste 2 dagar" (datetime-interval [2013 2 10] [2013 2 12]) "nΓ€sta 3 dagar" "nΓ€sta tre dagar" (datetime-interval [2013 2 13] [2013 2 16]) "senaste 2 veckor" "senaste tvΓ₯ veckorna" "senaste tvΓ₯ veckor" (datetime-interval [2013 1 28 :grain :week] [2013 2 11 :grain :week]) "nΓ€sta 3 veckor" "nΓ€sta tre veckorna" (datetime-interval [2013 2 18 :grain :week] [2013 3 11 :grain :week]) "senaste 2 mΓ₯nader" "senaste tvΓ₯ mΓ₯nader" "senaste tvΓ₯ mΓ₯nader" (datetime-interval [2012 12] [2013 02]) "nΓ€sta 3 mΓ₯nader" "nΓ€sta tre mΓ₯nader" (datetime-interval [2013 3] [2013 6]) "senaste 2 Γ₯r" "senaste tvΓ₯ Γ₯r" "senaste 2 Γ₯r" (datetime-interval [2011] [2013]) "nΓ€sta 3 Γ₯r" "nΓ€sta tre Γ₯r" (datetime-interval [2014] [2017]) ; Explicit intervals "13-15 juli" "13-15 Juli" "13 till 15 Juli" "13 juli till 15 juli" (datetime-interval [2013 7 13] [2013 7 16]) "8 Aug - 12 Aug" "8 Aug - 12 aug" "8 aug - 12 aug" "8 augusti - 12 augusti" (datetime-interval [2013 8 8] [2013 8 13]) "9:30 - 11:00" "9:30 till 11:00" (datetime-interval [2013 2 12 9 30] [2013 2 12 11 1]) "frΓ₯n 9:30 - 11:00 pΓ₯ torsdag" "frΓ₯n 9:30 till 11:00 pΓ₯ torsdag" "mellan 9:30 och 11:00 pΓ₯ torsdag" "9:30 - 11:00 pΓ₯ torsdag" "9:30 till 11:00 pΓ₯ torsdag" "efter 9:30 men fΓΆre 11:00 pΓ₯ torsdag" "torsdag frΓ₯n 9:30 till 11:00" "torsdag mellan 9:30 och 11:00" "frΓ₯n 9:30 till 11:00 pΓ₯ torsdag" (datetime-interval [2013 2 14 9 30] [2013 2 14 11 1]) "torsdag frΓ₯n 9 till 11" (datetime-interval [2013 2 14 9] [2013 2 14 12]) "11:30-13:30" ; go train this rule! "11:30-13:30" "11:30-13:30" "11:30-13:30" "11:30-13:30" "11:30-13:30" (datetime-interval [2013 2 12 11 30] [2013 2 12 13 31]) "inom 2 veckor" (datetime-interval [2013 2 12 4 30 0] [2013 2 26]) "innan kl. 14" "innan klockan 14" (datetime 2013 2 12 14 :direction :before) ; Timezones "16 CET" "kl. 16 CET" "klockan 16 CET" (datetime 2013 2 12 16 :hour 4 :meridiem :pm :timezone "CET") "torsdag kl. 8:00 GMT" "torsdag klockan 8:00 GMT" "torsdag 08:00 GMT" (datetime 2013 2 14 8 00 :timezone "GMT") ;; Bookface tests "idag kl. 14" "idag klockan 14" "kl. 14" "klockan 14" (datetime 2013 2 12 14) "25/4 kl. 16:00" "25/4 klockan 16:00" "25-04 klockan 16:00" "25-4 kl. 16:00" (datetime 2013 4 25 16 0) "15:00 imorgon" "kl. 15:00 imorgon" "klockan 15:00 imorgon" (datetime 2013 2 13 15 0) "efter kl. 14" "efter klockan 14" (datetime 2013 2 12 14 :direction :after) "efter 5 dagar" "efter fem dagar" (datetime 2013 2 17 4 :direction :after) "om 5 dagar" "om fem dagar" (datetime 2013 2 17 4) "efter imorgon kl. 14" "efter imorgon klockan 14" "imorgon efter kl. 14" ;; FIXME this is actually not ambiguous it's 2pm - midnight. "imorgon efter klockan 14" (datetime 2013 2 13 14 :direction :after) "fΓΆre kl. 11" "fΓΆre klockan 11" (datetime 2013 2 12 11 :direction :before) "imorgon fΓΆre kl. 11" ;; FIXME this is actually not ambiguous. it's midnight to 11 am "imorgon fΓΆre klockan 11" (datetime 2013 2 13 11 :direction :before) "under eftermiddagen" (datetime-interval [2013 2 12 12] [2013 2 12 19]) "kl. 13:30" "klockan 13:30" (datetime 2013 2 12 13 30) "om 15 minuter" (datetime 2013 2 12 4 45 0) "efter lunch" (datetime-interval [2013 2 12 13] [2013 2 12 17]) "10:30" (datetime 2013 2 12 10 30) "m<NAME>" ;; how should we deal with fb mornings? (datetime-interval [2013 2 12 4] [2013 2 12 12]) "nΓ€sta mΓ₯ndag" (datetime 2013 2 18 :day-of-week 1) )
true
( ; Context map ; Tuesday Feb 12, 2013 at 4:30am is the "now" for the tests {:reference-time (time/t -2 2013 2 12 4 30 0) :min (time/t -2 1900) :max (time/t -2 2100)} "PI:NAME:<NAME>END_PI" "just nu" (datetime 2013 2 12 4 30 00) "PI:NAME:<NAME>END_PI" (datetime 2013 2 12) "PI:NAME:<NAME>END_PI" (datetime 2013 2 11) "PI:NAME:<NAME>END_PI" (datetime 2013 2 13) "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "pΓ₯ mΓ₯ndag" (datetime 2013 2 18 :day-of-week 1) "PI:NAME:<NAME>END_PI den 18 februari" "MΓ₯n, 18 februari" (datetime 2013 2 18 :day-of-week 1 :day 18 :month 2) "PI:NAME:<NAME>END_PI" (datetime 2013 2 19) "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "tors." (datetime 2013 2 14) "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "fre." (datetime 2013 2 15) "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "lΓΆr." (datetime 2013 2 16) "PI:NAME:<NAME>END_PI" "sΓΆn" "sΓΆn." (datetime 2013 2 17) "Den fΓΆrste mars" "Den fΓΆrsta mars" "1. mars" "Den 1. mars" (datetime 2013 3 1 :day 1 :month 3) "3 mars" "den tredje mars" "den 3. mars" (datetime 2013 3 3 :day 3 :month 3) "3 mars 2015" "tredje mars 2015" "3. mars 2015" "3-3-2015" "03-03-2015" "3/3/2015" "3/3/15" "2015-3-3" "2015-03-03" (datetime 2015 3 3 :day 3 :month 3 :year 2015) "PΓ₯ den 15." "PΓ₯ den 15" "Den 15." "Den 15" (datetime 2013 2 15 :day 15) "den 15. februari" "15. februari" "februari 15" "15-02" "15/02" (datetime 2013 2 15 :day 15 :month 2) "8 Aug" (datetime 2013 8 8 :day 8 :month 8) "Oktober 2014" (datetime 2014 10 :year 2014 :month 10) "31/10/1974" "31/10/74" "31-10-74" (datetime 1974 10 31 :day 31 :month 10 :year 1974) "14april 2015" "April 14, 2015" "fjortonde April 15" (datetime 2015 4 14 :day 14 :month 4 :years 2015) "nΓ€sta fredag igen" (datetime 2013 2 22 :day-of-week 2) "nΓ€sta mars" (datetime 2013 3) "nΓ€sta mars igen" (datetime 2014 3) "SΓΆndag, 10 feb" "SΓΆndag 10 Feb" (datetime 2013 2 10 :day-of-week 7 :day 10 :month 2) "Ons, Feb13" "Ons feb13" (datetime 2013 2 13 :day-of-week 3 :day 13 :month 2) "MΓ₯ndag, Feb 18" "MΓ₯n, februari 18" (datetime 2013 2 18 :day-of-week 1 :day 18 :month 2) ; ;; Cycles "denna vecka" (datetime 2013 2 11 :grain :week) "fΓΆrra vecka" (datetime 2013 2 4 :grain :week) "nΓ€sta vecka" (datetime 2013 2 18 :grain :week) "fΓΆrra mΓ₯nad" (datetime 2013 1) "nΓ€sta mΓ₯nad" (datetime 2013 3) "detta kvartal" (datetime 2013 1 1 :grain :quarter) "nΓ€sta kvartal" (datetime 2013 4 1 :grain :quarter) "tredje kvartalet" "3. kvartal" "3 kvartal" (datetime 2013 7 1 :grain :quarter) "4. kvartal 2018" "fjΓ€rde kvartalet 2018" (datetime 2018 10 1 :grain :quarter) "fΓΆrra Γ₯r" (datetime 2012) "i fjol" (datetime 2012) "i Γ₯r" "detta Γ₯r" (datetime 2013) "nΓ€sta Γ₯r" (datetime 2014) "fΓΆrra sΓΆndag" "sΓΆndag i fΓΆrra veckan" "sΓΆndag fΓΆrra veckan" (datetime 2013 2 10 :day-of-week 7) "fΓΆrra tisdag" (datetime 2013 2 5 :day-of-week 2) "nΓ€sta tisdag" ; when today is Tuesday, "nΓ€sta tirsdag" (next tuesday) is a week from now (datetime 2013 2 19 :day-of-week 2) "nΓ€sta onsdag" ; when today is Tuesday, "nΓ€sta onsdag" (next wednesday) is tomorrow (datetime 2013 2 13 :day-of-week 3) "onsdag i nΓ€sta vecka" "onsdag nΓ€sta vecka" "nΓ€sta onsdag igen" (datetime 2013 2 20 :day-of-week 3) "nΓ€sta fredag igen" (datetime 2013 2 22 :day-of-week 5) "mΓ₯ndag denna veckan" (datetime 2013 2 11 :day-of-week 1) "tisdag denna vecka" (datetime 2013 2 12 :day-of-week 2) "onsdag denna vecka" (datetime 2013 2 13 :day-of-week 3) "i ΓΆverimorgon" (datetime 2013 2 14) "i fΓΆrrgΓ₯r" (datetime 2013 2 10) "sista mΓ₯ndag i mars" (datetime 2013 3 25 :day-of-week 1) "sista sΓΆndag i mars 2014" (datetime 2014 3 30 :day-of-week 7) "tredje dagen i oktober" "tredje dagen i Oktober" (datetime 2013 10 3) "fΓΆrsta veckan i oktober 2014" "fΓΆrsta veckan i Oktober 2014" (datetime 2014 10 6 :grain :week) ;"the week of october 6th" ;"the week of october 7th" ;(datetime 2013 10 7 :grain :week) "sista dagen i oktober 2015" "sista dagen i Oktober 2015" (datetime 2015 10 31) "sista veckan i september 2014" "sista veckan i September 2014" (datetime 2014 9 22 :grain :week) ;; nth of "fΓΆrsta tisdag i oktober" "fΓΆrsta tisdagen i Oktober" (datetime 2013 10 1) "tredje tisdagen i september 2014" "tredje tisdagen i September 2014" (datetime 2014 9 16) "fΓΆrsta onsdagen i oktober 2014" "fΓΆrsta onsdagen i Oktober 2014" (datetime 2014 10 1) "PI:NAME:<NAME>END_PI onsdagen i oktober 2014" "PI:NAME:<NAME>END_PI onsdagen i Oktober 2014" (datetime 2014 10 8) ;; Hours "kPI:NAME:<NAME>END_PIan 3" "kl. 3" (datetime 2013 2 13 3) "3:18" (datetime 2013 2 13 3 18) "kPI:NAME:<NAME>END_PI 15" "kl. 15" "15h" (datetime 2013 2 12 15 :hour 3 :meridiem :pm) "ca. kl. 15" ;; FIXME pm overrides precision "cirka kl. 15" "omkring klockan 15" (datetime 2013 2 12 15 :hour 3 :meridiem :pm) ;; :precision "approximate" "imorgon klockan 17 exakt" ;; FIXME precision is lost "imorgon kl. 17 precis" (datetime 2013 2 13 17 :hour 5 :meridiem :pm) ;; :precision "exact" "kvart ΓΆver 15" "15:15" (datetime 2013 2 12 15 15 :hour 3 :minute 15 :meridiem :pm) "kl. 20 ΓΆver 15" "klockan 20 ΓΆver 15" "tjugo ΓΆver 15" "kl. 15:20" "15:20" (datetime 2013 2 12 15 20 :hour 3 :minute 20 :meridiem :pm) "15:30" (datetime 2013 2 12 15 30 :hour 3 :minute 30 :meridiem :pm) "15:23:24" (datetime 2013 2 12 15 23 24 :hour 15 :minute 23 :second 24) "kvart i 12" "kvart i tolv" "11:45" (datetime 2013 2 12 11 45 :hour 11 :minute 45) ;; Mixing date and time "klockan 9 pΓ₯ lΓΆrdag" (datetime 2013 2 16 9 :day-of-week 6 :hour 9 :meridiem :am) "Fre, Jul 18, 2014 19:00" (datetime 2014 7 18 19 0 :day-of-week 5 :hour 7 :meridiem :pm) "kl. 19:30, PI:NAME:<NAME>END_PI, 20 sep" (datetime 2014 9 20 19 30 :day-of-week 6 :hour 7 :minute 30 :meridiem :pm) ; ;; Involving periods "om 1 sekund" "om en sekund" "en sekund frΓ₯n nu" (datetime 2013 2 12 4 30 1) "om 1 minut" "om en minut" (datetime 2013 2 12 4 31 0) "om 2 minuter" "om tvΓ₯ minuter" "om 2 minuter mer" "om tvΓ₯ minuter mer" "2 minuter frΓ₯n nu" "tvΓ₯ minuter frΓ₯n nu" (datetime 2013 2 12 4 32 0) "om 60 minuter" (datetime 2013 2 12 5 30 0) "om en halv timme" (datetime 2013 2 12 5 0 0) "om 2,5 timme" "om 2 och en halv timme" "om tvΓ₯ och en halv timme" (datetime 2013 2 12 7 0 0) "om en timme" "om 1 timme" "om 1t" (datetime 2013 2 12 5 30) "om ett par timmar" (datetime 2013 2 12 6 30) "om 24 timmar" (datetime 2013 2 13 4 30) "om en dag" (datetime 2013 2 13 4) "3 Γ₯r frΓ₯n idag" (datetime 2016 2) "om 7 dagar" (datetime 2013 2 19 4) "om en vecka" (datetime 2013 2 19) "om ca. en halv timme" ;; FIXME precision is lost "om cirka en halv timme" (datetime 2013 2 12 5 0 0) ;; :precision "approximate" "7 dagar sedan" "sju dagar sedan" (datetime 2013 2 5 4) "14 dagar sedan" "fjorton dagar sedan" (datetime 2013 1 29 4) "en vecka sedan" "1 vecka sedan" (datetime 2013 2 5) "3 veckor sedan" "tre veckor sedan" (datetime 2013 1 22) "3 mΓ₯nader sedan" "tre mΓ₯nader sedan" (datetime 2012 11 12) "tvΓ₯ Γ₯r sedan" "2 Γ₯r sedan" (datetime 2011 2) "1954" (datetime 1954) ; "1 Γ₯r efter julafton" ; "ett Γ₯r efter julafton" ; (datetime 2013 12) ; resolves as after last Xmas... ; Seasons "denna sommaren" "den hΓ€r sommaren" (datetime-interval [2013 6 21] [2013 9 24]) "denna vintern" "den hΓ€r vintern" (datetime-interval [2012 12 21] [2013 3 21]) ; US holidays (http://www.timeanddate.com/holidays/us/) "juldagen" (datetime 2013 12 25) "nyΓ₯rsafton" (datetime 2013 12 31) "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" (datetime 2014 1 1) ; Part of day (morning, afternoon...) "PI:NAME:<NAME>END_PI" (datetime-interval [2013 2 12 18] [2013 2 13 00]) "PI:NAME:<NAME>END_PI" (datetime-interval [2013 2 8 18] [2013 2 11 00]) "PI:NAME:<NAME>END_PI" (datetime-interval [2013 2 13 18] [2013 2 14 00]) "PI:NAME:<NAME>END_PI" (datetime-interval [2013 2 13 12] [2013 2 13 14]) "PI:NAME:<NAME>END_PI" (datetime-interval [2013 2 11 18] [2013 2 12 00]) "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" (datetime-interval [2013 2 15 18] [2013 2 18 00]) "PI:NAME:<NAME>END_PI" (datetime-interval [2013 2 18 4] [2013 2 18 12]) ; Intervals involving cycles "senaste 2 sekunder" "senaste tvΓ₯ sekunderna" (datetime-interval [2013 2 12 4 29 58] [2013 2 12 4 30 00]) "nΓ€sta 3 sekunder" "nΓ€sta tre sekunder" (datetime-interval [2013 2 12 4 30 01] [2013 2 12 4 30 04]) "senaste 2 minuter" "senaste tvΓ₯ minuter" (datetime-interval [2013 2 12 4 28] [2013 2 12 4 30]) "nΓ€sta 3 minuter" "nΓ€sta tre minuter" (datetime-interval [2013 2 12 4 31] [2013 2 12 4 34]) "senaste 1 timme" ; "senaste timme" (datetime-interval [2013 2 12 3] [2013 2 12 4]) "nΓ€sta 3 timmar" "nΓ€sta tre timmar" (datetime-interval [2013 2 12 5] [2013 2 12 8]) "senaste 2 dagar" "senaste tvΓ₯ dagar" "senaste 2 dagar" (datetime-interval [2013 2 10] [2013 2 12]) "nΓ€sta 3 dagar" "nΓ€sta tre dagar" (datetime-interval [2013 2 13] [2013 2 16]) "senaste 2 veckor" "senaste tvΓ₯ veckorna" "senaste tvΓ₯ veckor" (datetime-interval [2013 1 28 :grain :week] [2013 2 11 :grain :week]) "nΓ€sta 3 veckor" "nΓ€sta tre veckorna" (datetime-interval [2013 2 18 :grain :week] [2013 3 11 :grain :week]) "senaste 2 mΓ₯nader" "senaste tvΓ₯ mΓ₯nader" "senaste tvΓ₯ mΓ₯nader" (datetime-interval [2012 12] [2013 02]) "nΓ€sta 3 mΓ₯nader" "nΓ€sta tre mΓ₯nader" (datetime-interval [2013 3] [2013 6]) "senaste 2 Γ₯r" "senaste tvΓ₯ Γ₯r" "senaste 2 Γ₯r" (datetime-interval [2011] [2013]) "nΓ€sta 3 Γ₯r" "nΓ€sta tre Γ₯r" (datetime-interval [2014] [2017]) ; Explicit intervals "13-15 juli" "13-15 Juli" "13 till 15 Juli" "13 juli till 15 juli" (datetime-interval [2013 7 13] [2013 7 16]) "8 Aug - 12 Aug" "8 Aug - 12 aug" "8 aug - 12 aug" "8 augusti - 12 augusti" (datetime-interval [2013 8 8] [2013 8 13]) "9:30 - 11:00" "9:30 till 11:00" (datetime-interval [2013 2 12 9 30] [2013 2 12 11 1]) "frΓ₯n 9:30 - 11:00 pΓ₯ torsdag" "frΓ₯n 9:30 till 11:00 pΓ₯ torsdag" "mellan 9:30 och 11:00 pΓ₯ torsdag" "9:30 - 11:00 pΓ₯ torsdag" "9:30 till 11:00 pΓ₯ torsdag" "efter 9:30 men fΓΆre 11:00 pΓ₯ torsdag" "torsdag frΓ₯n 9:30 till 11:00" "torsdag mellan 9:30 och 11:00" "frΓ₯n 9:30 till 11:00 pΓ₯ torsdag" (datetime-interval [2013 2 14 9 30] [2013 2 14 11 1]) "torsdag frΓ₯n 9 till 11" (datetime-interval [2013 2 14 9] [2013 2 14 12]) "11:30-13:30" ; go train this rule! "11:30-13:30" "11:30-13:30" "11:30-13:30" "11:30-13:30" "11:30-13:30" (datetime-interval [2013 2 12 11 30] [2013 2 12 13 31]) "inom 2 veckor" (datetime-interval [2013 2 12 4 30 0] [2013 2 26]) "innan kl. 14" "innan klockan 14" (datetime 2013 2 12 14 :direction :before) ; Timezones "16 CET" "kl. 16 CET" "klockan 16 CET" (datetime 2013 2 12 16 :hour 4 :meridiem :pm :timezone "CET") "torsdag kl. 8:00 GMT" "torsdag klockan 8:00 GMT" "torsdag 08:00 GMT" (datetime 2013 2 14 8 00 :timezone "GMT") ;; Bookface tests "idag kl. 14" "idag klockan 14" "kl. 14" "klockan 14" (datetime 2013 2 12 14) "25/4 kl. 16:00" "25/4 klockan 16:00" "25-04 klockan 16:00" "25-4 kl. 16:00" (datetime 2013 4 25 16 0) "15:00 imorgon" "kl. 15:00 imorgon" "klockan 15:00 imorgon" (datetime 2013 2 13 15 0) "efter kl. 14" "efter klockan 14" (datetime 2013 2 12 14 :direction :after) "efter 5 dagar" "efter fem dagar" (datetime 2013 2 17 4 :direction :after) "om 5 dagar" "om fem dagar" (datetime 2013 2 17 4) "efter imorgon kl. 14" "efter imorgon klockan 14" "imorgon efter kl. 14" ;; FIXME this is actually not ambiguous it's 2pm - midnight. "imorgon efter klockan 14" (datetime 2013 2 13 14 :direction :after) "fΓΆre kl. 11" "fΓΆre klockan 11" (datetime 2013 2 12 11 :direction :before) "imorgon fΓΆre kl. 11" ;; FIXME this is actually not ambiguous. it's midnight to 11 am "imorgon fΓΆre klockan 11" (datetime 2013 2 13 11 :direction :before) "under eftermiddagen" (datetime-interval [2013 2 12 12] [2013 2 12 19]) "kl. 13:30" "klockan 13:30" (datetime 2013 2 12 13 30) "om 15 minuter" (datetime 2013 2 12 4 45 0) "efter lunch" (datetime-interval [2013 2 12 13] [2013 2 12 17]) "10:30" (datetime 2013 2 12 10 30) "mPI:NAME:<NAME>END_PI" ;; how should we deal with fb mornings? (datetime-interval [2013 2 12 4] [2013 2 12 12]) "nΓ€sta mΓ₯ndag" (datetime 2013 2 18 :day-of-week 1) )
[ { "context": "-cert-pem]\n (when ca-cert-pem\n (let [ts-pass (random-password 4)\n ;; insert the ca cert in th", "end": 2788, "score": 0.5049930810928345, "start": 2782, "tag": "PASSWORD", "value": "random" }, { "context": "eystore\" ks-pass)\n :ssl.keystore.password ks-pass}))))\n\n\n\n(defn ssl-opts\n \"Return valid configurat", "end": 3653, "score": 0.8645504117012024, "start": 3646, "tag": "PASSWORD", "value": "ks-pass" } ]
src/com/viooh/kafka_ssl_helper/core.clj
VIOOH/kafka-ssl-helper
2
;; ;; Copyright 2019-2020 VIOOH Ltd ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; (ns com.viooh.kafka-ssl-helper.core (:import [java.security KeyStore] [java.security.cert Certificate CertificateFactory] [java.security SecureRandom]) (:require [pem-reader.core :as pem] [clojure.string :as str] [clojure.java.io :as io])) (defn- random-password "Number of random integers generated for the password" [n] (let [rdm (SecureRandom.)] (->> (repeatedly #(.nextInt rdm)) (take n) (map (partial format "%x")) (str/join "")))) (defn- ^KeyStore empty-keystore "Generate" [pass] (doto (KeyStore/getInstance "PKCS12") (.load nil (char-array pass)))) (defn- certificate-factory [] (CertificateFactory/getInstance "X509")) (defn- load-key [^String key] (-> key (.getBytes "UTF-8") io/input-stream pem/read pem/private-key)) (defn- set-pk+cert [^KeyStore ks ks-pass ^String private-key ^String cert-pem] (let [^CertificateFactory cert-factory (certificate-factory) key (load-key private-key) certs (.generateCertificates cert-factory (io/input-stream (.getBytes cert-pem "UTF-8")))] (.setKeyEntry ks "private" key (char-array ks-pass) (into-array Certificate certs))) ks) (defn- set-rootca-cert [^KeyStore ks ks-pass ^String cert-pem] (let [^CertificateFactory cert-factory (certificate-factory) cert (.generateCertificate cert-factory (io/input-stream (.getBytes cert-pem "UTF-8")))] (.setCertificateEntry ks "CARoot" cert)) ks) (defn- persist-ks [^KeyStore ks ^String dest ks-pass] (with-open [f (java.io.FileOutputStream. dest)] (.store ks f (char-array ks-pass)))) (defn- tmp-file [name extension] (let [tmp-file (java.io.File/createTempFile name extension)] (.deleteOnExit tmp-file) (.getAbsolutePath tmp-file))) (defn- persist-temp-ks [^KeyStore ks name ks-pass] (let [tmp-file (tmp-file name ".jks")] (persist-ks ks tmp-file ks-pass) tmp-file)) (defn- truststore-ssl-opts [ca-cert-pem] (when ca-cert-pem (let [ts-pass (random-password 4) ;; insert the ca cert in the truststore ts (-> (empty-keystore ts-pass) (set-rootca-cert ts-pass ca-cert-pem))] {:ssl.truststore.location (persist-temp-ks ts "temp_truststore" ts-pass) :ssl.truststore.password ts-pass}))) (defn keystore-ssl-opts "Return valid configuration options for a kafka consumer using ssl auth" [{:keys [cert-pem ca-cert-pem private-key] :as opts}] (when (and cert-pem private-key) (let [ks-pass (random-password 4) ;; insert the pk and the cert in the keystore ks (-> (empty-keystore ks-pass) (set-pk+cert ks-pass private-key cert-pem))] (merge (truststore-ssl-opts ca-cert-pem) {:ssl.keystore.location (persist-temp-ks ks "temp_keystore" ks-pass) :ssl.keystore.password ks-pass})))) (defn ssl-opts "Return valid configuration options for a kafka consumer using ssl auth" [{:keys [cert-pem ca-cert-pem private-key] :as opts}] (merge (truststore-ssl-opts ca-cert-pem) (keystore-ssl-opts opts) {:security.protocol "SSL"}))
68610
;; ;; Copyright 2019-2020 VIOOH Ltd ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; (ns com.viooh.kafka-ssl-helper.core (:import [java.security KeyStore] [java.security.cert Certificate CertificateFactory] [java.security SecureRandom]) (:require [pem-reader.core :as pem] [clojure.string :as str] [clojure.java.io :as io])) (defn- random-password "Number of random integers generated for the password" [n] (let [rdm (SecureRandom.)] (->> (repeatedly #(.nextInt rdm)) (take n) (map (partial format "%x")) (str/join "")))) (defn- ^KeyStore empty-keystore "Generate" [pass] (doto (KeyStore/getInstance "PKCS12") (.load nil (char-array pass)))) (defn- certificate-factory [] (CertificateFactory/getInstance "X509")) (defn- load-key [^String key] (-> key (.getBytes "UTF-8") io/input-stream pem/read pem/private-key)) (defn- set-pk+cert [^KeyStore ks ks-pass ^String private-key ^String cert-pem] (let [^CertificateFactory cert-factory (certificate-factory) key (load-key private-key) certs (.generateCertificates cert-factory (io/input-stream (.getBytes cert-pem "UTF-8")))] (.setKeyEntry ks "private" key (char-array ks-pass) (into-array Certificate certs))) ks) (defn- set-rootca-cert [^KeyStore ks ks-pass ^String cert-pem] (let [^CertificateFactory cert-factory (certificate-factory) cert (.generateCertificate cert-factory (io/input-stream (.getBytes cert-pem "UTF-8")))] (.setCertificateEntry ks "CARoot" cert)) ks) (defn- persist-ks [^KeyStore ks ^String dest ks-pass] (with-open [f (java.io.FileOutputStream. dest)] (.store ks f (char-array ks-pass)))) (defn- tmp-file [name extension] (let [tmp-file (java.io.File/createTempFile name extension)] (.deleteOnExit tmp-file) (.getAbsolutePath tmp-file))) (defn- persist-temp-ks [^KeyStore ks name ks-pass] (let [tmp-file (tmp-file name ".jks")] (persist-ks ks tmp-file ks-pass) tmp-file)) (defn- truststore-ssl-opts [ca-cert-pem] (when ca-cert-pem (let [ts-pass (<PASSWORD>-password 4) ;; insert the ca cert in the truststore ts (-> (empty-keystore ts-pass) (set-rootca-cert ts-pass ca-cert-pem))] {:ssl.truststore.location (persist-temp-ks ts "temp_truststore" ts-pass) :ssl.truststore.password ts-pass}))) (defn keystore-ssl-opts "Return valid configuration options for a kafka consumer using ssl auth" [{:keys [cert-pem ca-cert-pem private-key] :as opts}] (when (and cert-pem private-key) (let [ks-pass (random-password 4) ;; insert the pk and the cert in the keystore ks (-> (empty-keystore ks-pass) (set-pk+cert ks-pass private-key cert-pem))] (merge (truststore-ssl-opts ca-cert-pem) {:ssl.keystore.location (persist-temp-ks ks "temp_keystore" ks-pass) :ssl.keystore.password <PASSWORD>})))) (defn ssl-opts "Return valid configuration options for a kafka consumer using ssl auth" [{:keys [cert-pem ca-cert-pem private-key] :as opts}] (merge (truststore-ssl-opts ca-cert-pem) (keystore-ssl-opts opts) {:security.protocol "SSL"}))
true
;; ;; Copyright 2019-2020 VIOOH Ltd ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; (ns com.viooh.kafka-ssl-helper.core (:import [java.security KeyStore] [java.security.cert Certificate CertificateFactory] [java.security SecureRandom]) (:require [pem-reader.core :as pem] [clojure.string :as str] [clojure.java.io :as io])) (defn- random-password "Number of random integers generated for the password" [n] (let [rdm (SecureRandom.)] (->> (repeatedly #(.nextInt rdm)) (take n) (map (partial format "%x")) (str/join "")))) (defn- ^KeyStore empty-keystore "Generate" [pass] (doto (KeyStore/getInstance "PKCS12") (.load nil (char-array pass)))) (defn- certificate-factory [] (CertificateFactory/getInstance "X509")) (defn- load-key [^String key] (-> key (.getBytes "UTF-8") io/input-stream pem/read pem/private-key)) (defn- set-pk+cert [^KeyStore ks ks-pass ^String private-key ^String cert-pem] (let [^CertificateFactory cert-factory (certificate-factory) key (load-key private-key) certs (.generateCertificates cert-factory (io/input-stream (.getBytes cert-pem "UTF-8")))] (.setKeyEntry ks "private" key (char-array ks-pass) (into-array Certificate certs))) ks) (defn- set-rootca-cert [^KeyStore ks ks-pass ^String cert-pem] (let [^CertificateFactory cert-factory (certificate-factory) cert (.generateCertificate cert-factory (io/input-stream (.getBytes cert-pem "UTF-8")))] (.setCertificateEntry ks "CARoot" cert)) ks) (defn- persist-ks [^KeyStore ks ^String dest ks-pass] (with-open [f (java.io.FileOutputStream. dest)] (.store ks f (char-array ks-pass)))) (defn- tmp-file [name extension] (let [tmp-file (java.io.File/createTempFile name extension)] (.deleteOnExit tmp-file) (.getAbsolutePath tmp-file))) (defn- persist-temp-ks [^KeyStore ks name ks-pass] (let [tmp-file (tmp-file name ".jks")] (persist-ks ks tmp-file ks-pass) tmp-file)) (defn- truststore-ssl-opts [ca-cert-pem] (when ca-cert-pem (let [ts-pass (PI:PASSWORD:<PASSWORD>END_PI-password 4) ;; insert the ca cert in the truststore ts (-> (empty-keystore ts-pass) (set-rootca-cert ts-pass ca-cert-pem))] {:ssl.truststore.location (persist-temp-ks ts "temp_truststore" ts-pass) :ssl.truststore.password ts-pass}))) (defn keystore-ssl-opts "Return valid configuration options for a kafka consumer using ssl auth" [{:keys [cert-pem ca-cert-pem private-key] :as opts}] (when (and cert-pem private-key) (let [ks-pass (random-password 4) ;; insert the pk and the cert in the keystore ks (-> (empty-keystore ks-pass) (set-pk+cert ks-pass private-key cert-pem))] (merge (truststore-ssl-opts ca-cert-pem) {:ssl.keystore.location (persist-temp-ks ks "temp_keystore" ks-pass) :ssl.keystore.password PI:PASSWORD:<PASSWORD>END_PI})))) (defn ssl-opts "Return valid configuration options for a kafka consumer using ssl auth" [{:keys [cert-pem ca-cert-pem private-key] :as opts}] (merge (truststore-ssl-opts ca-cert-pem) (keystore-ssl-opts opts) {:security.protocol "SSL"}))
[ { "context": ";;\n;; User interface utilities\n;; @author Antonio Garrote\n;;\n\n(ns #^{:author \"Antonio Garrote <antoniogarro", "end": 57, "score": 0.9998941421508789, "start": 42, "tag": "NAME", "value": "Antonio Garrote" }, { "context": "es\n;; @author Antonio Garrote\n;;\n\n(ns #^{:author \"Antonio Garrote <[email protected]>\"}\n clj-ml.ui\n \"Names", "end": 93, "score": 0.9998925924301147, "start": 78, "tag": "NAME", "value": "Antonio Garrote" }, { "context": "onio Garrote\n;;\n\n(ns #^{:author \"Antonio Garrote <[email protected]>\"}\n clj-ml.ui\n \"Namespace containing functions ", "end": 119, "score": 0.9999302625656128, "start": 95, "tag": "EMAIL", "value": "[email protected]" }, { "context": "lime []\n; (do\n; (add-classpath \"file:///Users/antonio.garrote/Development/old/clj-ml/lib/joda-time-1.6.jar\")\n; ", "end": 8657, "score": 0.8986334800720215, "start": 8642, "tag": "USERNAME", "value": "antonio.garrote" } ]
src/clj_ml/ui.clj
bmabey/clj-ml
7
;; ;; User interface utilities ;; @author Antonio Garrote ;; (ns #^{:author "Antonio Garrote <[email protected]>"} clj-ml.ui "Namespace containing functions for plotting classifiers, clusterers and data sets." (:use (clj-ml data utils clusterers) (incanter core stats charts)) (:import (weka.clusterers ClusterEvaluation SimpleKMeans))) (defn visualize-plot [plot] "Prepare a plot to be displayed" (do (clear-background plot) (view plot) plot)) (defmulti display-object "Displays some kind of clj-ml object" (fn [kind chart data opts] [kind chart])) (defmethod display-object [:dataset :boxplot] ([kind chart dataset-opts display-opts] (let [dataset (get dataset-opts :dataset) dataseq (dataset-seq dataset) cols (get dataset-opts :cols) cols-names (dataset-format dataset) vals-map (reduce (fn [acum col] (let [name (name (nth cols-names col)) vals (map #(nth (instance-to-vector %1) col) dataseq)] (conj acum {name vals}))) {} cols) title (or (get display-opts :title) (str "Dataset '" (dataset-name dataset) "' Box Plot")) legend (if (nil? (get display-opts :legend)) true (get display-opts :legend)) should-display (get display-opts :visualize)] (loop [plot nil ks (keys vals-map)] (if (empty? ks) (if should-display (visualize-plot plot) plot) (let [this-val (get vals-map (first ks)) the-plot (if (nil? plot) (box-plot this-val :title title :legend legend :series-label (name (first ks))) (do (add-box-plot plot this-val :series-label (name (first ks))) plot))] (recur the-plot (rest ks)))))))) (defmethod display-object [:dataset :scatter-plot] ([kind chart dataset-opts display-opts] (let [dataset (get dataset-opts :dataset) dataseq (dataset-seq dataset) cols (get dataset-opts :cols) col-0 (nth cols 0) col-1 (nth cols 1) group-by (get dataset-opts :group-by) cols-names (dataset-format dataset) group-vals (if (nil? group-by) {:no-group-by :no-class} (dataset-values-at dataset group-by)) acum-map (reduce (fn [acum group-val] (conj acum {(first group-val) (reduce (fn [acum x] (conj acum {x []})) {} cols)})) {} group-vals) folded-points (reduce (fn [acum instance] (let [inst (instance-to-vector instance) val-0 (nth inst col-0) val-1 (nth inst col-1) class (if (nil? group-by) :no-group-by (nth inst group-by))] (merge-with (fn [a b] {col-0 (conj (get a col-0) (get b col-0)) col-1 (conj (get a col-1) (get b col-1))}) acum {class {col-0 val-0 col-1 val-1}}))) acum-map dataseq) title (or (get display-opts :title) (str "Dataset '" (dataset-name dataset) "' Scatter Plot (" (name (nth cols-names col-0)) " vs " (name (nth cols-names col-1)) ")")) legend (if (nil? (get display-opts :legend)) true (get display-opts :legend)) should-display (get display-opts :visualize)] (loop [plot nil ks (keys folded-points)] (if (empty? ks) (if should-display (visualize-plot plot) plot) (let [this-vals (get folded-points (first ks)) this-val-0 (get this-vals col-0) this-val-1 (get this-vals col-1) the-plot (if (nil? plot) (scatter-plot this-val-0 this-val-1 :title title :x-label (name (nth cols-names col-0)) :y-label (name (nth cols-names col-1)) :series-label (name (first ks)) :legend legend) (do (add-points plot this-val-0 this-val-1 :series-label (name (first ks))) plot))] (recur the-plot (rest ks)))))))) ;; visualization of different objects (defn dataset-display-numeric-attributes [dataset attributes & visualization-options] "Displays the provided attributes into a box plot" (let [attr (map #(if (keyword? %1) (dataset-index-attr dataset %1) %1) attributes) options-pre (first-or-default visualization-options {}) options (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre)] (display-object :dataset :boxplot {:dataset dataset :cols attr} options))) (defn dataset-display-class-for-attributes [dataset attribute-x attribute-y & visualization-options] "Displays how a pair of attributes are distributed for each class" (let [attr-x (if (keyword? attribute-x) (dataset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (dataset-index-attr dataset attribute-y) attribute-y) options-pre (first-or-default visualization-options {}) opts (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre) class-index (dataset-get-class dataset)] (display-object :dataset :scatter-plot {:dataset dataset :cols [attr-x attr-y] :group-by class-index} opts))) (defn dataset-display-attributes [dataset attribute-x attribute-y & visualization-options] "Displays the distribution of a set of attributes for a dataset" (let [attr-x (if (keyword? attribute-x) (datset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (datset-index-attr dataset attribute-y) attribute-y) options-pre (first-or-default visualization-options {}) opts (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre) class-index (dataset-get-class dataset)] (display-object :dataset :scatter-plot {:dataset dataset :cols [attr-x attr-y]} opts))) ;; visualization (defmulti clusterer-display-for-attributes (fn [clusterer dataset attribute-x attribute-y] (class clusterer))) (defmethod clusterer-display-for-attributes SimpleKMeans ([clusterer dataset attribute-x attribute-y & visualization-options] (let [attr-x (if (keyword? attribute-x) (dataset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (dataset-index-attr dataset attribute-y) attribute-y) opts (first-or-default visualization-options {}) display? (if (= (get visualization-options :visualize) false) false true) true-opts (conj opts {:visualize false}) plot (dataset-display-class-for-attributes dataset attribute-x attribute-y true-opts) info (clusterer-info clusterer) centroids (:centroids info)] (do (loop [ks (keys centroids)] (if (empty? ks) (if display? (visualize-plot plot) plot) (let [k (first ks) centroid (get centroids k) val-x (instance-value-at centroid attr-x) val-y (instance-value-at centroid attr-y)] (add-pointer plot val-x val-y :text (str "centroid " k " (" (float val-x) "," (float val-y) ")")) (recur (rest ks))))))))) ;; Things to load to test this from slime ;(defn load-test-from-slime [] ; (do ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/joda-time-1.6.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/opencsv-2.0.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/classes/") ; (add-classpath "file:///Applications/weka-3-6-2/weka.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/src/") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-charts-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-core-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-io-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-processing-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-chrono-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-full-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-mongodb-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/jfreechart-1.0.13.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/parallelcolt-0.7.2.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/arpack-combo-0.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/gnujaxp-1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/clojure-json-1.1-20091229.021828-4.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/clojure-db-object-0.1.1-20091229.021828-2.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/jcommon-1.0.16.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/netlib-java-0.9.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/processing-core-1.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/congomongo-0.1.1-20091229.021828-1.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/mongo-1.0.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/mongo-java-driver-1.1.0-20091229.021828-3.jar") ; ))
22189
;; ;; User interface utilities ;; @author <NAME> ;; (ns #^{:author "<NAME> <<EMAIL>>"} clj-ml.ui "Namespace containing functions for plotting classifiers, clusterers and data sets." (:use (clj-ml data utils clusterers) (incanter core stats charts)) (:import (weka.clusterers ClusterEvaluation SimpleKMeans))) (defn visualize-plot [plot] "Prepare a plot to be displayed" (do (clear-background plot) (view plot) plot)) (defmulti display-object "Displays some kind of clj-ml object" (fn [kind chart data opts] [kind chart])) (defmethod display-object [:dataset :boxplot] ([kind chart dataset-opts display-opts] (let [dataset (get dataset-opts :dataset) dataseq (dataset-seq dataset) cols (get dataset-opts :cols) cols-names (dataset-format dataset) vals-map (reduce (fn [acum col] (let [name (name (nth cols-names col)) vals (map #(nth (instance-to-vector %1) col) dataseq)] (conj acum {name vals}))) {} cols) title (or (get display-opts :title) (str "Dataset '" (dataset-name dataset) "' Box Plot")) legend (if (nil? (get display-opts :legend)) true (get display-opts :legend)) should-display (get display-opts :visualize)] (loop [plot nil ks (keys vals-map)] (if (empty? ks) (if should-display (visualize-plot plot) plot) (let [this-val (get vals-map (first ks)) the-plot (if (nil? plot) (box-plot this-val :title title :legend legend :series-label (name (first ks))) (do (add-box-plot plot this-val :series-label (name (first ks))) plot))] (recur the-plot (rest ks)))))))) (defmethod display-object [:dataset :scatter-plot] ([kind chart dataset-opts display-opts] (let [dataset (get dataset-opts :dataset) dataseq (dataset-seq dataset) cols (get dataset-opts :cols) col-0 (nth cols 0) col-1 (nth cols 1) group-by (get dataset-opts :group-by) cols-names (dataset-format dataset) group-vals (if (nil? group-by) {:no-group-by :no-class} (dataset-values-at dataset group-by)) acum-map (reduce (fn [acum group-val] (conj acum {(first group-val) (reduce (fn [acum x] (conj acum {x []})) {} cols)})) {} group-vals) folded-points (reduce (fn [acum instance] (let [inst (instance-to-vector instance) val-0 (nth inst col-0) val-1 (nth inst col-1) class (if (nil? group-by) :no-group-by (nth inst group-by))] (merge-with (fn [a b] {col-0 (conj (get a col-0) (get b col-0)) col-1 (conj (get a col-1) (get b col-1))}) acum {class {col-0 val-0 col-1 val-1}}))) acum-map dataseq) title (or (get display-opts :title) (str "Dataset '" (dataset-name dataset) "' Scatter Plot (" (name (nth cols-names col-0)) " vs " (name (nth cols-names col-1)) ")")) legend (if (nil? (get display-opts :legend)) true (get display-opts :legend)) should-display (get display-opts :visualize)] (loop [plot nil ks (keys folded-points)] (if (empty? ks) (if should-display (visualize-plot plot) plot) (let [this-vals (get folded-points (first ks)) this-val-0 (get this-vals col-0) this-val-1 (get this-vals col-1) the-plot (if (nil? plot) (scatter-plot this-val-0 this-val-1 :title title :x-label (name (nth cols-names col-0)) :y-label (name (nth cols-names col-1)) :series-label (name (first ks)) :legend legend) (do (add-points plot this-val-0 this-val-1 :series-label (name (first ks))) plot))] (recur the-plot (rest ks)))))))) ;; visualization of different objects (defn dataset-display-numeric-attributes [dataset attributes & visualization-options] "Displays the provided attributes into a box plot" (let [attr (map #(if (keyword? %1) (dataset-index-attr dataset %1) %1) attributes) options-pre (first-or-default visualization-options {}) options (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre)] (display-object :dataset :boxplot {:dataset dataset :cols attr} options))) (defn dataset-display-class-for-attributes [dataset attribute-x attribute-y & visualization-options] "Displays how a pair of attributes are distributed for each class" (let [attr-x (if (keyword? attribute-x) (dataset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (dataset-index-attr dataset attribute-y) attribute-y) options-pre (first-or-default visualization-options {}) opts (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre) class-index (dataset-get-class dataset)] (display-object :dataset :scatter-plot {:dataset dataset :cols [attr-x attr-y] :group-by class-index} opts))) (defn dataset-display-attributes [dataset attribute-x attribute-y & visualization-options] "Displays the distribution of a set of attributes for a dataset" (let [attr-x (if (keyword? attribute-x) (datset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (datset-index-attr dataset attribute-y) attribute-y) options-pre (first-or-default visualization-options {}) opts (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre) class-index (dataset-get-class dataset)] (display-object :dataset :scatter-plot {:dataset dataset :cols [attr-x attr-y]} opts))) ;; visualization (defmulti clusterer-display-for-attributes (fn [clusterer dataset attribute-x attribute-y] (class clusterer))) (defmethod clusterer-display-for-attributes SimpleKMeans ([clusterer dataset attribute-x attribute-y & visualization-options] (let [attr-x (if (keyword? attribute-x) (dataset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (dataset-index-attr dataset attribute-y) attribute-y) opts (first-or-default visualization-options {}) display? (if (= (get visualization-options :visualize) false) false true) true-opts (conj opts {:visualize false}) plot (dataset-display-class-for-attributes dataset attribute-x attribute-y true-opts) info (clusterer-info clusterer) centroids (:centroids info)] (do (loop [ks (keys centroids)] (if (empty? ks) (if display? (visualize-plot plot) plot) (let [k (first ks) centroid (get centroids k) val-x (instance-value-at centroid attr-x) val-y (instance-value-at centroid attr-y)] (add-pointer plot val-x val-y :text (str "centroid " k " (" (float val-x) "," (float val-y) ")")) (recur (rest ks))))))))) ;; Things to load to test this from slime ;(defn load-test-from-slime [] ; (do ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/joda-time-1.6.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/opencsv-2.0.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/classes/") ; (add-classpath "file:///Applications/weka-3-6-2/weka.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/src/") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-charts-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-core-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-io-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-processing-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-chrono-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-full-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-mongodb-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/jfreechart-1.0.13.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/parallelcolt-0.7.2.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/arpack-combo-0.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/gnujaxp-1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/clojure-json-1.1-20091229.021828-4.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/clojure-db-object-0.1.1-20091229.021828-2.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/jcommon-1.0.16.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/netlib-java-0.9.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/processing-core-1.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/congomongo-0.1.1-20091229.021828-1.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/mongo-1.0.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/mongo-java-driver-1.1.0-20091229.021828-3.jar") ; ))
true
;; ;; User interface utilities ;; @author PI:NAME:<NAME>END_PI ;; (ns #^{:author "PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>"} clj-ml.ui "Namespace containing functions for plotting classifiers, clusterers and data sets." (:use (clj-ml data utils clusterers) (incanter core stats charts)) (:import (weka.clusterers ClusterEvaluation SimpleKMeans))) (defn visualize-plot [plot] "Prepare a plot to be displayed" (do (clear-background plot) (view plot) plot)) (defmulti display-object "Displays some kind of clj-ml object" (fn [kind chart data opts] [kind chart])) (defmethod display-object [:dataset :boxplot] ([kind chart dataset-opts display-opts] (let [dataset (get dataset-opts :dataset) dataseq (dataset-seq dataset) cols (get dataset-opts :cols) cols-names (dataset-format dataset) vals-map (reduce (fn [acum col] (let [name (name (nth cols-names col)) vals (map #(nth (instance-to-vector %1) col) dataseq)] (conj acum {name vals}))) {} cols) title (or (get display-opts :title) (str "Dataset '" (dataset-name dataset) "' Box Plot")) legend (if (nil? (get display-opts :legend)) true (get display-opts :legend)) should-display (get display-opts :visualize)] (loop [plot nil ks (keys vals-map)] (if (empty? ks) (if should-display (visualize-plot plot) plot) (let [this-val (get vals-map (first ks)) the-plot (if (nil? plot) (box-plot this-val :title title :legend legend :series-label (name (first ks))) (do (add-box-plot plot this-val :series-label (name (first ks))) plot))] (recur the-plot (rest ks)))))))) (defmethod display-object [:dataset :scatter-plot] ([kind chart dataset-opts display-opts] (let [dataset (get dataset-opts :dataset) dataseq (dataset-seq dataset) cols (get dataset-opts :cols) col-0 (nth cols 0) col-1 (nth cols 1) group-by (get dataset-opts :group-by) cols-names (dataset-format dataset) group-vals (if (nil? group-by) {:no-group-by :no-class} (dataset-values-at dataset group-by)) acum-map (reduce (fn [acum group-val] (conj acum {(first group-val) (reduce (fn [acum x] (conj acum {x []})) {} cols)})) {} group-vals) folded-points (reduce (fn [acum instance] (let [inst (instance-to-vector instance) val-0 (nth inst col-0) val-1 (nth inst col-1) class (if (nil? group-by) :no-group-by (nth inst group-by))] (merge-with (fn [a b] {col-0 (conj (get a col-0) (get b col-0)) col-1 (conj (get a col-1) (get b col-1))}) acum {class {col-0 val-0 col-1 val-1}}))) acum-map dataseq) title (or (get display-opts :title) (str "Dataset '" (dataset-name dataset) "' Scatter Plot (" (name (nth cols-names col-0)) " vs " (name (nth cols-names col-1)) ")")) legend (if (nil? (get display-opts :legend)) true (get display-opts :legend)) should-display (get display-opts :visualize)] (loop [plot nil ks (keys folded-points)] (if (empty? ks) (if should-display (visualize-plot plot) plot) (let [this-vals (get folded-points (first ks)) this-val-0 (get this-vals col-0) this-val-1 (get this-vals col-1) the-plot (if (nil? plot) (scatter-plot this-val-0 this-val-1 :title title :x-label (name (nth cols-names col-0)) :y-label (name (nth cols-names col-1)) :series-label (name (first ks)) :legend legend) (do (add-points plot this-val-0 this-val-1 :series-label (name (first ks))) plot))] (recur the-plot (rest ks)))))))) ;; visualization of different objects (defn dataset-display-numeric-attributes [dataset attributes & visualization-options] "Displays the provided attributes into a box plot" (let [attr (map #(if (keyword? %1) (dataset-index-attr dataset %1) %1) attributes) options-pre (first-or-default visualization-options {}) options (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre)] (display-object :dataset :boxplot {:dataset dataset :cols attr} options))) (defn dataset-display-class-for-attributes [dataset attribute-x attribute-y & visualization-options] "Displays how a pair of attributes are distributed for each class" (let [attr-x (if (keyword? attribute-x) (dataset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (dataset-index-attr dataset attribute-y) attribute-y) options-pre (first-or-default visualization-options {}) opts (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre) class-index (dataset-get-class dataset)] (display-object :dataset :scatter-plot {:dataset dataset :cols [attr-x attr-y] :group-by class-index} opts))) (defn dataset-display-attributes [dataset attribute-x attribute-y & visualization-options] "Displays the distribution of a set of attributes for a dataset" (let [attr-x (if (keyword? attribute-x) (datset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (datset-index-attr dataset attribute-y) attribute-y) options-pre (first-or-default visualization-options {}) opts (if (nil? (:visualize options-pre)) (conj options-pre {:visualize true}) options-pre) class-index (dataset-get-class dataset)] (display-object :dataset :scatter-plot {:dataset dataset :cols [attr-x attr-y]} opts))) ;; visualization (defmulti clusterer-display-for-attributes (fn [clusterer dataset attribute-x attribute-y] (class clusterer))) (defmethod clusterer-display-for-attributes SimpleKMeans ([clusterer dataset attribute-x attribute-y & visualization-options] (let [attr-x (if (keyword? attribute-x) (dataset-index-attr dataset attribute-x) attribute-x) attr-y (if (keyword? attribute-y) (dataset-index-attr dataset attribute-y) attribute-y) opts (first-or-default visualization-options {}) display? (if (= (get visualization-options :visualize) false) false true) true-opts (conj opts {:visualize false}) plot (dataset-display-class-for-attributes dataset attribute-x attribute-y true-opts) info (clusterer-info clusterer) centroids (:centroids info)] (do (loop [ks (keys centroids)] (if (empty? ks) (if display? (visualize-plot plot) plot) (let [k (first ks) centroid (get centroids k) val-x (instance-value-at centroid attr-x) val-y (instance-value-at centroid attr-y)] (add-pointer plot val-x val-y :text (str "centroid " k " (" (float val-x) "," (float val-y) ")")) (recur (rest ks))))))))) ;; Things to load to test this from slime ;(defn load-test-from-slime [] ; (do ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/joda-time-1.6.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/opencsv-2.0.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/classes/") ; (add-classpath "file:///Applications/weka-3-6-2/weka.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/src/") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-charts-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-core-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-io-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-processing-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-chrono-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-full-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/incanter-mongodb-1.0-master-SNAPSHOT.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/jfreechart-1.0.13.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/parallelcolt-0.7.2.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/arpack-combo-0.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/gnujaxp-1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/clojure-json-1.1-20091229.021828-4.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/clojure-db-object-0.1.1-20091229.021828-2.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/jcommon-1.0.16.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/netlib-java-0.9.1.jar") ; (add-classpath "file:///Users/antonio.garrote/Development/old/clj-ml/lib/processing-core-1.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/congomongo-0.1.1-20091229.021828-1.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/mongo-1.0.jar") ; (add-classpath"file:///Users/antonio.garrote/Development/old/clj-ml/lib/mongo-java-driver-1.1.0-20091229.021828-3.jar") ; ))
[ { "context": "tion, check for unify in ltl\n\n; Copyright (c) 2016 Burkhardt Renz, THM. All rights reserved.\n; The use and distribu", "end": 103, "score": 0.9998711943626404, "start": 89, "tag": "NAME", "value": "Burkhardt Renz" } ]
src/lwb/nd/swap/ltl.clj
esb-lwb/lwb
22
; lwb Logic WorkBench -- Natural deduction, check for unify in ltl ; Copyright (c) 2016 Burkhardt Renz, THM. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.nd.swap.ltl (:require [lwb.nd.swap.common :refer :all] [lwb.ltl :refer :all] [lwb.nd.error :refer :all] [lwb.nd.proof :as proof] [clojure.spec.alpha :as s] [lwb.nd.proof :as proof])) ;; # Checking constraints of ltl ;; ## Helper functions (defn- body-type "In ltl we can have three possibilities for the type of a body: (1) :state the statement is a proposition at a certain state, e.g. `(at [i] A)` (2) :rel a relational statement, e.g. `(<= i j)` or `(succ i j)` (3) :alone the statement is not relational and has no state e.g `V2`. Given a body returns the type." [body] (cond (symbol? body) :alone ; order of conds relevant!! (= 'at (first body)) :state :else :rel)) (defn- separate-state "Splits an non-relational proposition into the state and the proposition in that state. e.g. `(at [i] (and A B))` -> `[i (and A B)]` Requires: `body` is not a relational statement." [body] [(first (second body)) (nth body 2)]) (defn- swap-type "In ltl we can have four possibilities: (1) :alone `old` occurs alone in a body (2) :state `old` occurs in at and only there (3) :rel `old` occurs in a relational statement (4) :prop `old` occurs in the proposition at a certain state Given a vector with indexed bodies, returns this type Requires the vector of indexed bodies is not empty." [proof old] (let [ib (involved-bodies proof old) bts (set (map #(body-type (second %)) ib))] (cond (= #{:alone} bts) :alone (contains? bts :rel) :rel (= #{:state} bts) (let [[state _] (separate-state (second (first ib)))] (if (= old state) :state :prop))))) (defn- check-alone "`new` must be a wellformed ltl formula at a certain state. Exception otherwise." [new] (if-not (s/valid? :lwb.ltl/at-fml new) (throw (ex-error (format "'%s' is not a valid ltl formula at a certain state." new))))) (defn- check-state "`new` must be a symbol named with a single small character or a small character followed by `'`. Exception otherwise." [new] (if-not (and (symbol? new) (re-matches #"[a-z]'*" (name new))) (throw (ex-error (format "'%s' is not a valid state symbol." new))))) (defn- find-rel-plno "Find the pline in `proof `with a relational expression containing `old`. Require: the swap-type is `:rel`. We expect that there is exactly one such `plno`." [proof old] (let [ib (involved-bodies proof old) rel-fn (fn [[_ body]] (not= 'at (first body))) ; filter bodies without state ib' (filter rel-fn ib) rel-fn' (fn [[_ body]] (and (seq? body) (= 3 (count body)) (or (= old (second body)) (= old (nth body 2))))) ib'' (filter rel-fn' ib')] (first (map first ib'')))) (defn- assumption? "The pline at `plno` in `proof` is an assumption? Requires: `plno` is valid." [proof plno] (= :assumption (:roth (proof/pline-at-plno proof plno)))) (defn- fresh? "The state `new` is not already in scope." [proof plno new] (let [curr-scope (proof/scope proof (proof/pline-at-plno proof plno)) plid (proof/plno->plid proof plno) ; we must consider only plines above the pline at plno curr-scope' (take-while #(not= plid (:plid %)) curr-scope) rel-fn (fn [body] (and (seq? body) (= 'at (first body)))) curr-states (set (map #(first (second %)) (filter rel-fn (map :body curr-scope'))))] (not (contains? curr-states new)))) (defn- succ? "The relational expression at `plno` of `proof` is `succ`. Requires: `plno` is valid and has a relational expression as body." [proof plno] (= 'succ (first (:body (proof/pline-at-plno proof plno))))) (defn- fresh-in-succ? "The new state `new` is fresh in the expression `body` with operator `succ`. Requires: `body` is such an expression." [body old new] (let [arg1 (second body) arg2 (nth body 2)] (if (= arg1 old) (not= arg2 new) (not= arg1 new)))) (defn- fresh-in-succ2? "The new state `new` at the second argument of `succ` is fresh in the proof. Requires: `plno` is a `succ` expression." [proof plno old new] (let [arg2 (nth (:body (proof/pline-at-plno proof plno)) 2)] (if (= arg2 old) (fresh? proof plno new) true))) (defn- check-rel "We have to consider the following cases: - expression `succ`: the `new` state must be different from the other one - expression `succ`: the `new` state at the second argument of `succ` must be fresh unless the mode is :checked, which means that the user has checked the validity of the swap. - expression `<=` and `:roth` = `:assumption`: the `new` state must be fresh one" [proof old new mode] ; new is a symbol for a state (check-state new) (let [rel-plno (find-rel-plno proof old)] (cond (succ? proof rel-plno) (cond (not (fresh-in-succ? (:body (proof/pline-at-plno proof rel-plno)) old new)) (throw (ex-error (format "'%s' must differ from the other argument in a succ expression." new))) (and (not (fresh-in-succ2? proof rel-plno old new)) (= mode :unchecked)) (throw (ex-warning (format "State '%s' is already in scope - if you are sure use swap with :checked." new)))) (assumption? proof rel-plno) (if (not (fresh? proof rel-plno new)) (throw (ex-error (format "'%s' must be a fresh state, '%s' is not." old new))))))) (defn- check-prop "`new` must be a wellformed ltl formula. Exception otherwise." [new] (if-not (s/valid? :lwb.ltl/fml new) (throw (ex-error (format "'%s' is not a valid ltl formula." new))))) ;; ## Checking the constrains for ltl in swap (defn check-swap "Check whether `old` and `new` can be swapped in `proof`. Throws exception if not." [proof old new mode] (let [st (swap-type proof old)] (try (case st :alone (check-alone new) :state (check-state new) :rel (check-rel proof old new mode) :prop (check-prop new)) (catch IllegalArgumentException _ (throw (ex-error (format "There is no '%s' in the proof." old)))) (catch Exception e (throw e)))))
22198
; lwb Logic WorkBench -- Natural deduction, check for unify in ltl ; Copyright (c) 2016 <NAME>, THM. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.nd.swap.ltl (:require [lwb.nd.swap.common :refer :all] [lwb.ltl :refer :all] [lwb.nd.error :refer :all] [lwb.nd.proof :as proof] [clojure.spec.alpha :as s] [lwb.nd.proof :as proof])) ;; # Checking constraints of ltl ;; ## Helper functions (defn- body-type "In ltl we can have three possibilities for the type of a body: (1) :state the statement is a proposition at a certain state, e.g. `(at [i] A)` (2) :rel a relational statement, e.g. `(<= i j)` or `(succ i j)` (3) :alone the statement is not relational and has no state e.g `V2`. Given a body returns the type." [body] (cond (symbol? body) :alone ; order of conds relevant!! (= 'at (first body)) :state :else :rel)) (defn- separate-state "Splits an non-relational proposition into the state and the proposition in that state. e.g. `(at [i] (and A B))` -> `[i (and A B)]` Requires: `body` is not a relational statement." [body] [(first (second body)) (nth body 2)]) (defn- swap-type "In ltl we can have four possibilities: (1) :alone `old` occurs alone in a body (2) :state `old` occurs in at and only there (3) :rel `old` occurs in a relational statement (4) :prop `old` occurs in the proposition at a certain state Given a vector with indexed bodies, returns this type Requires the vector of indexed bodies is not empty." [proof old] (let [ib (involved-bodies proof old) bts (set (map #(body-type (second %)) ib))] (cond (= #{:alone} bts) :alone (contains? bts :rel) :rel (= #{:state} bts) (let [[state _] (separate-state (second (first ib)))] (if (= old state) :state :prop))))) (defn- check-alone "`new` must be a wellformed ltl formula at a certain state. Exception otherwise." [new] (if-not (s/valid? :lwb.ltl/at-fml new) (throw (ex-error (format "'%s' is not a valid ltl formula at a certain state." new))))) (defn- check-state "`new` must be a symbol named with a single small character or a small character followed by `'`. Exception otherwise." [new] (if-not (and (symbol? new) (re-matches #"[a-z]'*" (name new))) (throw (ex-error (format "'%s' is not a valid state symbol." new))))) (defn- find-rel-plno "Find the pline in `proof `with a relational expression containing `old`. Require: the swap-type is `:rel`. We expect that there is exactly one such `plno`." [proof old] (let [ib (involved-bodies proof old) rel-fn (fn [[_ body]] (not= 'at (first body))) ; filter bodies without state ib' (filter rel-fn ib) rel-fn' (fn [[_ body]] (and (seq? body) (= 3 (count body)) (or (= old (second body)) (= old (nth body 2))))) ib'' (filter rel-fn' ib')] (first (map first ib'')))) (defn- assumption? "The pline at `plno` in `proof` is an assumption? Requires: `plno` is valid." [proof plno] (= :assumption (:roth (proof/pline-at-plno proof plno)))) (defn- fresh? "The state `new` is not already in scope." [proof plno new] (let [curr-scope (proof/scope proof (proof/pline-at-plno proof plno)) plid (proof/plno->plid proof plno) ; we must consider only plines above the pline at plno curr-scope' (take-while #(not= plid (:plid %)) curr-scope) rel-fn (fn [body] (and (seq? body) (= 'at (first body)))) curr-states (set (map #(first (second %)) (filter rel-fn (map :body curr-scope'))))] (not (contains? curr-states new)))) (defn- succ? "The relational expression at `plno` of `proof` is `succ`. Requires: `plno` is valid and has a relational expression as body." [proof plno] (= 'succ (first (:body (proof/pline-at-plno proof plno))))) (defn- fresh-in-succ? "The new state `new` is fresh in the expression `body` with operator `succ`. Requires: `body` is such an expression." [body old new] (let [arg1 (second body) arg2 (nth body 2)] (if (= arg1 old) (not= arg2 new) (not= arg1 new)))) (defn- fresh-in-succ2? "The new state `new` at the second argument of `succ` is fresh in the proof. Requires: `plno` is a `succ` expression." [proof plno old new] (let [arg2 (nth (:body (proof/pline-at-plno proof plno)) 2)] (if (= arg2 old) (fresh? proof plno new) true))) (defn- check-rel "We have to consider the following cases: - expression `succ`: the `new` state must be different from the other one - expression `succ`: the `new` state at the second argument of `succ` must be fresh unless the mode is :checked, which means that the user has checked the validity of the swap. - expression `<=` and `:roth` = `:assumption`: the `new` state must be fresh one" [proof old new mode] ; new is a symbol for a state (check-state new) (let [rel-plno (find-rel-plno proof old)] (cond (succ? proof rel-plno) (cond (not (fresh-in-succ? (:body (proof/pline-at-plno proof rel-plno)) old new)) (throw (ex-error (format "'%s' must differ from the other argument in a succ expression." new))) (and (not (fresh-in-succ2? proof rel-plno old new)) (= mode :unchecked)) (throw (ex-warning (format "State '%s' is already in scope - if you are sure use swap with :checked." new)))) (assumption? proof rel-plno) (if (not (fresh? proof rel-plno new)) (throw (ex-error (format "'%s' must be a fresh state, '%s' is not." old new))))))) (defn- check-prop "`new` must be a wellformed ltl formula. Exception otherwise." [new] (if-not (s/valid? :lwb.ltl/fml new) (throw (ex-error (format "'%s' is not a valid ltl formula." new))))) ;; ## Checking the constrains for ltl in swap (defn check-swap "Check whether `old` and `new` can be swapped in `proof`. Throws exception if not." [proof old new mode] (let [st (swap-type proof old)] (try (case st :alone (check-alone new) :state (check-state new) :rel (check-rel proof old new mode) :prop (check-prop new)) (catch IllegalArgumentException _ (throw (ex-error (format "There is no '%s' in the proof." old)))) (catch Exception e (throw e)))))
true
; lwb Logic WorkBench -- Natural deduction, check for unify in ltl ; Copyright (c) 2016 PI:NAME:<NAME>END_PI, THM. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.nd.swap.ltl (:require [lwb.nd.swap.common :refer :all] [lwb.ltl :refer :all] [lwb.nd.error :refer :all] [lwb.nd.proof :as proof] [clojure.spec.alpha :as s] [lwb.nd.proof :as proof])) ;; # Checking constraints of ltl ;; ## Helper functions (defn- body-type "In ltl we can have three possibilities for the type of a body: (1) :state the statement is a proposition at a certain state, e.g. `(at [i] A)` (2) :rel a relational statement, e.g. `(<= i j)` or `(succ i j)` (3) :alone the statement is not relational and has no state e.g `V2`. Given a body returns the type." [body] (cond (symbol? body) :alone ; order of conds relevant!! (= 'at (first body)) :state :else :rel)) (defn- separate-state "Splits an non-relational proposition into the state and the proposition in that state. e.g. `(at [i] (and A B))` -> `[i (and A B)]` Requires: `body` is not a relational statement." [body] [(first (second body)) (nth body 2)]) (defn- swap-type "In ltl we can have four possibilities: (1) :alone `old` occurs alone in a body (2) :state `old` occurs in at and only there (3) :rel `old` occurs in a relational statement (4) :prop `old` occurs in the proposition at a certain state Given a vector with indexed bodies, returns this type Requires the vector of indexed bodies is not empty." [proof old] (let [ib (involved-bodies proof old) bts (set (map #(body-type (second %)) ib))] (cond (= #{:alone} bts) :alone (contains? bts :rel) :rel (= #{:state} bts) (let [[state _] (separate-state (second (first ib)))] (if (= old state) :state :prop))))) (defn- check-alone "`new` must be a wellformed ltl formula at a certain state. Exception otherwise." [new] (if-not (s/valid? :lwb.ltl/at-fml new) (throw (ex-error (format "'%s' is not a valid ltl formula at a certain state." new))))) (defn- check-state "`new` must be a symbol named with a single small character or a small character followed by `'`. Exception otherwise." [new] (if-not (and (symbol? new) (re-matches #"[a-z]'*" (name new))) (throw (ex-error (format "'%s' is not a valid state symbol." new))))) (defn- find-rel-plno "Find the pline in `proof `with a relational expression containing `old`. Require: the swap-type is `:rel`. We expect that there is exactly one such `plno`." [proof old] (let [ib (involved-bodies proof old) rel-fn (fn [[_ body]] (not= 'at (first body))) ; filter bodies without state ib' (filter rel-fn ib) rel-fn' (fn [[_ body]] (and (seq? body) (= 3 (count body)) (or (= old (second body)) (= old (nth body 2))))) ib'' (filter rel-fn' ib')] (first (map first ib'')))) (defn- assumption? "The pline at `plno` in `proof` is an assumption? Requires: `plno` is valid." [proof plno] (= :assumption (:roth (proof/pline-at-plno proof plno)))) (defn- fresh? "The state `new` is not already in scope." [proof plno new] (let [curr-scope (proof/scope proof (proof/pline-at-plno proof plno)) plid (proof/plno->plid proof plno) ; we must consider only plines above the pline at plno curr-scope' (take-while #(not= plid (:plid %)) curr-scope) rel-fn (fn [body] (and (seq? body) (= 'at (first body)))) curr-states (set (map #(first (second %)) (filter rel-fn (map :body curr-scope'))))] (not (contains? curr-states new)))) (defn- succ? "The relational expression at `plno` of `proof` is `succ`. Requires: `plno` is valid and has a relational expression as body." [proof plno] (= 'succ (first (:body (proof/pline-at-plno proof plno))))) (defn- fresh-in-succ? "The new state `new` is fresh in the expression `body` with operator `succ`. Requires: `body` is such an expression." [body old new] (let [arg1 (second body) arg2 (nth body 2)] (if (= arg1 old) (not= arg2 new) (not= arg1 new)))) (defn- fresh-in-succ2? "The new state `new` at the second argument of `succ` is fresh in the proof. Requires: `plno` is a `succ` expression." [proof plno old new] (let [arg2 (nth (:body (proof/pline-at-plno proof plno)) 2)] (if (= arg2 old) (fresh? proof plno new) true))) (defn- check-rel "We have to consider the following cases: - expression `succ`: the `new` state must be different from the other one - expression `succ`: the `new` state at the second argument of `succ` must be fresh unless the mode is :checked, which means that the user has checked the validity of the swap. - expression `<=` and `:roth` = `:assumption`: the `new` state must be fresh one" [proof old new mode] ; new is a symbol for a state (check-state new) (let [rel-plno (find-rel-plno proof old)] (cond (succ? proof rel-plno) (cond (not (fresh-in-succ? (:body (proof/pline-at-plno proof rel-plno)) old new)) (throw (ex-error (format "'%s' must differ from the other argument in a succ expression." new))) (and (not (fresh-in-succ2? proof rel-plno old new)) (= mode :unchecked)) (throw (ex-warning (format "State '%s' is already in scope - if you are sure use swap with :checked." new)))) (assumption? proof rel-plno) (if (not (fresh? proof rel-plno new)) (throw (ex-error (format "'%s' must be a fresh state, '%s' is not." old new))))))) (defn- check-prop "`new` must be a wellformed ltl formula. Exception otherwise." [new] (if-not (s/valid? :lwb.ltl/fml new) (throw (ex-error (format "'%s' is not a valid ltl formula." new))))) ;; ## Checking the constrains for ltl in swap (defn check-swap "Check whether `old` and `new` can be swapped in `proof`. Throws exception if not." [proof old new mode] (let [st (swap-type proof old)] (try (case st :alone (check-alone new) :state (check-state new) :rel (check-rel proof old new mode) :prop (check-prop new)) (catch IllegalArgumentException _ (throw (ex-error (format "There is no '%s' in the proof." old)))) (catch Exception e (throw e)))))
[ { "context": ";; Copyright (c) Rich Hickey and contributors. All rights reserved.\n;; The u", "end": 30, "score": 0.9998468160629272, "start": 19, "tag": "NAME", "value": "Rich Hickey" } ]
resources/public/js/out/cljs/core/async/impl/buffers.cljs
yuppieghost/soul-talk
135
;; Copyright (c) Rich Hickey and contributors. All rights reserved. ;; The use and distribution terms for this software are covered by the ;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this distribution. ;; By using this software in any fashion, you are agreeing to be bound by ;; the terms of this license. ;; You must not remove this notice, or any other, from this software. (ns cljs.core.async.impl.buffers (:require [cljs.core.async.impl.protocols :as impl])) ;; ----------------------------------------------------------------------------- ;; DO NOT USE, this is internal buffer representation (defn acopy [src src-start dest dest-start len] (loop [cnt 0] (when (< cnt len) (aset dest (+ dest-start cnt) (aget src (+ src-start cnt))) (recur (inc cnt))))) (deftype RingBuffer [^:mutable head ^:mutable tail ^:mutable length ^:mutable arr] Object (pop [_] (when-not (zero? length) (let [x (aget arr tail)] (aset arr tail nil) (set! tail (js-mod (inc tail) (alength arr))) (set! length (dec length)) x))) (unshift [_ x] (aset arr head x) (set! head (js-mod (inc head) (alength arr))) (set! length (inc length)) nil) (unbounded-unshift [this x] (if (== (inc length) (alength arr)) (.resize this)) (.unshift this x)) ;; Doubles the size of the buffer while retaining all the existing values (resize [_] (let [new-arr-size (* (alength arr) 2) new-arr (make-array new-arr-size)] (cond (< tail head) (do (acopy arr tail new-arr 0 length) (set! tail 0) (set! head length) (set! arr new-arr)) (> tail head) (do (acopy arr tail new-arr 0 (- (alength arr) tail)) (acopy arr 0 new-arr (- (alength arr) tail) head) (set! tail 0) (set! head length) (set! arr new-arr)) (== tail head) (do (set! tail 0) (set! head 0) (set! arr new-arr))))) (cleanup [this keep?] (dotimes [x length] (let [v (.pop this)] (when ^boolean (keep? v) (.unshift this v)))))) (defn ring-buffer [n] (assert (> n 0) "Can't create a ring buffer of size 0") (RingBuffer. 0 0 0 (make-array n))) ;; ----------------------------------------------------------------------------- (deftype FixedBuffer [buf n] impl/Buffer (full? [this] (== (.-length buf) n)) (remove! [this] (.pop buf)) (add!* [this itm] (.unbounded-unshift buf itm) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn fixed-buffer [n] (FixedBuffer. (ring-buffer n) n)) (deftype DroppingBuffer [buf n] impl/UnblockingBuffer impl/Buffer (full? [this] false) (remove! [this] (.pop buf)) (add!* [this itm] (when-not (== (.-length buf) n) (.unshift buf itm)) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn dropping-buffer [n] (DroppingBuffer. (ring-buffer n) n)) (deftype SlidingBuffer [buf n] impl/UnblockingBuffer impl/Buffer (full? [this] false) (remove! [this] (.pop buf)) (add!* [this itm] (when (== (.-length buf) n) (impl/remove! this)) (.unshift buf itm) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn sliding-buffer [n] (SlidingBuffer. (ring-buffer n) n)) (defonce ^:private NO-VAL (js/Object.)) (defn- undelivered? [val] (identical? NO-VAL val)) (deftype PromiseBuffer [^:mutable val] impl/UnblockingBuffer impl/Buffer (full? [_] false) (remove! [_] val) (add!* [this itm] (when (undelivered? val) (set! val itm)) this) (close-buf! [_] (when (undelivered? val) (set! val nil))) cljs.core/ICounted (-count [_] (if (undelivered? val) 0 1))) (defn promise-buffer [] (PromiseBuffer. NO-VAL))
6385
;; Copyright (c) <NAME> and contributors. All rights reserved. ;; The use and distribution terms for this software are covered by the ;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this distribution. ;; By using this software in any fashion, you are agreeing to be bound by ;; the terms of this license. ;; You must not remove this notice, or any other, from this software. (ns cljs.core.async.impl.buffers (:require [cljs.core.async.impl.protocols :as impl])) ;; ----------------------------------------------------------------------------- ;; DO NOT USE, this is internal buffer representation (defn acopy [src src-start dest dest-start len] (loop [cnt 0] (when (< cnt len) (aset dest (+ dest-start cnt) (aget src (+ src-start cnt))) (recur (inc cnt))))) (deftype RingBuffer [^:mutable head ^:mutable tail ^:mutable length ^:mutable arr] Object (pop [_] (when-not (zero? length) (let [x (aget arr tail)] (aset arr tail nil) (set! tail (js-mod (inc tail) (alength arr))) (set! length (dec length)) x))) (unshift [_ x] (aset arr head x) (set! head (js-mod (inc head) (alength arr))) (set! length (inc length)) nil) (unbounded-unshift [this x] (if (== (inc length) (alength arr)) (.resize this)) (.unshift this x)) ;; Doubles the size of the buffer while retaining all the existing values (resize [_] (let [new-arr-size (* (alength arr) 2) new-arr (make-array new-arr-size)] (cond (< tail head) (do (acopy arr tail new-arr 0 length) (set! tail 0) (set! head length) (set! arr new-arr)) (> tail head) (do (acopy arr tail new-arr 0 (- (alength arr) tail)) (acopy arr 0 new-arr (- (alength arr) tail) head) (set! tail 0) (set! head length) (set! arr new-arr)) (== tail head) (do (set! tail 0) (set! head 0) (set! arr new-arr))))) (cleanup [this keep?] (dotimes [x length] (let [v (.pop this)] (when ^boolean (keep? v) (.unshift this v)))))) (defn ring-buffer [n] (assert (> n 0) "Can't create a ring buffer of size 0") (RingBuffer. 0 0 0 (make-array n))) ;; ----------------------------------------------------------------------------- (deftype FixedBuffer [buf n] impl/Buffer (full? [this] (== (.-length buf) n)) (remove! [this] (.pop buf)) (add!* [this itm] (.unbounded-unshift buf itm) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn fixed-buffer [n] (FixedBuffer. (ring-buffer n) n)) (deftype DroppingBuffer [buf n] impl/UnblockingBuffer impl/Buffer (full? [this] false) (remove! [this] (.pop buf)) (add!* [this itm] (when-not (== (.-length buf) n) (.unshift buf itm)) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn dropping-buffer [n] (DroppingBuffer. (ring-buffer n) n)) (deftype SlidingBuffer [buf n] impl/UnblockingBuffer impl/Buffer (full? [this] false) (remove! [this] (.pop buf)) (add!* [this itm] (when (== (.-length buf) n) (impl/remove! this)) (.unshift buf itm) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn sliding-buffer [n] (SlidingBuffer. (ring-buffer n) n)) (defonce ^:private NO-VAL (js/Object.)) (defn- undelivered? [val] (identical? NO-VAL val)) (deftype PromiseBuffer [^:mutable val] impl/UnblockingBuffer impl/Buffer (full? [_] false) (remove! [_] val) (add!* [this itm] (when (undelivered? val) (set! val itm)) this) (close-buf! [_] (when (undelivered? val) (set! val nil))) cljs.core/ICounted (-count [_] (if (undelivered? val) 0 1))) (defn promise-buffer [] (PromiseBuffer. NO-VAL))
true
;; Copyright (c) PI:NAME:<NAME>END_PI and contributors. All rights reserved. ;; The use and distribution terms for this software are covered by the ;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this distribution. ;; By using this software in any fashion, you are agreeing to be bound by ;; the terms of this license. ;; You must not remove this notice, or any other, from this software. (ns cljs.core.async.impl.buffers (:require [cljs.core.async.impl.protocols :as impl])) ;; ----------------------------------------------------------------------------- ;; DO NOT USE, this is internal buffer representation (defn acopy [src src-start dest dest-start len] (loop [cnt 0] (when (< cnt len) (aset dest (+ dest-start cnt) (aget src (+ src-start cnt))) (recur (inc cnt))))) (deftype RingBuffer [^:mutable head ^:mutable tail ^:mutable length ^:mutable arr] Object (pop [_] (when-not (zero? length) (let [x (aget arr tail)] (aset arr tail nil) (set! tail (js-mod (inc tail) (alength arr))) (set! length (dec length)) x))) (unshift [_ x] (aset arr head x) (set! head (js-mod (inc head) (alength arr))) (set! length (inc length)) nil) (unbounded-unshift [this x] (if (== (inc length) (alength arr)) (.resize this)) (.unshift this x)) ;; Doubles the size of the buffer while retaining all the existing values (resize [_] (let [new-arr-size (* (alength arr) 2) new-arr (make-array new-arr-size)] (cond (< tail head) (do (acopy arr tail new-arr 0 length) (set! tail 0) (set! head length) (set! arr new-arr)) (> tail head) (do (acopy arr tail new-arr 0 (- (alength arr) tail)) (acopy arr 0 new-arr (- (alength arr) tail) head) (set! tail 0) (set! head length) (set! arr new-arr)) (== tail head) (do (set! tail 0) (set! head 0) (set! arr new-arr))))) (cleanup [this keep?] (dotimes [x length] (let [v (.pop this)] (when ^boolean (keep? v) (.unshift this v)))))) (defn ring-buffer [n] (assert (> n 0) "Can't create a ring buffer of size 0") (RingBuffer. 0 0 0 (make-array n))) ;; ----------------------------------------------------------------------------- (deftype FixedBuffer [buf n] impl/Buffer (full? [this] (== (.-length buf) n)) (remove! [this] (.pop buf)) (add!* [this itm] (.unbounded-unshift buf itm) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn fixed-buffer [n] (FixedBuffer. (ring-buffer n) n)) (deftype DroppingBuffer [buf n] impl/UnblockingBuffer impl/Buffer (full? [this] false) (remove! [this] (.pop buf)) (add!* [this itm] (when-not (== (.-length buf) n) (.unshift buf itm)) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn dropping-buffer [n] (DroppingBuffer. (ring-buffer n) n)) (deftype SlidingBuffer [buf n] impl/UnblockingBuffer impl/Buffer (full? [this] false) (remove! [this] (.pop buf)) (add!* [this itm] (when (== (.-length buf) n) (impl/remove! this)) (.unshift buf itm) this) (close-buf! [this]) cljs.core/ICounted (-count [this] (.-length buf))) (defn sliding-buffer [n] (SlidingBuffer. (ring-buffer n) n)) (defonce ^:private NO-VAL (js/Object.)) (defn- undelivered? [val] (identical? NO-VAL val)) (deftype PromiseBuffer [^:mutable val] impl/UnblockingBuffer impl/Buffer (full? [_] false) (remove! [_] val) (add!* [this itm] (when (undelivered? val) (set! val itm)) this) (close-buf! [_] (when (undelivered? val) (set! val nil))) cljs.core/ICounted (-count [_] (if (undelivered? val) 0 1))) (defn promise-buffer [] (PromiseBuffer. NO-VAL))
[ { "context": "entVector.java:153)\n(def location\n {{:x 1 :y 1} \"Nico\" {:x 1 :y 2} \"John\" {:x 2 :y 1} \"Makoto\" \n {:x ", "end": 2036, "score": 0.9997479915618896, "start": 2032, "tag": "NAME", "value": "Nico" }, { "context": "\n(def location\n {{:x 1 :y 1} \"Nico\" {:x 1 :y 2} \"John\" {:x 2 :y 1} \"Makoto\" \n {:x 2 :y 2} \"Tony\"}\n )", "end": 2055, "score": 0.9998179078102112, "start": 2051, "tag": "NAME", "value": "John" }, { "context": ":x 1 :y 1} \"Nico\" {:x 1 :y 2} \"John\" {:x 2 :y 1} \"Makoto\" \n {:x 2 :y 2} \"Tony\"}\n )\n;;=> #'chapter02.acc", "end": 2076, "score": 0.9992381930351257, "start": 2070, "tag": "NAME", "value": "Makoto" }, { "context": "y 2} \"John\" {:x 2 :y 1} \"Makoto\" \n {:x 2 :y 2} \"Tony\"}\n )\n;;=> #'chapter02.accessing/location\n(locati", "end": 2099, "score": 0.9994298219680786, "start": 2095, "tag": "NAME", "value": "Tony" }, { "context": "2.accessing/location\n(location {:x 2 :y 2})\n;;=> \"Tony\"\n(def biography-of-konan-doyle\n {:name \"Arthur I", "end": 2175, "score": 0.9977079033851624, "start": 2171, "tag": "NAME", "value": "Tony" }, { "context": "=> \"Tony\"\n(def biography-of-konan-doyle\n {:name \"Arthur Ignatius Conan Doyle\"\n :born \"22-May-1859\"\n :died \"7-July-1930\"\n ", "end": 2244, "score": 0.9998705983161926, "start": 2217, "tag": "NAME", "value": "Arthur Ignatius Conan Doyle" }, { "context": "f Sherlock Holmes\" \"The Lost World\"]\n :spouse [\"Louisa Hawkins\" \"Jean Leckie\"]\n :no-of-children 5\n }\n )\n;;=", "end": 2610, "score": 0.9998581409454346, "start": 2596, "tag": "NAME", "value": "Louisa Hawkins" }, { "context": "\" \"The Lost World\"]\n :spouse [\"Louisa Hawkins\" \"Jean Leckie\"]\n :no-of-children 5\n }\n )\n;;=> #'chapter02.", "end": 2624, "score": 0.9998440742492676, "start": 2613, "tag": "NAME", "value": "Jean Leckie" } ]
Chapter 02 Code/src/chapter02/accessing.clj
PacktPublishing/Clojure-Programming-Cookbook
14
(ns chapter02.accessing) (nth [1 2 3 4 5] 1) ;;=> 2 (nth '("a" "b" "c" "d" "e") 3) ;;=> "d" (nth [1 2 3] 3) ;;=> IndexOutOfBoundsException clojure.lang.PersistentVector.arrayFor (PersistentVector.java:153) (nth [1 2 3] 3 nil) ;;=> nil (get {:a 1 :b 2 :c 3 :d 4 :e 5} :c) ;;=> 3 (get {:a 1 :b 2 :c 3 :d 4 :e 5} :f) ;;=> nil (get {:a 1 :b 2 :c 3 :d 4 :e 5} :f :not-found) ;;=> :not-found (get #{:a :b :c} :c) ;;=> :c (get #{:a :b :c} :d) ;;=> nil (get #{:a :b :c} :d :not-found) ;;=> :not-found ({:a 1 :b 2 :c 3 :d 4 :e 5} :c) ;;=> 3 ({:a 1 :b 2 :c 3 :d 4 :e 5} :f) ;;=> nil ({:a 1 :b 2 :c 3 :d 4 :e 5} :f :not-found) ;;=> :not-found (:c {:a 1 :b 2 :c 3 :d 4 :e 5}) ;:=> 3 (:f {:a 1 :b 2 :c 3 :d 4 :e 5}) ;;=> nil (:f {:a 1 :b 2 :c 3 :d 4 :e 5} :not-found) ;;=> :not-found (get #{:banana :apple :strawberry :orange :melon} :orange) ;;=> :orange (get #{:banana :apple :strawberry :orange :melon} :grape) ;;=> nil (get #{:banana :apple :strawberry :orange :melon} :grape :not-found) ;;=> :not-found (#{:banana :apple :strawberry :orange :melon} :orange) ;;=> :orange (#{:banana :apple :strawberry :orange :melon} :grape) ;;=> nil (:orange #{:banana :apple :strawberry :orange :melon}) ;;=> :orange ( :grape #{:banana :apple :strawberry :orange :melon}) ;;=> nil (second [1 2 3 4 5]) ;;=> 2 (second '()) ;;=> nil (next [1]) ;;=> nil (rest [1]) ;;=> () (ffirst [[1 2 3] 4 [3 5 6]]) ;;=> 1 (ffirst {:a 1 :b 2}) ;;=> :a (first (first [[1 2 3] 4 [3 5 6]])) ;;=> 1 (first (first {:a 1 :b 2})) ;;=> :a (update {:a 1 :b 2 :c 3} :a inc) ;;=> {:a 2, :b 2, :c 3} (ifn? +) ;;=> true (ifn? []) ;;=> true (ifn? {}) ;;=> true (ifn? #{}) ;;=> true (ifn? :a) ;;=> true (ifn? '()) ;;=> false (ifn? 1) ;;=> false (get ["a" "b" "c" "d" "e"] 3) ;;=> "d" (get ["a" "b" "c" "d" "e"] 5) ;;=> nil (get ["a" "b" "c" "d" "e"] 5 :not-found) ;;=> :not-found (["a" "b" "c" "d" "e"] 3) ;;=> "d" (["a" "b" "c" "d" "e"] 5) ;;=> IndexOutOfBoundsException clojure.lang.PersistentVector.arrayFor (PersistentVector.java:153) (def location {{:x 1 :y 1} "Nico" {:x 1 :y 2} "John" {:x 2 :y 1} "Makoto" {:x 2 :y 2} "Tony"} ) ;;=> #'chapter02.accessing/location (location {:x 2 :y 2}) ;;=> "Tony" (def biography-of-konan-doyle {:name "Arthur Ignatius Conan Doyle" :born "22-May-1859" :died "7-July-1930" :occupation ["novelist" "short story writer" "poet" "physician"] :nationality "scotish" :citizenship "United Kingdom" :genre ["Detective fiction", "fantasy", "science fiction", "historical novels", "non-fiction"] :notable-works ["Stories of Sherlock Holmes" "The Lost World"] :spouse ["Louisa Hawkins" "Jean Leckie"] :no-of-children 5 } ) ;;=> #'chapter02.accessing/biography-of-konan-doyle (get-in biography-of-konan-doyle [:genre 2]) ;;=> "science fiction" ;;=> ["Detective fiction" "fantasy" "science fiction" "historical novels" "non-fiction"] (get (get biography-of-konan-doyle :genre) 2) ;;=> "science fiction" (assoc-in {:a {:b 1 :c 2} :d 3} [:a :c] 1) ;;=> {:a {:b 1, :c 1}, :d 3} (assoc-in {:a {:b 1 :c 2} :d 3} [:a :d] 1) ;;=> {:a {:b 1, :c 2, :d 1}, :d 3} (update-in {:a {:b 1 :c 2} :d 3} [:a :c] inc) ;;=> {:a {:b 1, :c 3}, :d 3} (update-in {:a {:b 1 :c 2} :d 3} [:a :c] (constantly 10)) ;;=> {:a {:b 1, :c 10}, :d 3}
97399
(ns chapter02.accessing) (nth [1 2 3 4 5] 1) ;;=> 2 (nth '("a" "b" "c" "d" "e") 3) ;;=> "d" (nth [1 2 3] 3) ;;=> IndexOutOfBoundsException clojure.lang.PersistentVector.arrayFor (PersistentVector.java:153) (nth [1 2 3] 3 nil) ;;=> nil (get {:a 1 :b 2 :c 3 :d 4 :e 5} :c) ;;=> 3 (get {:a 1 :b 2 :c 3 :d 4 :e 5} :f) ;;=> nil (get {:a 1 :b 2 :c 3 :d 4 :e 5} :f :not-found) ;;=> :not-found (get #{:a :b :c} :c) ;;=> :c (get #{:a :b :c} :d) ;;=> nil (get #{:a :b :c} :d :not-found) ;;=> :not-found ({:a 1 :b 2 :c 3 :d 4 :e 5} :c) ;;=> 3 ({:a 1 :b 2 :c 3 :d 4 :e 5} :f) ;;=> nil ({:a 1 :b 2 :c 3 :d 4 :e 5} :f :not-found) ;;=> :not-found (:c {:a 1 :b 2 :c 3 :d 4 :e 5}) ;:=> 3 (:f {:a 1 :b 2 :c 3 :d 4 :e 5}) ;;=> nil (:f {:a 1 :b 2 :c 3 :d 4 :e 5} :not-found) ;;=> :not-found (get #{:banana :apple :strawberry :orange :melon} :orange) ;;=> :orange (get #{:banana :apple :strawberry :orange :melon} :grape) ;;=> nil (get #{:banana :apple :strawberry :orange :melon} :grape :not-found) ;;=> :not-found (#{:banana :apple :strawberry :orange :melon} :orange) ;;=> :orange (#{:banana :apple :strawberry :orange :melon} :grape) ;;=> nil (:orange #{:banana :apple :strawberry :orange :melon}) ;;=> :orange ( :grape #{:banana :apple :strawberry :orange :melon}) ;;=> nil (second [1 2 3 4 5]) ;;=> 2 (second '()) ;;=> nil (next [1]) ;;=> nil (rest [1]) ;;=> () (ffirst [[1 2 3] 4 [3 5 6]]) ;;=> 1 (ffirst {:a 1 :b 2}) ;;=> :a (first (first [[1 2 3] 4 [3 5 6]])) ;;=> 1 (first (first {:a 1 :b 2})) ;;=> :a (update {:a 1 :b 2 :c 3} :a inc) ;;=> {:a 2, :b 2, :c 3} (ifn? +) ;;=> true (ifn? []) ;;=> true (ifn? {}) ;;=> true (ifn? #{}) ;;=> true (ifn? :a) ;;=> true (ifn? '()) ;;=> false (ifn? 1) ;;=> false (get ["a" "b" "c" "d" "e"] 3) ;;=> "d" (get ["a" "b" "c" "d" "e"] 5) ;;=> nil (get ["a" "b" "c" "d" "e"] 5 :not-found) ;;=> :not-found (["a" "b" "c" "d" "e"] 3) ;;=> "d" (["a" "b" "c" "d" "e"] 5) ;;=> IndexOutOfBoundsException clojure.lang.PersistentVector.arrayFor (PersistentVector.java:153) (def location {{:x 1 :y 1} "<NAME>" {:x 1 :y 2} "<NAME>" {:x 2 :y 1} "<NAME>" {:x 2 :y 2} "<NAME>"} ) ;;=> #'chapter02.accessing/location (location {:x 2 :y 2}) ;;=> "<NAME>" (def biography-of-konan-doyle {:name "<NAME>" :born "22-May-1859" :died "7-July-1930" :occupation ["novelist" "short story writer" "poet" "physician"] :nationality "scotish" :citizenship "United Kingdom" :genre ["Detective fiction", "fantasy", "science fiction", "historical novels", "non-fiction"] :notable-works ["Stories of Sherlock Holmes" "The Lost World"] :spouse ["<NAME>" "<NAME>"] :no-of-children 5 } ) ;;=> #'chapter02.accessing/biography-of-konan-doyle (get-in biography-of-konan-doyle [:genre 2]) ;;=> "science fiction" ;;=> ["Detective fiction" "fantasy" "science fiction" "historical novels" "non-fiction"] (get (get biography-of-konan-doyle :genre) 2) ;;=> "science fiction" (assoc-in {:a {:b 1 :c 2} :d 3} [:a :c] 1) ;;=> {:a {:b 1, :c 1}, :d 3} (assoc-in {:a {:b 1 :c 2} :d 3} [:a :d] 1) ;;=> {:a {:b 1, :c 2, :d 1}, :d 3} (update-in {:a {:b 1 :c 2} :d 3} [:a :c] inc) ;;=> {:a {:b 1, :c 3}, :d 3} (update-in {:a {:b 1 :c 2} :d 3} [:a :c] (constantly 10)) ;;=> {:a {:b 1, :c 10}, :d 3}
true
(ns chapter02.accessing) (nth [1 2 3 4 5] 1) ;;=> 2 (nth '("a" "b" "c" "d" "e") 3) ;;=> "d" (nth [1 2 3] 3) ;;=> IndexOutOfBoundsException clojure.lang.PersistentVector.arrayFor (PersistentVector.java:153) (nth [1 2 3] 3 nil) ;;=> nil (get {:a 1 :b 2 :c 3 :d 4 :e 5} :c) ;;=> 3 (get {:a 1 :b 2 :c 3 :d 4 :e 5} :f) ;;=> nil (get {:a 1 :b 2 :c 3 :d 4 :e 5} :f :not-found) ;;=> :not-found (get #{:a :b :c} :c) ;;=> :c (get #{:a :b :c} :d) ;;=> nil (get #{:a :b :c} :d :not-found) ;;=> :not-found ({:a 1 :b 2 :c 3 :d 4 :e 5} :c) ;;=> 3 ({:a 1 :b 2 :c 3 :d 4 :e 5} :f) ;;=> nil ({:a 1 :b 2 :c 3 :d 4 :e 5} :f :not-found) ;;=> :not-found (:c {:a 1 :b 2 :c 3 :d 4 :e 5}) ;:=> 3 (:f {:a 1 :b 2 :c 3 :d 4 :e 5}) ;;=> nil (:f {:a 1 :b 2 :c 3 :d 4 :e 5} :not-found) ;;=> :not-found (get #{:banana :apple :strawberry :orange :melon} :orange) ;;=> :orange (get #{:banana :apple :strawberry :orange :melon} :grape) ;;=> nil (get #{:banana :apple :strawberry :orange :melon} :grape :not-found) ;;=> :not-found (#{:banana :apple :strawberry :orange :melon} :orange) ;;=> :orange (#{:banana :apple :strawberry :orange :melon} :grape) ;;=> nil (:orange #{:banana :apple :strawberry :orange :melon}) ;;=> :orange ( :grape #{:banana :apple :strawberry :orange :melon}) ;;=> nil (second [1 2 3 4 5]) ;;=> 2 (second '()) ;;=> nil (next [1]) ;;=> nil (rest [1]) ;;=> () (ffirst [[1 2 3] 4 [3 5 6]]) ;;=> 1 (ffirst {:a 1 :b 2}) ;;=> :a (first (first [[1 2 3] 4 [3 5 6]])) ;;=> 1 (first (first {:a 1 :b 2})) ;;=> :a (update {:a 1 :b 2 :c 3} :a inc) ;;=> {:a 2, :b 2, :c 3} (ifn? +) ;;=> true (ifn? []) ;;=> true (ifn? {}) ;;=> true (ifn? #{}) ;;=> true (ifn? :a) ;;=> true (ifn? '()) ;;=> false (ifn? 1) ;;=> false (get ["a" "b" "c" "d" "e"] 3) ;;=> "d" (get ["a" "b" "c" "d" "e"] 5) ;;=> nil (get ["a" "b" "c" "d" "e"] 5 :not-found) ;;=> :not-found (["a" "b" "c" "d" "e"] 3) ;;=> "d" (["a" "b" "c" "d" "e"] 5) ;;=> IndexOutOfBoundsException clojure.lang.PersistentVector.arrayFor (PersistentVector.java:153) (def location {{:x 1 :y 1} "PI:NAME:<NAME>END_PI" {:x 1 :y 2} "PI:NAME:<NAME>END_PI" {:x 2 :y 1} "PI:NAME:<NAME>END_PI" {:x 2 :y 2} "PI:NAME:<NAME>END_PI"} ) ;;=> #'chapter02.accessing/location (location {:x 2 :y 2}) ;;=> "PI:NAME:<NAME>END_PI" (def biography-of-konan-doyle {:name "PI:NAME:<NAME>END_PI" :born "22-May-1859" :died "7-July-1930" :occupation ["novelist" "short story writer" "poet" "physician"] :nationality "scotish" :citizenship "United Kingdom" :genre ["Detective fiction", "fantasy", "science fiction", "historical novels", "non-fiction"] :notable-works ["Stories of Sherlock Holmes" "The Lost World"] :spouse ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI"] :no-of-children 5 } ) ;;=> #'chapter02.accessing/biography-of-konan-doyle (get-in biography-of-konan-doyle [:genre 2]) ;;=> "science fiction" ;;=> ["Detective fiction" "fantasy" "science fiction" "historical novels" "non-fiction"] (get (get biography-of-konan-doyle :genre) 2) ;;=> "science fiction" (assoc-in {:a {:b 1 :c 2} :d 3} [:a :c] 1) ;;=> {:a {:b 1, :c 1}, :d 3} (assoc-in {:a {:b 1 :c 2} :d 3} [:a :d] 1) ;;=> {:a {:b 1, :c 2, :d 1}, :d 3} (update-in {:a {:b 1 :c 2} :d 3} [:a :c] inc) ;;=> {:a {:b 1, :c 3}, :d 3} (update-in {:a {:b 1 :c 2} :d 3} [:a :c] (constantly 10)) ;;=> {:a {:b 1, :c 10}, :d 3}
[ { "context": " leak!\"]\n [:div {:class \"inboxnumbertext\"} \"[email protected]\"]]]\n [:div {:class \"iteminboxdiv\"}\n [:di", "end": 9846, "score": 0.9998472929000854, "start": 9823, "tag": "EMAIL", "value": "[email protected]" }, { "context": " leak!\"]\n [:div {:class \"inboxnumbertext\"} \"[email protected]\"]]]\n [:div {:class \"iteminboxdiv itembuttondi", "end": 10156, "score": 0.9998305439949036, "start": 10133, "tag": "EMAIL", "value": "[email protected]" } ]
PROJECTNAMESPACE.PROJECTNAME/src/PROJECTNAMESPACE/PROJECTNAME/frontend/app/views.cljs
armincerf/clojure-template
1
(ns PROJECTNAMESPACE.PROJECTNAME.frontend.app.views (:require [re-frame.core :as rf] [reagent.core :as r] [fork.re-frame :as fork] [PROJECTNAMESPACE.PROJECTNAME.frontend.common :as common] [PROJECTNAMESPACE.PROJECTNAME.common :as common-shared] [PROJECTNAMESPACE.PROJECTNAME.frontend.app.components :as components] [PROJECTNAMESPACE.PROJECTNAME.frontend.app.subscriptions :as sub] [PROJECTNAMESPACE.PROJECTNAME.frontend.global-messages :as messages] [PROJECTNAMESPACE.PROJECTNAME.frontend.http :as http] [clojure.string :as str])) (defn type->icon-class [type] (str "asset-icon " (common-shared/keyword->string type) "-icon")) (defn type-selector [selected-type] (let [type-keyword @selected-type] [:div.asset-types (for [asset-type [:email :phone-number :password] :let [active? (= type-keyword asset-type)]] ^{:key asset-type} [:button.card.asset-type {:class (when active? "asset-type--active") :on-click #(reset! selected-type asset-type)} (common-shared/keyword->readable-string asset-type)])])) (defn data-input [{:keys [handle-change handle-blur submitting?]} options] [:input.form-control.w-input (merge {:disabled submitting? :on-change handle-change :on-blur handle-blur} options)]) (defn text-input [{:keys [values] :as props} key] [data-input props {:name key :type "text" :value (values key)}]) (defn form-input [label input] [:div.margin-bottom-sm [:label.form-label.form__label label] [:div.form-control-wrapper input]]) (defn asset-profile [] (let [{:keys [id asset/name asset/description asset/data asset/type asset/breaches] :as asset} @(rf/subscribe [::sub/current-asset]) type-str (common-shared/keyword->readable-string type) last-scan (:valid-time breaches) breaches (:breach/data breaches)] [:div {:class "page-content w-clearfix"} [:h1 {:class "page-title"} [components/breadcrumb [{:label "Assets" :href (common/route->url :app/homepage)} {:label name}]]] [:div {:class "topdashbar"} [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "leftcolstat w-col w-col-4 w-col-stack"} [:div {:class "circlewicon nomeetingsicon"}]] [:div {:class "column-7 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Risk score"] [:div {:class "topdashbartext number"} "36"] [:div {:class "topdashbartext number desctext"} "/100"]]]] [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon rescheduleicon"}]] [:div {:class "column-8 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} name] [:div {:class "topdashbartext number"} "2"] [:div {:class "topdashbartext number desctext"} "This Month"]]]] [:div {:class "stathold laststat bottomlaststat"} [:div {:class "statrow statlastbottom w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon cancelledmeetings"}]] [:div {:class "column-9 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Total scans"] [:div {:class "topdashbartext number"} "5"] [:div {:class "topdashbartext number desctext"} "This Month"]]]]] [:div.asset-details.card [:div.asset-details__icon.circlewicon {:class (type->icon-class type)}] [:h3 "Asset Details"] [:div.asset-details__label name] [:div.asset__data data] (when description [:div.asset-details__item [:div.asset-details__label "Description:"] [:div.asset__description description]]) [:div.asset-details__item [:div.asset-details__label "Last scan:"] [:div.asset__description (common-shared/format-tx-time last-scan)]] [:h3 "Breaches"] [:div.asset-details__breaches (if (seq breaches) [:div.breaches [:p "Oh jeez... looks like the following websites/companies have been hacked and leaked your private information! Click on a name to show more details about the breach."] (for [{:keys [Name]} breaches] ^{:key Name} [:a {:on-click #(rf/dispatch [:asset/show-breach-detail Name])} Name])] [:p "Good news! Your data has not been found in any data breaches or hacks"]) [:button {:on-click #(rf/dispatch [:asset/search-breaches id])} "Search for new breaches"]]]])) (defn add-data [] (let [selected-type (r/atom :email)] (fn [] [:div {:class "page-content w-clearfix"} [:h1.page-title "Add data"] [type-selector selected-type] [fork/form {:path :new-asset :form-id "asset" :prevent-default? true :clean-on-unmount? true :on-submit #(rf/dispatch [:asset/create @selected-type %]) :initial-values {:asset/name "" :asset/description "" :asset/data ""}} (fn [{:keys [values form-id handle-submit submitting? reset] :as props}] [:div.form [:form.profile-component__form {:id form-id :on-submit handle-submit} [form-input "Name" [text-input props "asset/name"]] [form-input "Description" [text-input props "asset/description"]] [form-input (common-shared/keyword->readable-string @selected-type) [data-input props {:type (if (= :password @selected-type) "password" "text") :name "asset/data" :value (values "asset/data")}] ] [:button.btn.btn--md.form__submit {:type "submit" :disabled submitting?} "Submit"]]])]]))) (defn account-settings [] [:div "settings"]) (defn my-assets [] (let [assets @(rf/subscribe [::sub/assets]) loading? @(rf/subscribe [:loading?])] [:div.assets (if loading? [common-shared/loader] (for [{:keys [id asset/data asset/name asset/type]} assets :let [last-scan "Last scanned - 10:30 AM"]] ^{:key id} [:div {:class "meetingdetailcontain float-left"} [:div {:class "meetingdetailtopdiv"} [:div {:class "w-row"} [:div {:class "column-10 w-col w-col-5"} [:div {:class (type->icon-class type)}]] [:div {:class "w-col w-col-7"} [:div {:class "meetingtitle"} (common-shared/keyword->readable-string type)] [:div {:class "meetingtitle meetingemail"} (if (= :password type) "*********" data)] [:div {:class "meetingtitle meetingemail meetingtime w-hidden-small w-hidden-tiny"} last-scan]]]] [:div {:class "meetingdetailmiddlediv"} [:div {:class "w-row"} [:div {:class "w-col w-col-6"} [:div {:class "alert-text"} (rand-nth ["No" "2" "3"]) " New Alerts"]] [:div {:class "w-col w-col-6"} [:div {:class "alert-text resolved-alert"} (rand-nth ["1" "3"]) " Resolved Alerts"]]]] [:div {:class "meetingtitle meetingemail only-mobile"} last-scan] [:a {:href (common/id-route :app/asset-profile {:asset id}) :class "bottommorelink bottommoreright w-inline-block"} [:div {:class "detailscallinktext"} "View Details"]]]))])) (defn overview [] [:div {:class "page-content w-clearfix"} [:h1 {:class "page-title"} "Assets"] [:div {:class "topdashbar"} [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "leftcolstat w-col w-col-4 w-col-stack"} [:div {:class "circlewicon nomeetingsicon"}]] [:div {:class "column-7 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Data Protected"] [:div {:class "topdashbartext number"} "36"] [:div {:class "topdashbartext number desctext"} "Items"]]]] [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon rescheduleicon"}]] [:div {:class "column-8 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Alerts"] [:div {:class "topdashbartext number"} "14"] [:div {:class "topdashbartext number desctext"} "This Month"]]]] [:div {:class "stathold laststat bottomlaststat"} [:div {:class "statrow statlastbottom w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon cancelledmeetings"}]] [:div {:class "column-9 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Total scans"] [:div {:class "topdashbartext number"} "20"] [:div {:class "topdashbartext number desctext"} "This Month"]]]]] [:div {:class "numberofmeetings"} "My Assets" [my-assets]]]) (defn home [] ) (defn inbox [show-inbox?] (fn [] [:div {:class "inboxcontain" :style {:display (if @show-inbox? "block" "none")}} [:div {:class "iteminboxdiv"} [:div {:class "iteminboxdetaildiv"} [:div {:class "div-block-8"}] [:div {:class "iteminboxdetailtext"} "01/01/2020 10:30"]] [:div [:div "Email found in Yahoo data leak!"] [:div {:class "inboxnumbertext"} "[email protected]"]]] [:div {:class "iteminboxdiv"} [:div {:class "iteminboxdetaildiv"} [:div {:class "div-block-8"}] [:div {:class "iteminboxdetailtext"} "01/05/2020 12:30"]] [:div [:div "Email found in Hotmail data leak!"] [:div {:class "inboxnumbertext"} "[email protected]"]]] [:div {:class "iteminboxdiv itembuttondiv"} [:a {:class "inboxbutton leftinboxbutton w-inline-block"} [:div "Refresh"]] [:a {:data-ix "new-interaction-3" :class "inboxbutton w-inline-block" :on-click #(do (.preventDefault %) (reset! show-inbox? false))} [:div "Close"]]] (when-not show-inbox? [:div {:class "iteminboxdiv bottombelldiv w-clearfix"} [:div {:class "meetingrightdiv"}] [:div {:class "w-row"} [:div {:class "w-col w-col-4"} [:div {:class "calimginbox bottombellimg"}]] [:div {:class "w-col w-col-8"} [:div "Inbox"] [:div {:class "inboxdate w-embed w-script"}]]]])])) (defn views [] (let [page @(rf/subscribe [::sub/page]) show-inbox? (r/atom false)] [:<> [components/top-nav] [:div {:class "section"} [:a {:data-ix "new-interaction-2" :class "inboxdiv w-hidden-small w-hidden-tiny w-inline-block" :on-click #(do (.preventDefault %) (swap! show-inbox? not))} [:div {:class "bell"} "H"]] [:div {:class "pagecontain"} [:div {:class "div-block-6"} [components/left-menu page] (case page :app/homepage [overview] :app/add-data [add-data] :app/asset-profile [asset-profile] :app/account-settings [account-settings] [:p.font-italic (str "No content for page " page)])]] [inbox show-inbox?]] [components/modal] [messages/toast]]))
65939
(ns PROJECTNAMESPACE.PROJECTNAME.frontend.app.views (:require [re-frame.core :as rf] [reagent.core :as r] [fork.re-frame :as fork] [PROJECTNAMESPACE.PROJECTNAME.frontend.common :as common] [PROJECTNAMESPACE.PROJECTNAME.common :as common-shared] [PROJECTNAMESPACE.PROJECTNAME.frontend.app.components :as components] [PROJECTNAMESPACE.PROJECTNAME.frontend.app.subscriptions :as sub] [PROJECTNAMESPACE.PROJECTNAME.frontend.global-messages :as messages] [PROJECTNAMESPACE.PROJECTNAME.frontend.http :as http] [clojure.string :as str])) (defn type->icon-class [type] (str "asset-icon " (common-shared/keyword->string type) "-icon")) (defn type-selector [selected-type] (let [type-keyword @selected-type] [:div.asset-types (for [asset-type [:email :phone-number :password] :let [active? (= type-keyword asset-type)]] ^{:key asset-type} [:button.card.asset-type {:class (when active? "asset-type--active") :on-click #(reset! selected-type asset-type)} (common-shared/keyword->readable-string asset-type)])])) (defn data-input [{:keys [handle-change handle-blur submitting?]} options] [:input.form-control.w-input (merge {:disabled submitting? :on-change handle-change :on-blur handle-blur} options)]) (defn text-input [{:keys [values] :as props} key] [data-input props {:name key :type "text" :value (values key)}]) (defn form-input [label input] [:div.margin-bottom-sm [:label.form-label.form__label label] [:div.form-control-wrapper input]]) (defn asset-profile [] (let [{:keys [id asset/name asset/description asset/data asset/type asset/breaches] :as asset} @(rf/subscribe [::sub/current-asset]) type-str (common-shared/keyword->readable-string type) last-scan (:valid-time breaches) breaches (:breach/data breaches)] [:div {:class "page-content w-clearfix"} [:h1 {:class "page-title"} [components/breadcrumb [{:label "Assets" :href (common/route->url :app/homepage)} {:label name}]]] [:div {:class "topdashbar"} [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "leftcolstat w-col w-col-4 w-col-stack"} [:div {:class "circlewicon nomeetingsicon"}]] [:div {:class "column-7 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Risk score"] [:div {:class "topdashbartext number"} "36"] [:div {:class "topdashbartext number desctext"} "/100"]]]] [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon rescheduleicon"}]] [:div {:class "column-8 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} name] [:div {:class "topdashbartext number"} "2"] [:div {:class "topdashbartext number desctext"} "This Month"]]]] [:div {:class "stathold laststat bottomlaststat"} [:div {:class "statrow statlastbottom w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon cancelledmeetings"}]] [:div {:class "column-9 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Total scans"] [:div {:class "topdashbartext number"} "5"] [:div {:class "topdashbartext number desctext"} "This Month"]]]]] [:div.asset-details.card [:div.asset-details__icon.circlewicon {:class (type->icon-class type)}] [:h3 "Asset Details"] [:div.asset-details__label name] [:div.asset__data data] (when description [:div.asset-details__item [:div.asset-details__label "Description:"] [:div.asset__description description]]) [:div.asset-details__item [:div.asset-details__label "Last scan:"] [:div.asset__description (common-shared/format-tx-time last-scan)]] [:h3 "Breaches"] [:div.asset-details__breaches (if (seq breaches) [:div.breaches [:p "Oh jeez... looks like the following websites/companies have been hacked and leaked your private information! Click on a name to show more details about the breach."] (for [{:keys [Name]} breaches] ^{:key Name} [:a {:on-click #(rf/dispatch [:asset/show-breach-detail Name])} Name])] [:p "Good news! Your data has not been found in any data breaches or hacks"]) [:button {:on-click #(rf/dispatch [:asset/search-breaches id])} "Search for new breaches"]]]])) (defn add-data [] (let [selected-type (r/atom :email)] (fn [] [:div {:class "page-content w-clearfix"} [:h1.page-title "Add data"] [type-selector selected-type] [fork/form {:path :new-asset :form-id "asset" :prevent-default? true :clean-on-unmount? true :on-submit #(rf/dispatch [:asset/create @selected-type %]) :initial-values {:asset/name "" :asset/description "" :asset/data ""}} (fn [{:keys [values form-id handle-submit submitting? reset] :as props}] [:div.form [:form.profile-component__form {:id form-id :on-submit handle-submit} [form-input "Name" [text-input props "asset/name"]] [form-input "Description" [text-input props "asset/description"]] [form-input (common-shared/keyword->readable-string @selected-type) [data-input props {:type (if (= :password @selected-type) "password" "text") :name "asset/data" :value (values "asset/data")}] ] [:button.btn.btn--md.form__submit {:type "submit" :disabled submitting?} "Submit"]]])]]))) (defn account-settings [] [:div "settings"]) (defn my-assets [] (let [assets @(rf/subscribe [::sub/assets]) loading? @(rf/subscribe [:loading?])] [:div.assets (if loading? [common-shared/loader] (for [{:keys [id asset/data asset/name asset/type]} assets :let [last-scan "Last scanned - 10:30 AM"]] ^{:key id} [:div {:class "meetingdetailcontain float-left"} [:div {:class "meetingdetailtopdiv"} [:div {:class "w-row"} [:div {:class "column-10 w-col w-col-5"} [:div {:class (type->icon-class type)}]] [:div {:class "w-col w-col-7"} [:div {:class "meetingtitle"} (common-shared/keyword->readable-string type)] [:div {:class "meetingtitle meetingemail"} (if (= :password type) "*********" data)] [:div {:class "meetingtitle meetingemail meetingtime w-hidden-small w-hidden-tiny"} last-scan]]]] [:div {:class "meetingdetailmiddlediv"} [:div {:class "w-row"} [:div {:class "w-col w-col-6"} [:div {:class "alert-text"} (rand-nth ["No" "2" "3"]) " New Alerts"]] [:div {:class "w-col w-col-6"} [:div {:class "alert-text resolved-alert"} (rand-nth ["1" "3"]) " Resolved Alerts"]]]] [:div {:class "meetingtitle meetingemail only-mobile"} last-scan] [:a {:href (common/id-route :app/asset-profile {:asset id}) :class "bottommorelink bottommoreright w-inline-block"} [:div {:class "detailscallinktext"} "View Details"]]]))])) (defn overview [] [:div {:class "page-content w-clearfix"} [:h1 {:class "page-title"} "Assets"] [:div {:class "topdashbar"} [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "leftcolstat w-col w-col-4 w-col-stack"} [:div {:class "circlewicon nomeetingsicon"}]] [:div {:class "column-7 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Data Protected"] [:div {:class "topdashbartext number"} "36"] [:div {:class "topdashbartext number desctext"} "Items"]]]] [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon rescheduleicon"}]] [:div {:class "column-8 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Alerts"] [:div {:class "topdashbartext number"} "14"] [:div {:class "topdashbartext number desctext"} "This Month"]]]] [:div {:class "stathold laststat bottomlaststat"} [:div {:class "statrow statlastbottom w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon cancelledmeetings"}]] [:div {:class "column-9 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Total scans"] [:div {:class "topdashbartext number"} "20"] [:div {:class "topdashbartext number desctext"} "This Month"]]]]] [:div {:class "numberofmeetings"} "My Assets" [my-assets]]]) (defn home [] ) (defn inbox [show-inbox?] (fn [] [:div {:class "inboxcontain" :style {:display (if @show-inbox? "block" "none")}} [:div {:class "iteminboxdiv"} [:div {:class "iteminboxdetaildiv"} [:div {:class "div-block-8"}] [:div {:class "iteminboxdetailtext"} "01/01/2020 10:30"]] [:div [:div "Email found in Yahoo data leak!"] [:div {:class "inboxnumbertext"} "<EMAIL>"]]] [:div {:class "iteminboxdiv"} [:div {:class "iteminboxdetaildiv"} [:div {:class "div-block-8"}] [:div {:class "iteminboxdetailtext"} "01/05/2020 12:30"]] [:div [:div "Email found in Hotmail data leak!"] [:div {:class "inboxnumbertext"} "<EMAIL>"]]] [:div {:class "iteminboxdiv itembuttondiv"} [:a {:class "inboxbutton leftinboxbutton w-inline-block"} [:div "Refresh"]] [:a {:data-ix "new-interaction-3" :class "inboxbutton w-inline-block" :on-click #(do (.preventDefault %) (reset! show-inbox? false))} [:div "Close"]]] (when-not show-inbox? [:div {:class "iteminboxdiv bottombelldiv w-clearfix"} [:div {:class "meetingrightdiv"}] [:div {:class "w-row"} [:div {:class "w-col w-col-4"} [:div {:class "calimginbox bottombellimg"}]] [:div {:class "w-col w-col-8"} [:div "Inbox"] [:div {:class "inboxdate w-embed w-script"}]]]])])) (defn views [] (let [page @(rf/subscribe [::sub/page]) show-inbox? (r/atom false)] [:<> [components/top-nav] [:div {:class "section"} [:a {:data-ix "new-interaction-2" :class "inboxdiv w-hidden-small w-hidden-tiny w-inline-block" :on-click #(do (.preventDefault %) (swap! show-inbox? not))} [:div {:class "bell"} "H"]] [:div {:class "pagecontain"} [:div {:class "div-block-6"} [components/left-menu page] (case page :app/homepage [overview] :app/add-data [add-data] :app/asset-profile [asset-profile] :app/account-settings [account-settings] [:p.font-italic (str "No content for page " page)])]] [inbox show-inbox?]] [components/modal] [messages/toast]]))
true
(ns PROJECTNAMESPACE.PROJECTNAME.frontend.app.views (:require [re-frame.core :as rf] [reagent.core :as r] [fork.re-frame :as fork] [PROJECTNAMESPACE.PROJECTNAME.frontend.common :as common] [PROJECTNAMESPACE.PROJECTNAME.common :as common-shared] [PROJECTNAMESPACE.PROJECTNAME.frontend.app.components :as components] [PROJECTNAMESPACE.PROJECTNAME.frontend.app.subscriptions :as sub] [PROJECTNAMESPACE.PROJECTNAME.frontend.global-messages :as messages] [PROJECTNAMESPACE.PROJECTNAME.frontend.http :as http] [clojure.string :as str])) (defn type->icon-class [type] (str "asset-icon " (common-shared/keyword->string type) "-icon")) (defn type-selector [selected-type] (let [type-keyword @selected-type] [:div.asset-types (for [asset-type [:email :phone-number :password] :let [active? (= type-keyword asset-type)]] ^{:key asset-type} [:button.card.asset-type {:class (when active? "asset-type--active") :on-click #(reset! selected-type asset-type)} (common-shared/keyword->readable-string asset-type)])])) (defn data-input [{:keys [handle-change handle-blur submitting?]} options] [:input.form-control.w-input (merge {:disabled submitting? :on-change handle-change :on-blur handle-blur} options)]) (defn text-input [{:keys [values] :as props} key] [data-input props {:name key :type "text" :value (values key)}]) (defn form-input [label input] [:div.margin-bottom-sm [:label.form-label.form__label label] [:div.form-control-wrapper input]]) (defn asset-profile [] (let [{:keys [id asset/name asset/description asset/data asset/type asset/breaches] :as asset} @(rf/subscribe [::sub/current-asset]) type-str (common-shared/keyword->readable-string type) last-scan (:valid-time breaches) breaches (:breach/data breaches)] [:div {:class "page-content w-clearfix"} [:h1 {:class "page-title"} [components/breadcrumb [{:label "Assets" :href (common/route->url :app/homepage)} {:label name}]]] [:div {:class "topdashbar"} [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "leftcolstat w-col w-col-4 w-col-stack"} [:div {:class "circlewicon nomeetingsicon"}]] [:div {:class "column-7 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Risk score"] [:div {:class "topdashbartext number"} "36"] [:div {:class "topdashbartext number desctext"} "/100"]]]] [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon rescheduleicon"}]] [:div {:class "column-8 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} name] [:div {:class "topdashbartext number"} "2"] [:div {:class "topdashbartext number desctext"} "This Month"]]]] [:div {:class "stathold laststat bottomlaststat"} [:div {:class "statrow statlastbottom w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon cancelledmeetings"}]] [:div {:class "column-9 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Total scans"] [:div {:class "topdashbartext number"} "5"] [:div {:class "topdashbartext number desctext"} "This Month"]]]]] [:div.asset-details.card [:div.asset-details__icon.circlewicon {:class (type->icon-class type)}] [:h3 "Asset Details"] [:div.asset-details__label name] [:div.asset__data data] (when description [:div.asset-details__item [:div.asset-details__label "Description:"] [:div.asset__description description]]) [:div.asset-details__item [:div.asset-details__label "Last scan:"] [:div.asset__description (common-shared/format-tx-time last-scan)]] [:h3 "Breaches"] [:div.asset-details__breaches (if (seq breaches) [:div.breaches [:p "Oh jeez... looks like the following websites/companies have been hacked and leaked your private information! Click on a name to show more details about the breach."] (for [{:keys [Name]} breaches] ^{:key Name} [:a {:on-click #(rf/dispatch [:asset/show-breach-detail Name])} Name])] [:p "Good news! Your data has not been found in any data breaches or hacks"]) [:button {:on-click #(rf/dispatch [:asset/search-breaches id])} "Search for new breaches"]]]])) (defn add-data [] (let [selected-type (r/atom :email)] (fn [] [:div {:class "page-content w-clearfix"} [:h1.page-title "Add data"] [type-selector selected-type] [fork/form {:path :new-asset :form-id "asset" :prevent-default? true :clean-on-unmount? true :on-submit #(rf/dispatch [:asset/create @selected-type %]) :initial-values {:asset/name "" :asset/description "" :asset/data ""}} (fn [{:keys [values form-id handle-submit submitting? reset] :as props}] [:div.form [:form.profile-component__form {:id form-id :on-submit handle-submit} [form-input "Name" [text-input props "asset/name"]] [form-input "Description" [text-input props "asset/description"]] [form-input (common-shared/keyword->readable-string @selected-type) [data-input props {:type (if (= :password @selected-type) "password" "text") :name "asset/data" :value (values "asset/data")}] ] [:button.btn.btn--md.form__submit {:type "submit" :disabled submitting?} "Submit"]]])]]))) (defn account-settings [] [:div "settings"]) (defn my-assets [] (let [assets @(rf/subscribe [::sub/assets]) loading? @(rf/subscribe [:loading?])] [:div.assets (if loading? [common-shared/loader] (for [{:keys [id asset/data asset/name asset/type]} assets :let [last-scan "Last scanned - 10:30 AM"]] ^{:key id} [:div {:class "meetingdetailcontain float-left"} [:div {:class "meetingdetailtopdiv"} [:div {:class "w-row"} [:div {:class "column-10 w-col w-col-5"} [:div {:class (type->icon-class type)}]] [:div {:class "w-col w-col-7"} [:div {:class "meetingtitle"} (common-shared/keyword->readable-string type)] [:div {:class "meetingtitle meetingemail"} (if (= :password type) "*********" data)] [:div {:class "meetingtitle meetingemail meetingtime w-hidden-small w-hidden-tiny"} last-scan]]]] [:div {:class "meetingdetailmiddlediv"} [:div {:class "w-row"} [:div {:class "w-col w-col-6"} [:div {:class "alert-text"} (rand-nth ["No" "2" "3"]) " New Alerts"]] [:div {:class "w-col w-col-6"} [:div {:class "alert-text resolved-alert"} (rand-nth ["1" "3"]) " Resolved Alerts"]]]] [:div {:class "meetingtitle meetingemail only-mobile"} last-scan] [:a {:href (common/id-route :app/asset-profile {:asset id}) :class "bottommorelink bottommoreright w-inline-block"} [:div {:class "detailscallinktext"} "View Details"]]]))])) (defn overview [] [:div {:class "page-content w-clearfix"} [:h1 {:class "page-title"} "Assets"] [:div {:class "topdashbar"} [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "leftcolstat w-col w-col-4 w-col-stack"} [:div {:class "circlewicon nomeetingsicon"}]] [:div {:class "column-7 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Data Protected"] [:div {:class "topdashbartext number"} "36"] [:div {:class "topdashbartext number desctext"} "Items"]]]] [:div {:class "stathold"} [:div {:class "statrow w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon rescheduleicon"}]] [:div {:class "column-8 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Alerts"] [:div {:class "topdashbartext number"} "14"] [:div {:class "topdashbartext number desctext"} "This Month"]]]] [:div {:class "stathold laststat bottomlaststat"} [:div {:class "statrow statlastbottom w-row"} [:div {:class "w-col w-col-4 w-col-stack"} [:div {:class "circlewicon cancelledmeetings"}]] [:div {:class "column-9 w-col w-col-8 w-col-stack"} [:div {:class "topdashbartext"} "Total scans"] [:div {:class "topdashbartext number"} "20"] [:div {:class "topdashbartext number desctext"} "This Month"]]]]] [:div {:class "numberofmeetings"} "My Assets" [my-assets]]]) (defn home [] ) (defn inbox [show-inbox?] (fn [] [:div {:class "inboxcontain" :style {:display (if @show-inbox? "block" "none")}} [:div {:class "iteminboxdiv"} [:div {:class "iteminboxdetaildiv"} [:div {:class "div-block-8"}] [:div {:class "iteminboxdetailtext"} "01/01/2020 10:30"]] [:div [:div "Email found in Yahoo data leak!"] [:div {:class "inboxnumbertext"} "PI:EMAIL:<EMAIL>END_PI"]]] [:div {:class "iteminboxdiv"} [:div {:class "iteminboxdetaildiv"} [:div {:class "div-block-8"}] [:div {:class "iteminboxdetailtext"} "01/05/2020 12:30"]] [:div [:div "Email found in Hotmail data leak!"] [:div {:class "inboxnumbertext"} "PI:EMAIL:<EMAIL>END_PI"]]] [:div {:class "iteminboxdiv itembuttondiv"} [:a {:class "inboxbutton leftinboxbutton w-inline-block"} [:div "Refresh"]] [:a {:data-ix "new-interaction-3" :class "inboxbutton w-inline-block" :on-click #(do (.preventDefault %) (reset! show-inbox? false))} [:div "Close"]]] (when-not show-inbox? [:div {:class "iteminboxdiv bottombelldiv w-clearfix"} [:div {:class "meetingrightdiv"}] [:div {:class "w-row"} [:div {:class "w-col w-col-4"} [:div {:class "calimginbox bottombellimg"}]] [:div {:class "w-col w-col-8"} [:div "Inbox"] [:div {:class "inboxdate w-embed w-script"}]]]])])) (defn views [] (let [page @(rf/subscribe [::sub/page]) show-inbox? (r/atom false)] [:<> [components/top-nav] [:div {:class "section"} [:a {:data-ix "new-interaction-2" :class "inboxdiv w-hidden-small w-hidden-tiny w-inline-block" :on-click #(do (.preventDefault %) (swap! show-inbox? not))} [:div {:class "bell"} "H"]] [:div {:class "pagecontain"} [:div {:class "div-block-6"} [components/left-menu page] (case page :app/homepage [overview] :app/add-data [add-data] :app/asset-profile [asset-profile] :app/account-settings [account-settings] [:p.font-italic (str "No content for page " page)])]] [inbox show-inbox?]] [components/modal] [messages/toast]]))
[ { "context": "ro execution engines.\"\n :url \"https://github.com/xsc/claro.circuit-breaker\"\n :license {:name \"MIT Lic", "end": 154, "score": 0.9914443492889404, "start": 151, "tag": "USERNAME", "value": "xsc" }, { "context": "opensource.org/licenses/MIT\"\n :author \"Yannick Scherer\"\n :year 2017\n :key \"mit\"}\n ", "end": 301, "score": 0.9998703002929688, "start": 286, "tag": "NAME", "value": "Yannick Scherer" }, { "context": " :source-uri \"https://github.com/xsc/claro.circuit-breaker/blob/master/{filepath}#L{li", "end": 1189, "score": 0.988496720790863, "start": 1186, "tag": "USERNAME", "value": "xsc" } ]
project.clj
xsc/claro.circuit-breaker
3
(defproject claro/circuit-breaker "0.1.1-SNAPSHOT" :description "Circuit-breaker middleware for claro execution engines." :url "https://github.com/xsc/claro.circuit-breaker" :license {:name "MIT License" :url "https://opensource.org/licenses/MIT" :author "Yannick Scherer" :year 2017 :key "mit"} :dependencies [[org.clojure/clojure "1.8.0"] [claro "0.2.20" :scope "provided"] [io.github.resilience4j/resilience4j-circuitbreaker "0.10.1"]] :profiles {:dev {:dependencies [[org.clojure/test.check "0.9.0"] [ch.qos.logback/logback-classic "1.2.3"] [org.slf4j/slf4j-api "1.7.25"]]} :codox {:dependencies [[org.clojure/tools.reader "1.1.0"] [codox-theme-rdash "0.1.2"]] :plugins [[lein-codox "0.10.3"]] :codox {:project {:name "claro/circuit-breaker"} :metadata {:doc/format :markdown} :themes [:rdash] :source-paths ["src"] :source-uri "https://github.com/xsc/claro.circuit-breaker/blob/master/{filepath}#L{line}" :namespaces [claro.middleware.circuit-breaker]}} :coverage {:plugins [[lein-cloverage "1.0.9"]] :dependencies [[org.clojure/tools.reader "1.1.0"] [riddley "0.1.14"]]}} :aliases {"codox" ["with-profile" "codox,dev" "codox"] "cloverage" ["with-profile" "+coverage" "cloverage"]} :pedantic? :abort)
22763
(defproject claro/circuit-breaker "0.1.1-SNAPSHOT" :description "Circuit-breaker middleware for claro execution engines." :url "https://github.com/xsc/claro.circuit-breaker" :license {:name "MIT License" :url "https://opensource.org/licenses/MIT" :author "<NAME>" :year 2017 :key "mit"} :dependencies [[org.clojure/clojure "1.8.0"] [claro "0.2.20" :scope "provided"] [io.github.resilience4j/resilience4j-circuitbreaker "0.10.1"]] :profiles {:dev {:dependencies [[org.clojure/test.check "0.9.0"] [ch.qos.logback/logback-classic "1.2.3"] [org.slf4j/slf4j-api "1.7.25"]]} :codox {:dependencies [[org.clojure/tools.reader "1.1.0"] [codox-theme-rdash "0.1.2"]] :plugins [[lein-codox "0.10.3"]] :codox {:project {:name "claro/circuit-breaker"} :metadata {:doc/format :markdown} :themes [:rdash] :source-paths ["src"] :source-uri "https://github.com/xsc/claro.circuit-breaker/blob/master/{filepath}#L{line}" :namespaces [claro.middleware.circuit-breaker]}} :coverage {:plugins [[lein-cloverage "1.0.9"]] :dependencies [[org.clojure/tools.reader "1.1.0"] [riddley "0.1.14"]]}} :aliases {"codox" ["with-profile" "codox,dev" "codox"] "cloverage" ["with-profile" "+coverage" "cloverage"]} :pedantic? :abort)
true
(defproject claro/circuit-breaker "0.1.1-SNAPSHOT" :description "Circuit-breaker middleware for claro execution engines." :url "https://github.com/xsc/claro.circuit-breaker" :license {:name "MIT License" :url "https://opensource.org/licenses/MIT" :author "PI:NAME:<NAME>END_PI" :year 2017 :key "mit"} :dependencies [[org.clojure/clojure "1.8.0"] [claro "0.2.20" :scope "provided"] [io.github.resilience4j/resilience4j-circuitbreaker "0.10.1"]] :profiles {:dev {:dependencies [[org.clojure/test.check "0.9.0"] [ch.qos.logback/logback-classic "1.2.3"] [org.slf4j/slf4j-api "1.7.25"]]} :codox {:dependencies [[org.clojure/tools.reader "1.1.0"] [codox-theme-rdash "0.1.2"]] :plugins [[lein-codox "0.10.3"]] :codox {:project {:name "claro/circuit-breaker"} :metadata {:doc/format :markdown} :themes [:rdash] :source-paths ["src"] :source-uri "https://github.com/xsc/claro.circuit-breaker/blob/master/{filepath}#L{line}" :namespaces [claro.middleware.circuit-breaker]}} :coverage {:plugins [[lein-cloverage "1.0.9"]] :dependencies [[org.clojure/tools.reader "1.1.0"] [riddley "0.1.14"]]}} :aliases {"codox" ["with-profile" "codox,dev" "codox"] "cloverage" ["with-profile" "+coverage" "cloverage"]} :pedantic? :abort)
[ { "context": " \";USER=GUEST;PASSWORD=guest\"))})\n\n(defmethod sql.tx/pk-sql-type :h2 [_] \"BIGI", "end": 3039, "score": 0.999019980430603, "start": 3034, "tag": "PASSWORD", "value": "guest" }, { "context": "out\n \"CREATE USER IF NOT EXISTS GUEST PASSWORD 'guest';\\n\"\n\n ;; Set DB_CLOSE_DELAY here because only ", "end": 3627, "score": 0.9344335198402405, "start": 3622, "tag": "PASSWORD", "value": "guest" } ]
c#-metabase/test/metabase/test/data/h2.clj
hanakhry/Crime_Admin
0
(ns metabase.test.data.h2 "Code for creating / destroying an H2 database from a `DatabaseDefinition`." (:require [clojure.string :as str] [metabase.db :as mdb] [metabase.db.spec :as dbspec] [metabase.driver.sql.util :as sql.u] [metabase.models.database :refer [Database]] [metabase.test.data.impl :as data.impl] [metabase.test.data.interface :as tx] [metabase.test.data.sql :as sql.tx] [metabase.test.data.sql-jdbc :as sql-jdbc.tx] [metabase.test.data.sql-jdbc.execute :as execute] [metabase.test.data.sql-jdbc.load-data :as load-data] [metabase.test.data.sql-jdbc.spec :as spec] [toucan.db :as db])) (sql-jdbc.tx/add-test-extensions! :h2) (defonce ^:private h2-test-dbs-created-by-this-instance (atom #{})) (defn- destroy-test-database-if-created-by-another-instance! "For H2, test databases are all in-memory, which don't work if they're saved from a different REPL session or the like. So delete any 'stale' in-mem DBs from the application DB when someone calls `get-or-create-database!` as needed." [database-name] (when-not (contains? @h2-test-dbs-created-by-this-instance database-name) (locking h2-test-dbs-created-by-this-instance (when-not (contains? @h2-test-dbs-created-by-this-instance database-name) (mdb/setup-db!) ; if not already setup (db/delete! Database :engine "h2", :name database-name) (swap! h2-test-dbs-created-by-this-instance conj database-name))))) (defmethod data.impl/get-or-create-database! :h2 [driver dbdef] (let [{:keys [database-name], :as dbdef} (tx/get-dataset-definition dbdef)] (destroy-test-database-if-created-by-another-instance! database-name) ((get-method data.impl/get-or-create-database! :default) driver dbdef))) (doseq [[base-type database-type] {:type/BigInteger "BIGINT" :type/Boolean "BOOL" :type/Date "DATE" :type/DateTime "DATETIME" :type/DateTimeWithTZ "TIMESTAMP WITH TIME ZONE" :type/Decimal "DECIMAL" :type/Float "FLOAT" :type/Integer "INTEGER" :type/Text "VARCHAR" :type/Time "TIME"}] (defmethod sql.tx/field-base-type->sql-type [:h2 base-type] [_ _] database-type)) (defmethod tx/dbdef->connection-details :h2 [_ context dbdef] {:db (str "mem:" (tx/escaped-database-name dbdef) (when (= context :db) ;; Return details with the GUEST user added so SQL queries are ;; allowed. ";USER=GUEST;PASSWORD=guest"))}) (defmethod sql.tx/pk-sql-type :h2 [_] "BIGINT AUTO_INCREMENT") (defmethod sql.tx/pk-field-name :h2 [_] "ID") (defmethod sql.tx/drop-db-if-exists-sql :h2 [& _] nil) (defmethod sql.tx/create-db-sql :h2 [& _] (str ;; We don't need to actually do anything to create a database here. Just disable the undo ;; log (i.e., transactions) for this DB session because the bulk operations to load data don't need to be atomic "SET UNDO_LOG = 0;\n" ;; Create a non-admin account 'GUEST' which will be used from here on out "CREATE USER IF NOT EXISTS GUEST PASSWORD 'guest';\n" ;; Set DB_CLOSE_DELAY here because only admins are allowed to do it, so we can't set it via the connection string. ;; Set it to to -1 (no automatic closing) "SET DB_CLOSE_DELAY -1;")) (defmethod sql.tx/create-table-sql :h2 [driver dbdef {:keys [table-name], :as tabledef}] (str ((get-method sql.tx/create-table-sql :sql-jdbc/test-extensions) driver dbdef tabledef) ";\n" ;; Grant the GUEST account r/w permissions for this table (format "GRANT ALL ON %s TO GUEST;" (sql.u/quote-name driver :table (tx/format-name driver table-name))))) (defmethod tx/has-questionable-timezone-support? :h2 [_] true) (defmethod tx/format-name :h2 [_ s] (str/upper-case s)) (defmethod tx/id-field-type :h2 [_] :type/BigInteger) (defmethod tx/aggregate-column-info :h2 ([driver ag-type] ((get-method tx/aggregate-column-info ::tx/test-extensions) driver ag-type)) ([driver ag-type field] (merge ((get-method tx/aggregate-column-info ::tx/test-extensions) driver ag-type field) (when (= ag-type :sum) {:base_type :type/BigInteger})))) (defmethod execute/execute-sql! :h2 [driver _ dbdef sql] ;; we always want to use 'server' context when execute-sql! is called (never ;; try connect as GUEST, since we're not giving them priviledges to create ;; tables / etc) ((get-method execute/execute-sql! :sql-jdbc/test-extensions) driver :server dbdef sql)) ;; Don't use the h2 driver implementation, which makes the connection string read-only & if-exists only (defmethod spec/dbdef->spec :h2 [driver context dbdef] (dbspec/h2 (tx/dbdef->connection-details driver context dbdef))) (defmethod load-data/load-data! :h2 [& args] (apply load-data/load-data-all-at-once! args)) (defmethod sql.tx/inline-column-comment-sql :h2 [& args] (apply sql.tx/standard-inline-column-comment-sql args)) (defmethod sql.tx/standalone-table-comment-sql :h2 [& args] (apply sql.tx/standard-standalone-table-comment-sql args))
37787
(ns metabase.test.data.h2 "Code for creating / destroying an H2 database from a `DatabaseDefinition`." (:require [clojure.string :as str] [metabase.db :as mdb] [metabase.db.spec :as dbspec] [metabase.driver.sql.util :as sql.u] [metabase.models.database :refer [Database]] [metabase.test.data.impl :as data.impl] [metabase.test.data.interface :as tx] [metabase.test.data.sql :as sql.tx] [metabase.test.data.sql-jdbc :as sql-jdbc.tx] [metabase.test.data.sql-jdbc.execute :as execute] [metabase.test.data.sql-jdbc.load-data :as load-data] [metabase.test.data.sql-jdbc.spec :as spec] [toucan.db :as db])) (sql-jdbc.tx/add-test-extensions! :h2) (defonce ^:private h2-test-dbs-created-by-this-instance (atom #{})) (defn- destroy-test-database-if-created-by-another-instance! "For H2, test databases are all in-memory, which don't work if they're saved from a different REPL session or the like. So delete any 'stale' in-mem DBs from the application DB when someone calls `get-or-create-database!` as needed." [database-name] (when-not (contains? @h2-test-dbs-created-by-this-instance database-name) (locking h2-test-dbs-created-by-this-instance (when-not (contains? @h2-test-dbs-created-by-this-instance database-name) (mdb/setup-db!) ; if not already setup (db/delete! Database :engine "h2", :name database-name) (swap! h2-test-dbs-created-by-this-instance conj database-name))))) (defmethod data.impl/get-or-create-database! :h2 [driver dbdef] (let [{:keys [database-name], :as dbdef} (tx/get-dataset-definition dbdef)] (destroy-test-database-if-created-by-another-instance! database-name) ((get-method data.impl/get-or-create-database! :default) driver dbdef))) (doseq [[base-type database-type] {:type/BigInteger "BIGINT" :type/Boolean "BOOL" :type/Date "DATE" :type/DateTime "DATETIME" :type/DateTimeWithTZ "TIMESTAMP WITH TIME ZONE" :type/Decimal "DECIMAL" :type/Float "FLOAT" :type/Integer "INTEGER" :type/Text "VARCHAR" :type/Time "TIME"}] (defmethod sql.tx/field-base-type->sql-type [:h2 base-type] [_ _] database-type)) (defmethod tx/dbdef->connection-details :h2 [_ context dbdef] {:db (str "mem:" (tx/escaped-database-name dbdef) (when (= context :db) ;; Return details with the GUEST user added so SQL queries are ;; allowed. ";USER=GUEST;PASSWORD=<PASSWORD>"))}) (defmethod sql.tx/pk-sql-type :h2 [_] "BIGINT AUTO_INCREMENT") (defmethod sql.tx/pk-field-name :h2 [_] "ID") (defmethod sql.tx/drop-db-if-exists-sql :h2 [& _] nil) (defmethod sql.tx/create-db-sql :h2 [& _] (str ;; We don't need to actually do anything to create a database here. Just disable the undo ;; log (i.e., transactions) for this DB session because the bulk operations to load data don't need to be atomic "SET UNDO_LOG = 0;\n" ;; Create a non-admin account 'GUEST' which will be used from here on out "CREATE USER IF NOT EXISTS GUEST PASSWORD '<PASSWORD>';\n" ;; Set DB_CLOSE_DELAY here because only admins are allowed to do it, so we can't set it via the connection string. ;; Set it to to -1 (no automatic closing) "SET DB_CLOSE_DELAY -1;")) (defmethod sql.tx/create-table-sql :h2 [driver dbdef {:keys [table-name], :as tabledef}] (str ((get-method sql.tx/create-table-sql :sql-jdbc/test-extensions) driver dbdef tabledef) ";\n" ;; Grant the GUEST account r/w permissions for this table (format "GRANT ALL ON %s TO GUEST;" (sql.u/quote-name driver :table (tx/format-name driver table-name))))) (defmethod tx/has-questionable-timezone-support? :h2 [_] true) (defmethod tx/format-name :h2 [_ s] (str/upper-case s)) (defmethod tx/id-field-type :h2 [_] :type/BigInteger) (defmethod tx/aggregate-column-info :h2 ([driver ag-type] ((get-method tx/aggregate-column-info ::tx/test-extensions) driver ag-type)) ([driver ag-type field] (merge ((get-method tx/aggregate-column-info ::tx/test-extensions) driver ag-type field) (when (= ag-type :sum) {:base_type :type/BigInteger})))) (defmethod execute/execute-sql! :h2 [driver _ dbdef sql] ;; we always want to use 'server' context when execute-sql! is called (never ;; try connect as GUEST, since we're not giving them priviledges to create ;; tables / etc) ((get-method execute/execute-sql! :sql-jdbc/test-extensions) driver :server dbdef sql)) ;; Don't use the h2 driver implementation, which makes the connection string read-only & if-exists only (defmethod spec/dbdef->spec :h2 [driver context dbdef] (dbspec/h2 (tx/dbdef->connection-details driver context dbdef))) (defmethod load-data/load-data! :h2 [& args] (apply load-data/load-data-all-at-once! args)) (defmethod sql.tx/inline-column-comment-sql :h2 [& args] (apply sql.tx/standard-inline-column-comment-sql args)) (defmethod sql.tx/standalone-table-comment-sql :h2 [& args] (apply sql.tx/standard-standalone-table-comment-sql args))
true
(ns metabase.test.data.h2 "Code for creating / destroying an H2 database from a `DatabaseDefinition`." (:require [clojure.string :as str] [metabase.db :as mdb] [metabase.db.spec :as dbspec] [metabase.driver.sql.util :as sql.u] [metabase.models.database :refer [Database]] [metabase.test.data.impl :as data.impl] [metabase.test.data.interface :as tx] [metabase.test.data.sql :as sql.tx] [metabase.test.data.sql-jdbc :as sql-jdbc.tx] [metabase.test.data.sql-jdbc.execute :as execute] [metabase.test.data.sql-jdbc.load-data :as load-data] [metabase.test.data.sql-jdbc.spec :as spec] [toucan.db :as db])) (sql-jdbc.tx/add-test-extensions! :h2) (defonce ^:private h2-test-dbs-created-by-this-instance (atom #{})) (defn- destroy-test-database-if-created-by-another-instance! "For H2, test databases are all in-memory, which don't work if they're saved from a different REPL session or the like. So delete any 'stale' in-mem DBs from the application DB when someone calls `get-or-create-database!` as needed." [database-name] (when-not (contains? @h2-test-dbs-created-by-this-instance database-name) (locking h2-test-dbs-created-by-this-instance (when-not (contains? @h2-test-dbs-created-by-this-instance database-name) (mdb/setup-db!) ; if not already setup (db/delete! Database :engine "h2", :name database-name) (swap! h2-test-dbs-created-by-this-instance conj database-name))))) (defmethod data.impl/get-or-create-database! :h2 [driver dbdef] (let [{:keys [database-name], :as dbdef} (tx/get-dataset-definition dbdef)] (destroy-test-database-if-created-by-another-instance! database-name) ((get-method data.impl/get-or-create-database! :default) driver dbdef))) (doseq [[base-type database-type] {:type/BigInteger "BIGINT" :type/Boolean "BOOL" :type/Date "DATE" :type/DateTime "DATETIME" :type/DateTimeWithTZ "TIMESTAMP WITH TIME ZONE" :type/Decimal "DECIMAL" :type/Float "FLOAT" :type/Integer "INTEGER" :type/Text "VARCHAR" :type/Time "TIME"}] (defmethod sql.tx/field-base-type->sql-type [:h2 base-type] [_ _] database-type)) (defmethod tx/dbdef->connection-details :h2 [_ context dbdef] {:db (str "mem:" (tx/escaped-database-name dbdef) (when (= context :db) ;; Return details with the GUEST user added so SQL queries are ;; allowed. ";USER=GUEST;PASSWORD=PI:PASSWORD:<PASSWORD>END_PI"))}) (defmethod sql.tx/pk-sql-type :h2 [_] "BIGINT AUTO_INCREMENT") (defmethod sql.tx/pk-field-name :h2 [_] "ID") (defmethod sql.tx/drop-db-if-exists-sql :h2 [& _] nil) (defmethod sql.tx/create-db-sql :h2 [& _] (str ;; We don't need to actually do anything to create a database here. Just disable the undo ;; log (i.e., transactions) for this DB session because the bulk operations to load data don't need to be atomic "SET UNDO_LOG = 0;\n" ;; Create a non-admin account 'GUEST' which will be used from here on out "CREATE USER IF NOT EXISTS GUEST PASSWORD 'PI:PASSWORD:<PASSWORD>END_PI';\n" ;; Set DB_CLOSE_DELAY here because only admins are allowed to do it, so we can't set it via the connection string. ;; Set it to to -1 (no automatic closing) "SET DB_CLOSE_DELAY -1;")) (defmethod sql.tx/create-table-sql :h2 [driver dbdef {:keys [table-name], :as tabledef}] (str ((get-method sql.tx/create-table-sql :sql-jdbc/test-extensions) driver dbdef tabledef) ";\n" ;; Grant the GUEST account r/w permissions for this table (format "GRANT ALL ON %s TO GUEST;" (sql.u/quote-name driver :table (tx/format-name driver table-name))))) (defmethod tx/has-questionable-timezone-support? :h2 [_] true) (defmethod tx/format-name :h2 [_ s] (str/upper-case s)) (defmethod tx/id-field-type :h2 [_] :type/BigInteger) (defmethod tx/aggregate-column-info :h2 ([driver ag-type] ((get-method tx/aggregate-column-info ::tx/test-extensions) driver ag-type)) ([driver ag-type field] (merge ((get-method tx/aggregate-column-info ::tx/test-extensions) driver ag-type field) (when (= ag-type :sum) {:base_type :type/BigInteger})))) (defmethod execute/execute-sql! :h2 [driver _ dbdef sql] ;; we always want to use 'server' context when execute-sql! is called (never ;; try connect as GUEST, since we're not giving them priviledges to create ;; tables / etc) ((get-method execute/execute-sql! :sql-jdbc/test-extensions) driver :server dbdef sql)) ;; Don't use the h2 driver implementation, which makes the connection string read-only & if-exists only (defmethod spec/dbdef->spec :h2 [driver context dbdef] (dbspec/h2 (tx/dbdef->connection-details driver context dbdef))) (defmethod load-data/load-data! :h2 [& args] (apply load-data/load-data-all-at-once! args)) (defmethod sql.tx/inline-column-comment-sql :h2 [& args] (apply sql.tx/standard-inline-column-comment-sql args)) (defmethod sql.tx/standalone-table-comment-sql :h2 [& args] (apply sql.tx/standard-standalone-table-comment-sql args))
[ { "context": "creen name\")\n (control password-field :pass \"Password\")\n (control password-field :pass1 \"Retype Pa", "end": 995, "score": 0.9947616457939148, "start": 987, "tag": "PASSWORD", "value": "Password" }, { "context": "\"Password\")\n (control password-field :pass1 \"Retype Password\")\n (submit-button \"Create Account\"))))\n\n(def", "end": 1051, "score": 0.9954484105110168, "start": 1036, "tag": "PASSWORD", "value": "Retype Password" }, { "context": "-success \"To login enter username: admin password: admin\"]\n (cond\n (has-value? error)\n [", "end": 1683, "score": 0.9953172206878662, "start": 1678, "tag": "PASSWORD", "value": "admin" }, { "context": "ating-label-form-group.controls\n [:label \"Username\"]\n [:input.form-control {:type \"text\" :id", "end": 1948, "score": 0.9991254806518555, "start": 1940, "tag": "USERNAME", "value": "Username" }, { "context": "\n [:input.form-control {:type \"text\" :id \"username\" :name \"id\" :placeholder \"Username\" :required \"re", "end": 2008, "score": 0.9994188547134399, "start": 2000, "tag": "USERNAME", "value": "username" }, { "context": "pe \"text\" :id \"username\" :name \"id\" :placeholder \"Username\" :required \"required\" :data-validation-required-m", "end": 2043, "score": 0.9990816116333008, "start": 2035, "tag": "USERNAME", "value": "Username" }, { "context": "ata-validation-required-message \"Please enter your username.\"}]\n [:p.help-block.text-danger]]]\n ", "end": 2127, "score": 0.955468475818634, "start": 2119, "tag": "USERNAME", "value": "username" }, { "context": "ssword\" :id \"password\" :name \"pass\" :placeholder \"Password\" :required \"required\" :data-validation-required-m", "end": 2399, "score": 0.8268811702728271, "start": 2391, "tag": "PASSWORD", "value": "Password" }, { "context": "ired\"])\n (rule (has-value? pass)\n [:pass \"password is required\"])\n (rule (and user (crypt/compare pass (:pass", "end": 2904, "score": 0.85701984167099, "start": 2884, "tag": "PASSWORD", "value": "password is required" }, { "context": " (crypt/compare pass (:pass user)))\n [:pass \"invalid password\"])\n (if (errors? :id :pass)\n (login-page)\n ", "end": 2993, "score": 0.9937808513641357, "start": 2977, "tag": "PASSWORD", "value": "invalid password" } ]
src/blog/routes/auth.clj
VukTheBeast/Blog
0
(ns blog.routes.auth (:require [compojure.core :refer [defroutes GET POST]] [blog.views.admin :as admin] [blog.views.layout :as layout] [hiccup.form :refer [form-to label text-field password-field submit-button]] [noir.response :refer [redirect]] [noir.session :as session] [blog.routes.helper :as helper] [noir.validation :refer [rule errors? has-value? on-error]] [noir.util.crypt :as crypt] [blog.models.db :as db])) (defn format-error "Formating error text in propriet html format" [[error]] [:p.error error]) (defn control "Format form fields and adding errors notification" [field name text] (list (on-error name format-error) (label name text) (field name) [:br])) (defn registration-page [] (admin/common (form-to [:post "/register"] (control text-field :id "screen name") (control password-field :pass "Password") (control password-field :pass1 "Retype Password") (submit-button "Create Account")))) (defn handle-registration [id pass pass1] (rule (= pass pass1) [:pass "password was not retyped correctly"]) (if (errors? :pass) (registration-page) (do (db/add-user-record {:id id :pass (crypt/encrypt pass)}) (redirect "/login")))) (defn login-page [& [error]] (admin/common (helper/header "Log In" "Please enter your username and pass for admin panel." "img/login-bg.jpg") [:div.container [:div.row [:div.col-lg-8.col-lg-offset-2.col-md-10.col-md-offset-1 [:div.alert.alert-success "To login enter username: admin password: admin"] (cond (has-value? error) [:div.alert.alert-danger error]) [:form#LogInForm {:action "login" :method "POST"} [:div.row.control-group [:div.form-group.col-xs-12.floating-label-form-group.controls [:label "Username"] [:input.form-control {:type "text" :id "username" :name "id" :placeholder "Username" :required "required" :data-validation-required-message "Please enter your username."}] [:p.help-block.text-danger]]] [:div.row.control-group [:div.form-group.col-xs-12.floating-label-form-group.controls [:label "Password"] [:input.form-control {:type "password" :id "password" :name "pass" :placeholder "Password" :required "required" :data-validation-required-message "Please enter your password."}] [:p.help-block.text-danger]]] [:br] [:div#success] [:div.row [:div.form-group.col-xs-12.floating-label-form-group.controls [:button.btn.btn-default.submit "Login"]]]]]]])) (comment (defn handle-login [id pass] (let [user (db/get-user id)] (rule (has-value? id) [:id "screen name is required"]) (rule (has-value? pass) [:pass "password is required"]) (rule (and user (crypt/compare pass (:pass user))) [:pass "invalid password"]) (if (errors? :id :pass) (login-page) (do (session/put! :user id) (redirect "/")))))) (defn handle-login [id pass] (cond (empty? id) (login-page "screen name is required") (empty? pass) (login-page "password is required") (and (= "admin" id) (= "admin" pass)) (do (session/put! :user id) (redirect "/admin")) :else (login-page "authentication failed"))) (defroutes auth-routes (GET "/register" [_] (registration-page)) (POST "/register" [id pass pass1] (handle-registration id pass pass1)) (GET "/login" [] (login-page)) (POST "/login" [id pass] (handle-login id pass)))
5514
(ns blog.routes.auth (:require [compojure.core :refer [defroutes GET POST]] [blog.views.admin :as admin] [blog.views.layout :as layout] [hiccup.form :refer [form-to label text-field password-field submit-button]] [noir.response :refer [redirect]] [noir.session :as session] [blog.routes.helper :as helper] [noir.validation :refer [rule errors? has-value? on-error]] [noir.util.crypt :as crypt] [blog.models.db :as db])) (defn format-error "Formating error text in propriet html format" [[error]] [:p.error error]) (defn control "Format form fields and adding errors notification" [field name text] (list (on-error name format-error) (label name text) (field name) [:br])) (defn registration-page [] (admin/common (form-to [:post "/register"] (control text-field :id "screen name") (control password-field :pass "<PASSWORD>") (control password-field :pass1 "<PASSWORD>") (submit-button "Create Account")))) (defn handle-registration [id pass pass1] (rule (= pass pass1) [:pass "password was not retyped correctly"]) (if (errors? :pass) (registration-page) (do (db/add-user-record {:id id :pass (crypt/encrypt pass)}) (redirect "/login")))) (defn login-page [& [error]] (admin/common (helper/header "Log In" "Please enter your username and pass for admin panel." "img/login-bg.jpg") [:div.container [:div.row [:div.col-lg-8.col-lg-offset-2.col-md-10.col-md-offset-1 [:div.alert.alert-success "To login enter username: admin password: <PASSWORD>"] (cond (has-value? error) [:div.alert.alert-danger error]) [:form#LogInForm {:action "login" :method "POST"} [:div.row.control-group [:div.form-group.col-xs-12.floating-label-form-group.controls [:label "Username"] [:input.form-control {:type "text" :id "username" :name "id" :placeholder "Username" :required "required" :data-validation-required-message "Please enter your username."}] [:p.help-block.text-danger]]] [:div.row.control-group [:div.form-group.col-xs-12.floating-label-form-group.controls [:label "Password"] [:input.form-control {:type "password" :id "password" :name "pass" :placeholder "<PASSWORD>" :required "required" :data-validation-required-message "Please enter your password."}] [:p.help-block.text-danger]]] [:br] [:div#success] [:div.row [:div.form-group.col-xs-12.floating-label-form-group.controls [:button.btn.btn-default.submit "Login"]]]]]]])) (comment (defn handle-login [id pass] (let [user (db/get-user id)] (rule (has-value? id) [:id "screen name is required"]) (rule (has-value? pass) [:pass "<PASSWORD>"]) (rule (and user (crypt/compare pass (:pass user))) [:pass "<PASSWORD>"]) (if (errors? :id :pass) (login-page) (do (session/put! :user id) (redirect "/")))))) (defn handle-login [id pass] (cond (empty? id) (login-page "screen name is required") (empty? pass) (login-page "password is required") (and (= "admin" id) (= "admin" pass)) (do (session/put! :user id) (redirect "/admin")) :else (login-page "authentication failed"))) (defroutes auth-routes (GET "/register" [_] (registration-page)) (POST "/register" [id pass pass1] (handle-registration id pass pass1)) (GET "/login" [] (login-page)) (POST "/login" [id pass] (handle-login id pass)))
true
(ns blog.routes.auth (:require [compojure.core :refer [defroutes GET POST]] [blog.views.admin :as admin] [blog.views.layout :as layout] [hiccup.form :refer [form-to label text-field password-field submit-button]] [noir.response :refer [redirect]] [noir.session :as session] [blog.routes.helper :as helper] [noir.validation :refer [rule errors? has-value? on-error]] [noir.util.crypt :as crypt] [blog.models.db :as db])) (defn format-error "Formating error text in propriet html format" [[error]] [:p.error error]) (defn control "Format form fields and adding errors notification" [field name text] (list (on-error name format-error) (label name text) (field name) [:br])) (defn registration-page [] (admin/common (form-to [:post "/register"] (control text-field :id "screen name") (control password-field :pass "PI:PASSWORD:<PASSWORD>END_PI") (control password-field :pass1 "PI:PASSWORD:<PASSWORD>END_PI") (submit-button "Create Account")))) (defn handle-registration [id pass pass1] (rule (= pass pass1) [:pass "password was not retyped correctly"]) (if (errors? :pass) (registration-page) (do (db/add-user-record {:id id :pass (crypt/encrypt pass)}) (redirect "/login")))) (defn login-page [& [error]] (admin/common (helper/header "Log In" "Please enter your username and pass for admin panel." "img/login-bg.jpg") [:div.container [:div.row [:div.col-lg-8.col-lg-offset-2.col-md-10.col-md-offset-1 [:div.alert.alert-success "To login enter username: admin password: PI:PASSWORD:<PASSWORD>END_PI"] (cond (has-value? error) [:div.alert.alert-danger error]) [:form#LogInForm {:action "login" :method "POST"} [:div.row.control-group [:div.form-group.col-xs-12.floating-label-form-group.controls [:label "Username"] [:input.form-control {:type "text" :id "username" :name "id" :placeholder "Username" :required "required" :data-validation-required-message "Please enter your username."}] [:p.help-block.text-danger]]] [:div.row.control-group [:div.form-group.col-xs-12.floating-label-form-group.controls [:label "Password"] [:input.form-control {:type "password" :id "password" :name "pass" :placeholder "PI:PASSWORD:<PASSWORD>END_PI" :required "required" :data-validation-required-message "Please enter your password."}] [:p.help-block.text-danger]]] [:br] [:div#success] [:div.row [:div.form-group.col-xs-12.floating-label-form-group.controls [:button.btn.btn-default.submit "Login"]]]]]]])) (comment (defn handle-login [id pass] (let [user (db/get-user id)] (rule (has-value? id) [:id "screen name is required"]) (rule (has-value? pass) [:pass "PI:PASSWORD:<PASSWORD>END_PI"]) (rule (and user (crypt/compare pass (:pass user))) [:pass "PI:PASSWORD:<PASSWORD>END_PI"]) (if (errors? :id :pass) (login-page) (do (session/put! :user id) (redirect "/")))))) (defn handle-login [id pass] (cond (empty? id) (login-page "screen name is required") (empty? pass) (login-page "password is required") (and (= "admin" id) (= "admin" pass)) (do (session/put! :user id) (redirect "/admin")) :else (login-page "authentication failed"))) (defroutes auth-routes (GET "/register" [_] (registration-page)) (POST "/register" [id pass pass1] (handle-registration id pass pass1)) (GET "/login" [] (login-page)) (POST "/login" [id pass] (handle-login id pass)))
[ { "context": "\"+egg +bacon -spam\" \"id:\\\"2019/12\\\"\" \"applicant:\\\"[email protected]\\\"\"\n \"resource:\\\"urn:fi:abcd\\\"\"]\n (", "end": 2415, "score": 0.9999078512191772, "start": 2398, "tag": "EMAIL", "value": "[email protected]" } ]
src/cljs/rems/search.cljs
ossilva/rems
0
(ns rems.search (:require [reagent.core :as r] [rems.atoms :refer [close-symbol]] [rems.spinner :as spinner] [rems.text :refer [text]] [rems.util :refer [fetch focus-when-collapse-opened]])) (defn search-field [{:keys [id on-search searching? info]}] (let [input-value (r/atom "") input-element (atom nil) collapse-id "application-search-tips-collapse"] (fn [{:keys [id on-search searching? info]}] [:<> [:div.search-field [:label.mr-1 {:for id} (text :t.search/search)] [:div.input-group.mr-2.w-50 [:input.form-control {:id id :type :text :value @input-value :ref (fn [element] (reset! input-element element)) :on-change (fn [event] (let [value (-> event .-target .-value)] (reset! input-value value) (on-search value)))}] (when-not (= "" @input-value) [:div.input-group-append [:button.btn.btn-outline-secondary {:id (str id "-clear") :type :button :aria-label (text :t.search/clear-search) ;; override the custom font-size from .btn which breaks .input-group :style {:font-size "inherit"} :on-click (fn [] (reset! input-value "") (on-search "") (.focus @input-element))} [close-symbol]]])] (when info [:a.application-search-tips.btn.btn-link.collapsed {:data-toggle "collapse" :href (str "#" collapse-id) :aria-label (text :t.search/example-searches) :aria-expanded "false" :aria-controls collapse-id} [:i.fa.fa-question-circle]]) (when searching? [spinner/small])] (when info [:div.search-tips.collapse {:id collapse-id :ref focus-when-collapse-opened :tab-index "-1"} info])]))) (defn- application-search-info [] ; TODO: this should probably be almost completely in localized text [:span (text :t.search/example-searches) ": " (->> ["supercalifra*" "+egg +bacon -spam" "id:\"2019/12\"" "applicant:\"[email protected]\"" "resource:\"urn:fi:abcd\""] (map (fn [example] [:tt.example-search example])) (interpose ", ") (into [:<>])) " " [:a {:href "https://github.com/CSCfi/rems/blob/master/docs/search.md"} (text :t.search/learn-more)]]) (defn application-search-field [opts] [search-field (assoc opts :info [application-search-info])])
124995
(ns rems.search (:require [reagent.core :as r] [rems.atoms :refer [close-symbol]] [rems.spinner :as spinner] [rems.text :refer [text]] [rems.util :refer [fetch focus-when-collapse-opened]])) (defn search-field [{:keys [id on-search searching? info]}] (let [input-value (r/atom "") input-element (atom nil) collapse-id "application-search-tips-collapse"] (fn [{:keys [id on-search searching? info]}] [:<> [:div.search-field [:label.mr-1 {:for id} (text :t.search/search)] [:div.input-group.mr-2.w-50 [:input.form-control {:id id :type :text :value @input-value :ref (fn [element] (reset! input-element element)) :on-change (fn [event] (let [value (-> event .-target .-value)] (reset! input-value value) (on-search value)))}] (when-not (= "" @input-value) [:div.input-group-append [:button.btn.btn-outline-secondary {:id (str id "-clear") :type :button :aria-label (text :t.search/clear-search) ;; override the custom font-size from .btn which breaks .input-group :style {:font-size "inherit"} :on-click (fn [] (reset! input-value "") (on-search "") (.focus @input-element))} [close-symbol]]])] (when info [:a.application-search-tips.btn.btn-link.collapsed {:data-toggle "collapse" :href (str "#" collapse-id) :aria-label (text :t.search/example-searches) :aria-expanded "false" :aria-controls collapse-id} [:i.fa.fa-question-circle]]) (when searching? [spinner/small])] (when info [:div.search-tips.collapse {:id collapse-id :ref focus-when-collapse-opened :tab-index "-1"} info])]))) (defn- application-search-info [] ; TODO: this should probably be almost completely in localized text [:span (text :t.search/example-searches) ": " (->> ["supercalifra*" "+egg +bacon -spam" "id:\"2019/12\"" "applicant:\"<EMAIL>\"" "resource:\"urn:fi:abcd\""] (map (fn [example] [:tt.example-search example])) (interpose ", ") (into [:<>])) " " [:a {:href "https://github.com/CSCfi/rems/blob/master/docs/search.md"} (text :t.search/learn-more)]]) (defn application-search-field [opts] [search-field (assoc opts :info [application-search-info])])
true
(ns rems.search (:require [reagent.core :as r] [rems.atoms :refer [close-symbol]] [rems.spinner :as spinner] [rems.text :refer [text]] [rems.util :refer [fetch focus-when-collapse-opened]])) (defn search-field [{:keys [id on-search searching? info]}] (let [input-value (r/atom "") input-element (atom nil) collapse-id "application-search-tips-collapse"] (fn [{:keys [id on-search searching? info]}] [:<> [:div.search-field [:label.mr-1 {:for id} (text :t.search/search)] [:div.input-group.mr-2.w-50 [:input.form-control {:id id :type :text :value @input-value :ref (fn [element] (reset! input-element element)) :on-change (fn [event] (let [value (-> event .-target .-value)] (reset! input-value value) (on-search value)))}] (when-not (= "" @input-value) [:div.input-group-append [:button.btn.btn-outline-secondary {:id (str id "-clear") :type :button :aria-label (text :t.search/clear-search) ;; override the custom font-size from .btn which breaks .input-group :style {:font-size "inherit"} :on-click (fn [] (reset! input-value "") (on-search "") (.focus @input-element))} [close-symbol]]])] (when info [:a.application-search-tips.btn.btn-link.collapsed {:data-toggle "collapse" :href (str "#" collapse-id) :aria-label (text :t.search/example-searches) :aria-expanded "false" :aria-controls collapse-id} [:i.fa.fa-question-circle]]) (when searching? [spinner/small])] (when info [:div.search-tips.collapse {:id collapse-id :ref focus-when-collapse-opened :tab-index "-1"} info])]))) (defn- application-search-info [] ; TODO: this should probably be almost completely in localized text [:span (text :t.search/example-searches) ": " (->> ["supercalifra*" "+egg +bacon -spam" "id:\"2019/12\"" "applicant:\"PI:EMAIL:<EMAIL>END_PI\"" "resource:\"urn:fi:abcd\""] (map (fn [example] [:tt.example-search example])) (interpose ", ") (into [:<>])) " " [:a {:href "https://github.com/CSCfi/rems/blob/master/docs/search.md"} (text :t.search/learn-more)]]) (defn application-search-field [opts] [search-field (assoc opts :info [application-search-info])])
[ { "context": " \"User-Agent\" \"CrossRefDOICheckerBot ([email protected])\"}}))]\n (let [doi-urls (set (conj (-> @resul", "end": 7152, "score": 0.9998480677604675, "start": 7135, "tag": "EMAIL", "value": "[email protected]" }, { "context": " \"User-Agent\" \"CrossrefEventDataBot ([email protected])\"}\n depth 0\n url url]\n (if (> ", "end": 9361, "score": 0.9998601078987122, "start": 9344, "tag": "EMAIL", "value": "[email protected]" } ]
src/event_data_reverse/lookup.clj
VladimirAlexiev/event-data-reverse
6
(ns event-data-reverse.lookup (:require [event-data-reverse.structured-extraction :as structured-extraction] [event-data-reverse.unstructured-extraction :as unstructured-extraction]) (:require [crossref.util.doi :as crdoi]) (:require [clojure.string :as string]) (:require [clojure.tools.logging :refer [info]]) (:require [net.cgrand.enlive-html :as html] [cemerick.url :as cemerick-url] [robert.bruce :refer [try-try-again]] [org.httpkit.client :as http] [clojure.data.json :as json]) (:import [java.net URL URI URLEncoder URLDecoder])) (def whole-doi-re #"^10\.\d{4,9}/[^\s]+$") (def doi-re #"(10\.\d{4,9}/[^\s]+)") (def doi-encoded-re #"10\.\d{4,9}%2[fF][^\s]+") ; TODO not entirely sure what the grammar of ShortDOI is but this seems to fit. ; Unfortunately this also matches the first half of a DOI. ; Match the shortcut URL (e.g. "doi.org/aabbe") or the handle (e.g. "10/aabbe"). ; Locate a shortDOI in its natural habitat. (def shortdoi-find-re #"(?:(?:(?:dx.)?doi.org/)|10/)(?:info:doi/|urn:|doi:)?([a-zA-Z0-9]+)") ; The shortDOI itself is just an alphanumeric string, which isn't particularly disinctive. (def shortdoi-re #"[a-zA-Z0-9]+") ; https://en.wikipedia.org/wiki/Publisher_Item_Identifier ; Used by Elsevier and others. (def pii-re #"[SB][0-9XB]{16}") ; Helpers (defn try-url "Try to construct a URL." [text] (try (new URL text) (catch Exception _ nil))) (defn try-hostname "Try to get a hostname from a URL string." [text] (try (.getHost (new URL text)) (catch Exception e nil))) (defn doi-from-url "If a URL is a DOI, return the non-URL version of the DOI." [text] (when-let [url (try-url text)] (when (#{"doi.org" "dx.doi.org"} (.getHost url)) (.substring (or (.getPath url) "") 1)))) (defn matches-doi? "Does this look like a DOI?" [input] (and (not (string/blank? input)) (re-matches whole-doi-re input))) (defn remove-doi-colon-prefix "Turn 'doi:10.5555/12346789' into '10.5555/12345678'" [input] (when-let [match (re-matches #"^[a-zA-Z ]+: ?(10\.\d+/.*)$" input)] (.toLowerCase (second match)))) (defn resolve-doi "Resolve a DOI or ShortDOI, expressed as not-URL form. May or may not be URLEscaped. Return the DOI." [doi] (let [response @(try-try-again {:sleep 500 :tries 2} #(http/get (str "http://doi.org/" doi) {:follow-redirects false})) status (:status response) redirect-header (-> response :headers :location)] (cond (:error response) nil ; If it's a shortDOI it will redirect to the real one. Use this. (= (try-hostname redirect-header) "doi.org") (crdoi/non-url-doi redirect-header) ; If it's a real DOI it will return a 30x. (= (quot status 100) 3) (crdoi/non-url-doi doi) ; If it's not anything then don't return anything. :default nil))) (defn resolve-doi-maybe-escaped "Try to `resolve-doi`, escaped and unescaped." [doi] (if-let [unescaped (resolve-doi doi)] unescaped (when-let [escaped (resolve-doi (URLEncoder/encode doi "UTF-8"))] (URLDecoder/decode escaped "UTF-8")))) (def max-drops 5) (defn validate-doi "For a given suspected DOI or shortDOI, validate that it exists against the API, possibly modifying it to get there." [doi] (loop [i 0 doi doi] ; Terminate if we're at the end of clipping things off or the DOI no longer looks like an DOI. ; The API will return 200 for e.g. "10.", so don't try and feed it things like that. (if (or (= i max-drops) (nil? doi) (< (.length doi) i) ; The shortDOI regular expression is rather liberal, but it is what it is. (not (or (re-matches doi-re doi) (re-matches shortdoi-re doi)))) ; Stop recursion. nil ; Or try this substring. (if-let [clean-doi (resolve-doi-maybe-escaped doi)] ; resolve-doi may alter the DOI it returns, e.g. resolving a shortDOI to a real DOI or lower-casing. ; We have a working DOI! ; Just check it does't contain a sneaky question mark which would still resolve e.g. http://www.tandfonline.com/doi/full/10.1080/00325481.2016.1186487?platform=hootsuite ; If there is a question mark, try removing it to see if it still works. (if (.contains clean-doi "?") (let [before-q (first (.split clean-doi "\\?"))] (if (resolve-doi before-q) before-q clean-doi)) clean-doi) (recur (inc i) (.substring doi 0 (- (.length doi) 1))))))) (defn first-valid "Return the first valid, possibly cleaned, DOI" [dois] ; using `keep` on a chunked seq would waste time evaluating whole chunk rather than each in sequence. (loop [[doi & tail] dois] (if-let [validated (validate-doi doi)] validated (when (not-empty tail) (recur tail))))) (defn validate-pii "Validate a PII and return the DOI if it's been used as an alternative ID." [pii] (let [result (try-try-again {:sleep 500 :tries 2} #(http/get "http://api.crossref.org/v1/works" {:query-params {:filter (str "alternative-id:" pii)}})) body (-> @result :body json/read-str) items (get-in body ["message" "items"])] ; Only return when there's exactly one match. (when (= 1 (count items)) (get (first items) "DOI")))) (defn strip-extras-from-url "Remove the query string and fragment from a URL" [url] (new URL (.getProtocol url) (.getHost url) (.getPort url) (.getPath url))) (defn url-in-set? "Fairly liberal test for if needle is in set of URLs. Disregards query string and fragment: only useful in context of checking something we suspect to be true." [needle-url haystack-urls] (let [base-needle (strip-extras-from-url needle-url)] (loop [urls haystack-urls] (cond ; Are they the same file? Removes fragment. (.sameFile needle-url (first urls)) true ; Try removing the query string. (= base-needle (strip-extras-from-url (first urls))) true (not-empty (rest urls)) (recur (rest urls)) :default nil)))) (defn url-matches-doi? "Does the given DOI resolve to the given URL? Return DOI if so." [url doi] (info "Check " url " for " doi) (when-let [real-url (try-url url)] (when-let [; URL may have a query string on the end. So construct some candidates. result (try-try-again {:sleep 500 :tries 2} #(http/get (str "http://doi.org/" doi) {:follow-redirects true :throw-exceptions true :socket-timeout 5000 :conn-timeout 5000 :headers {"Referer" "chronograph.crossref.org" "User-Agent" "CrossRefDOICheckerBot ([email protected])"}}))] (let [doi-urls (set (conj (-> @result :trace-redirects) (-> @result :opts :url))) doi-real-urls (keep try-url doi-urls) ; Now we have a java.net.URL that we're concerned with and a set of java.net.URLs that we're trying to match. url-match (url-in-set? real-url doi-real-urls)] (when url-match doi))))) (defn extract-text-fragments-from-html "Extract all text from an HTML document." [input] (string/join " " (-> input (html/html-snippet) (html/select [:body html/text-node]) (html/transform [:script] nil) (html/texts)))) ; DOI Extraction ; Extract things that look like DOIs. Don't validate them yet. (defn extract-doi-from-get-params "If there's a DOI in a get parameter of a URL, find it" [url] (try (let [params (-> url cemerick-url/query->map clojure.walk/keywordize-keys) doi-like (keep (fn [[k v]] (when (re-matches whole-doi-re v) v)) params)] (first doi-like)) ; Some things look like URLs but turn out not to be. (catch IllegalArgumentException _ nil))) (defn extract-doi-in-a-hrefs-from-html "Extract all <a href> links from an HTML document. DEPRECATED" [input] (let [links (html/select (html/html-snippet input) [:a]) hrefs (keep #(-> % :attrs :href) links) dois (keep doi-from-url hrefs)] (distinct dois))) (defn extract-potential-dois-from-text "Extract potential DOIs from arbitrary text, including URL-encoded ones which will be unencoded." [text] ; doi-re and short-doi-find-re have a capture group for the actual value we want to find, hence `second`. (let [matches (map second (concat (re-seq doi-re text) (re-seq shortdoi-find-re text))) encoded-matches (map #(URLDecoder/decode %) (re-seq doi-encoded-re text))] (distinct (concat encoded-matches matches)))) (defn extract-potential-piis-from-text [text] (let [matches (re-seq pii-re text)] (distinct matches))) (defn fetch "Fetch the content at a URL, following redirects and accepting cookies." [url] (loop [headers {"Referer" "eventdata.crossref.org" "User-Agent" "CrossrefEventDataBot ([email protected])"} depth 0 url url] (if (> depth 4) nil (let [result @(org.httpkit.client/get url {:follow-redirects false :headers headers :as :text}) cookie (-> result :headers :set-cookie) new-headers (merge headers (when cookie {"Cookie" cookie}))] (condp = (:status result) 200 result ; Weirdly some Nature pages return 401 with the content. http://www.nature.com/nrendo/journal/v10/n9/full/nrendo.2014.114.html 401 result 302 (recur new-headers (inc depth) (-> result :headers :location)) nil))))) (def recognised-content-types "Content types we'll allow ourselves to inspect. Notable in its absence is PDF, for now." #{"text/plain" "text/html"}) (defn resolve-doi-from-url "Take a URL and try to resolve it to find what valid DOI it corresponds to." [url] (info "Attempt resolve-doi-from-url: " url) ; Check if we want to bother with this URL. (when-let [result (try-try-again {:sleep 500 :tries 2} #(fetch url))] (when (recognised-content-types (.getBaseType (new javax.mail.internet.ContentType (.toLowerCase (get-in result [:headers :content-type] "unknown/unknown"))))) (let [body (:body result) doi-from-structured (structured-extraction/from-tags body) doi-from-unstructured (unstructured-extraction/from-webpage body url) ; DOI candidates in order of likelihood candidates (distinct [doi-from-structured doi-from-unstructured]) ; Validate ones that exist. The regular expression might be a bit greedy, so this may chop bits off the end to make it work. valid-doi (first-valid candidates) ; NB not using url-maches-doi, maybe reintroduce. ] (info "Found from structured HTML:", doi-from-structured) (info "Found from unstructured text:" doi-from-unstructured) (info "Valid DOI: " valid-doi) valid-doi)))) ; Combined methods. ; Combine extraction methods and validate. (defn get-embedded-doi-from-string "Get valid DOI that's embedded in a URL (or an arbitrary string) by a number of methods." [url] (info "Attempt get-embedded-doi-from-string") ; First see if cleanly represented it's in the GET params. (if-let [doi (-> url extract-doi-from-get-params validate-doi)] doi ; Next try extracting DOIs and/or PII with regular expressions. (let [potential-dois (extract-potential-dois-from-text url) validated-doi (first-valid potential-dois) potential-alternative-ids (extract-potential-piis-from-text url) validated-pii-doi (->> potential-alternative-ids (keep validate-pii) first)] (if (or validated-doi validated-pii-doi) (or validated-doi validated-pii-doi) ; We may need to do extra things. ; Try splitting in various places. (let [; e.g. nomos-elibrary.de last-slash (map #(clojure.string/replace % #"^(10\.\d+/(.*))/.*$" "$1") potential-dois) ; e.g. ijorcs.org first-slash (map #(clojure.string/replace % #"^(10\.\d+/(.*?))/.*$" "$1") potential-dois) ; e.g. SICIs semicolon (map #(clojure.string/replace % #"^(10\.\d+/(.*));.*$" "$1") potential-dois) ; eg. JSOR hashchar (map #(clojure.string/replace % #"^(10\.\d+/(.*?))#.*$" "$1") potential-dois) ; e.g. biomedcentral question-mark (map #(clojure.string/replace % #"^(10\.\d+/(.*?))\?.*$" "$1") potential-dois) ; e.g. citeseerx amp-mark (map #(clojure.string/replace % #"^(10\.\d+/(.*?))&.*$" "$1") potential-dois) candidates (distinct (concat first-slash last-slash semicolon hashchar question-mark amp-mark)) ; Lots of these produce duplicates. distinct-candidates (distinct candidates) ; Now take the first one that we could validate. doi (first-valid distinct-candidates)] doi))))) (defn cleanup-doi "Take a URL or DOI or something that could be a DOI, return the valid DOI if it is one." [potential-doi] (info "Attempt cleanup-doi") (when (or (re-matches whole-doi-re potential-doi) (re-find doi-re potential-doi) (re-find doi-encoded-re potential-doi) (re-matches shortdoi-find-re potential-doi)) (let [normalized-doi (crdoi/non-url-doi potential-doi) doi-colon-prefixed-doi (remove-doi-colon-prefix potential-doi)] ; Find the first operation that produces an output that looks like a DOI. (first-valid [potential-doi normalized-doi doi-colon-prefixed-doi])))) ; External functions. (defn lookup "Lookup a DOI from an input. Return only valid DOI." [input ] ; Try to treat it as a DOI in a particular encoding. (if-let [cleaned-valid-doi (cleanup-doi input)] [:cleaned cleaned-valid-doi] ; Try to treat it as a Publisher URL that has a DOI in the URL, or a string with a DOI in it somehow. (if-let [embedded-valid-doi (validate-doi (get-embedded-doi-from-string input))] [:embedded embedded-valid-doi] ; Try to treat it as a Publisher URL that must be fetched to extract its DOI. (if-let [resolved-valid-doi (when-let [url (try-url input)] (resolve-doi-from-url input))] [:resolved resolved-valid-doi] nil))))
80536
(ns event-data-reverse.lookup (:require [event-data-reverse.structured-extraction :as structured-extraction] [event-data-reverse.unstructured-extraction :as unstructured-extraction]) (:require [crossref.util.doi :as crdoi]) (:require [clojure.string :as string]) (:require [clojure.tools.logging :refer [info]]) (:require [net.cgrand.enlive-html :as html] [cemerick.url :as cemerick-url] [robert.bruce :refer [try-try-again]] [org.httpkit.client :as http] [clojure.data.json :as json]) (:import [java.net URL URI URLEncoder URLDecoder])) (def whole-doi-re #"^10\.\d{4,9}/[^\s]+$") (def doi-re #"(10\.\d{4,9}/[^\s]+)") (def doi-encoded-re #"10\.\d{4,9}%2[fF][^\s]+") ; TODO not entirely sure what the grammar of ShortDOI is but this seems to fit. ; Unfortunately this also matches the first half of a DOI. ; Match the shortcut URL (e.g. "doi.org/aabbe") or the handle (e.g. "10/aabbe"). ; Locate a shortDOI in its natural habitat. (def shortdoi-find-re #"(?:(?:(?:dx.)?doi.org/)|10/)(?:info:doi/|urn:|doi:)?([a-zA-Z0-9]+)") ; The shortDOI itself is just an alphanumeric string, which isn't particularly disinctive. (def shortdoi-re #"[a-zA-Z0-9]+") ; https://en.wikipedia.org/wiki/Publisher_Item_Identifier ; Used by Elsevier and others. (def pii-re #"[SB][0-9XB]{16}") ; Helpers (defn try-url "Try to construct a URL." [text] (try (new URL text) (catch Exception _ nil))) (defn try-hostname "Try to get a hostname from a URL string." [text] (try (.getHost (new URL text)) (catch Exception e nil))) (defn doi-from-url "If a URL is a DOI, return the non-URL version of the DOI." [text] (when-let [url (try-url text)] (when (#{"doi.org" "dx.doi.org"} (.getHost url)) (.substring (or (.getPath url) "") 1)))) (defn matches-doi? "Does this look like a DOI?" [input] (and (not (string/blank? input)) (re-matches whole-doi-re input))) (defn remove-doi-colon-prefix "Turn 'doi:10.5555/12346789' into '10.5555/12345678'" [input] (when-let [match (re-matches #"^[a-zA-Z ]+: ?(10\.\d+/.*)$" input)] (.toLowerCase (second match)))) (defn resolve-doi "Resolve a DOI or ShortDOI, expressed as not-URL form. May or may not be URLEscaped. Return the DOI." [doi] (let [response @(try-try-again {:sleep 500 :tries 2} #(http/get (str "http://doi.org/" doi) {:follow-redirects false})) status (:status response) redirect-header (-> response :headers :location)] (cond (:error response) nil ; If it's a shortDOI it will redirect to the real one. Use this. (= (try-hostname redirect-header) "doi.org") (crdoi/non-url-doi redirect-header) ; If it's a real DOI it will return a 30x. (= (quot status 100) 3) (crdoi/non-url-doi doi) ; If it's not anything then don't return anything. :default nil))) (defn resolve-doi-maybe-escaped "Try to `resolve-doi`, escaped and unescaped." [doi] (if-let [unescaped (resolve-doi doi)] unescaped (when-let [escaped (resolve-doi (URLEncoder/encode doi "UTF-8"))] (URLDecoder/decode escaped "UTF-8")))) (def max-drops 5) (defn validate-doi "For a given suspected DOI or shortDOI, validate that it exists against the API, possibly modifying it to get there." [doi] (loop [i 0 doi doi] ; Terminate if we're at the end of clipping things off or the DOI no longer looks like an DOI. ; The API will return 200 for e.g. "10.", so don't try and feed it things like that. (if (or (= i max-drops) (nil? doi) (< (.length doi) i) ; The shortDOI regular expression is rather liberal, but it is what it is. (not (or (re-matches doi-re doi) (re-matches shortdoi-re doi)))) ; Stop recursion. nil ; Or try this substring. (if-let [clean-doi (resolve-doi-maybe-escaped doi)] ; resolve-doi may alter the DOI it returns, e.g. resolving a shortDOI to a real DOI or lower-casing. ; We have a working DOI! ; Just check it does't contain a sneaky question mark which would still resolve e.g. http://www.tandfonline.com/doi/full/10.1080/00325481.2016.1186487?platform=hootsuite ; If there is a question mark, try removing it to see if it still works. (if (.contains clean-doi "?") (let [before-q (first (.split clean-doi "\\?"))] (if (resolve-doi before-q) before-q clean-doi)) clean-doi) (recur (inc i) (.substring doi 0 (- (.length doi) 1))))))) (defn first-valid "Return the first valid, possibly cleaned, DOI" [dois] ; using `keep` on a chunked seq would waste time evaluating whole chunk rather than each in sequence. (loop [[doi & tail] dois] (if-let [validated (validate-doi doi)] validated (when (not-empty tail) (recur tail))))) (defn validate-pii "Validate a PII and return the DOI if it's been used as an alternative ID." [pii] (let [result (try-try-again {:sleep 500 :tries 2} #(http/get "http://api.crossref.org/v1/works" {:query-params {:filter (str "alternative-id:" pii)}})) body (-> @result :body json/read-str) items (get-in body ["message" "items"])] ; Only return when there's exactly one match. (when (= 1 (count items)) (get (first items) "DOI")))) (defn strip-extras-from-url "Remove the query string and fragment from a URL" [url] (new URL (.getProtocol url) (.getHost url) (.getPort url) (.getPath url))) (defn url-in-set? "Fairly liberal test for if needle is in set of URLs. Disregards query string and fragment: only useful in context of checking something we suspect to be true." [needle-url haystack-urls] (let [base-needle (strip-extras-from-url needle-url)] (loop [urls haystack-urls] (cond ; Are they the same file? Removes fragment. (.sameFile needle-url (first urls)) true ; Try removing the query string. (= base-needle (strip-extras-from-url (first urls))) true (not-empty (rest urls)) (recur (rest urls)) :default nil)))) (defn url-matches-doi? "Does the given DOI resolve to the given URL? Return DOI if so." [url doi] (info "Check " url " for " doi) (when-let [real-url (try-url url)] (when-let [; URL may have a query string on the end. So construct some candidates. result (try-try-again {:sleep 500 :tries 2} #(http/get (str "http://doi.org/" doi) {:follow-redirects true :throw-exceptions true :socket-timeout 5000 :conn-timeout 5000 :headers {"Referer" "chronograph.crossref.org" "User-Agent" "CrossRefDOICheckerBot (<EMAIL>)"}}))] (let [doi-urls (set (conj (-> @result :trace-redirects) (-> @result :opts :url))) doi-real-urls (keep try-url doi-urls) ; Now we have a java.net.URL that we're concerned with and a set of java.net.URLs that we're trying to match. url-match (url-in-set? real-url doi-real-urls)] (when url-match doi))))) (defn extract-text-fragments-from-html "Extract all text from an HTML document." [input] (string/join " " (-> input (html/html-snippet) (html/select [:body html/text-node]) (html/transform [:script] nil) (html/texts)))) ; DOI Extraction ; Extract things that look like DOIs. Don't validate them yet. (defn extract-doi-from-get-params "If there's a DOI in a get parameter of a URL, find it" [url] (try (let [params (-> url cemerick-url/query->map clojure.walk/keywordize-keys) doi-like (keep (fn [[k v]] (when (re-matches whole-doi-re v) v)) params)] (first doi-like)) ; Some things look like URLs but turn out not to be. (catch IllegalArgumentException _ nil))) (defn extract-doi-in-a-hrefs-from-html "Extract all <a href> links from an HTML document. DEPRECATED" [input] (let [links (html/select (html/html-snippet input) [:a]) hrefs (keep #(-> % :attrs :href) links) dois (keep doi-from-url hrefs)] (distinct dois))) (defn extract-potential-dois-from-text "Extract potential DOIs from arbitrary text, including URL-encoded ones which will be unencoded." [text] ; doi-re and short-doi-find-re have a capture group for the actual value we want to find, hence `second`. (let [matches (map second (concat (re-seq doi-re text) (re-seq shortdoi-find-re text))) encoded-matches (map #(URLDecoder/decode %) (re-seq doi-encoded-re text))] (distinct (concat encoded-matches matches)))) (defn extract-potential-piis-from-text [text] (let [matches (re-seq pii-re text)] (distinct matches))) (defn fetch "Fetch the content at a URL, following redirects and accepting cookies." [url] (loop [headers {"Referer" "eventdata.crossref.org" "User-Agent" "CrossrefEventDataBot (<EMAIL>)"} depth 0 url url] (if (> depth 4) nil (let [result @(org.httpkit.client/get url {:follow-redirects false :headers headers :as :text}) cookie (-> result :headers :set-cookie) new-headers (merge headers (when cookie {"Cookie" cookie}))] (condp = (:status result) 200 result ; Weirdly some Nature pages return 401 with the content. http://www.nature.com/nrendo/journal/v10/n9/full/nrendo.2014.114.html 401 result 302 (recur new-headers (inc depth) (-> result :headers :location)) nil))))) (def recognised-content-types "Content types we'll allow ourselves to inspect. Notable in its absence is PDF, for now." #{"text/plain" "text/html"}) (defn resolve-doi-from-url "Take a URL and try to resolve it to find what valid DOI it corresponds to." [url] (info "Attempt resolve-doi-from-url: " url) ; Check if we want to bother with this URL. (when-let [result (try-try-again {:sleep 500 :tries 2} #(fetch url))] (when (recognised-content-types (.getBaseType (new javax.mail.internet.ContentType (.toLowerCase (get-in result [:headers :content-type] "unknown/unknown"))))) (let [body (:body result) doi-from-structured (structured-extraction/from-tags body) doi-from-unstructured (unstructured-extraction/from-webpage body url) ; DOI candidates in order of likelihood candidates (distinct [doi-from-structured doi-from-unstructured]) ; Validate ones that exist. The regular expression might be a bit greedy, so this may chop bits off the end to make it work. valid-doi (first-valid candidates) ; NB not using url-maches-doi, maybe reintroduce. ] (info "Found from structured HTML:", doi-from-structured) (info "Found from unstructured text:" doi-from-unstructured) (info "Valid DOI: " valid-doi) valid-doi)))) ; Combined methods. ; Combine extraction methods and validate. (defn get-embedded-doi-from-string "Get valid DOI that's embedded in a URL (or an arbitrary string) by a number of methods." [url] (info "Attempt get-embedded-doi-from-string") ; First see if cleanly represented it's in the GET params. (if-let [doi (-> url extract-doi-from-get-params validate-doi)] doi ; Next try extracting DOIs and/or PII with regular expressions. (let [potential-dois (extract-potential-dois-from-text url) validated-doi (first-valid potential-dois) potential-alternative-ids (extract-potential-piis-from-text url) validated-pii-doi (->> potential-alternative-ids (keep validate-pii) first)] (if (or validated-doi validated-pii-doi) (or validated-doi validated-pii-doi) ; We may need to do extra things. ; Try splitting in various places. (let [; e.g. nomos-elibrary.de last-slash (map #(clojure.string/replace % #"^(10\.\d+/(.*))/.*$" "$1") potential-dois) ; e.g. ijorcs.org first-slash (map #(clojure.string/replace % #"^(10\.\d+/(.*?))/.*$" "$1") potential-dois) ; e.g. SICIs semicolon (map #(clojure.string/replace % #"^(10\.\d+/(.*));.*$" "$1") potential-dois) ; eg. JSOR hashchar (map #(clojure.string/replace % #"^(10\.\d+/(.*?))#.*$" "$1") potential-dois) ; e.g. biomedcentral question-mark (map #(clojure.string/replace % #"^(10\.\d+/(.*?))\?.*$" "$1") potential-dois) ; e.g. citeseerx amp-mark (map #(clojure.string/replace % #"^(10\.\d+/(.*?))&.*$" "$1") potential-dois) candidates (distinct (concat first-slash last-slash semicolon hashchar question-mark amp-mark)) ; Lots of these produce duplicates. distinct-candidates (distinct candidates) ; Now take the first one that we could validate. doi (first-valid distinct-candidates)] doi))))) (defn cleanup-doi "Take a URL or DOI or something that could be a DOI, return the valid DOI if it is one." [potential-doi] (info "Attempt cleanup-doi") (when (or (re-matches whole-doi-re potential-doi) (re-find doi-re potential-doi) (re-find doi-encoded-re potential-doi) (re-matches shortdoi-find-re potential-doi)) (let [normalized-doi (crdoi/non-url-doi potential-doi) doi-colon-prefixed-doi (remove-doi-colon-prefix potential-doi)] ; Find the first operation that produces an output that looks like a DOI. (first-valid [potential-doi normalized-doi doi-colon-prefixed-doi])))) ; External functions. (defn lookup "Lookup a DOI from an input. Return only valid DOI." [input ] ; Try to treat it as a DOI in a particular encoding. (if-let [cleaned-valid-doi (cleanup-doi input)] [:cleaned cleaned-valid-doi] ; Try to treat it as a Publisher URL that has a DOI in the URL, or a string with a DOI in it somehow. (if-let [embedded-valid-doi (validate-doi (get-embedded-doi-from-string input))] [:embedded embedded-valid-doi] ; Try to treat it as a Publisher URL that must be fetched to extract its DOI. (if-let [resolved-valid-doi (when-let [url (try-url input)] (resolve-doi-from-url input))] [:resolved resolved-valid-doi] nil))))
true
(ns event-data-reverse.lookup (:require [event-data-reverse.structured-extraction :as structured-extraction] [event-data-reverse.unstructured-extraction :as unstructured-extraction]) (:require [crossref.util.doi :as crdoi]) (:require [clojure.string :as string]) (:require [clojure.tools.logging :refer [info]]) (:require [net.cgrand.enlive-html :as html] [cemerick.url :as cemerick-url] [robert.bruce :refer [try-try-again]] [org.httpkit.client :as http] [clojure.data.json :as json]) (:import [java.net URL URI URLEncoder URLDecoder])) (def whole-doi-re #"^10\.\d{4,9}/[^\s]+$") (def doi-re #"(10\.\d{4,9}/[^\s]+)") (def doi-encoded-re #"10\.\d{4,9}%2[fF][^\s]+") ; TODO not entirely sure what the grammar of ShortDOI is but this seems to fit. ; Unfortunately this also matches the first half of a DOI. ; Match the shortcut URL (e.g. "doi.org/aabbe") or the handle (e.g. "10/aabbe"). ; Locate a shortDOI in its natural habitat. (def shortdoi-find-re #"(?:(?:(?:dx.)?doi.org/)|10/)(?:info:doi/|urn:|doi:)?([a-zA-Z0-9]+)") ; The shortDOI itself is just an alphanumeric string, which isn't particularly disinctive. (def shortdoi-re #"[a-zA-Z0-9]+") ; https://en.wikipedia.org/wiki/Publisher_Item_Identifier ; Used by Elsevier and others. (def pii-re #"[SB][0-9XB]{16}") ; Helpers (defn try-url "Try to construct a URL." [text] (try (new URL text) (catch Exception _ nil))) (defn try-hostname "Try to get a hostname from a URL string." [text] (try (.getHost (new URL text)) (catch Exception e nil))) (defn doi-from-url "If a URL is a DOI, return the non-URL version of the DOI." [text] (when-let [url (try-url text)] (when (#{"doi.org" "dx.doi.org"} (.getHost url)) (.substring (or (.getPath url) "") 1)))) (defn matches-doi? "Does this look like a DOI?" [input] (and (not (string/blank? input)) (re-matches whole-doi-re input))) (defn remove-doi-colon-prefix "Turn 'doi:10.5555/12346789' into '10.5555/12345678'" [input] (when-let [match (re-matches #"^[a-zA-Z ]+: ?(10\.\d+/.*)$" input)] (.toLowerCase (second match)))) (defn resolve-doi "Resolve a DOI or ShortDOI, expressed as not-URL form. May or may not be URLEscaped. Return the DOI." [doi] (let [response @(try-try-again {:sleep 500 :tries 2} #(http/get (str "http://doi.org/" doi) {:follow-redirects false})) status (:status response) redirect-header (-> response :headers :location)] (cond (:error response) nil ; If it's a shortDOI it will redirect to the real one. Use this. (= (try-hostname redirect-header) "doi.org") (crdoi/non-url-doi redirect-header) ; If it's a real DOI it will return a 30x. (= (quot status 100) 3) (crdoi/non-url-doi doi) ; If it's not anything then don't return anything. :default nil))) (defn resolve-doi-maybe-escaped "Try to `resolve-doi`, escaped and unescaped." [doi] (if-let [unescaped (resolve-doi doi)] unescaped (when-let [escaped (resolve-doi (URLEncoder/encode doi "UTF-8"))] (URLDecoder/decode escaped "UTF-8")))) (def max-drops 5) (defn validate-doi "For a given suspected DOI or shortDOI, validate that it exists against the API, possibly modifying it to get there." [doi] (loop [i 0 doi doi] ; Terminate if we're at the end of clipping things off or the DOI no longer looks like an DOI. ; The API will return 200 for e.g. "10.", so don't try and feed it things like that. (if (or (= i max-drops) (nil? doi) (< (.length doi) i) ; The shortDOI regular expression is rather liberal, but it is what it is. (not (or (re-matches doi-re doi) (re-matches shortdoi-re doi)))) ; Stop recursion. nil ; Or try this substring. (if-let [clean-doi (resolve-doi-maybe-escaped doi)] ; resolve-doi may alter the DOI it returns, e.g. resolving a shortDOI to a real DOI or lower-casing. ; We have a working DOI! ; Just check it does't contain a sneaky question mark which would still resolve e.g. http://www.tandfonline.com/doi/full/10.1080/00325481.2016.1186487?platform=hootsuite ; If there is a question mark, try removing it to see if it still works. (if (.contains clean-doi "?") (let [before-q (first (.split clean-doi "\\?"))] (if (resolve-doi before-q) before-q clean-doi)) clean-doi) (recur (inc i) (.substring doi 0 (- (.length doi) 1))))))) (defn first-valid "Return the first valid, possibly cleaned, DOI" [dois] ; using `keep` on a chunked seq would waste time evaluating whole chunk rather than each in sequence. (loop [[doi & tail] dois] (if-let [validated (validate-doi doi)] validated (when (not-empty tail) (recur tail))))) (defn validate-pii "Validate a PII and return the DOI if it's been used as an alternative ID." [pii] (let [result (try-try-again {:sleep 500 :tries 2} #(http/get "http://api.crossref.org/v1/works" {:query-params {:filter (str "alternative-id:" pii)}})) body (-> @result :body json/read-str) items (get-in body ["message" "items"])] ; Only return when there's exactly one match. (when (= 1 (count items)) (get (first items) "DOI")))) (defn strip-extras-from-url "Remove the query string and fragment from a URL" [url] (new URL (.getProtocol url) (.getHost url) (.getPort url) (.getPath url))) (defn url-in-set? "Fairly liberal test for if needle is in set of URLs. Disregards query string and fragment: only useful in context of checking something we suspect to be true." [needle-url haystack-urls] (let [base-needle (strip-extras-from-url needle-url)] (loop [urls haystack-urls] (cond ; Are they the same file? Removes fragment. (.sameFile needle-url (first urls)) true ; Try removing the query string. (= base-needle (strip-extras-from-url (first urls))) true (not-empty (rest urls)) (recur (rest urls)) :default nil)))) (defn url-matches-doi? "Does the given DOI resolve to the given URL? Return DOI if so." [url doi] (info "Check " url " for " doi) (when-let [real-url (try-url url)] (when-let [; URL may have a query string on the end. So construct some candidates. result (try-try-again {:sleep 500 :tries 2} #(http/get (str "http://doi.org/" doi) {:follow-redirects true :throw-exceptions true :socket-timeout 5000 :conn-timeout 5000 :headers {"Referer" "chronograph.crossref.org" "User-Agent" "CrossRefDOICheckerBot (PI:EMAIL:<EMAIL>END_PI)"}}))] (let [doi-urls (set (conj (-> @result :trace-redirects) (-> @result :opts :url))) doi-real-urls (keep try-url doi-urls) ; Now we have a java.net.URL that we're concerned with and a set of java.net.URLs that we're trying to match. url-match (url-in-set? real-url doi-real-urls)] (when url-match doi))))) (defn extract-text-fragments-from-html "Extract all text from an HTML document." [input] (string/join " " (-> input (html/html-snippet) (html/select [:body html/text-node]) (html/transform [:script] nil) (html/texts)))) ; DOI Extraction ; Extract things that look like DOIs. Don't validate them yet. (defn extract-doi-from-get-params "If there's a DOI in a get parameter of a URL, find it" [url] (try (let [params (-> url cemerick-url/query->map clojure.walk/keywordize-keys) doi-like (keep (fn [[k v]] (when (re-matches whole-doi-re v) v)) params)] (first doi-like)) ; Some things look like URLs but turn out not to be. (catch IllegalArgumentException _ nil))) (defn extract-doi-in-a-hrefs-from-html "Extract all <a href> links from an HTML document. DEPRECATED" [input] (let [links (html/select (html/html-snippet input) [:a]) hrefs (keep #(-> % :attrs :href) links) dois (keep doi-from-url hrefs)] (distinct dois))) (defn extract-potential-dois-from-text "Extract potential DOIs from arbitrary text, including URL-encoded ones which will be unencoded." [text] ; doi-re and short-doi-find-re have a capture group for the actual value we want to find, hence `second`. (let [matches (map second (concat (re-seq doi-re text) (re-seq shortdoi-find-re text))) encoded-matches (map #(URLDecoder/decode %) (re-seq doi-encoded-re text))] (distinct (concat encoded-matches matches)))) (defn extract-potential-piis-from-text [text] (let [matches (re-seq pii-re text)] (distinct matches))) (defn fetch "Fetch the content at a URL, following redirects and accepting cookies." [url] (loop [headers {"Referer" "eventdata.crossref.org" "User-Agent" "CrossrefEventDataBot (PI:EMAIL:<EMAIL>END_PI)"} depth 0 url url] (if (> depth 4) nil (let [result @(org.httpkit.client/get url {:follow-redirects false :headers headers :as :text}) cookie (-> result :headers :set-cookie) new-headers (merge headers (when cookie {"Cookie" cookie}))] (condp = (:status result) 200 result ; Weirdly some Nature pages return 401 with the content. http://www.nature.com/nrendo/journal/v10/n9/full/nrendo.2014.114.html 401 result 302 (recur new-headers (inc depth) (-> result :headers :location)) nil))))) (def recognised-content-types "Content types we'll allow ourselves to inspect. Notable in its absence is PDF, for now." #{"text/plain" "text/html"}) (defn resolve-doi-from-url "Take a URL and try to resolve it to find what valid DOI it corresponds to." [url] (info "Attempt resolve-doi-from-url: " url) ; Check if we want to bother with this URL. (when-let [result (try-try-again {:sleep 500 :tries 2} #(fetch url))] (when (recognised-content-types (.getBaseType (new javax.mail.internet.ContentType (.toLowerCase (get-in result [:headers :content-type] "unknown/unknown"))))) (let [body (:body result) doi-from-structured (structured-extraction/from-tags body) doi-from-unstructured (unstructured-extraction/from-webpage body url) ; DOI candidates in order of likelihood candidates (distinct [doi-from-structured doi-from-unstructured]) ; Validate ones that exist. The regular expression might be a bit greedy, so this may chop bits off the end to make it work. valid-doi (first-valid candidates) ; NB not using url-maches-doi, maybe reintroduce. ] (info "Found from structured HTML:", doi-from-structured) (info "Found from unstructured text:" doi-from-unstructured) (info "Valid DOI: " valid-doi) valid-doi)))) ; Combined methods. ; Combine extraction methods and validate. (defn get-embedded-doi-from-string "Get valid DOI that's embedded in a URL (or an arbitrary string) by a number of methods." [url] (info "Attempt get-embedded-doi-from-string") ; First see if cleanly represented it's in the GET params. (if-let [doi (-> url extract-doi-from-get-params validate-doi)] doi ; Next try extracting DOIs and/or PII with regular expressions. (let [potential-dois (extract-potential-dois-from-text url) validated-doi (first-valid potential-dois) potential-alternative-ids (extract-potential-piis-from-text url) validated-pii-doi (->> potential-alternative-ids (keep validate-pii) first)] (if (or validated-doi validated-pii-doi) (or validated-doi validated-pii-doi) ; We may need to do extra things. ; Try splitting in various places. (let [; e.g. nomos-elibrary.de last-slash (map #(clojure.string/replace % #"^(10\.\d+/(.*))/.*$" "$1") potential-dois) ; e.g. ijorcs.org first-slash (map #(clojure.string/replace % #"^(10\.\d+/(.*?))/.*$" "$1") potential-dois) ; e.g. SICIs semicolon (map #(clojure.string/replace % #"^(10\.\d+/(.*));.*$" "$1") potential-dois) ; eg. JSOR hashchar (map #(clojure.string/replace % #"^(10\.\d+/(.*?))#.*$" "$1") potential-dois) ; e.g. biomedcentral question-mark (map #(clojure.string/replace % #"^(10\.\d+/(.*?))\?.*$" "$1") potential-dois) ; e.g. citeseerx amp-mark (map #(clojure.string/replace % #"^(10\.\d+/(.*?))&.*$" "$1") potential-dois) candidates (distinct (concat first-slash last-slash semicolon hashchar question-mark amp-mark)) ; Lots of these produce duplicates. distinct-candidates (distinct candidates) ; Now take the first one that we could validate. doi (first-valid distinct-candidates)] doi))))) (defn cleanup-doi "Take a URL or DOI or something that could be a DOI, return the valid DOI if it is one." [potential-doi] (info "Attempt cleanup-doi") (when (or (re-matches whole-doi-re potential-doi) (re-find doi-re potential-doi) (re-find doi-encoded-re potential-doi) (re-matches shortdoi-find-re potential-doi)) (let [normalized-doi (crdoi/non-url-doi potential-doi) doi-colon-prefixed-doi (remove-doi-colon-prefix potential-doi)] ; Find the first operation that produces an output that looks like a DOI. (first-valid [potential-doi normalized-doi doi-colon-prefixed-doi])))) ; External functions. (defn lookup "Lookup a DOI from an input. Return only valid DOI." [input ] ; Try to treat it as a DOI in a particular encoding. (if-let [cleaned-valid-doi (cleanup-doi input)] [:cleaned cleaned-valid-doi] ; Try to treat it as a Publisher URL that has a DOI in the URL, or a string with a DOI in it somehow. (if-let [embedded-valid-doi (validate-doi (get-embedded-doi-from-string input))] [:embedded embedded-valid-doi] ; Try to treat it as a Publisher URL that must be fetched to extract its DOI. (if-let [resolved-valid-doi (when-let [url (try-url input)] (resolve-doi-from-url input))] [:resolved resolved-valid-doi] nil))))
[ { "context": " kandan-client (kandan-api/make-client api-key \"http://localhost:3000/remote/faye\")\n channels (:kandan-channels uti", "end": 3565, "score": 0.9385260343551636, "start": 3532, "tag": "KEY", "value": "http://localhost:3000/remote/faye" } ]
src/omchaya/core.cljs
sgrove/omchaya
66
(ns omchaya.core (:require [cljs.core.async :as async :refer [>! <! alts! chan sliding-buffer put! close!]] [clojure.string :as string] [dommy.core :as dommy] [omchaya.api.mock :as api] [omchaya.components.app :as app] [omchaya.controllers.controls :as controls-con] [omchaya.controllers.post-controls :as controls-pcon] [omchaya.controllers.api :as api-con] [omchaya.controllers.post-api :as api-pcon] [omchaya.datetime :as dt] [omchaya.api.kandan :as kandan-api] [omchaya.mock-data :as mock-data] [omchaya.routes :as routes] [omchaya.useful :as useful :refer [ffilter]] [omchaya.utils :as utils :refer [mprint]] [om.core :as om :include-macros true] [om.dom :as dom :include-macros true]) (:require-macros [cljs.core.async.macros :as am :refer [go go-loop alt!]]) (:use-macros [dommy.macros :only [sel sel1]])) (enable-console-print!) (def controls-ch (chan)) (def api-ch (chan)) (def app-state (atom (mock-data/initial-state {:controls controls-ch :api api-ch}))) ;; :state-history is a vector of vectors, where the inner ;; vector is the same shape as the messages played (def history (atom [])) (defn filtered-message? [message] (get #{:credit-card-updated} message)) (defn update-history! [history channel message] (let [m (first message) record (if (filtered-message? m) m message)] (swap! history conj [channel record]))) (defn main [target state] (let [comms (:comms @state) history (or history (atom []))] (routes/define-routes! state (.getElementById js/document "history-container")) (om/root app/app state {:target target :opts {:comms comms}}) (go (while true (alt! (:controls comms) ([v] (when (:log-channels? utils/initial-player-state) (mprint "Controls Verbose: " (pr-str v))) (let [previous-state @state] (update-history! history :controls v) (swap! state (partial controls-con/control-event target (first v) (second v))) (controls-pcon/post-control-event! target (first v) (second v) previous-state @state))) (:api comms) ([v] (when (:log-channels? utils/initial-player-state) (mprint "API Verbose: " (pr-str v))) (let [previous-state @state] (update-history! history :api v) (swap! state (partial api-con/api-event target (first v) (second v))) (api-pcon/post-api-event! target (first v) (second v) previous-state @state))) ;; Capture the current history for playback in the absence ;; of a server to store it (async/timeout 30000) (mprint (pr-str @history))))))) (defn setup! [] (let [comms (:comms @app-state)] (main (. js/document (getElementById "app")) app-state) (when (:restore-state? utils/initial-query-map) (put! (:controls comms) [:state-restored])) (when (:kandan-client? utils/initial-query-map) (let [api-key (:kandan-api-key utils/initial-query-map) kandan-client (kandan-api/make-client api-key "http://localhost:3000/remote/faye") channels (:kandan-channels utils/initial-query-map)] (put! (:controls comms) [:api-key-updated api-key]) (doseq [channel channels] (kandan-api/subscribe! kandan-client (str "/channels/" channel) (:api comms))))))) (set! (.-onload js/window) setup!) ;; Local dev tooling (defn ^:export send-async-message [ch-name message data] (put! (get-in @app-state [:comms (keyword ch-name)]) [(keyword message) (js->clj data :keywordize-keys true)])) (defn ^:export remove-channel! [channel-id] (put! (get-in @app-state [:comms :controls]) [:channel-remotely-destroyed channel-id])) (comment ;; Uncomment to have random messages send (js/setInterval #(api/random-message (get-in @app-state [:comms :api]) (rand-nth (keys (:channels @app-state)))) 2500))
45608
(ns omchaya.core (:require [cljs.core.async :as async :refer [>! <! alts! chan sliding-buffer put! close!]] [clojure.string :as string] [dommy.core :as dommy] [omchaya.api.mock :as api] [omchaya.components.app :as app] [omchaya.controllers.controls :as controls-con] [omchaya.controllers.post-controls :as controls-pcon] [omchaya.controllers.api :as api-con] [omchaya.controllers.post-api :as api-pcon] [omchaya.datetime :as dt] [omchaya.api.kandan :as kandan-api] [omchaya.mock-data :as mock-data] [omchaya.routes :as routes] [omchaya.useful :as useful :refer [ffilter]] [omchaya.utils :as utils :refer [mprint]] [om.core :as om :include-macros true] [om.dom :as dom :include-macros true]) (:require-macros [cljs.core.async.macros :as am :refer [go go-loop alt!]]) (:use-macros [dommy.macros :only [sel sel1]])) (enable-console-print!) (def controls-ch (chan)) (def api-ch (chan)) (def app-state (atom (mock-data/initial-state {:controls controls-ch :api api-ch}))) ;; :state-history is a vector of vectors, where the inner ;; vector is the same shape as the messages played (def history (atom [])) (defn filtered-message? [message] (get #{:credit-card-updated} message)) (defn update-history! [history channel message] (let [m (first message) record (if (filtered-message? m) m message)] (swap! history conj [channel record]))) (defn main [target state] (let [comms (:comms @state) history (or history (atom []))] (routes/define-routes! state (.getElementById js/document "history-container")) (om/root app/app state {:target target :opts {:comms comms}}) (go (while true (alt! (:controls comms) ([v] (when (:log-channels? utils/initial-player-state) (mprint "Controls Verbose: " (pr-str v))) (let [previous-state @state] (update-history! history :controls v) (swap! state (partial controls-con/control-event target (first v) (second v))) (controls-pcon/post-control-event! target (first v) (second v) previous-state @state))) (:api comms) ([v] (when (:log-channels? utils/initial-player-state) (mprint "API Verbose: " (pr-str v))) (let [previous-state @state] (update-history! history :api v) (swap! state (partial api-con/api-event target (first v) (second v))) (api-pcon/post-api-event! target (first v) (second v) previous-state @state))) ;; Capture the current history for playback in the absence ;; of a server to store it (async/timeout 30000) (mprint (pr-str @history))))))) (defn setup! [] (let [comms (:comms @app-state)] (main (. js/document (getElementById "app")) app-state) (when (:restore-state? utils/initial-query-map) (put! (:controls comms) [:state-restored])) (when (:kandan-client? utils/initial-query-map) (let [api-key (:kandan-api-key utils/initial-query-map) kandan-client (kandan-api/make-client api-key "<KEY>") channels (:kandan-channels utils/initial-query-map)] (put! (:controls comms) [:api-key-updated api-key]) (doseq [channel channels] (kandan-api/subscribe! kandan-client (str "/channels/" channel) (:api comms))))))) (set! (.-onload js/window) setup!) ;; Local dev tooling (defn ^:export send-async-message [ch-name message data] (put! (get-in @app-state [:comms (keyword ch-name)]) [(keyword message) (js->clj data :keywordize-keys true)])) (defn ^:export remove-channel! [channel-id] (put! (get-in @app-state [:comms :controls]) [:channel-remotely-destroyed channel-id])) (comment ;; Uncomment to have random messages send (js/setInterval #(api/random-message (get-in @app-state [:comms :api]) (rand-nth (keys (:channels @app-state)))) 2500))
true
(ns omchaya.core (:require [cljs.core.async :as async :refer [>! <! alts! chan sliding-buffer put! close!]] [clojure.string :as string] [dommy.core :as dommy] [omchaya.api.mock :as api] [omchaya.components.app :as app] [omchaya.controllers.controls :as controls-con] [omchaya.controllers.post-controls :as controls-pcon] [omchaya.controllers.api :as api-con] [omchaya.controllers.post-api :as api-pcon] [omchaya.datetime :as dt] [omchaya.api.kandan :as kandan-api] [omchaya.mock-data :as mock-data] [omchaya.routes :as routes] [omchaya.useful :as useful :refer [ffilter]] [omchaya.utils :as utils :refer [mprint]] [om.core :as om :include-macros true] [om.dom :as dom :include-macros true]) (:require-macros [cljs.core.async.macros :as am :refer [go go-loop alt!]]) (:use-macros [dommy.macros :only [sel sel1]])) (enable-console-print!) (def controls-ch (chan)) (def api-ch (chan)) (def app-state (atom (mock-data/initial-state {:controls controls-ch :api api-ch}))) ;; :state-history is a vector of vectors, where the inner ;; vector is the same shape as the messages played (def history (atom [])) (defn filtered-message? [message] (get #{:credit-card-updated} message)) (defn update-history! [history channel message] (let [m (first message) record (if (filtered-message? m) m message)] (swap! history conj [channel record]))) (defn main [target state] (let [comms (:comms @state) history (or history (atom []))] (routes/define-routes! state (.getElementById js/document "history-container")) (om/root app/app state {:target target :opts {:comms comms}}) (go (while true (alt! (:controls comms) ([v] (when (:log-channels? utils/initial-player-state) (mprint "Controls Verbose: " (pr-str v))) (let [previous-state @state] (update-history! history :controls v) (swap! state (partial controls-con/control-event target (first v) (second v))) (controls-pcon/post-control-event! target (first v) (second v) previous-state @state))) (:api comms) ([v] (when (:log-channels? utils/initial-player-state) (mprint "API Verbose: " (pr-str v))) (let [previous-state @state] (update-history! history :api v) (swap! state (partial api-con/api-event target (first v) (second v))) (api-pcon/post-api-event! target (first v) (second v) previous-state @state))) ;; Capture the current history for playback in the absence ;; of a server to store it (async/timeout 30000) (mprint (pr-str @history))))))) (defn setup! [] (let [comms (:comms @app-state)] (main (. js/document (getElementById "app")) app-state) (when (:restore-state? utils/initial-query-map) (put! (:controls comms) [:state-restored])) (when (:kandan-client? utils/initial-query-map) (let [api-key (:kandan-api-key utils/initial-query-map) kandan-client (kandan-api/make-client api-key "PI:KEY:<KEY>END_PI") channels (:kandan-channels utils/initial-query-map)] (put! (:controls comms) [:api-key-updated api-key]) (doseq [channel channels] (kandan-api/subscribe! kandan-client (str "/channels/" channel) (:api comms))))))) (set! (.-onload js/window) setup!) ;; Local dev tooling (defn ^:export send-async-message [ch-name message data] (put! (get-in @app-state [:comms (keyword ch-name)]) [(keyword message) (js->clj data :keywordize-keys true)])) (defn ^:export remove-channel! [channel-id] (put! (get-in @app-state [:comms :controls]) [:channel-remotely-destroyed channel-id])) (comment ;; Uncomment to have random messages send (js/setInterval #(api/random-message (get-in @app-state [:comms :api]) (rand-nth (keys (:channels @app-state)))) 2500))
[ { "context": "))\n\n(def sample-input (str \"SSL_KEYSTORE_PASSWORD=keypass1234\\n\"\n \"SSL_TRUSTSTORE_PASSWORD=", "end": 250, "score": 0.9982037544250488, "start": 237, "tag": "PASSWORD", "value": "keypass1234\\n" }, { "context": "\"\n \"SSL_TRUSTSTORE_PASSWORD=trustpass1234\"))\n\n(deftest prefixed?\n\n (is (secure/prefixed? \"", "end": 313, "score": 0.9992325305938721, "start": 300, "tag": "PASSWORD", "value": "trustpass1234" }, { "context": "ntException\n (secure/decrypted nil \"ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=\")))\n\n (is (thrown? IllegalArgumentException\n ", "end": 986, "score": 0.9996664524078369, "start": 853, "tag": "KEY", "value": "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=\"" }, { "context": "entException\n (secure/decrypted \"\" \"ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=\")))\n\n (is (thrown? IllegalArgumentException\n ", "end": 1201, "score": 0.9996753931045532, "start": 1068, "tag": "KEY", "value": "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=\"" }, { "context": "and-variants\n\n (let [secret-key (key/secret-key \"aquickredfox\" \"some-salt\")]\n\n (is (= sample-input\n ", "end": 1908, "score": 0.995222270488739, "start": 1896, "tag": "KEY", "value": "aquickredfox" }, { "context": "t [secret-key (key/secret-key \"aquickredfox\" \"some-salt\")]\n\n (is (= sample-input\n (->> (", "end": 1915, "score": 0.6761475801467896, "start": 1915, "tag": "KEY", "value": "" }, { "context": "7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=\"\n \"ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=\")))\n\n (is (= sample-input\n (->> (secu", "end": 2388, "score": 0.995662271976471, "start": 2255, "tag": "KEY", "value": "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=\"" }, { "context": " (secure/decoded-text (key/import-key \"//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=\")))))\n\n (is (= sample-input\n (->> (se", "end": 2723, "score": 0.9994999170303345, "start": 2677, "tag": "KEY", "value": "\"//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=\"" }, { "context": " (->> (secure/encoded-payload (key/import-key \"//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=\") sample-input)\n (secure/decoded-", "end": 2854, "score": 0.9993749856948853, "start": 2810, "tag": "KEY", "value": "\"//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88" }, { "context": " (->> (secure/encoded-payload (key/import-key \"//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=\") sample-input)\n (secure/decoded-tex", "end": 3052, "score": 0.9971234798431396, "start": 3005, "tag": "KEY", "value": "\"//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=\")" }, { "context": " (secure/decoded-text (key/import-key \"//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=\")))))\n\n ;; the random IV guarantees (well almos", "end": 3166, "score": 0.9994267821311951, "start": 3120, "tag": "KEY", "value": "\"//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=\"" }, { "context": "terpretation\n\n (let [secret-key (key/secret-key \"aquickredfox\" \"some-salt\")]\n\n (is (= {\"SSL_KEYSTORE_PASSWOR", "end": 3495, "score": 0.9615728855133057, "start": 3483, "tag": "KEY", "value": "aquickredfox" }, { "context": "e-salt\")]\n\n (is (= {\"SSL_KEYSTORE_PASSWORD\" \"keypass1234\"\n \"SSL_TRUSTSTORE_PASSWORD\" \"trustpass", "end": 3562, "score": 0.9520242810249329, "start": 3551, "tag": "PASSWORD", "value": "keypass1234" }, { "context": "ypass1234\"\n \"SSL_TRUSTSTORE_PASSWORD\" \"trustpass1234\"}\n (->> (secure/encoded-payload secret-", "end": 3616, "score": 0.9993129372596741, "start": 3603, "tag": "PASSWORD", "value": "trustpass1234" }, { "context": "LoginModule required username=\\\"kpow\\\" password=\\\"kpow-secret\\\";\"\n \"SASL_MECHANISM\" \"PLAIN\"", "end": 3912, "score": 0.999494731426239, "start": 3901, "tag": "PASSWORD", "value": "kpow-secret" }, { "context": "store.jks\"\n \"SSL_TRUSTSTORE_PASSWORD\" \"password1234\"}\n (-> (secure/decrypted (slurp \"dev-re", "end": 4129, "score": 0.9995133280754089, "start": 4117, "tag": "PASSWORD", "value": "password1234" }, { "context": "curity.plain.PlainLoginModule required username=\\\"kpow\\\" password=\\\"kpow-secret\\\";\"\n \"sasl.me", "end": 4411, "score": 0.9988213777542114, "start": 4407, "tag": "USERNAME", "value": "kpow" }, { "context": "LoginModule required username=\\\"kpow\\\" password=\\\"kpow-secret\\\";\"\n \"sasl.mechanism\" \"PLAIN\"", "end": 4436, "score": 0.999525249004364, "start": 4425, "tag": "PASSWORD", "value": "kpow-secret" }, { "context": "store.jks\"\n \"ssl.truststore.password\" \"1234\"}\n (-> (secure/decrypted (slurp \"dev-re", "end": 4645, "score": 0.9994926452636719, "start": 4641, "tag": "PASSWORD", "value": "1234" }, { "context": "curity.plain.PlainLoginModule required username=\\\"kpow\\\" password=\\\"kpow-secret\\\";\"\n \"SASL_", "end": 4952, "score": 0.9989416599273682, "start": 4948, "tag": "USERNAME", "value": "kpow" }, { "context": "LoginModule required username=\\\"kpow\\\" password=\\\"kpow-secret\\\";\"\n \"SASL_MECHANISM\" \"PLAI", "end": 4977, "score": 0.9995129108428955, "start": 4966, "tag": "PASSWORD", "value": "kpow-secret" }, { "context": "ore.jks\"\n \"SSL_TRUSTSTORE_PASSWORD\" \"password1234\"}\n (into {} (Decoder/properties (slur", "end": 5202, "score": 0.9995008707046509, "start": 5190, "tag": "PASSWORD", "value": "password1234" }, { "context": "l-characters\n\n (let [secret-key (key/secret-key \"aquickredfox\" \"some-salt\")]\n\n ;; OWASP special characters l", "end": 5691, "score": 0.998610258102417, "start": 5679, "tag": "KEY", "value": "aquickredfox" }, { "context": "t [secret-key (key/secret-key \"aquickredfox\" \"some-salt\")]\n\n ;; OWASP special characters list: https:/", "end": 5703, "score": 0.9288309812545776, "start": 5699, "tag": "KEY", "value": "salt" }, { "context": "{|}~\"\n (secure/decoded-text secret-key \"ARBU3lQltrp0+i4IHmIsYhA1X/4kbXxlHllPBlmzdYcNeJtEzv9SseIMbwJ+wuxldZWrjo1qNFRg71ysTX7YZBvU\")))\n\n ;; Mixed characters pass generated by Ke", "end": 6242, "score": 0.9997818470001221, "start": 6154, "tag": "KEY", "value": "ARBU3lQltrp0+i4IHmIsYhA1X/4kbXxlHllPBlmzdYcNeJtEzv9SseIMbwJ+wuxldZWrjo1qNFRg71ysTX7YZBvU" } ]
test/kpow/secure_test.clj
operatr-io/kpow-secure
3
(ns kpow.secure-test (:require [clojure.test :refer [deftest is testing]] [kpow.secure :as secure] [kpow.secure.key :as key]) (:import (io.kpow.secure Decoder))) (def sample-input (str "SSL_KEYSTORE_PASSWORD=keypass1234\n" "SSL_TRUSTSTORE_PASSWORD=trustpass1234")) (deftest prefixed? (is (secure/prefixed? "AES:abc")) (is (not (secure/prefixed? "aes:abc"))) (is (not (secure/prefixed? "aesabc"))) (is (not (secure/prefixed? "abc"))) (is (not (secure/prefixed? ""))) (is (not (secure/prefixed? nil)))) (deftest error-cases (is (thrown? IllegalArgumentException (secure/encrypted nil "some-text"))) (is (thrown? IllegalArgumentException (secure/encrypted "" "some-text"))) (is (thrown? IllegalArgumentException (secure/decrypted nil "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA="))) (is (thrown? IllegalArgumentException (secure/decrypted "" "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA="))) (is (thrown? IllegalArgumentException (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" nil))) (is (thrown? IllegalArgumentException (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" ""))) (is (thrown? IllegalArgumentException (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" nil))) ;; you can encrypt a blank string (just not nil, above) (is (= "" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88="))))) (deftest full-trip-and-variants (let [secret-key (key/secret-key "aquickredfox" "some-salt")] (is (= sample-input (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" sample-input) (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=")))) (is (= sample-input (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA="))) (is (= sample-input (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text secret-key)))) (is (= sample-input (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text (key/import-key "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88="))))) (is (= sample-input (->> (secure/encoded-payload (key/import-key "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=") sample-input) (secure/decoded-text secret-key)))) (is (= sample-input (->> (secure/encoded-payload (key/import-key "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=") sample-input) (secure/decoded-text (key/import-key "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88="))))) ;; the random IV guarantees (well almost) different payload even with the same key when encoded for a second time (is (not= (secure/encoded-payload secret-key sample-input) (secure/encoded-payload secret-key sample-input))))) (deftest interpretation (let [secret-key (key/secret-key "aquickredfox" "some-salt")] (is (= {"SSL_KEYSTORE_PASSWORD" "keypass1234" "SSL_TRUSTSTORE_PASSWORD" "trustpass1234"} (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text secret-key) (secure/->map)))) (is (= {"SASL_JAAS_CONFIG" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"kpow-secret\";" "SASL_MECHANISM" "PLAIN" "SECURITY_PROTOCOL" "SASL_PLAINTEXT" "SSL_TRUSTSTORE_LOCATION" "/ssl/truststore.jks" "SSL_TRUSTSTORE_PASSWORD" "password1234"} (-> (secure/decrypted (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/config.env.aes")) (secure/->map)))) (is (= {"sasl.jaas.config" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"kpow-secret\";" "sasl.mechanism" "PLAIN" "security.protocol" "SASL_PLAINTEXT" "ssl.truststore.location" "/ssl/truststore.jks" "ssl.truststore.password" "1234"} (-> (secure/decrypted (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/props.env.aes")) (secure/->map)))) (testing "interop" (is (= {"SASL_JAAS_CONFIG" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"kpow-secret\";" "SASL_MECHANISM" "PLAIN" "SECURITY_PROTOCOL" "SASL_PLAINTEXT" "SSL_TRUSTSTORE_LOCATION" "/ssl/truststore.jks" "SSL_TRUSTSTORE_PASSWORD" "password1234"} (into {} (Decoder/properties (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/config.env.aes"))))) (is (= sample-input (Decoder/text "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=")))))) (deftest special-characters (let [secret-key (key/secret-key "aquickredfox" "some-salt")] ;; OWASP special characters list: https://owasp.org/www-community/password-special-characters (is (= " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=")))) (is (= " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" (secure/decoded-text secret-key "ARBU3lQltrp0+i4IHmIsYhA1X/4kbXxlHllPBlmzdYcNeJtEzv9SseIMbwJ+wuxldZWrjo1qNFRg71ysTX7YZBvU"))) ;; Mixed characters pass generated by KeepassXC (is (= "w;7MQ{&{o[F(H&^Fv[aN^y{7J@z+K7#" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "w;7MQ{&{o[F(H&^Fv[aN^y{7J@z+K7#") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88="))))))
100142
(ns kpow.secure-test (:require [clojure.test :refer [deftest is testing]] [kpow.secure :as secure] [kpow.secure.key :as key]) (:import (io.kpow.secure Decoder))) (def sample-input (str "SSL_KEYSTORE_PASSWORD=<PASSWORD>" "SSL_TRUSTSTORE_PASSWORD=<PASSWORD>")) (deftest prefixed? (is (secure/prefixed? "AES:abc")) (is (not (secure/prefixed? "aes:abc"))) (is (not (secure/prefixed? "aesabc"))) (is (not (secure/prefixed? "abc"))) (is (not (secure/prefixed? ""))) (is (not (secure/prefixed? nil)))) (deftest error-cases (is (thrown? IllegalArgumentException (secure/encrypted nil "some-text"))) (is (thrown? IllegalArgumentException (secure/encrypted "" "some-text"))) (is (thrown? IllegalArgumentException (secure/decrypted nil "<KEY>))) (is (thrown? IllegalArgumentException (secure/decrypted "" "<KEY>))) (is (thrown? IllegalArgumentException (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" nil))) (is (thrown? IllegalArgumentException (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" ""))) (is (thrown? IllegalArgumentException (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" nil))) ;; you can encrypt a blank string (just not nil, above) (is (= "" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88="))))) (deftest full-trip-and-variants (let [secret-key (key/secret-key "<KEY>" "some<KEY>-salt")] (is (= sample-input (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" sample-input) (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=")))) (is (= sample-input (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "<KEY>))) (is (= sample-input (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text secret-key)))) (is (= sample-input (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text (key/import-key <KEY>))))) (is (= sample-input (->> (secure/encoded-payload (key/import-key <KEY>=") sample-input) (secure/decoded-text secret-key)))) (is (= sample-input (->> (secure/encoded-payload (key/import-key <KEY> sample-input) (secure/decoded-text (key/import-key <KEY>))))) ;; the random IV guarantees (well almost) different payload even with the same key when encoded for a second time (is (not= (secure/encoded-payload secret-key sample-input) (secure/encoded-payload secret-key sample-input))))) (deftest interpretation (let [secret-key (key/secret-key "<KEY>" "some-salt")] (is (= {"SSL_KEYSTORE_PASSWORD" "<PASSWORD>" "SSL_TRUSTSTORE_PASSWORD" "<PASSWORD>"} (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text secret-key) (secure/->map)))) (is (= {"SASL_JAAS_CONFIG" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"<PASSWORD>\";" "SASL_MECHANISM" "PLAIN" "SECURITY_PROTOCOL" "SASL_PLAINTEXT" "SSL_TRUSTSTORE_LOCATION" "/ssl/truststore.jks" "SSL_TRUSTSTORE_PASSWORD" "<PASSWORD>"} (-> (secure/decrypted (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/config.env.aes")) (secure/->map)))) (is (= {"sasl.jaas.config" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"<PASSWORD>\";" "sasl.mechanism" "PLAIN" "security.protocol" "SASL_PLAINTEXT" "ssl.truststore.location" "/ssl/truststore.jks" "ssl.truststore.password" "<PASSWORD>"} (-> (secure/decrypted (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/props.env.aes")) (secure/->map)))) (testing "interop" (is (= {"SASL_JAAS_CONFIG" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"<PASSWORD>\";" "SASL_MECHANISM" "PLAIN" "SECURITY_PROTOCOL" "SASL_PLAINTEXT" "SSL_TRUSTSTORE_LOCATION" "/ssl/truststore.jks" "SSL_TRUSTSTORE_PASSWORD" "<PASSWORD>"} (into {} (Decoder/properties (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/config.env.aes"))))) (is (= sample-input (Decoder/text "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=")))))) (deftest special-characters (let [secret-key (key/secret-key "<KEY>" "some-<KEY>")] ;; OWASP special characters list: https://owasp.org/www-community/password-special-characters (is (= " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=")))) (is (= " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" (secure/decoded-text secret-key "<KEY>"))) ;; Mixed characters pass generated by KeepassXC (is (= "w;7MQ{&{o[F(H&^Fv[aN^y{7J@z+K7#" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "w;7MQ{&{o[F(H&^Fv[aN^y{7J@z+K7#") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88="))))))
true
(ns kpow.secure-test (:require [clojure.test :refer [deftest is testing]] [kpow.secure :as secure] [kpow.secure.key :as key]) (:import (io.kpow.secure Decoder))) (def sample-input (str "SSL_KEYSTORE_PASSWORD=PI:PASSWORD:<PASSWORD>END_PI" "SSL_TRUSTSTORE_PASSWORD=PI:PASSWORD:<PASSWORD>END_PI")) (deftest prefixed? (is (secure/prefixed? "AES:abc")) (is (not (secure/prefixed? "aes:abc"))) (is (not (secure/prefixed? "aesabc"))) (is (not (secure/prefixed? "abc"))) (is (not (secure/prefixed? ""))) (is (not (secure/prefixed? nil)))) (deftest error-cases (is (thrown? IllegalArgumentException (secure/encrypted nil "some-text"))) (is (thrown? IllegalArgumentException (secure/encrypted "" "some-text"))) (is (thrown? IllegalArgumentException (secure/decrypted nil "PI:KEY:<KEY>END_PI))) (is (thrown? IllegalArgumentException (secure/decrypted "" "PI:KEY:<KEY>END_PI))) (is (thrown? IllegalArgumentException (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" nil))) (is (thrown? IllegalArgumentException (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" ""))) (is (thrown? IllegalArgumentException (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" nil))) ;; you can encrypt a blank string (just not nil, above) (is (= "" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88="))))) (deftest full-trip-and-variants (let [secret-key (key/secret-key "PI:KEY:<KEY>END_PI" "somePI:KEY:<KEY>END_PI-salt")] (is (= sample-input (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" sample-input) (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=")))) (is (= sample-input (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "PI:KEY:<KEY>END_PI))) (is (= sample-input (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text secret-key)))) (is (= sample-input (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text (key/import-key PI:KEY:<KEY>END_PI))))) (is (= sample-input (->> (secure/encoded-payload (key/import-key PI:KEY:<KEY>END_PI=") sample-input) (secure/decoded-text secret-key)))) (is (= sample-input (->> (secure/encoded-payload (key/import-key PI:KEY:<KEY>END_PI sample-input) (secure/decoded-text (key/import-key PI:KEY:<KEY>END_PI))))) ;; the random IV guarantees (well almost) different payload even with the same key when encoded for a second time (is (not= (secure/encoded-payload secret-key sample-input) (secure/encoded-payload secret-key sample-input))))) (deftest interpretation (let [secret-key (key/secret-key "PI:KEY:<KEY>END_PI" "some-salt")] (is (= {"SSL_KEYSTORE_PASSWORD" "PI:PASSWORD:<PASSWORD>END_PI" "SSL_TRUSTSTORE_PASSWORD" "PI:PASSWORD:<PASSWORD>END_PI"} (->> (secure/encoded-payload secret-key sample-input) (secure/decoded-text secret-key) (secure/->map)))) (is (= {"SASL_JAAS_CONFIG" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"PI:PASSWORD:<PASSWORD>END_PI\";" "SASL_MECHANISM" "PLAIN" "SECURITY_PROTOCOL" "SASL_PLAINTEXT" "SSL_TRUSTSTORE_LOCATION" "/ssl/truststore.jks" "SSL_TRUSTSTORE_PASSWORD" "PI:PASSWORD:<PASSWORD>END_PI"} (-> (secure/decrypted (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/config.env.aes")) (secure/->map)))) (is (= {"sasl.jaas.config" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"PI:PASSWORD:<PASSWORD>END_PI\";" "sasl.mechanism" "PLAIN" "security.protocol" "SASL_PLAINTEXT" "ssl.truststore.location" "/ssl/truststore.jks" "ssl.truststore.password" "PI:PASSWORD:<PASSWORD>END_PI"} (-> (secure/decrypted (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/props.env.aes")) (secure/->map)))) (testing "interop" (is (= {"SASL_JAAS_CONFIG" "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kpow\" password=\"PI:PASSWORD:<PASSWORD>END_PI\";" "SASL_MECHANISM" "PLAIN" "SECURITY_PROTOCOL" "SASL_PLAINTEXT" "SSL_TRUSTSTORE_LOCATION" "/ssl/truststore.jks" "SSL_TRUSTSTORE_PASSWORD" "PI:PASSWORD:<PASSWORD>END_PI"} (into {} (Decoder/properties (slurp "dev-resources/secure/passphrase.key") (slurp "dev-resources/secure/config.env.aes"))))) (is (= sample-input (Decoder/text "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "ARDuFSOqVc5l8dPe2l8jLnRvf2Y2/ZnhWNtkuZuoP1Updxo4cFAsFr+eM4WVcH/yIogK3ypO4sLp7sSXjkXv3L5Ci/5poJG2U/+No5ySBR1BhDjcV3mkO3TBYp4nQu65mpA=")))))) (deftest special-characters (let [secret-key (key/secret-key "PI:KEY:<KEY>END_PI" "some-PI:KEY:<KEY>END_PI")] ;; OWASP special characters list: https://owasp.org/www-community/password-special-characters (is (= " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=")))) (is (= " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" (secure/decoded-text secret-key "PI:KEY:<KEY>END_PI"))) ;; Mixed characters pass generated by KeepassXC (is (= "w;7MQ{&{o[F(H&^Fv[aN^y{7J@z+K7#" (->> (secure/encrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88=" "w;7MQ{&{o[F(H&^Fv[aN^y{7J@z+K7#") (secure/decrypted "//iQh9KYe7pM+mevjifZPrm7YE2+rRloG1E15zzjR88="))))))
[ { "context": "rmat-simple-type-edn holiday))\n\n\n;; Copyright 2018 Frederic Merizen\n;;\n;; Licensed under the Apache License, Version ", "end": 17094, "score": 0.9998658299446106, "start": 17078, "tag": "NAME", "value": "Frederic Merizen" } ]
src/ferje/config/edn.clj
chourave/clojyday
0
;; Copyright and license information at end of file (ns ferje.config.edn (:require [clojure.spec.alpha :as s] [ferje.config.core :as config] [ferje.config.raw :as raw] [ferje.place :as place]) (:import (de.jollyday.datasource ConfigurationDataSource) (de.jollyday.parameter BaseManagerParameter CalendarPartManagerParameter) (ferje.config.raw ConfigSource) (de.jollyday.util ResourceUtil))) (place/add-format :edn) (defn tag "" [tag] (let [tag (name tag)] #{tag (keyword tag) (symbol tag)})) (defn fluff "" [fluff] (s/? (tag fluff))) (defn named-as "" [values] (let [values (into #{} (mapcat tag) values)] (s/conformer #(or (some-> % values name keyword) ::s/invalid)))) (s/def ::substitution (s/cat :substitute (named-as config/weekdays) :fluff (fluff :with) :with (named-as config/withs) :weekday (named-as config/weekdays))) (s/def ::moving-conditions (s/* (s/cat :tag (tag :substitute) :substitutions (s/+ ::substitution)))) (s/def ::month (named-as config/months)) (s/def ::weekday (named-as config/weekdays)) (s/def ::when (named-as config/whens)) (s/def ::which (named-as config/whichs)) (s/def ::every (s/alt :canonical (named-as config/everys) :piecemeal (s/cat :prefix (s/? (s/or :number #{2 4 5 6} :parity (named-as #{:odd :even}))) :suffix (named-as #{:year :years})))) (s/def ::description-key (s/conformer #(if (config/named? %) (-> % name keyword) ::s/invalid))) (s/def ::chronology (named-as config/chronologies)) (s/def ::christian-type (named-as config/christian-types)) (s/def ::islamic-type (named-as config/islamic-types)) (s/def ::hindu-type (named-as config/hindu-types)) (s/def ::hebrew-type (named-as config/hebrew-types)) (s/def ::ethiopian-orthodox-type (named-as config/ethiopian-orthodox-types)) (s/def ::fixed (s/cat :month ::month :day ::config/day :moving-conditions ::moving-conditions)) (s/def ::fixed-weekday (s/cat :which ::which :weekday ::weekday :fluff (fluff :of) :month ::month)) (s/def ::holiday (s/or :christian-holiday ::christian-type :islamic-holiday ::islamic-type :hindu-holiday ::hindu-type :hebrew-holiday ::hebrew-type :ethiopian-orthodox-holiday ::ethiopian-orthodox-type :composite (s/cat :definition (s/alt :fixed ::fixed :relative-to-fixed (s/cat :offset (s/alt :days (s/cat :days ::config/days :fluff (fluff :days)) :weekday ::weekday) :when ::when :reference ::fixed) :fixed-weekday-between-fixed (s/cat :weekday ::weekday :tag (tag :between) :from ::fixed :fluff (fluff :and) :to ::fixed) :fixed-weekday ::fixed-weekday :relative-to-weekday-in-month (s/cat :weekday ::weekday :when ::when :reference ::fixed-weekday) :fixed-weekday-relative-to-fixed (s/cat :which ::which :weekday ::weekday :when ::when :reference ::fixed) :christian-holiday (s/cat :chronology (s/? ::chronology) :type ::christian-type :moving-conditions ::moving-conditions) :relative-to-easter-sunday (s/cat :days int? :fluff (fluff :days) :when (s/? ::when) :chronology (s/? ::chronology) :tag (tag :easter)) :islamic-holiday ::islamic-type :hindu-holiday ::hindu-type :hebrew-holiday ::hebrew-type :ethiopian-orthodox-holiday ::ethiopian-orthodox-type) :options (s/* (s/alt :valid-from (s/cat :tag (s/alt :one (tag :valid-from) :two (s/cat :valid (tag :valid) :from (tag :from))) :value ::config/valid-from) :valid-to (s/cat :tag (s/alt :one (tag :valid-to) :two (s/cat :valid (tag :valid) :to (tag :to))) :value ::config/valid-to) :valid (s/cat :tag (tag :valid) :from-tag (tag :from) :from ::config/valid-from :to-tag (tag :to) :to ::config/valid-to) :every (s/cat :tag (tag :every) :value ::every) :description-key (s/cat :tag (tag :description-key) :value ::description-key) :localized-type (named-as #{:official :unofficial :inofficial})))))) (s/def ::holidays (s/coll-of ::holiday)) (s/def ::sub-configurations (s/coll-of ::configuration)) (defn keywordize-keys [m] (into {} (map (fn [[k v]] [(-> k name keyword) v])) m)) (s/def ::hierarchy (s/and config/named? (s/conformer #(-> % name keyword)))) (s/def ::configuration (s/and (s/map-of config/named? any?) (s/conformer keywordize-keys) (s/keys :req-un [::config/description ::hierarchy ::holidays] :opt-un [::sub-configurations]))) (defn holiday-type "" [conformed-holiday] (cond (not (map-entry? conformed-holiday)) nil (= :composite (key conformed-holiday)) (-> conformed-holiday val :definition key) :else (key conformed-holiday))) (defmulti -edn->holiday "" holiday-type) (defmulti parse-option "" key) (defmethod parse-option :valid-from [[k v]] {k (:value v)}) (defmethod parse-option :valid-to [[k v]] {k (:value v)}) (defmethod parse-option :valid [[_ {:keys [from to]}]] {:valid-from from, :valid-to to}) (defmethod parse-option :every [[k {:keys [value]}]] {k (case (key value) :canonical (val value) :piecemeal ({nil :every-year 2 :2-years 4 :4-years 5 :5-years 6 :6-years :odd :odd-years :even :even-years} (some-> value val :prefix val)))}) (defmethod parse-option :description-key [[k v]] {k (:value v)}) (defmethod parse-option :localized-type [[k v]] {k ({:inofficial :unofficial-holiday :unofficial :unofficial-holiday :official :official-holiday} v)}) (defn parse-common-options "" [conformed-holiday] (into {} (mapcat parse-option) (-> conformed-holiday val :options))) (defn edn->holiday "" [holiday] (let [conformed (s/conform ::holiday holiday) type (holiday-type conformed)] (-> conformed -edn->holiday (assoc :holiday type) (merge (parse-common-options conformed))))) (s/fdef edn->holiday :args (s/cat :holiday ::holiday) :ret `config/holiday) (defn edn->configuration "" [configuration] (let [configuration (keywordize-keys configuration)] (-> configuration (update :holidays #(map edn->holiday %)) (update :hierarchy #(-> % name keyword)) (into (for [[k v] (select-keys configuration [:sub-configurations])] [k (map edn->configuration v)]))))) (s/fdef edn->configuration :args (s/cat :configuration ::configuration) :ret ::config/configuration) (defmethod place/configuration-data-source :edn [_] (reify ConfigurationDataSource (getConfiguration [_ parameters] (-> parameters raw/manager-parameter->config edn->configuration config/->Configuration)))) (defmethod place/-create-manager-parameters [String :edn] [calendar-part _] (proxy [CalendarPartManagerParameter] [(place/normalized-calendar-part calendar-part) nil] (createResourceUrl [] (->> calendar-part raw/cal-edn-path str (.getResource (ResourceUtil.)))))) (defmethod place/-create-manager-parameters [clojure.lang.IPersistentMap :edn] [config _] (proxy [BaseManagerParameter ConfigSource] [nil] (createCacheKey [] (-> config hash str)) (get_config [] config))) (defmulti -holiday->edn "" :holiday) (defmulti format-common-options "" (fn [style _] style)) (defmethod format-common-options :code [_ holiday] (mapcat (fn [[_ v :as x]] (when v x)) (select-keys holiday [:valid-from :valid-to :every :description-key]))) (defmethod format-common-options :english [_ {:keys [valid-from valid-to every description-key]}] (concat (cond (and valid-from valid-to) [:valid :from valid-from :to valid-to] valid-from [:valid :from valid-from] valid-to [:valid :to valid-to]) (when every [:every every]) (when description-key [:description-key description-key]))) (defn format-official-marker "" [holiday] (when (= :unofficial-holiday (:localized-type holiday)) [:unofficial])) (defn simplify-edn "" [holiday] (if (next holiday) holiday (first holiday))) (s/def ::edn-style #{:code :english}) (defmulti apply-style "" #(-> %2)) (defmethod apply-style :code [holiday _] holiday) (defmethod apply-style :english [holiday _] (into [] (map #(if (keyword? %) (-> % name symbol) %)) holiday)) (defn holiday->edn "" [style holiday] (-> (-holiday->edn holiday) (into (format-common-options style holiday)) (into (format-official-marker holiday)) (apply-style style) simplify-edn)) (s/fdef holiday->edn :args (s/cat :style ::edn-style, :holiday `config/holiday) :ret ::holiday) (defmulti format-configuration "" #(-> %2)) (defmethod format-configuration :code [configuration _] configuration) (defmethod format-configuration :english [configuration _] (into {} (map (fn [[k v]] [(-> k name symbol) v])) configuration)) (defn format-keyword "" [style keyword] (case style :english (-> keyword name symbol) :code keyword)) (defn configuration->edn "" [style configuration] (-> configuration (update :holidays #(for [h %] (holiday->edn style h))) (update :hierarchy #(format-keyword style %)) (into (for [[k v] (select-keys configuration [:sub-configurations])] [k (for [c v] (configuration->edn style c))])) (format-configuration style))) (s/fdef configuration->edn :args (s/cat :style ::edn-style, :configuration ::config/configuration) :ret ::configuration) (defn composite-definition "" [holiday] (some-> holiday val :definition val)) (defn parse-moving-conditions "" [holiday] (when-let [conditions (:moving-conditions holiday)] {:moving-conditions (into [] (mapcat #(for [s (:substitutions %)] (select-keys s [:substitute :with :weekday]))) conditions)})) (defn parse-fixed-edn "" [fixed] (merge (select-keys fixed [:month :day]) (parse-moving-conditions fixed))) (defmethod -edn->holiday :fixed [holiday] (parse-fixed-edn (composite-definition holiday))) (defn format-moving-conditions "" [holiday] (when-let [moving-conditions (:moving-conditions holiday)] (into [:substitute] (mapcat (juxt :substitute (constantly :with) :with :weekday)) moving-conditions))) (defn format-fixed-edn "" [fixed] (let [{:keys [month day]} fixed] (into [month day] (format-moving-conditions fixed)))) (defmethod -holiday->edn :fixed [holiday] (format-fixed-edn holiday)) (defmethod -edn->holiday :relative-to-fixed [holiday] (let [{:keys [offset when reference]} (composite-definition holiday) offset-type (key offset)] {:when when :date (parse-fixed-edn reference) offset-type ((case offset-type :days :days :weekday identity) (val offset))})) (defmethod -holiday->edn :relative-to-fixed [holiday] (let [{:keys [when days weekday date]} holiday offset (if days [days :days] [weekday])] (into offset (cons when (format-fixed-edn date))))) (defmethod -edn->holiday :fixed-weekday-between-fixed [holiday] (let [{:keys [from to weekday]} (composite-definition holiday)] {:from (parse-fixed-edn from) :to (parse-fixed-edn to) :weekday weekday})) (defmethod -holiday->edn :fixed-weekday-between-fixed [holiday] (let [{:keys [from to weekday]} holiday] (into [weekday :between] (concat (format-fixed-edn from) [:and] (format-fixed-edn to))))) (defn parse-fixed-weekday-edn "" [fixed-weekday] (select-keys fixed-weekday [:month :which :weekday])) (defmethod -edn->holiday :fixed-weekday [holiday] (parse-fixed-weekday-edn (composite-definition holiday))) (defmethod -holiday->edn :fixed-weekday [holiday] (let [{:keys [month which weekday]} holiday] [which weekday :of month])) (defmethod -edn->holiday :relative-to-weekday-in-month [holiday] (let [definition (composite-definition holiday)] (assoc (select-keys definition [:weekday :when]) :fixed-weekday (parse-fixed-weekday-edn (:reference definition))))) (defmethod -holiday->edn :relative-to-weekday-in-month [holiday] (let [{:keys [weekday when fixed-weekday]} holiday {:keys [month which] ref-weekday :weekday} fixed-weekday] [weekday when which ref-weekday :of month])) (defmethod -edn->holiday :fixed-weekday-relative-to-fixed [holiday] (let [definition (composite-definition holiday)] (assoc (select-keys definition [:which :weekday :when]) :date (parse-fixed-edn (:reference definition))))) (defmethod -holiday->edn :fixed-weekday-relative-to-fixed [holiday] (let [{:keys [which weekday when date]} holiday] (into [which weekday when] (format-fixed-edn date)))) (defmethod -edn->holiday :christian-holiday [holiday] (if-let [definition (composite-definition holiday)] (let [{:keys [chronology]} definition] (cond-> (select-keys definition [:type]) chronology (assoc :chronology chronology) :alwas (merge (parse-moving-conditions definition)))) {:type (val holiday)})) (defmethod -holiday->edn :christian-holiday [holiday] (let [{:keys [type chronology]} holiday] (into [] (keep identity) (concat [chronology type] (format-moving-conditions holiday))))) (defmethod -edn->holiday :relative-to-easter-sunday [holiday] (let [{:keys [days when chronology] :or {when :after, chronology :gregorian}} (composite-definition holiday) sign ({:before -1, :after 1} when)] {:days (* sign days) :chronology chronology})) (defmethod -holiday->edn :relative-to-easter-sunday [holiday] (let [{:keys [days chronology]} holiday] (if (neg? days) [(- days) :days :before chronology :easter] [days :days :after chronology :easter]))) (defn parse-simple-type-edn "" [holiday] {:type (or (composite-definition holiday) (val holiday))}) (defmethod -edn->holiday :islamic-holiday [holiday] (parse-simple-type-edn holiday)) (defn format-simple-type-edn "" [holiday] [(:type holiday)]) (defmethod -holiday->edn :islamic-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :hindu-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :hindu-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :hebrew-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :hebrew-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :ethiopian-orthodox-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :ethiopian-orthodox-holiday [holiday] (format-simple-type-edn holiday)) ;; Copyright 2018 Frederic Merizen ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express ;; or implied. See the License for the specific language governing ;; permissions and limitations under the License.
48773
;; Copyright and license information at end of file (ns ferje.config.edn (:require [clojure.spec.alpha :as s] [ferje.config.core :as config] [ferje.config.raw :as raw] [ferje.place :as place]) (:import (de.jollyday.datasource ConfigurationDataSource) (de.jollyday.parameter BaseManagerParameter CalendarPartManagerParameter) (ferje.config.raw ConfigSource) (de.jollyday.util ResourceUtil))) (place/add-format :edn) (defn tag "" [tag] (let [tag (name tag)] #{tag (keyword tag) (symbol tag)})) (defn fluff "" [fluff] (s/? (tag fluff))) (defn named-as "" [values] (let [values (into #{} (mapcat tag) values)] (s/conformer #(or (some-> % values name keyword) ::s/invalid)))) (s/def ::substitution (s/cat :substitute (named-as config/weekdays) :fluff (fluff :with) :with (named-as config/withs) :weekday (named-as config/weekdays))) (s/def ::moving-conditions (s/* (s/cat :tag (tag :substitute) :substitutions (s/+ ::substitution)))) (s/def ::month (named-as config/months)) (s/def ::weekday (named-as config/weekdays)) (s/def ::when (named-as config/whens)) (s/def ::which (named-as config/whichs)) (s/def ::every (s/alt :canonical (named-as config/everys) :piecemeal (s/cat :prefix (s/? (s/or :number #{2 4 5 6} :parity (named-as #{:odd :even}))) :suffix (named-as #{:year :years})))) (s/def ::description-key (s/conformer #(if (config/named? %) (-> % name keyword) ::s/invalid))) (s/def ::chronology (named-as config/chronologies)) (s/def ::christian-type (named-as config/christian-types)) (s/def ::islamic-type (named-as config/islamic-types)) (s/def ::hindu-type (named-as config/hindu-types)) (s/def ::hebrew-type (named-as config/hebrew-types)) (s/def ::ethiopian-orthodox-type (named-as config/ethiopian-orthodox-types)) (s/def ::fixed (s/cat :month ::month :day ::config/day :moving-conditions ::moving-conditions)) (s/def ::fixed-weekday (s/cat :which ::which :weekday ::weekday :fluff (fluff :of) :month ::month)) (s/def ::holiday (s/or :christian-holiday ::christian-type :islamic-holiday ::islamic-type :hindu-holiday ::hindu-type :hebrew-holiday ::hebrew-type :ethiopian-orthodox-holiday ::ethiopian-orthodox-type :composite (s/cat :definition (s/alt :fixed ::fixed :relative-to-fixed (s/cat :offset (s/alt :days (s/cat :days ::config/days :fluff (fluff :days)) :weekday ::weekday) :when ::when :reference ::fixed) :fixed-weekday-between-fixed (s/cat :weekday ::weekday :tag (tag :between) :from ::fixed :fluff (fluff :and) :to ::fixed) :fixed-weekday ::fixed-weekday :relative-to-weekday-in-month (s/cat :weekday ::weekday :when ::when :reference ::fixed-weekday) :fixed-weekday-relative-to-fixed (s/cat :which ::which :weekday ::weekday :when ::when :reference ::fixed) :christian-holiday (s/cat :chronology (s/? ::chronology) :type ::christian-type :moving-conditions ::moving-conditions) :relative-to-easter-sunday (s/cat :days int? :fluff (fluff :days) :when (s/? ::when) :chronology (s/? ::chronology) :tag (tag :easter)) :islamic-holiday ::islamic-type :hindu-holiday ::hindu-type :hebrew-holiday ::hebrew-type :ethiopian-orthodox-holiday ::ethiopian-orthodox-type) :options (s/* (s/alt :valid-from (s/cat :tag (s/alt :one (tag :valid-from) :two (s/cat :valid (tag :valid) :from (tag :from))) :value ::config/valid-from) :valid-to (s/cat :tag (s/alt :one (tag :valid-to) :two (s/cat :valid (tag :valid) :to (tag :to))) :value ::config/valid-to) :valid (s/cat :tag (tag :valid) :from-tag (tag :from) :from ::config/valid-from :to-tag (tag :to) :to ::config/valid-to) :every (s/cat :tag (tag :every) :value ::every) :description-key (s/cat :tag (tag :description-key) :value ::description-key) :localized-type (named-as #{:official :unofficial :inofficial})))))) (s/def ::holidays (s/coll-of ::holiday)) (s/def ::sub-configurations (s/coll-of ::configuration)) (defn keywordize-keys [m] (into {} (map (fn [[k v]] [(-> k name keyword) v])) m)) (s/def ::hierarchy (s/and config/named? (s/conformer #(-> % name keyword)))) (s/def ::configuration (s/and (s/map-of config/named? any?) (s/conformer keywordize-keys) (s/keys :req-un [::config/description ::hierarchy ::holidays] :opt-un [::sub-configurations]))) (defn holiday-type "" [conformed-holiday] (cond (not (map-entry? conformed-holiday)) nil (= :composite (key conformed-holiday)) (-> conformed-holiday val :definition key) :else (key conformed-holiday))) (defmulti -edn->holiday "" holiday-type) (defmulti parse-option "" key) (defmethod parse-option :valid-from [[k v]] {k (:value v)}) (defmethod parse-option :valid-to [[k v]] {k (:value v)}) (defmethod parse-option :valid [[_ {:keys [from to]}]] {:valid-from from, :valid-to to}) (defmethod parse-option :every [[k {:keys [value]}]] {k (case (key value) :canonical (val value) :piecemeal ({nil :every-year 2 :2-years 4 :4-years 5 :5-years 6 :6-years :odd :odd-years :even :even-years} (some-> value val :prefix val)))}) (defmethod parse-option :description-key [[k v]] {k (:value v)}) (defmethod parse-option :localized-type [[k v]] {k ({:inofficial :unofficial-holiday :unofficial :unofficial-holiday :official :official-holiday} v)}) (defn parse-common-options "" [conformed-holiday] (into {} (mapcat parse-option) (-> conformed-holiday val :options))) (defn edn->holiday "" [holiday] (let [conformed (s/conform ::holiday holiday) type (holiday-type conformed)] (-> conformed -edn->holiday (assoc :holiday type) (merge (parse-common-options conformed))))) (s/fdef edn->holiday :args (s/cat :holiday ::holiday) :ret `config/holiday) (defn edn->configuration "" [configuration] (let [configuration (keywordize-keys configuration)] (-> configuration (update :holidays #(map edn->holiday %)) (update :hierarchy #(-> % name keyword)) (into (for [[k v] (select-keys configuration [:sub-configurations])] [k (map edn->configuration v)]))))) (s/fdef edn->configuration :args (s/cat :configuration ::configuration) :ret ::config/configuration) (defmethod place/configuration-data-source :edn [_] (reify ConfigurationDataSource (getConfiguration [_ parameters] (-> parameters raw/manager-parameter->config edn->configuration config/->Configuration)))) (defmethod place/-create-manager-parameters [String :edn] [calendar-part _] (proxy [CalendarPartManagerParameter] [(place/normalized-calendar-part calendar-part) nil] (createResourceUrl [] (->> calendar-part raw/cal-edn-path str (.getResource (ResourceUtil.)))))) (defmethod place/-create-manager-parameters [clojure.lang.IPersistentMap :edn] [config _] (proxy [BaseManagerParameter ConfigSource] [nil] (createCacheKey [] (-> config hash str)) (get_config [] config))) (defmulti -holiday->edn "" :holiday) (defmulti format-common-options "" (fn [style _] style)) (defmethod format-common-options :code [_ holiday] (mapcat (fn [[_ v :as x]] (when v x)) (select-keys holiday [:valid-from :valid-to :every :description-key]))) (defmethod format-common-options :english [_ {:keys [valid-from valid-to every description-key]}] (concat (cond (and valid-from valid-to) [:valid :from valid-from :to valid-to] valid-from [:valid :from valid-from] valid-to [:valid :to valid-to]) (when every [:every every]) (when description-key [:description-key description-key]))) (defn format-official-marker "" [holiday] (when (= :unofficial-holiday (:localized-type holiday)) [:unofficial])) (defn simplify-edn "" [holiday] (if (next holiday) holiday (first holiday))) (s/def ::edn-style #{:code :english}) (defmulti apply-style "" #(-> %2)) (defmethod apply-style :code [holiday _] holiday) (defmethod apply-style :english [holiday _] (into [] (map #(if (keyword? %) (-> % name symbol) %)) holiday)) (defn holiday->edn "" [style holiday] (-> (-holiday->edn holiday) (into (format-common-options style holiday)) (into (format-official-marker holiday)) (apply-style style) simplify-edn)) (s/fdef holiday->edn :args (s/cat :style ::edn-style, :holiday `config/holiday) :ret ::holiday) (defmulti format-configuration "" #(-> %2)) (defmethod format-configuration :code [configuration _] configuration) (defmethod format-configuration :english [configuration _] (into {} (map (fn [[k v]] [(-> k name symbol) v])) configuration)) (defn format-keyword "" [style keyword] (case style :english (-> keyword name symbol) :code keyword)) (defn configuration->edn "" [style configuration] (-> configuration (update :holidays #(for [h %] (holiday->edn style h))) (update :hierarchy #(format-keyword style %)) (into (for [[k v] (select-keys configuration [:sub-configurations])] [k (for [c v] (configuration->edn style c))])) (format-configuration style))) (s/fdef configuration->edn :args (s/cat :style ::edn-style, :configuration ::config/configuration) :ret ::configuration) (defn composite-definition "" [holiday] (some-> holiday val :definition val)) (defn parse-moving-conditions "" [holiday] (when-let [conditions (:moving-conditions holiday)] {:moving-conditions (into [] (mapcat #(for [s (:substitutions %)] (select-keys s [:substitute :with :weekday]))) conditions)})) (defn parse-fixed-edn "" [fixed] (merge (select-keys fixed [:month :day]) (parse-moving-conditions fixed))) (defmethod -edn->holiday :fixed [holiday] (parse-fixed-edn (composite-definition holiday))) (defn format-moving-conditions "" [holiday] (when-let [moving-conditions (:moving-conditions holiday)] (into [:substitute] (mapcat (juxt :substitute (constantly :with) :with :weekday)) moving-conditions))) (defn format-fixed-edn "" [fixed] (let [{:keys [month day]} fixed] (into [month day] (format-moving-conditions fixed)))) (defmethod -holiday->edn :fixed [holiday] (format-fixed-edn holiday)) (defmethod -edn->holiday :relative-to-fixed [holiday] (let [{:keys [offset when reference]} (composite-definition holiday) offset-type (key offset)] {:when when :date (parse-fixed-edn reference) offset-type ((case offset-type :days :days :weekday identity) (val offset))})) (defmethod -holiday->edn :relative-to-fixed [holiday] (let [{:keys [when days weekday date]} holiday offset (if days [days :days] [weekday])] (into offset (cons when (format-fixed-edn date))))) (defmethod -edn->holiday :fixed-weekday-between-fixed [holiday] (let [{:keys [from to weekday]} (composite-definition holiday)] {:from (parse-fixed-edn from) :to (parse-fixed-edn to) :weekday weekday})) (defmethod -holiday->edn :fixed-weekday-between-fixed [holiday] (let [{:keys [from to weekday]} holiday] (into [weekday :between] (concat (format-fixed-edn from) [:and] (format-fixed-edn to))))) (defn parse-fixed-weekday-edn "" [fixed-weekday] (select-keys fixed-weekday [:month :which :weekday])) (defmethod -edn->holiday :fixed-weekday [holiday] (parse-fixed-weekday-edn (composite-definition holiday))) (defmethod -holiday->edn :fixed-weekday [holiday] (let [{:keys [month which weekday]} holiday] [which weekday :of month])) (defmethod -edn->holiday :relative-to-weekday-in-month [holiday] (let [definition (composite-definition holiday)] (assoc (select-keys definition [:weekday :when]) :fixed-weekday (parse-fixed-weekday-edn (:reference definition))))) (defmethod -holiday->edn :relative-to-weekday-in-month [holiday] (let [{:keys [weekday when fixed-weekday]} holiday {:keys [month which] ref-weekday :weekday} fixed-weekday] [weekday when which ref-weekday :of month])) (defmethod -edn->holiday :fixed-weekday-relative-to-fixed [holiday] (let [definition (composite-definition holiday)] (assoc (select-keys definition [:which :weekday :when]) :date (parse-fixed-edn (:reference definition))))) (defmethod -holiday->edn :fixed-weekday-relative-to-fixed [holiday] (let [{:keys [which weekday when date]} holiday] (into [which weekday when] (format-fixed-edn date)))) (defmethod -edn->holiday :christian-holiday [holiday] (if-let [definition (composite-definition holiday)] (let [{:keys [chronology]} definition] (cond-> (select-keys definition [:type]) chronology (assoc :chronology chronology) :alwas (merge (parse-moving-conditions definition)))) {:type (val holiday)})) (defmethod -holiday->edn :christian-holiday [holiday] (let [{:keys [type chronology]} holiday] (into [] (keep identity) (concat [chronology type] (format-moving-conditions holiday))))) (defmethod -edn->holiday :relative-to-easter-sunday [holiday] (let [{:keys [days when chronology] :or {when :after, chronology :gregorian}} (composite-definition holiday) sign ({:before -1, :after 1} when)] {:days (* sign days) :chronology chronology})) (defmethod -holiday->edn :relative-to-easter-sunday [holiday] (let [{:keys [days chronology]} holiday] (if (neg? days) [(- days) :days :before chronology :easter] [days :days :after chronology :easter]))) (defn parse-simple-type-edn "" [holiday] {:type (or (composite-definition holiday) (val holiday))}) (defmethod -edn->holiday :islamic-holiday [holiday] (parse-simple-type-edn holiday)) (defn format-simple-type-edn "" [holiday] [(:type holiday)]) (defmethod -holiday->edn :islamic-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :hindu-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :hindu-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :hebrew-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :hebrew-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :ethiopian-orthodox-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :ethiopian-orthodox-holiday [holiday] (format-simple-type-edn holiday)) ;; Copyright 2018 <NAME> ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express ;; or implied. See the License for the specific language governing ;; permissions and limitations under the License.
true
;; Copyright and license information at end of file (ns ferje.config.edn (:require [clojure.spec.alpha :as s] [ferje.config.core :as config] [ferje.config.raw :as raw] [ferje.place :as place]) (:import (de.jollyday.datasource ConfigurationDataSource) (de.jollyday.parameter BaseManagerParameter CalendarPartManagerParameter) (ferje.config.raw ConfigSource) (de.jollyday.util ResourceUtil))) (place/add-format :edn) (defn tag "" [tag] (let [tag (name tag)] #{tag (keyword tag) (symbol tag)})) (defn fluff "" [fluff] (s/? (tag fluff))) (defn named-as "" [values] (let [values (into #{} (mapcat tag) values)] (s/conformer #(or (some-> % values name keyword) ::s/invalid)))) (s/def ::substitution (s/cat :substitute (named-as config/weekdays) :fluff (fluff :with) :with (named-as config/withs) :weekday (named-as config/weekdays))) (s/def ::moving-conditions (s/* (s/cat :tag (tag :substitute) :substitutions (s/+ ::substitution)))) (s/def ::month (named-as config/months)) (s/def ::weekday (named-as config/weekdays)) (s/def ::when (named-as config/whens)) (s/def ::which (named-as config/whichs)) (s/def ::every (s/alt :canonical (named-as config/everys) :piecemeal (s/cat :prefix (s/? (s/or :number #{2 4 5 6} :parity (named-as #{:odd :even}))) :suffix (named-as #{:year :years})))) (s/def ::description-key (s/conformer #(if (config/named? %) (-> % name keyword) ::s/invalid))) (s/def ::chronology (named-as config/chronologies)) (s/def ::christian-type (named-as config/christian-types)) (s/def ::islamic-type (named-as config/islamic-types)) (s/def ::hindu-type (named-as config/hindu-types)) (s/def ::hebrew-type (named-as config/hebrew-types)) (s/def ::ethiopian-orthodox-type (named-as config/ethiopian-orthodox-types)) (s/def ::fixed (s/cat :month ::month :day ::config/day :moving-conditions ::moving-conditions)) (s/def ::fixed-weekday (s/cat :which ::which :weekday ::weekday :fluff (fluff :of) :month ::month)) (s/def ::holiday (s/or :christian-holiday ::christian-type :islamic-holiday ::islamic-type :hindu-holiday ::hindu-type :hebrew-holiday ::hebrew-type :ethiopian-orthodox-holiday ::ethiopian-orthodox-type :composite (s/cat :definition (s/alt :fixed ::fixed :relative-to-fixed (s/cat :offset (s/alt :days (s/cat :days ::config/days :fluff (fluff :days)) :weekday ::weekday) :when ::when :reference ::fixed) :fixed-weekday-between-fixed (s/cat :weekday ::weekday :tag (tag :between) :from ::fixed :fluff (fluff :and) :to ::fixed) :fixed-weekday ::fixed-weekday :relative-to-weekday-in-month (s/cat :weekday ::weekday :when ::when :reference ::fixed-weekday) :fixed-weekday-relative-to-fixed (s/cat :which ::which :weekday ::weekday :when ::when :reference ::fixed) :christian-holiday (s/cat :chronology (s/? ::chronology) :type ::christian-type :moving-conditions ::moving-conditions) :relative-to-easter-sunday (s/cat :days int? :fluff (fluff :days) :when (s/? ::when) :chronology (s/? ::chronology) :tag (tag :easter)) :islamic-holiday ::islamic-type :hindu-holiday ::hindu-type :hebrew-holiday ::hebrew-type :ethiopian-orthodox-holiday ::ethiopian-orthodox-type) :options (s/* (s/alt :valid-from (s/cat :tag (s/alt :one (tag :valid-from) :two (s/cat :valid (tag :valid) :from (tag :from))) :value ::config/valid-from) :valid-to (s/cat :tag (s/alt :one (tag :valid-to) :two (s/cat :valid (tag :valid) :to (tag :to))) :value ::config/valid-to) :valid (s/cat :tag (tag :valid) :from-tag (tag :from) :from ::config/valid-from :to-tag (tag :to) :to ::config/valid-to) :every (s/cat :tag (tag :every) :value ::every) :description-key (s/cat :tag (tag :description-key) :value ::description-key) :localized-type (named-as #{:official :unofficial :inofficial})))))) (s/def ::holidays (s/coll-of ::holiday)) (s/def ::sub-configurations (s/coll-of ::configuration)) (defn keywordize-keys [m] (into {} (map (fn [[k v]] [(-> k name keyword) v])) m)) (s/def ::hierarchy (s/and config/named? (s/conformer #(-> % name keyword)))) (s/def ::configuration (s/and (s/map-of config/named? any?) (s/conformer keywordize-keys) (s/keys :req-un [::config/description ::hierarchy ::holidays] :opt-un [::sub-configurations]))) (defn holiday-type "" [conformed-holiday] (cond (not (map-entry? conformed-holiday)) nil (= :composite (key conformed-holiday)) (-> conformed-holiday val :definition key) :else (key conformed-holiday))) (defmulti -edn->holiday "" holiday-type) (defmulti parse-option "" key) (defmethod parse-option :valid-from [[k v]] {k (:value v)}) (defmethod parse-option :valid-to [[k v]] {k (:value v)}) (defmethod parse-option :valid [[_ {:keys [from to]}]] {:valid-from from, :valid-to to}) (defmethod parse-option :every [[k {:keys [value]}]] {k (case (key value) :canonical (val value) :piecemeal ({nil :every-year 2 :2-years 4 :4-years 5 :5-years 6 :6-years :odd :odd-years :even :even-years} (some-> value val :prefix val)))}) (defmethod parse-option :description-key [[k v]] {k (:value v)}) (defmethod parse-option :localized-type [[k v]] {k ({:inofficial :unofficial-holiday :unofficial :unofficial-holiday :official :official-holiday} v)}) (defn parse-common-options "" [conformed-holiday] (into {} (mapcat parse-option) (-> conformed-holiday val :options))) (defn edn->holiday "" [holiday] (let [conformed (s/conform ::holiday holiday) type (holiday-type conformed)] (-> conformed -edn->holiday (assoc :holiday type) (merge (parse-common-options conformed))))) (s/fdef edn->holiday :args (s/cat :holiday ::holiday) :ret `config/holiday) (defn edn->configuration "" [configuration] (let [configuration (keywordize-keys configuration)] (-> configuration (update :holidays #(map edn->holiday %)) (update :hierarchy #(-> % name keyword)) (into (for [[k v] (select-keys configuration [:sub-configurations])] [k (map edn->configuration v)]))))) (s/fdef edn->configuration :args (s/cat :configuration ::configuration) :ret ::config/configuration) (defmethod place/configuration-data-source :edn [_] (reify ConfigurationDataSource (getConfiguration [_ parameters] (-> parameters raw/manager-parameter->config edn->configuration config/->Configuration)))) (defmethod place/-create-manager-parameters [String :edn] [calendar-part _] (proxy [CalendarPartManagerParameter] [(place/normalized-calendar-part calendar-part) nil] (createResourceUrl [] (->> calendar-part raw/cal-edn-path str (.getResource (ResourceUtil.)))))) (defmethod place/-create-manager-parameters [clojure.lang.IPersistentMap :edn] [config _] (proxy [BaseManagerParameter ConfigSource] [nil] (createCacheKey [] (-> config hash str)) (get_config [] config))) (defmulti -holiday->edn "" :holiday) (defmulti format-common-options "" (fn [style _] style)) (defmethod format-common-options :code [_ holiday] (mapcat (fn [[_ v :as x]] (when v x)) (select-keys holiday [:valid-from :valid-to :every :description-key]))) (defmethod format-common-options :english [_ {:keys [valid-from valid-to every description-key]}] (concat (cond (and valid-from valid-to) [:valid :from valid-from :to valid-to] valid-from [:valid :from valid-from] valid-to [:valid :to valid-to]) (when every [:every every]) (when description-key [:description-key description-key]))) (defn format-official-marker "" [holiday] (when (= :unofficial-holiday (:localized-type holiday)) [:unofficial])) (defn simplify-edn "" [holiday] (if (next holiday) holiday (first holiday))) (s/def ::edn-style #{:code :english}) (defmulti apply-style "" #(-> %2)) (defmethod apply-style :code [holiday _] holiday) (defmethod apply-style :english [holiday _] (into [] (map #(if (keyword? %) (-> % name symbol) %)) holiday)) (defn holiday->edn "" [style holiday] (-> (-holiday->edn holiday) (into (format-common-options style holiday)) (into (format-official-marker holiday)) (apply-style style) simplify-edn)) (s/fdef holiday->edn :args (s/cat :style ::edn-style, :holiday `config/holiday) :ret ::holiday) (defmulti format-configuration "" #(-> %2)) (defmethod format-configuration :code [configuration _] configuration) (defmethod format-configuration :english [configuration _] (into {} (map (fn [[k v]] [(-> k name symbol) v])) configuration)) (defn format-keyword "" [style keyword] (case style :english (-> keyword name symbol) :code keyword)) (defn configuration->edn "" [style configuration] (-> configuration (update :holidays #(for [h %] (holiday->edn style h))) (update :hierarchy #(format-keyword style %)) (into (for [[k v] (select-keys configuration [:sub-configurations])] [k (for [c v] (configuration->edn style c))])) (format-configuration style))) (s/fdef configuration->edn :args (s/cat :style ::edn-style, :configuration ::config/configuration) :ret ::configuration) (defn composite-definition "" [holiday] (some-> holiday val :definition val)) (defn parse-moving-conditions "" [holiday] (when-let [conditions (:moving-conditions holiday)] {:moving-conditions (into [] (mapcat #(for [s (:substitutions %)] (select-keys s [:substitute :with :weekday]))) conditions)})) (defn parse-fixed-edn "" [fixed] (merge (select-keys fixed [:month :day]) (parse-moving-conditions fixed))) (defmethod -edn->holiday :fixed [holiday] (parse-fixed-edn (composite-definition holiday))) (defn format-moving-conditions "" [holiday] (when-let [moving-conditions (:moving-conditions holiday)] (into [:substitute] (mapcat (juxt :substitute (constantly :with) :with :weekday)) moving-conditions))) (defn format-fixed-edn "" [fixed] (let [{:keys [month day]} fixed] (into [month day] (format-moving-conditions fixed)))) (defmethod -holiday->edn :fixed [holiday] (format-fixed-edn holiday)) (defmethod -edn->holiday :relative-to-fixed [holiday] (let [{:keys [offset when reference]} (composite-definition holiday) offset-type (key offset)] {:when when :date (parse-fixed-edn reference) offset-type ((case offset-type :days :days :weekday identity) (val offset))})) (defmethod -holiday->edn :relative-to-fixed [holiday] (let [{:keys [when days weekday date]} holiday offset (if days [days :days] [weekday])] (into offset (cons when (format-fixed-edn date))))) (defmethod -edn->holiday :fixed-weekday-between-fixed [holiday] (let [{:keys [from to weekday]} (composite-definition holiday)] {:from (parse-fixed-edn from) :to (parse-fixed-edn to) :weekday weekday})) (defmethod -holiday->edn :fixed-weekday-between-fixed [holiday] (let [{:keys [from to weekday]} holiday] (into [weekday :between] (concat (format-fixed-edn from) [:and] (format-fixed-edn to))))) (defn parse-fixed-weekday-edn "" [fixed-weekday] (select-keys fixed-weekday [:month :which :weekday])) (defmethod -edn->holiday :fixed-weekday [holiday] (parse-fixed-weekday-edn (composite-definition holiday))) (defmethod -holiday->edn :fixed-weekday [holiday] (let [{:keys [month which weekday]} holiday] [which weekday :of month])) (defmethod -edn->holiday :relative-to-weekday-in-month [holiday] (let [definition (composite-definition holiday)] (assoc (select-keys definition [:weekday :when]) :fixed-weekday (parse-fixed-weekday-edn (:reference definition))))) (defmethod -holiday->edn :relative-to-weekday-in-month [holiday] (let [{:keys [weekday when fixed-weekday]} holiday {:keys [month which] ref-weekday :weekday} fixed-weekday] [weekday when which ref-weekday :of month])) (defmethod -edn->holiday :fixed-weekday-relative-to-fixed [holiday] (let [definition (composite-definition holiday)] (assoc (select-keys definition [:which :weekday :when]) :date (parse-fixed-edn (:reference definition))))) (defmethod -holiday->edn :fixed-weekday-relative-to-fixed [holiday] (let [{:keys [which weekday when date]} holiday] (into [which weekday when] (format-fixed-edn date)))) (defmethod -edn->holiday :christian-holiday [holiday] (if-let [definition (composite-definition holiday)] (let [{:keys [chronology]} definition] (cond-> (select-keys definition [:type]) chronology (assoc :chronology chronology) :alwas (merge (parse-moving-conditions definition)))) {:type (val holiday)})) (defmethod -holiday->edn :christian-holiday [holiday] (let [{:keys [type chronology]} holiday] (into [] (keep identity) (concat [chronology type] (format-moving-conditions holiday))))) (defmethod -edn->holiday :relative-to-easter-sunday [holiday] (let [{:keys [days when chronology] :or {when :after, chronology :gregorian}} (composite-definition holiday) sign ({:before -1, :after 1} when)] {:days (* sign days) :chronology chronology})) (defmethod -holiday->edn :relative-to-easter-sunday [holiday] (let [{:keys [days chronology]} holiday] (if (neg? days) [(- days) :days :before chronology :easter] [days :days :after chronology :easter]))) (defn parse-simple-type-edn "" [holiday] {:type (or (composite-definition holiday) (val holiday))}) (defmethod -edn->holiday :islamic-holiday [holiday] (parse-simple-type-edn holiday)) (defn format-simple-type-edn "" [holiday] [(:type holiday)]) (defmethod -holiday->edn :islamic-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :hindu-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :hindu-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :hebrew-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :hebrew-holiday [holiday] (format-simple-type-edn holiday)) (defmethod -edn->holiday :ethiopian-orthodox-holiday [holiday] (parse-simple-type-edn holiday)) (defmethod -holiday->edn :ethiopian-orthodox-holiday [holiday] (format-simple-type-edn holiday)) ;; Copyright 2018 PI:NAME:<NAME>END_PI ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express ;; or implied. See the License for the specific language governing ;; permissions and limitations under the License.
[ { "context": "find [?x]\n :where [[?x :vcard/FN \"John Smith\"]]})\n (sparql/sparql->datalog\n ", "end": 429, "score": 0.9994014501571655, "start": 419, "tag": "NAME", "value": "John Smith" }, { "context": " ?x <http://www.w3.org/2001/vcard-rdf/3.0#FN> \\\"John Smith\\\" }\")))\n\n (t/is (= (rdf/with-prefix {:vcard \"h", "end": 565, "score": 0.9988992810249329, "start": 555, "tag": "NAME", "value": "John Smith" }, { "context": "Name]\n :where [[?y :vcard/Family \"Smith\"]\n [?y :vcard/Given ?give", "end": 742, "score": 0.8042265176773071, "start": 737, "tag": "NAME", "value": "Smith" }, { "context": "venName ?o .\n MINUS {\n ?s foaf:givenName \\\"Bob\\\" .\n }\n}\")))\n\n ;; NOTE: Adapted to remove fi", "end": 11277, "score": 0.9995474219322205, "start": 11274, "tag": "NAME", "value": "Bob" } ]
crux-rdf/test/crux/sparql_test.clj
neuromantik33/crux
0
(ns crux.sparql-test (:require [clojure.test :as t] [clojure.java.io :as io] [crux.io :as cio] [crux.rdf :as rdf] [crux.sparql :as sparql])) (t/deftest test-can-parse-sparql-to-datalog (t/testing "Apacha Jena Tutorial" (t/is (= (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?x] :where [[?x :vcard/FN "John Smith"]]}) (sparql/sparql->datalog " SELECT ?x WHERE { ?x <http://www.w3.org/2001/vcard-rdf/3.0#FN> \"John Smith\" }"))) (t/is (= (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?y ?givenName] :where [[?y :vcard/Family "Smith"] [?y :vcard/Given ?givenName]]}) (sparql/sparql->datalog " SELECT ?y ?givenName WHERE { ?y <http://www.w3.org/2001/vcard-rdf/3.0#Family> \"Smith\" . ?y <http://www.w3.org/2001/vcard-rdf/3.0#Given> ?givenName . }"))) (t/is (= (cio/pr-edn-str (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?g] :where [[?y :vcard/Given ?g] [(re-find #"(?i)r" ?g)]]})) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> SELECT ?g WHERE { ?y vcard:Given ?g . FILTER regex(?g, \"r\", \"i\") }")))) (t/is (= (rdf/with-prefix {:info "http://somewhere/peopleInfo#"} '{:find [?resource] :where [[?resource :info/age ?age] [(>= ?age 24)]]}) (sparql/sparql->datalog " PREFIX info: <http://somewhere/peopleInfo#> SELECT ?resource WHERE { ?resource info:age ?age . FILTER (?age >= 24) }"))) (t/is (= (rdf/with-prefix {:foaf "http://xmlns.com/foaf/0.1/" :vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?name] :where [(or [?_anon_1 :foaf/name ?name] [?_anon_2 :vcard/FN ?name])]}) (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX vCard: <http://www.w3.org/2001/vcard-rdf/3.0#> SELECT ?name WHERE { { [] foaf:name ?name } UNION { [] vCard:FN ?name } }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/givenName ?name] [?x :http://xmlns.com/foaf/0.1/knows ?y] [(== ?y #{:http://example.org/A :http://example.org/B})]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:givenName ?name . ?x foaf:knows ?y . FILTER(?y IN (<http://example.org/A>, <http://example.org/B>)) }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/givenName ?name] [?x :http://xmlns.com/foaf/0.1/knows ?y] [(!= ?y :http://example.org/A)] [(!= ?y :http://example.org/B)]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:givenName ?name . ?x foaf:knows ?y . FILTER(?y NOT IN (<http://example.org/A>, <http://example.org/B>)) }")))) ;; https://www.w3.org/TR/2013/REC-sparql11-query-20130321 (t/testing "SPARQL 1.1" (t/is (= '{:find [?title], :where [[:http://example.org/book/book1 :http://purl.org/dc/elements/1.1/title ?title]]} (sparql/sparql->datalog "SELECT ?title WHERE { <http://example.org/book/book1> <http://purl.org/dc/elements/1.1/title> ?title . }"))) (t/is (= '{:find [?name ?mbox], :where [[?x :http://xmlns.com/foaf/0.1/name ?name] [?x :http://xmlns.com/foaf/0.1/mbox ?mbox]]} (sparql/sparql->datalog "PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name ?mbox WHERE { ?x foaf:name ?name . ?x foaf:mbox ?mbox }"))) (t/is (thrown-with-msg? UnsupportedOperationException #"Does not support variables in predicate position: \?p" (sparql/sparql->datalog "SELECT ?v WHERE { ?v ?p \"cat\"@en }"))) (t/is (= '{:find [?v], :where [[?v :http://xmlns.com/foaf/0.1/givenName "cat"]]} (sparql/sparql->datalog "SELECT ?v WHERE { ?v <http://xmlns.com/foaf/0.1/givenName> \"cat\"@en }"))) (t/is (= '{:find [?name], :where [[?P :http://xmlns.com/foaf/0.1/givenName ?G] [?P :http://xmlns.com/foaf/0.1/surname ?S] [(http://www.w3.org/2005/xpath-functions#concat ?G " " ?S) ?name]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?P foaf:givenName ?G ; foaf:surname ?S BIND(CONCAT(?G, \" \", ?S) AS ?name) }"))) (t/is (= '{:find [?name], :where [[?P :http://xmlns.com/foaf/0.1/givenName ?G] [?P :http://xmlns.com/foaf/0.1/surname ?S] [(http://www.w3.org/2005/xpath-functions#concat ?G " " ?S) ?name]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ( CONCAT(?G, \" \", ?S) AS ?name ) WHERE { ?P foaf:givenName ?G ; foaf:surname ?S } "))) (t/is (= (cio/pr-edn-str '{:find [?title], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] [(re-find #"^SPARQL" ?title)]]}) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { ?x dc:title ?title FILTER regex(?title, \"^SPARQL\") }")))) (t/is (= (cio/pr-edn-str '{:find [?title], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] [(re-find #"(?i)web" ?title)]]}) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { ?x dc:title ?title FILTER regex(?title, \"web\", \"i\" ) }")))) (t/is (= '{:find [?title ?price], :where [[?x :http://example.org/ns#price ?price] [?x :http://purl.org/dc/elements/1.1/title ?title] [(< ?price 30.5M)]]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price WHERE { ?x ns:price ?price . FILTER (?price < 30.5) ?x dc:title ?title . }"))) (t/is (= '{:find [?name ?mbox], :where [[?x :http://xmlns.com/foaf/0.1/name ?name] (or-join [?mbox ?x] [?x :http://xmlns.com/foaf/0.1/mbox ?mbox] (and [(identity :crux.sparql/optional) ?mbox] (not [?x :http://xmlns.com/foaf/0.1/mbox])))]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name ?mbox WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:mbox ?mbox } }"))) (t/is (= '{:find [?title ?price], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] (or-join [?x ?price] (and [?x :http://example.org/ns#price ?price] [(< ?price 30)]) (and (not [?x :http://example.org/ns#price]) [(identity :crux.sparql/optional) ?price]))]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price WHERE { ?x dc:title ?title . OPTIONAL { ?x ns:price ?price . FILTER (?price < 30) } }"))) (t/is (= '{:find [?title], :where [(or [?book :http://purl.org/dc/elements/1.0/title ?title] [?book :http://purl.org/dc/elements/1.1/title ?title])]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { { ?book dc10:title ?title } UNION { ?book dc11:title ?title } }"))) ;; TODO: this should really be working like optional and select ;; both ?x and ?y and not ?book (t/is (= '{:find [?book], :where [(or-join [?book] [?book :http://purl.org/dc/elements/1.0/title ?x] [?book :http://purl.org/dc/elements/1.1/title ?y])]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?book WHERE { { ?book dc10:title ?x } UNION { ?book dc11:title ?y } }"))) (t/is (= '{:find [?title ?author], :where [(or (and [?book :http://purl.org/dc/elements/1.0/title ?title] [?book :http://purl.org/dc/elements/1.0/creator ?author]) (and [?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://purl.org/dc/elements/1.1/creator ?author]))]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?title ?author WHERE { { ?book dc10:title ?title . ?book dc10:creator ?author } UNION { ?book dc11:title ?title . ?book dc11:creator ?author } }"))) (t/is (= (rdf/with-prefix '{:find [?person], :where [[?person :rdf/type :http://xmlns.com/foaf/0.1/Person] (not-join [?person] [?person :http://xmlns.com/foaf/0.1/name ?name])]}) (sparql/sparql->datalog " PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?person WHERE { ?person rdf:type foaf:Person . FILTER NOT EXISTS { ?person foaf:name ?name } }"))) (t/is (= (rdf/with-prefix '{:find [?person], :where [[?person :rdf/type :http://xmlns.com/foaf/0.1/Person] [?person :http://xmlns.com/foaf/0.1/name ?name]]}) (sparql/sparql->datalog " PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?person WHERE { ?person rdf:type foaf:Person . FILTER EXISTS { ?person foaf:name ?name } }"))) ;; NOTE: original has DISTINCT in select and ?p as predicate. (t/is (thrown-with-msg? UnsupportedOperationException #"MINUS not supported, use NOT EXISTS." (sparql/sparql->datalog " PREFIX : <http://example/> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?s WHERE { ?s foaf:givenName ?o . MINUS { ?s foaf:givenName \"Bob\" . } }"))) ;; NOTE: Adapted to remove first rdf:type/ part of the path which ;; simply expands to a blank node with a random id. (t/is (= '{:find [?x ?type], :where [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?x ?type)] :rules [[(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :http://www.w3.org/2000/01/rdf-schema#subClassOf ?o]] [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :http://www.w3.org/2000/01/rdf-schema#subClassOf ?t] (http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?t ?o)] [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :crux.db/id] [(identity :crux.sparql/zero-matches) ?o]]]} (sparql/sparql->datalog " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?x ?type { ?x rdfs:subClassOf* ?type }"))) (t/is (= '{:find [?person], :where [(http://xmlns.com/foaf/0.1/knows-PLUS :http://example/x ?person)] :rules [[(http://xmlns.com/foaf/0.1/knows-PLUS ?s ?o) [?s :http://xmlns.com/foaf/0.1/knows ?o]] [(http://xmlns.com/foaf/0.1/knows-PLUS ?s ?o) [?s :http://xmlns.com/foaf/0.1/knows ?t] (http://xmlns.com/foaf/0.1/knows-PLUS ?t ?o)]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX : <http://example/> SELECT ?person { :x foaf:knows+ ?person }"))) ;; NOTE: Adapted from above example for zero-or-one. ;; Parses to distinct and ZeroLengthPath in a union. (t/is (= '{:find [?person], :where [(or-join [?person] (and [:http://example/x :crux.db/id] [(identity :crux.sparql/zero-matches) ?person]) [:http://example/x :http://xmlns.com/foaf/0.1/knows ?person])]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX : <http://example/> SELECT ?person { :x foaf:knows? ?person }"))) (t/is (thrown-with-msg? UnsupportedOperationException #"Nested expressions are not supported." (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price { { ?x ns:price ?p . ?x ns:discount ?discount BIND (?p*(1-?discount) AS ?price) } {?x dc:title ?title . } FILTER(?price < 20) } "))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :http://example.org/book/book1} {?book :http://example.org/book/book3}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { VALUES ?book { :book1 :book3 } ?book dc:title ?title ; ns:price ?price . }"))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :crux.sparql/undefined, ?title "SPARQL Tutorial"} {?book :http://example.org/book/book2, ?title :crux.sparql/undefined}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { ?book dc:title ?title ; ns:price ?price . VALUES (?book ?title) { (UNDEF \"SPARQL Tutorial\") (:book2 UNDEF) } }"))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :crux.sparql/undefined, ?title "SPARQL Tutorial"} {?book :http://example.org/book/book2, ?title :crux.sparql/undefined}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { ?book dc:title ?title ; ns:price ?price . } VALUES (?book ?title) { (UNDEF \"SPARQL Tutorial\") (:book2 UNDEF) }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/name ?name]] :limit 20 :order-by [[?name :asc]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:name ?name } ORDER BY ?name LIMIT 20 "))) (t/is (= (rdf/with-prefix {:wsdbm "http://db.uwaterloo.ca/~galuc/wsdbm/"} '{:find [?v0 ?v1 ?v5 ?v2 ?v3] :where [[?v0 :wsdbm/gender :wsdbm/Gender1] [?v0 :http://purl.org/dc/terms/Location ?v1] [?v0 :wsdbm/follows ?v0] [?v0 :wsdbm/userId ?v5] [?v1 :http://www.geonames.org/ontology#parentCountry ?v2] [?v3 :http://purl.org/ontology/mo/performed_in ?v1]]}) (sparql/sparql->datalog " SELECT * WHERE { ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/gender> <http://db.uwaterloo.ca/~galuc/wsdbm/Gender1> . ?v0 <http://purl.org/dc/terms/Location> ?v1 . ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/follows> ?v0 . ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/userId> ?v5 . ?v1 <http://www.geonames.org/ontology#parentCountry> ?v2 . ?v3 <http://purl.org/ontology/mo/performed_in> ?v1 . }")))))
40615
(ns crux.sparql-test (:require [clojure.test :as t] [clojure.java.io :as io] [crux.io :as cio] [crux.rdf :as rdf] [crux.sparql :as sparql])) (t/deftest test-can-parse-sparql-to-datalog (t/testing "Apacha Jena Tutorial" (t/is (= (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?x] :where [[?x :vcard/FN "<NAME>"]]}) (sparql/sparql->datalog " SELECT ?x WHERE { ?x <http://www.w3.org/2001/vcard-rdf/3.0#FN> \"<NAME>\" }"))) (t/is (= (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?y ?givenName] :where [[?y :vcard/Family "<NAME>"] [?y :vcard/Given ?givenName]]}) (sparql/sparql->datalog " SELECT ?y ?givenName WHERE { ?y <http://www.w3.org/2001/vcard-rdf/3.0#Family> \"Smith\" . ?y <http://www.w3.org/2001/vcard-rdf/3.0#Given> ?givenName . }"))) (t/is (= (cio/pr-edn-str (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?g] :where [[?y :vcard/Given ?g] [(re-find #"(?i)r" ?g)]]})) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> SELECT ?g WHERE { ?y vcard:Given ?g . FILTER regex(?g, \"r\", \"i\") }")))) (t/is (= (rdf/with-prefix {:info "http://somewhere/peopleInfo#"} '{:find [?resource] :where [[?resource :info/age ?age] [(>= ?age 24)]]}) (sparql/sparql->datalog " PREFIX info: <http://somewhere/peopleInfo#> SELECT ?resource WHERE { ?resource info:age ?age . FILTER (?age >= 24) }"))) (t/is (= (rdf/with-prefix {:foaf "http://xmlns.com/foaf/0.1/" :vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?name] :where [(or [?_anon_1 :foaf/name ?name] [?_anon_2 :vcard/FN ?name])]}) (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX vCard: <http://www.w3.org/2001/vcard-rdf/3.0#> SELECT ?name WHERE { { [] foaf:name ?name } UNION { [] vCard:FN ?name } }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/givenName ?name] [?x :http://xmlns.com/foaf/0.1/knows ?y] [(== ?y #{:http://example.org/A :http://example.org/B})]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:givenName ?name . ?x foaf:knows ?y . FILTER(?y IN (<http://example.org/A>, <http://example.org/B>)) }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/givenName ?name] [?x :http://xmlns.com/foaf/0.1/knows ?y] [(!= ?y :http://example.org/A)] [(!= ?y :http://example.org/B)]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:givenName ?name . ?x foaf:knows ?y . FILTER(?y NOT IN (<http://example.org/A>, <http://example.org/B>)) }")))) ;; https://www.w3.org/TR/2013/REC-sparql11-query-20130321 (t/testing "SPARQL 1.1" (t/is (= '{:find [?title], :where [[:http://example.org/book/book1 :http://purl.org/dc/elements/1.1/title ?title]]} (sparql/sparql->datalog "SELECT ?title WHERE { <http://example.org/book/book1> <http://purl.org/dc/elements/1.1/title> ?title . }"))) (t/is (= '{:find [?name ?mbox], :where [[?x :http://xmlns.com/foaf/0.1/name ?name] [?x :http://xmlns.com/foaf/0.1/mbox ?mbox]]} (sparql/sparql->datalog "PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name ?mbox WHERE { ?x foaf:name ?name . ?x foaf:mbox ?mbox }"))) (t/is (thrown-with-msg? UnsupportedOperationException #"Does not support variables in predicate position: \?p" (sparql/sparql->datalog "SELECT ?v WHERE { ?v ?p \"cat\"@en }"))) (t/is (= '{:find [?v], :where [[?v :http://xmlns.com/foaf/0.1/givenName "cat"]]} (sparql/sparql->datalog "SELECT ?v WHERE { ?v <http://xmlns.com/foaf/0.1/givenName> \"cat\"@en }"))) (t/is (= '{:find [?name], :where [[?P :http://xmlns.com/foaf/0.1/givenName ?G] [?P :http://xmlns.com/foaf/0.1/surname ?S] [(http://www.w3.org/2005/xpath-functions#concat ?G " " ?S) ?name]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?P foaf:givenName ?G ; foaf:surname ?S BIND(CONCAT(?G, \" \", ?S) AS ?name) }"))) (t/is (= '{:find [?name], :where [[?P :http://xmlns.com/foaf/0.1/givenName ?G] [?P :http://xmlns.com/foaf/0.1/surname ?S] [(http://www.w3.org/2005/xpath-functions#concat ?G " " ?S) ?name]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ( CONCAT(?G, \" \", ?S) AS ?name ) WHERE { ?P foaf:givenName ?G ; foaf:surname ?S } "))) (t/is (= (cio/pr-edn-str '{:find [?title], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] [(re-find #"^SPARQL" ?title)]]}) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { ?x dc:title ?title FILTER regex(?title, \"^SPARQL\") }")))) (t/is (= (cio/pr-edn-str '{:find [?title], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] [(re-find #"(?i)web" ?title)]]}) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { ?x dc:title ?title FILTER regex(?title, \"web\", \"i\" ) }")))) (t/is (= '{:find [?title ?price], :where [[?x :http://example.org/ns#price ?price] [?x :http://purl.org/dc/elements/1.1/title ?title] [(< ?price 30.5M)]]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price WHERE { ?x ns:price ?price . FILTER (?price < 30.5) ?x dc:title ?title . }"))) (t/is (= '{:find [?name ?mbox], :where [[?x :http://xmlns.com/foaf/0.1/name ?name] (or-join [?mbox ?x] [?x :http://xmlns.com/foaf/0.1/mbox ?mbox] (and [(identity :crux.sparql/optional) ?mbox] (not [?x :http://xmlns.com/foaf/0.1/mbox])))]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name ?mbox WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:mbox ?mbox } }"))) (t/is (= '{:find [?title ?price], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] (or-join [?x ?price] (and [?x :http://example.org/ns#price ?price] [(< ?price 30)]) (and (not [?x :http://example.org/ns#price]) [(identity :crux.sparql/optional) ?price]))]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price WHERE { ?x dc:title ?title . OPTIONAL { ?x ns:price ?price . FILTER (?price < 30) } }"))) (t/is (= '{:find [?title], :where [(or [?book :http://purl.org/dc/elements/1.0/title ?title] [?book :http://purl.org/dc/elements/1.1/title ?title])]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { { ?book dc10:title ?title } UNION { ?book dc11:title ?title } }"))) ;; TODO: this should really be working like optional and select ;; both ?x and ?y and not ?book (t/is (= '{:find [?book], :where [(or-join [?book] [?book :http://purl.org/dc/elements/1.0/title ?x] [?book :http://purl.org/dc/elements/1.1/title ?y])]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?book WHERE { { ?book dc10:title ?x } UNION { ?book dc11:title ?y } }"))) (t/is (= '{:find [?title ?author], :where [(or (and [?book :http://purl.org/dc/elements/1.0/title ?title] [?book :http://purl.org/dc/elements/1.0/creator ?author]) (and [?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://purl.org/dc/elements/1.1/creator ?author]))]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?title ?author WHERE { { ?book dc10:title ?title . ?book dc10:creator ?author } UNION { ?book dc11:title ?title . ?book dc11:creator ?author } }"))) (t/is (= (rdf/with-prefix '{:find [?person], :where [[?person :rdf/type :http://xmlns.com/foaf/0.1/Person] (not-join [?person] [?person :http://xmlns.com/foaf/0.1/name ?name])]}) (sparql/sparql->datalog " PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?person WHERE { ?person rdf:type foaf:Person . FILTER NOT EXISTS { ?person foaf:name ?name } }"))) (t/is (= (rdf/with-prefix '{:find [?person], :where [[?person :rdf/type :http://xmlns.com/foaf/0.1/Person] [?person :http://xmlns.com/foaf/0.1/name ?name]]}) (sparql/sparql->datalog " PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?person WHERE { ?person rdf:type foaf:Person . FILTER EXISTS { ?person foaf:name ?name } }"))) ;; NOTE: original has DISTINCT in select and ?p as predicate. (t/is (thrown-with-msg? UnsupportedOperationException #"MINUS not supported, use NOT EXISTS." (sparql/sparql->datalog " PREFIX : <http://example/> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?s WHERE { ?s foaf:givenName ?o . MINUS { ?s foaf:givenName \"<NAME>\" . } }"))) ;; NOTE: Adapted to remove first rdf:type/ part of the path which ;; simply expands to a blank node with a random id. (t/is (= '{:find [?x ?type], :where [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?x ?type)] :rules [[(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :http://www.w3.org/2000/01/rdf-schema#subClassOf ?o]] [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :http://www.w3.org/2000/01/rdf-schema#subClassOf ?t] (http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?t ?o)] [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :crux.db/id] [(identity :crux.sparql/zero-matches) ?o]]]} (sparql/sparql->datalog " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?x ?type { ?x rdfs:subClassOf* ?type }"))) (t/is (= '{:find [?person], :where [(http://xmlns.com/foaf/0.1/knows-PLUS :http://example/x ?person)] :rules [[(http://xmlns.com/foaf/0.1/knows-PLUS ?s ?o) [?s :http://xmlns.com/foaf/0.1/knows ?o]] [(http://xmlns.com/foaf/0.1/knows-PLUS ?s ?o) [?s :http://xmlns.com/foaf/0.1/knows ?t] (http://xmlns.com/foaf/0.1/knows-PLUS ?t ?o)]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX : <http://example/> SELECT ?person { :x foaf:knows+ ?person }"))) ;; NOTE: Adapted from above example for zero-or-one. ;; Parses to distinct and ZeroLengthPath in a union. (t/is (= '{:find [?person], :where [(or-join [?person] (and [:http://example/x :crux.db/id] [(identity :crux.sparql/zero-matches) ?person]) [:http://example/x :http://xmlns.com/foaf/0.1/knows ?person])]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX : <http://example/> SELECT ?person { :x foaf:knows? ?person }"))) (t/is (thrown-with-msg? UnsupportedOperationException #"Nested expressions are not supported." (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price { { ?x ns:price ?p . ?x ns:discount ?discount BIND (?p*(1-?discount) AS ?price) } {?x dc:title ?title . } FILTER(?price < 20) } "))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :http://example.org/book/book1} {?book :http://example.org/book/book3}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { VALUES ?book { :book1 :book3 } ?book dc:title ?title ; ns:price ?price . }"))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :crux.sparql/undefined, ?title "SPARQL Tutorial"} {?book :http://example.org/book/book2, ?title :crux.sparql/undefined}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { ?book dc:title ?title ; ns:price ?price . VALUES (?book ?title) { (UNDEF \"SPARQL Tutorial\") (:book2 UNDEF) } }"))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :crux.sparql/undefined, ?title "SPARQL Tutorial"} {?book :http://example.org/book/book2, ?title :crux.sparql/undefined}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { ?book dc:title ?title ; ns:price ?price . } VALUES (?book ?title) { (UNDEF \"SPARQL Tutorial\") (:book2 UNDEF) }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/name ?name]] :limit 20 :order-by [[?name :asc]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:name ?name } ORDER BY ?name LIMIT 20 "))) (t/is (= (rdf/with-prefix {:wsdbm "http://db.uwaterloo.ca/~galuc/wsdbm/"} '{:find [?v0 ?v1 ?v5 ?v2 ?v3] :where [[?v0 :wsdbm/gender :wsdbm/Gender1] [?v0 :http://purl.org/dc/terms/Location ?v1] [?v0 :wsdbm/follows ?v0] [?v0 :wsdbm/userId ?v5] [?v1 :http://www.geonames.org/ontology#parentCountry ?v2] [?v3 :http://purl.org/ontology/mo/performed_in ?v1]]}) (sparql/sparql->datalog " SELECT * WHERE { ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/gender> <http://db.uwaterloo.ca/~galuc/wsdbm/Gender1> . ?v0 <http://purl.org/dc/terms/Location> ?v1 . ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/follows> ?v0 . ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/userId> ?v5 . ?v1 <http://www.geonames.org/ontology#parentCountry> ?v2 . ?v3 <http://purl.org/ontology/mo/performed_in> ?v1 . }")))))
true
(ns crux.sparql-test (:require [clojure.test :as t] [clojure.java.io :as io] [crux.io :as cio] [crux.rdf :as rdf] [crux.sparql :as sparql])) (t/deftest test-can-parse-sparql-to-datalog (t/testing "Apacha Jena Tutorial" (t/is (= (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?x] :where [[?x :vcard/FN "PI:NAME:<NAME>END_PI"]]}) (sparql/sparql->datalog " SELECT ?x WHERE { ?x <http://www.w3.org/2001/vcard-rdf/3.0#FN> \"PI:NAME:<NAME>END_PI\" }"))) (t/is (= (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?y ?givenName] :where [[?y :vcard/Family "PI:NAME:<NAME>END_PI"] [?y :vcard/Given ?givenName]]}) (sparql/sparql->datalog " SELECT ?y ?givenName WHERE { ?y <http://www.w3.org/2001/vcard-rdf/3.0#Family> \"Smith\" . ?y <http://www.w3.org/2001/vcard-rdf/3.0#Given> ?givenName . }"))) (t/is (= (cio/pr-edn-str (rdf/with-prefix {:vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?g] :where [[?y :vcard/Given ?g] [(re-find #"(?i)r" ?g)]]})) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> SELECT ?g WHERE { ?y vcard:Given ?g . FILTER regex(?g, \"r\", \"i\") }")))) (t/is (= (rdf/with-prefix {:info "http://somewhere/peopleInfo#"} '{:find [?resource] :where [[?resource :info/age ?age] [(>= ?age 24)]]}) (sparql/sparql->datalog " PREFIX info: <http://somewhere/peopleInfo#> SELECT ?resource WHERE { ?resource info:age ?age . FILTER (?age >= 24) }"))) (t/is (= (rdf/with-prefix {:foaf "http://xmlns.com/foaf/0.1/" :vcard "http://www.w3.org/2001/vcard-rdf/3.0#"} '{:find [?name] :where [(or [?_anon_1 :foaf/name ?name] [?_anon_2 :vcard/FN ?name])]}) (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX vCard: <http://www.w3.org/2001/vcard-rdf/3.0#> SELECT ?name WHERE { { [] foaf:name ?name } UNION { [] vCard:FN ?name } }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/givenName ?name] [?x :http://xmlns.com/foaf/0.1/knows ?y] [(== ?y #{:http://example.org/A :http://example.org/B})]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:givenName ?name . ?x foaf:knows ?y . FILTER(?y IN (<http://example.org/A>, <http://example.org/B>)) }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/givenName ?name] [?x :http://xmlns.com/foaf/0.1/knows ?y] [(!= ?y :http://example.org/A)] [(!= ?y :http://example.org/B)]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:givenName ?name . ?x foaf:knows ?y . FILTER(?y NOT IN (<http://example.org/A>, <http://example.org/B>)) }")))) ;; https://www.w3.org/TR/2013/REC-sparql11-query-20130321 (t/testing "SPARQL 1.1" (t/is (= '{:find [?title], :where [[:http://example.org/book/book1 :http://purl.org/dc/elements/1.1/title ?title]]} (sparql/sparql->datalog "SELECT ?title WHERE { <http://example.org/book/book1> <http://purl.org/dc/elements/1.1/title> ?title . }"))) (t/is (= '{:find [?name ?mbox], :where [[?x :http://xmlns.com/foaf/0.1/name ?name] [?x :http://xmlns.com/foaf/0.1/mbox ?mbox]]} (sparql/sparql->datalog "PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name ?mbox WHERE { ?x foaf:name ?name . ?x foaf:mbox ?mbox }"))) (t/is (thrown-with-msg? UnsupportedOperationException #"Does not support variables in predicate position: \?p" (sparql/sparql->datalog "SELECT ?v WHERE { ?v ?p \"cat\"@en }"))) (t/is (= '{:find [?v], :where [[?v :http://xmlns.com/foaf/0.1/givenName "cat"]]} (sparql/sparql->datalog "SELECT ?v WHERE { ?v <http://xmlns.com/foaf/0.1/givenName> \"cat\"@en }"))) (t/is (= '{:find [?name], :where [[?P :http://xmlns.com/foaf/0.1/givenName ?G] [?P :http://xmlns.com/foaf/0.1/surname ?S] [(http://www.w3.org/2005/xpath-functions#concat ?G " " ?S) ?name]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?P foaf:givenName ?G ; foaf:surname ?S BIND(CONCAT(?G, \" \", ?S) AS ?name) }"))) (t/is (= '{:find [?name], :where [[?P :http://xmlns.com/foaf/0.1/givenName ?G] [?P :http://xmlns.com/foaf/0.1/surname ?S] [(http://www.w3.org/2005/xpath-functions#concat ?G " " ?S) ?name]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ( CONCAT(?G, \" \", ?S) AS ?name ) WHERE { ?P foaf:givenName ?G ; foaf:surname ?S } "))) (t/is (= (cio/pr-edn-str '{:find [?title], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] [(re-find #"^SPARQL" ?title)]]}) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { ?x dc:title ?title FILTER regex(?title, \"^SPARQL\") }")))) (t/is (= (cio/pr-edn-str '{:find [?title], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] [(re-find #"(?i)web" ?title)]]}) (cio/pr-edn-str (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { ?x dc:title ?title FILTER regex(?title, \"web\", \"i\" ) }")))) (t/is (= '{:find [?title ?price], :where [[?x :http://example.org/ns#price ?price] [?x :http://purl.org/dc/elements/1.1/title ?title] [(< ?price 30.5M)]]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price WHERE { ?x ns:price ?price . FILTER (?price < 30.5) ?x dc:title ?title . }"))) (t/is (= '{:find [?name ?mbox], :where [[?x :http://xmlns.com/foaf/0.1/name ?name] (or-join [?mbox ?x] [?x :http://xmlns.com/foaf/0.1/mbox ?mbox] (and [(identity :crux.sparql/optional) ?mbox] (not [?x :http://xmlns.com/foaf/0.1/mbox])))]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name ?mbox WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:mbox ?mbox } }"))) (t/is (= '{:find [?title ?price], :where [[?x :http://purl.org/dc/elements/1.1/title ?title] (or-join [?x ?price] (and [?x :http://example.org/ns#price ?price] [(< ?price 30)]) (and (not [?x :http://example.org/ns#price]) [(identity :crux.sparql/optional) ?price]))]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price WHERE { ?x dc:title ?title . OPTIONAL { ?x ns:price ?price . FILTER (?price < 30) } }"))) (t/is (= '{:find [?title], :where [(or [?book :http://purl.org/dc/elements/1.0/title ?title] [?book :http://purl.org/dc/elements/1.1/title ?title])]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?title WHERE { { ?book dc10:title ?title } UNION { ?book dc11:title ?title } }"))) ;; TODO: this should really be working like optional and select ;; both ?x and ?y and not ?book (t/is (= '{:find [?book], :where [(or-join [?book] [?book :http://purl.org/dc/elements/1.0/title ?x] [?book :http://purl.org/dc/elements/1.1/title ?y])]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?book WHERE { { ?book dc10:title ?x } UNION { ?book dc11:title ?y } }"))) (t/is (= '{:find [?title ?author], :where [(or (and [?book :http://purl.org/dc/elements/1.0/title ?title] [?book :http://purl.org/dc/elements/1.0/creator ?author]) (and [?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://purl.org/dc/elements/1.1/creator ?author]))]} (sparql/sparql->datalog " PREFIX dc10: <http://purl.org/dc/elements/1.0/> PREFIX dc11: <http://purl.org/dc/elements/1.1/> SELECT ?title ?author WHERE { { ?book dc10:title ?title . ?book dc10:creator ?author } UNION { ?book dc11:title ?title . ?book dc11:creator ?author } }"))) (t/is (= (rdf/with-prefix '{:find [?person], :where [[?person :rdf/type :http://xmlns.com/foaf/0.1/Person] (not-join [?person] [?person :http://xmlns.com/foaf/0.1/name ?name])]}) (sparql/sparql->datalog " PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?person WHERE { ?person rdf:type foaf:Person . FILTER NOT EXISTS { ?person foaf:name ?name } }"))) (t/is (= (rdf/with-prefix '{:find [?person], :where [[?person :rdf/type :http://xmlns.com/foaf/0.1/Person] [?person :http://xmlns.com/foaf/0.1/name ?name]]}) (sparql/sparql->datalog " PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?person WHERE { ?person rdf:type foaf:Person . FILTER EXISTS { ?person foaf:name ?name } }"))) ;; NOTE: original has DISTINCT in select and ?p as predicate. (t/is (thrown-with-msg? UnsupportedOperationException #"MINUS not supported, use NOT EXISTS." (sparql/sparql->datalog " PREFIX : <http://example/> PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?s WHERE { ?s foaf:givenName ?o . MINUS { ?s foaf:givenName \"PI:NAME:<NAME>END_PI\" . } }"))) ;; NOTE: Adapted to remove first rdf:type/ part of the path which ;; simply expands to a blank node with a random id. (t/is (= '{:find [?x ?type], :where [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?x ?type)] :rules [[(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :http://www.w3.org/2000/01/rdf-schema#subClassOf ?o]] [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :http://www.w3.org/2000/01/rdf-schema#subClassOf ?t] (http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?t ?o)] [(http://www.w3.org/2000/01/rdf-schema#subClassOf-STAR ?s ?o) [?s :crux.db/id] [(identity :crux.sparql/zero-matches) ?o]]]} (sparql/sparql->datalog " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?x ?type { ?x rdfs:subClassOf* ?type }"))) (t/is (= '{:find [?person], :where [(http://xmlns.com/foaf/0.1/knows-PLUS :http://example/x ?person)] :rules [[(http://xmlns.com/foaf/0.1/knows-PLUS ?s ?o) [?s :http://xmlns.com/foaf/0.1/knows ?o]] [(http://xmlns.com/foaf/0.1/knows-PLUS ?s ?o) [?s :http://xmlns.com/foaf/0.1/knows ?t] (http://xmlns.com/foaf/0.1/knows-PLUS ?t ?o)]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX : <http://example/> SELECT ?person { :x foaf:knows+ ?person }"))) ;; NOTE: Adapted from above example for zero-or-one. ;; Parses to distinct and ZeroLengthPath in a union. (t/is (= '{:find [?person], :where [(or-join [?person] (and [:http://example/x :crux.db/id] [(identity :crux.sparql/zero-matches) ?person]) [:http://example/x :http://xmlns.com/foaf/0.1/knows ?person])]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX : <http://example/> SELECT ?person { :x foaf:knows? ?person }"))) (t/is (thrown-with-msg? UnsupportedOperationException #"Nested expressions are not supported." (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX ns: <http://example.org/ns#> SELECT ?title ?price { { ?x ns:price ?p . ?x ns:discount ?discount BIND (?p*(1-?discount) AS ?price) } {?x dc:title ?title . } FILTER(?price < 20) } "))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :http://example.org/book/book1} {?book :http://example.org/book/book3}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { VALUES ?book { :book1 :book3 } ?book dc:title ?title ; ns:price ?price . }"))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :crux.sparql/undefined, ?title "SPARQL Tutorial"} {?book :http://example.org/book/book2, ?title :crux.sparql/undefined}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { ?book dc:title ?title ; ns:price ?price . VALUES (?book ?title) { (UNDEF \"SPARQL Tutorial\") (:book2 UNDEF) } }"))) (t/is (= '{:find [?book ?title ?price], :where [[?book :http://purl.org/dc/elements/1.1/title ?title] [?book :http://example.org/ns#price ?price]], :args [{?book :crux.sparql/undefined, ?title "SPARQL Tutorial"} {?book :http://example.org/book/book2, ?title :crux.sparql/undefined}]} (sparql/sparql->datalog " PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX : <http://example.org/book/> PREFIX ns: <http://example.org/ns#> SELECT ?book ?title ?price { ?book dc:title ?title ; ns:price ?price . } VALUES (?book ?title) { (UNDEF \"SPARQL Tutorial\") (:book2 UNDEF) }"))) (t/is (= '{:find [?name], :where [[?x :http://xmlns.com/foaf/0.1/name ?name]] :limit 20 :order-by [[?name :asc]]} (sparql/sparql->datalog " PREFIX foaf: <http://xmlns.com/foaf/0.1/> SELECT ?name WHERE { ?x foaf:name ?name } ORDER BY ?name LIMIT 20 "))) (t/is (= (rdf/with-prefix {:wsdbm "http://db.uwaterloo.ca/~galuc/wsdbm/"} '{:find [?v0 ?v1 ?v5 ?v2 ?v3] :where [[?v0 :wsdbm/gender :wsdbm/Gender1] [?v0 :http://purl.org/dc/terms/Location ?v1] [?v0 :wsdbm/follows ?v0] [?v0 :wsdbm/userId ?v5] [?v1 :http://www.geonames.org/ontology#parentCountry ?v2] [?v3 :http://purl.org/ontology/mo/performed_in ?v1]]}) (sparql/sparql->datalog " SELECT * WHERE { ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/gender> <http://db.uwaterloo.ca/~galuc/wsdbm/Gender1> . ?v0 <http://purl.org/dc/terms/Location> ?v1 . ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/follows> ?v0 . ?v0 <http://db.uwaterloo.ca/~galuc/wsdbm/userId> ?v5 . ?v1 <http://www.geonames.org/ontology#parentCountry> ?v2 . ?v3 <http://purl.org/ontology/mo/performed_in> ?v1 . }")))))
[ { "context": " :private-key-password \"Password\"\n :agent-forwa", "end": 1725, "score": 0.9989977478981018, "start": 1717, "tag": "PASSWORD", "value": "Password" } ]
test/org/zalando/stups/even/ssh_test.clj
zalando-stups/even
43
(ns org.zalando.stups.even.ssh-test (:require [clojure.test :refer :all] [org.zalando.stups.even.ssh :refer :all] [clj-ssh.ssh :refer :all] [clojure.java.io :as io] [clojure.string :as str]) (:import (org.testcontainers.images.builder ImageFromDockerfile) (org.testcontainers.containers GenericContainer BindMode) (org.testcontainers.containers.wait HostPortWaitStrategy) (java.time Duration) (java.time.temporal ChronoUnit))) (defn load-key [key-id] (-> key-id io/resource slurp str/trim)) (def key-files ["key1.pem" "key2.pem"]) (def all-keys (str/join "\n" (map load-key key-files))) (defn test-with-pubkey [pub-key-file] (let [image (-> (ImageFromDockerfile.) (.withFileFromClasspath "Dockerfile", "dockerfile.sshd") (.withFileFromClasspath "entrypoint.sh", "entrypoint.sh")) container (doto (GenericContainer. image) (.addExposedPort (int 22)) (.addFileSystemBind pub-key-file "/authorized_keys" BindMode/READ_ONLY) (.setWaitStrategy (-> (HostPortWaitStrategy.) (.withStartupTimeout (Duration/of 60 ChronoUnit/SECONDS)))) (.start))] (try (is (= {:exit 0, :out "foobar", :err ""} (execute-ssh (.getContainerIpAddress container) "echo -n foobar" {:config {:user "root" :port (.getMappedPort container 22) :private-keys all-keys :private-key-password "Password" :agent-forwarding true :timeout 30}}))) (finally (.close container))))) (deftest test-execute-ssh (doseq [key key-files] (test-with-pubkey (format "dev-resources/%s.pub" key))))
103674
(ns org.zalando.stups.even.ssh-test (:require [clojure.test :refer :all] [org.zalando.stups.even.ssh :refer :all] [clj-ssh.ssh :refer :all] [clojure.java.io :as io] [clojure.string :as str]) (:import (org.testcontainers.images.builder ImageFromDockerfile) (org.testcontainers.containers GenericContainer BindMode) (org.testcontainers.containers.wait HostPortWaitStrategy) (java.time Duration) (java.time.temporal ChronoUnit))) (defn load-key [key-id] (-> key-id io/resource slurp str/trim)) (def key-files ["key1.pem" "key2.pem"]) (def all-keys (str/join "\n" (map load-key key-files))) (defn test-with-pubkey [pub-key-file] (let [image (-> (ImageFromDockerfile.) (.withFileFromClasspath "Dockerfile", "dockerfile.sshd") (.withFileFromClasspath "entrypoint.sh", "entrypoint.sh")) container (doto (GenericContainer. image) (.addExposedPort (int 22)) (.addFileSystemBind pub-key-file "/authorized_keys" BindMode/READ_ONLY) (.setWaitStrategy (-> (HostPortWaitStrategy.) (.withStartupTimeout (Duration/of 60 ChronoUnit/SECONDS)))) (.start))] (try (is (= {:exit 0, :out "foobar", :err ""} (execute-ssh (.getContainerIpAddress container) "echo -n foobar" {:config {:user "root" :port (.getMappedPort container 22) :private-keys all-keys :private-key-password "<PASSWORD>" :agent-forwarding true :timeout 30}}))) (finally (.close container))))) (deftest test-execute-ssh (doseq [key key-files] (test-with-pubkey (format "dev-resources/%s.pub" key))))
true
(ns org.zalando.stups.even.ssh-test (:require [clojure.test :refer :all] [org.zalando.stups.even.ssh :refer :all] [clj-ssh.ssh :refer :all] [clojure.java.io :as io] [clojure.string :as str]) (:import (org.testcontainers.images.builder ImageFromDockerfile) (org.testcontainers.containers GenericContainer BindMode) (org.testcontainers.containers.wait HostPortWaitStrategy) (java.time Duration) (java.time.temporal ChronoUnit))) (defn load-key [key-id] (-> key-id io/resource slurp str/trim)) (def key-files ["key1.pem" "key2.pem"]) (def all-keys (str/join "\n" (map load-key key-files))) (defn test-with-pubkey [pub-key-file] (let [image (-> (ImageFromDockerfile.) (.withFileFromClasspath "Dockerfile", "dockerfile.sshd") (.withFileFromClasspath "entrypoint.sh", "entrypoint.sh")) container (doto (GenericContainer. image) (.addExposedPort (int 22)) (.addFileSystemBind pub-key-file "/authorized_keys" BindMode/READ_ONLY) (.setWaitStrategy (-> (HostPortWaitStrategy.) (.withStartupTimeout (Duration/of 60 ChronoUnit/SECONDS)))) (.start))] (try (is (= {:exit 0, :out "foobar", :err ""} (execute-ssh (.getContainerIpAddress container) "echo -n foobar" {:config {:user "root" :port (.getMappedPort container 22) :private-keys all-keys :private-key-password "PI:PASSWORD:<PASSWORD>END_PI" :agent-forwarding true :timeout 30}}))) (finally (.close container))))) (deftest test-execute-ssh (doseq [key key-files] (test-with-pubkey (format "dev-resources/%s.pub" key))))
[ { "context": "all]))\n\n\n(defn setup\n []\n (h/create-test-user! \"[email protected]\" \"Test\" #{:customer :admin})\n (h/create-test-use", "end": 315, "score": 0.9999278783798218, "start": 282, "tag": "EMAIL", "value": "[email protected]" }, { "context": "est\" #{:customer :admin})\n (h/create-test-user! \"[email protected]\")\n (h/create-test-user! \"[email protected]", "end": 402, "score": 0.9999290108680725, "start": 369, "tag": "EMAIL", "value": "[email protected]" }, { "context": "simulator.amazonses.com\")\n (h/create-test-user! \"[email protected]\" \"Test\" #{:customer :admin})\n (h/delete-test-use", "end": 462, "score": 0.9999294877052307, "start": 429, "tag": "EMAIL", "value": "[email protected]" }, { "context": "est\" #{:customer :admin})\n (h/delete-test-user! \"[email protected]\"))\n\n(defn fixture [test]\n (h/ensure-empty-table)", "end": 549, "score": 0.9999269843101501, "start": 516, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :user/email-address \"[email protected]\"\n :user/name \"Test\"", "end": 1026, "score": 0.9999290704727173, "start": 993, "tag": "EMAIL", "value": "[email protected]" }, { "context": "s.com\"\n :user/name \"Test\"\n :user/roles #{:cu", "end": 1075, "score": 0.8905960321426392, "start": 1071, "tag": "NAME", "value": "Test" }, { "context": "oles #{:customer}}}})\n user-id (user/id \"[email protected]\")\n user (user/fetch user-id)\n {", "end": 1198, "score": 0.9999285340309143, "start": 1165, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :user/email-address \"[email protected]\"\n :user/name \"Test\"", "end": 1957, "score": 0.9999276995658875, "start": 1924, "tag": "EMAIL", "value": "[email protected]" }, { "context": "s.com\"\n :user/name \"Test\"\n :user/roles #{:cu", "end": 2006, "score": 0.9303197860717773, "start": 2002, "tag": "USERNAME", "value": "Test" }, { "context": "oles #{:customer}}}})\n user-id (user/id \"[email protected]\")\n user (user/fetch user-id)\n {", "end": 2129, "score": 0.9999247193336487, "start": 2096, "tag": "EMAIL", "value": "[email protected]" }, { "context": "[request (h/request\n {:session \"[email protected]\"\n :command {:add-user\n ", "end": 2786, "score": 0.9999268054962158, "start": 2753, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :user/email-address \"[email protected]\"\n :user/name \"Test\"", "end": 2998, "score": 0.9999275207519531, "start": 2965, "tag": "EMAIL", "value": "[email protected]" }, { "context": "s.com\"\n :user/name \"Test\"\n :user/roles #{:cu", "end": 3047, "score": 0.7374323606491089, "start": 3043, "tag": "USERNAME", "value": "Test" }, { "context": "oles #{:customer}}}})\n user-id (user/id \"[email protected]\")\n user (user/fetch user-id)\n {", "end": 3170, "score": 0.9999228119850159, "start": 3137, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :session {:current-user-id (user/id \"[email protected]\")}}\n (h/decode :transit body)))\n ", "end": 3512, "score": 0.9999198913574219, "start": 3479, "tag": "EMAIL", "value": "[email protected]" }, { "context": "[request (h/request\n {:session \"[email protected]\"\n :command {:add-user\n ", "end": 3872, "score": 0.9999261498451233, "start": 3839, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :user/email-address \"[email protected]\"\n :user/name \"Test\"", "end": 4084, "score": 0.9999291896820068, "start": 4051, "tag": "EMAIL", "value": "[email protected]" }, { "context": "s.com\"\n :user/name \"Test\"\n :user/roles #{:cu", "end": 4133, "score": 0.9260504841804504, "start": 4129, "tag": "NAME", "value": "Test" }, { "context": "oles #{:customer}}}})\n user-id (user/id \"[email protected]\")\n user (user/fetch user-id)\n {", "end": 4256, "score": 0.9999251961708069, "start": 4223, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :session {:current-user-id (user/id \"[email protected]\")}}\n (h/decode :transit body)))\n ", "end": 4598, "score": 0.9999263882637024, "start": 4565, "tag": "EMAIL", "value": "[email protected]" }, { "context": "[request (h/request\n {:session \"[email protected]\"\n :command {:add-user\n ", "end": 5027, "score": 0.999921977519989, "start": 4994, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :user/email-address \"[email protected]\"\n :user/name \"Test\"", "end": 5239, "score": 0.9999299645423889, "start": 5206, "tag": "EMAIL", "value": "[email protected]" }, { "context": "s.com\"\n :user/name \"Test\"\n :user/roles #{:cu", "end": 5288, "score": 0.9359478950500488, "start": 5284, "tag": "NAME", "value": "Test" }, { "context": "oles #{:customer}}}})\n user-id (user/id \"[email protected]\")\n user (user/fetch user-id)\n {", "end": 5411, "score": 0.9999243021011353, "start": 5378, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :session {:current-user-id (user/id \"[email protected]\")}}\n (h/decode :transit body)))\n ", "end": 5847, "score": 0.9999286532402039, "start": 5814, "tag": "EMAIL", "value": "[email protected]" }, { "context": "[request (h/request\n {:session \"[email protected]\"\n :command {:add-user\n ", "end": 6226, "score": 0.9999279379844666, "start": 6193, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :user/email-address \"[email protected]\"\n :user/name \"Test\"", "end": 6438, "score": 0.9999273419380188, "start": 6405, "tag": "EMAIL", "value": "[email protected]" }, { "context": "s.com\"\n :user/name \"Test\"\n :user/roles #{:cu", "end": 6487, "score": 0.576518177986145, "start": 6483, "tag": "USERNAME", "value": "Test" }, { "context": "oles #{:customer}}}})\n user-id (user/id \"[email protected]\")\n user (user/fetch user-id)\n {", "end": 6610, "score": 0.9999212622642517, "start": 6577, "tag": "EMAIL", "value": "[email protected]" }, { "context": " :session {:current-user-id (user/id \"[email protected]\")}}\n (h/decode :transit body)))\n ", "end": 7046, "score": 0.9999281167984009, "start": 7013, "tag": "EMAIL", "value": "[email protected]" } ]
api/test/feature/flow/command/add_user_test.clj
kgxsz/flow
0
(ns flow.command.add-user-test (:require [flow.core :refer :all] [flow.entity.authorisation :as authorisation] [flow.entity.user :as user] [flow.helpers :as h] [clojure.test :refer :all])) (defn setup [] (h/create-test-user! "[email protected]" "Test" #{:customer :admin}) (h/create-test-user! "[email protected]") (h/create-test-user! "[email protected]" "Test" #{:customer :admin}) (h/delete-test-user! "[email protected]")) (defn fixture [test] (h/ensure-empty-table) (setup) (test) (h/ensure-empty-table)) (use-fixtures :each fixture) (deftest test-add-user (testing "The handler negotiates the add-user command when no session is provided." (let [request (h/request {:command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "[email protected]" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "[email protected]") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id nil}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when an unauthorised session is provided." (let [request (h/request {:session :unauthorised :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "[email protected]" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "[email protected]") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id nil}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when the command is being made for an exiting user and a session authorised to a user with an admin role is provided." (let [request (h/request {:session "[email protected]" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "[email protected]" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "[email protected]") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id (user/id "[email protected]")}} (h/decode :transit body))) (is (= user user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user without an admin role is provided." (let [request (h/request {:session "[email protected]" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "[email protected]" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "[email protected]") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id (user/id "[email protected]")}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user with an admin role is provided, where that authorised user has previously been deleted." (let [request (h/request {:session "[email protected]" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "[email protected]" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "[email protected]") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {:id-resolution {#uuid "00000000-0000-0000-0000-000000000000" user-id}} :session {:current-user-id (user/id "[email protected]")}} (h/decode :transit body))) (is (nil? user)) (is (some? user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user with an admin role is provided." (let [request (h/request {:session "[email protected]" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "[email protected]" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "[email protected]") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {:id-resolution {#uuid "00000000-0000-0000-0000-000000000000" user-id}} :session {:current-user-id (user/id "[email protected]")}} (h/decode :transit body))) (is (nil? user)) (is (some? user')))))
28320
(ns flow.command.add-user-test (:require [flow.core :refer :all] [flow.entity.authorisation :as authorisation] [flow.entity.user :as user] [flow.helpers :as h] [clojure.test :refer :all])) (defn setup [] (h/create-test-user! "<EMAIL>" "Test" #{:customer :admin}) (h/create-test-user! "<EMAIL>") (h/create-test-user! "<EMAIL>" "Test" #{:customer :admin}) (h/delete-test-user! "<EMAIL>")) (defn fixture [test] (h/ensure-empty-table) (setup) (test) (h/ensure-empty-table)) (use-fixtures :each fixture) (deftest test-add-user (testing "The handler negotiates the add-user command when no session is provided." (let [request (h/request {:command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "<EMAIL>" :user/name "<NAME>" :user/roles #{:customer}}}}) user-id (user/id "<EMAIL>") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id nil}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when an unauthorised session is provided." (let [request (h/request {:session :unauthorised :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "<EMAIL>" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "<EMAIL>") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id nil}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when the command is being made for an exiting user and a session authorised to a user with an admin role is provided." (let [request (h/request {:session "<EMAIL>" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "<EMAIL>" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "<EMAIL>") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id (user/id "<EMAIL>")}} (h/decode :transit body))) (is (= user user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user without an admin role is provided." (let [request (h/request {:session "<EMAIL>" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "<EMAIL>" :user/name "<NAME>" :user/roles #{:customer}}}}) user-id (user/id "<EMAIL>") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id (user/id "<EMAIL>")}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user with an admin role is provided, where that authorised user has previously been deleted." (let [request (h/request {:session "<EMAIL>" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "<EMAIL>" :user/name "<NAME>" :user/roles #{:customer}}}}) user-id (user/id "<EMAIL>") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {:id-resolution {#uuid "00000000-0000-0000-0000-000000000000" user-id}} :session {:current-user-id (user/id "<EMAIL>")}} (h/decode :transit body))) (is (nil? user)) (is (some? user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user with an admin role is provided." (let [request (h/request {:session "<EMAIL>" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "<EMAIL>" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "<EMAIL>") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {:id-resolution {#uuid "00000000-0000-0000-0000-000000000000" user-id}} :session {:current-user-id (user/id "<EMAIL>")}} (h/decode :transit body))) (is (nil? user)) (is (some? user')))))
true
(ns flow.command.add-user-test (:require [flow.core :refer :all] [flow.entity.authorisation :as authorisation] [flow.entity.user :as user] [flow.helpers :as h] [clojure.test :refer :all])) (defn setup [] (h/create-test-user! "PI:EMAIL:<EMAIL>END_PI" "Test" #{:customer :admin}) (h/create-test-user! "PI:EMAIL:<EMAIL>END_PI") (h/create-test-user! "PI:EMAIL:<EMAIL>END_PI" "Test" #{:customer :admin}) (h/delete-test-user! "PI:EMAIL:<EMAIL>END_PI")) (defn fixture [test] (h/ensure-empty-table) (setup) (test) (h/ensure-empty-table)) (use-fixtures :each fixture) (deftest test-add-user (testing "The handler negotiates the add-user command when no session is provided." (let [request (h/request {:command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "PI:EMAIL:<EMAIL>END_PI" :user/name "PI:NAME:<NAME>END_PI" :user/roles #{:customer}}}}) user-id (user/id "PI:EMAIL:<EMAIL>END_PI") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id nil}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when an unauthorised session is provided." (let [request (h/request {:session :unauthorised :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "PI:EMAIL:<EMAIL>END_PI" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "PI:EMAIL:<EMAIL>END_PI") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id nil}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when the command is being made for an exiting user and a session authorised to a user with an admin role is provided." (let [request (h/request {:session "PI:EMAIL:<EMAIL>END_PI" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "PI:EMAIL:<EMAIL>END_PI" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "PI:EMAIL:<EMAIL>END_PI") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id (user/id "PI:EMAIL:<EMAIL>END_PI")}} (h/decode :transit body))) (is (= user user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user without an admin role is provided." (let [request (h/request {:session "PI:EMAIL:<EMAIL>END_PI" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "PI:EMAIL:<EMAIL>END_PI" :user/name "PI:NAME:<NAME>END_PI" :user/roles #{:customer}}}}) user-id (user/id "PI:EMAIL:<EMAIL>END_PI") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {} :session {:current-user-id (user/id "PI:EMAIL:<EMAIL>END_PI")}} (h/decode :transit body))) (is (= nil user user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user with an admin role is provided, where that authorised user has previously been deleted." (let [request (h/request {:session "PI:EMAIL:<EMAIL>END_PI" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "PI:EMAIL:<EMAIL>END_PI" :user/name "PI:NAME:<NAME>END_PI" :user/roles #{:customer}}}}) user-id (user/id "PI:EMAIL:<EMAIL>END_PI") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {:id-resolution {#uuid "00000000-0000-0000-0000-000000000000" user-id}} :session {:current-user-id (user/id "PI:EMAIL:<EMAIL>END_PI")}} (h/decode :transit body))) (is (nil? user)) (is (some? user')))) (testing "The handler negotiates the add-user command when the command is being made for a non-existent user and a session authorised to a user with an admin role is provided." (let [request (h/request {:session "PI:EMAIL:<EMAIL>END_PI" :command {:add-user {:user/id #uuid "00000000-0000-0000-0000-000000000000" :user/email-address "PI:EMAIL:<EMAIL>END_PI" :user/name "Test" :user/roles #{:customer}}}}) user-id (user/id "PI:EMAIL:<EMAIL>END_PI") user (user/fetch user-id) {:keys [status headers body] :as response} (handler request) user' (user/fetch user-id)] (is (= 200 status)) (is (= {:users {} :authorisations {} :metadata {:id-resolution {#uuid "00000000-0000-0000-0000-000000000000" user-id}} :session {:current-user-id (user/id "PI:EMAIL:<EMAIL>END_PI")}} (h/decode :transit body))) (is (nil? user)) (is (some? user')))))