entities
listlengths 1
44.6k
| max_stars_repo_path
stringlengths 6
160
| max_stars_repo_name
stringlengths 6
66
| max_stars_count
int64 0
47.9k
| content
stringlengths 18
1.04M
| id
stringlengths 1
6
| new_content
stringlengths 18
1.04M
| modified
bool 1
class | references
stringlengths 32
1.52M
|
---|---|---|---|---|---|---|---|---|
[
{
"context": "e bones simple webapp\"\n :url \"https://github.com/kwrooijen/lein-web-lite\"\n :license {:name \"MIT\"}\n :eval-i",
"end": 148,
"score": 0.999676525592804,
"start": 139,
"tag": "USERNAME",
"value": "kwrooijen"
},
{
"context": "er\n :password :env/clojars_pass\n :sign-release",
"end": 410,
"score": 0.9050725102424622,
"start": 394,
"tag": "PASSWORD",
"value": "env/clojars_pass"
}
] |
project.clj
|
kwrooijen/lein-web-lite
| 0 |
(defproject web-lite/lein-template "0.0.3"
:description "A Leiningen template for a bare bones simple webapp"
:url "https://github.com/kwrooijen/lein-web-lite"
:license {:name "MIT"}
:eval-in-leiningen true
:deploy-repositories [["clojars" {:url "https://clojars.org/repo"
:username :env/clojars_user
:password :env/clojars_pass
:sign-releases false}]])
|
47961
|
(defproject web-lite/lein-template "0.0.3"
:description "A Leiningen template for a bare bones simple webapp"
:url "https://github.com/kwrooijen/lein-web-lite"
:license {:name "MIT"}
:eval-in-leiningen true
:deploy-repositories [["clojars" {:url "https://clojars.org/repo"
:username :env/clojars_user
:password :<PASSWORD>
:sign-releases false}]])
| true |
(defproject web-lite/lein-template "0.0.3"
:description "A Leiningen template for a bare bones simple webapp"
:url "https://github.com/kwrooijen/lein-web-lite"
:license {:name "MIT"}
:eval-in-leiningen true
:deploy-repositories [["clojars" {:url "https://clojars.org/repo"
:username :env/clojars_user
:password :PI:PASSWORD:<PASSWORD>END_PI
:sign-releases false}]])
|
[
{
"context": "(comment\n re-core, Copyright 2012 Ronen Narkis, narkisr.com\n Licensed under the Apache License",
"end": 48,
"score": 0.9998793601989746,
"start": 36,
"tag": "NAME",
"value": "Ronen Narkis"
},
{
"context": "(comment\n re-core, Copyright 2012 Ronen Narkis, narkisr.com\n Licensed under the Apache License,\n Version ",
"end": 61,
"score": 0.8009606599807739,
"start": 50,
"tag": "EMAIL",
"value": "narkisr.com"
}
] |
src/es/jobs.clj
|
celestial-ops/core
| 1 |
(comment
re-core, Copyright 2012 Ronen Narkis, narkisr.com
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns es.jobs
"Jobs ES persistency"
(:refer-clojure :exclude [get])
(:require
[es.node :as node :refer (ES)]
[es.common :refer (flush- index)]
[clojurewerkz.elastisch.native.document :as doc]
[re-core.common :refer (envs import-logging)]))
(import-logging)
(defn put
"Add/Update a jobs into ES"
[{:keys [tid queue status] :as job} ttl & {:keys [flush?]}]
(doc/put @ES index "jobs" tid (merge job {:queue (name queue) :status (name status)}) {:ttl ttl})
(when flush? (flush-)))
(defn delete
"delete a system from ES"
[id]
(doc/delete @ES index "jobs" id))
(defn get
"Grabs a system by an id"
[id]
(doc/get @ES index "jobs" id))
(defn query-envs
"maps envs to query form terms"
[envs]
(map (fn [e] {:term {:env (name e)}}) envs))
(defn paginate
"basic query string"
[from size envs]
(let [q {:bool {:minimum_should_match 1 :should (query-envs envs)}}]
(:hits
(doc/search @ES index "jobs" {
:from from :size size :query q :sort {:end "desc"}
}))))
|
21692
|
(comment
re-core, Copyright 2012 <NAME>, <EMAIL>
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns es.jobs
"Jobs ES persistency"
(:refer-clojure :exclude [get])
(:require
[es.node :as node :refer (ES)]
[es.common :refer (flush- index)]
[clojurewerkz.elastisch.native.document :as doc]
[re-core.common :refer (envs import-logging)]))
(import-logging)
(defn put
"Add/Update a jobs into ES"
[{:keys [tid queue status] :as job} ttl & {:keys [flush?]}]
(doc/put @ES index "jobs" tid (merge job {:queue (name queue) :status (name status)}) {:ttl ttl})
(when flush? (flush-)))
(defn delete
"delete a system from ES"
[id]
(doc/delete @ES index "jobs" id))
(defn get
"Grabs a system by an id"
[id]
(doc/get @ES index "jobs" id))
(defn query-envs
"maps envs to query form terms"
[envs]
(map (fn [e] {:term {:env (name e)}}) envs))
(defn paginate
"basic query string"
[from size envs]
(let [q {:bool {:minimum_should_match 1 :should (query-envs envs)}}]
(:hits
(doc/search @ES index "jobs" {
:from from :size size :query q :sort {:end "desc"}
}))))
| true |
(comment
re-core, Copyright 2012 PI:NAME:<NAME>END_PI, PI:EMAIL:<EMAIL>END_PI
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns es.jobs
"Jobs ES persistency"
(:refer-clojure :exclude [get])
(:require
[es.node :as node :refer (ES)]
[es.common :refer (flush- index)]
[clojurewerkz.elastisch.native.document :as doc]
[re-core.common :refer (envs import-logging)]))
(import-logging)
(defn put
"Add/Update a jobs into ES"
[{:keys [tid queue status] :as job} ttl & {:keys [flush?]}]
(doc/put @ES index "jobs" tid (merge job {:queue (name queue) :status (name status)}) {:ttl ttl})
(when flush? (flush-)))
(defn delete
"delete a system from ES"
[id]
(doc/delete @ES index "jobs" id))
(defn get
"Grabs a system by an id"
[id]
(doc/get @ES index "jobs" id))
(defn query-envs
"maps envs to query form terms"
[envs]
(map (fn [e] {:term {:env (name e)}}) envs))
(defn paginate
"basic query string"
[from size envs]
(let [q {:bool {:minimum_should_match 1 :should (query-envs envs)}}]
(:hits
(doc/search @ES index "jobs" {
:from from :size size :query q :sort {:end "desc"}
}))))
|
[
{
"context": ";; Copyright (c) 2015 Andrey Antukh <[email protected]>\n;; All rights reserved.\n;;\n;; Redi",
"end": 35,
"score": 0.9998799562454224,
"start": 22,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": ";; Copyright (c) 2015 Andrey Antukh <[email protected]>\n;; All rights reserved.\n;;\n;; Redistribution and",
"end": 49,
"score": 0.9999337196350098,
"start": 37,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
src/clojure/catacumba/handlers/misc.clj
|
source-c/catacumba
| 212 |
;; Copyright (c) 2015 Andrey Antukh <[email protected]>
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions are met:
;;
;; * Redistributions of source code must retain the above copyright notice, this
;; list of conditions and the following disclaimer.
;;
;; * Redistributions in binary form must reproduce the above copyright notice,
;; this list of conditions and the following disclaimer in the documentation
;; and/or other materials provided with the distribution.
;;
;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(ns catacumba.handlers.misc
(:require [cuerdas.core :as str]
[ns-tracker.core :refer [ns-tracker]]
[catacumba.core :refer [on-close]]
[catacumba.impl.routing :as routing]
[catacumba.impl.context :as ct]
[catacumba.impl.handlers :as hs])
(:import ratpack.handling.RequestLogger
ratpack.handling.RequestOutcome
ratpack.handling.Chain
ratpack.handling.Context
ratpack.handling.Handler
ratpack.exec.Execution
ratpack.http.Status
ratpack.func.Block
ratpack.exec.ExecInterceptor
ratpack.exec.ExecInterceptor$ExecType))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; CORS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- allow-origin?
[value {:keys [origin]}]
(cond
(nil? value) value
(= origin "*") origin
(set? origin) (origin value)
(= origin value) origin))
(defn- normalize-headers
[headers]
(->> (map (comp str/lower name) headers)
(str/join ",")))
(defn- normalize-methods
[methods]
(->> (map (comp str/upper name) methods)
(str/join ",")))
(defn- handle-preflight
[context headers {:keys [allow-methods allow-headers max-age allow-credentials]
:or {allow-methods #{:get :post :put :delete}}
:as opts}]
(let [^String origin (get headers :origin)]
(when-let [origin (allow-origin? origin opts)]
(ct/set-headers! context
{:access-control-allow-origin origin
:access-control-allow-methods (normalize-methods allow-methods)})
(when allow-credentials
(ct/set-headers! context {:access-control-allow-credentials true}))
(when max-age
(ct/set-headers! context {:access-control-max-age max-age}))
(when allow-headers
(ct/set-headers! context {:access-control-allow-headers (normalize-headers allow-headers)})))
(hs/send! context "")))
(defn- handle-response
[context headers {:keys [allow-headers expose-headers origin allow-credentials] :as opts}]
(let [^String origin (get headers :origin)]
(when-let [origin (allow-origin? origin opts)]
(ct/set-headers! context {:access-control-allow-origin origin})
(when allow-credentials
(ct/set-headers! context {:access-control-allow-credentials true}))
(when allow-headers
(ct/set-headers! context {:access-control-allow-headers (normalize-headers allow-headers)}))
(when expose-headers
(ct/set-headers! context {:access-control-expose-headers (normalize-headers expose-headers)})))
(ct/delegate)))
(defn- cors-preflight?
[context headers]
(and (= (:method context) :options)
(contains? headers :origin)
(contains? headers :access-control-request-method)))
(defn cors
"A chain handler that handles cors related headers."
[{:keys [origin] :as opts}]
(fn [context]
(let [headers (:headers context)]
(if (cors-preflight? context headers)
(handle-preflight context headers opts)
(handle-response context headers opts)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Autorealoader
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn autoreloader
([] (autoreloader {}))
([{:keys [dirs] :or {dirs ["src"]}}]
(let [tracker (ns-tracker dirs)]
(fn [context]
(doseq [ns-sym (tracker)]
(println "=> reload:" ns-sym)
(require ns-sym :reload))
(ct/delegate)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Logging
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- status->map [^Status status]
{:code (.getCode status)
:message (.getMessage status)})
(defn- outcome->map [^RequestOutcome outcome]
(let [response (.getResponse outcome)]
{:headers (ct/headers->map
(.. response getHeaders asMultiValueMap)
true)
:status (status->map (.getStatus response))
:sent-at (.getSentAt outcome)
:duration (.getDuration outcome)}))
(defn log
([] (RequestLogger/ncsa))
([log-fn]
(fn [context]
(on-close context #(log-fn context (outcome->map %)))
(ct/delegate))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Interceptors
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- exec-interceptor
[interceptor]
(reify ExecInterceptor
(^void intercept [_ ^Execution exc ^ExecInterceptor$ExecType t ^Block b]
(let [continuation #(.execute b)
exectype (if (= t ExecInterceptor$ExecType/BLOCKING)
:blocking
:compute)]
(interceptor exc exectype continuation)))))
(defn interceptor
"Start interceptor from current context.
It wraps the rest of route chain the execution. It receive a
continuation (as a cloure function) that must be called in
order for processing to proceed."
[callback]
(fn [context]
(let [^Context ctx (:catacumba/context context)
^Execution exec (.getExecution ctx)]
(.addInterceptor exec
(exec-interceptor callback)
(reify Block
(^void execute [_]
(.next ctx)))))))
|
52031
|
;; Copyright (c) 2015 <NAME> <<EMAIL>>
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions are met:
;;
;; * Redistributions of source code must retain the above copyright notice, this
;; list of conditions and the following disclaimer.
;;
;; * Redistributions in binary form must reproduce the above copyright notice,
;; this list of conditions and the following disclaimer in the documentation
;; and/or other materials provided with the distribution.
;;
;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(ns catacumba.handlers.misc
(:require [cuerdas.core :as str]
[ns-tracker.core :refer [ns-tracker]]
[catacumba.core :refer [on-close]]
[catacumba.impl.routing :as routing]
[catacumba.impl.context :as ct]
[catacumba.impl.handlers :as hs])
(:import ratpack.handling.RequestLogger
ratpack.handling.RequestOutcome
ratpack.handling.Chain
ratpack.handling.Context
ratpack.handling.Handler
ratpack.exec.Execution
ratpack.http.Status
ratpack.func.Block
ratpack.exec.ExecInterceptor
ratpack.exec.ExecInterceptor$ExecType))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; CORS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- allow-origin?
[value {:keys [origin]}]
(cond
(nil? value) value
(= origin "*") origin
(set? origin) (origin value)
(= origin value) origin))
(defn- normalize-headers
[headers]
(->> (map (comp str/lower name) headers)
(str/join ",")))
(defn- normalize-methods
[methods]
(->> (map (comp str/upper name) methods)
(str/join ",")))
(defn- handle-preflight
[context headers {:keys [allow-methods allow-headers max-age allow-credentials]
:or {allow-methods #{:get :post :put :delete}}
:as opts}]
(let [^String origin (get headers :origin)]
(when-let [origin (allow-origin? origin opts)]
(ct/set-headers! context
{:access-control-allow-origin origin
:access-control-allow-methods (normalize-methods allow-methods)})
(when allow-credentials
(ct/set-headers! context {:access-control-allow-credentials true}))
(when max-age
(ct/set-headers! context {:access-control-max-age max-age}))
(when allow-headers
(ct/set-headers! context {:access-control-allow-headers (normalize-headers allow-headers)})))
(hs/send! context "")))
(defn- handle-response
[context headers {:keys [allow-headers expose-headers origin allow-credentials] :as opts}]
(let [^String origin (get headers :origin)]
(when-let [origin (allow-origin? origin opts)]
(ct/set-headers! context {:access-control-allow-origin origin})
(when allow-credentials
(ct/set-headers! context {:access-control-allow-credentials true}))
(when allow-headers
(ct/set-headers! context {:access-control-allow-headers (normalize-headers allow-headers)}))
(when expose-headers
(ct/set-headers! context {:access-control-expose-headers (normalize-headers expose-headers)})))
(ct/delegate)))
(defn- cors-preflight?
[context headers]
(and (= (:method context) :options)
(contains? headers :origin)
(contains? headers :access-control-request-method)))
(defn cors
"A chain handler that handles cors related headers."
[{:keys [origin] :as opts}]
(fn [context]
(let [headers (:headers context)]
(if (cors-preflight? context headers)
(handle-preflight context headers opts)
(handle-response context headers opts)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Autorealoader
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn autoreloader
([] (autoreloader {}))
([{:keys [dirs] :or {dirs ["src"]}}]
(let [tracker (ns-tracker dirs)]
(fn [context]
(doseq [ns-sym (tracker)]
(println "=> reload:" ns-sym)
(require ns-sym :reload))
(ct/delegate)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Logging
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- status->map [^Status status]
{:code (.getCode status)
:message (.getMessage status)})
(defn- outcome->map [^RequestOutcome outcome]
(let [response (.getResponse outcome)]
{:headers (ct/headers->map
(.. response getHeaders asMultiValueMap)
true)
:status (status->map (.getStatus response))
:sent-at (.getSentAt outcome)
:duration (.getDuration outcome)}))
(defn log
([] (RequestLogger/ncsa))
([log-fn]
(fn [context]
(on-close context #(log-fn context (outcome->map %)))
(ct/delegate))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Interceptors
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- exec-interceptor
[interceptor]
(reify ExecInterceptor
(^void intercept [_ ^Execution exc ^ExecInterceptor$ExecType t ^Block b]
(let [continuation #(.execute b)
exectype (if (= t ExecInterceptor$ExecType/BLOCKING)
:blocking
:compute)]
(interceptor exc exectype continuation)))))
(defn interceptor
"Start interceptor from current context.
It wraps the rest of route chain the execution. It receive a
continuation (as a cloure function) that must be called in
order for processing to proceed."
[callback]
(fn [context]
(let [^Context ctx (:catacumba/context context)
^Execution exec (.getExecution ctx)]
(.addInterceptor exec
(exec-interceptor callback)
(reify Block
(^void execute [_]
(.next ctx)))))))
| true |
;; Copyright (c) 2015 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions are met:
;;
;; * Redistributions of source code must retain the above copyright notice, this
;; list of conditions and the following disclaimer.
;;
;; * Redistributions in binary form must reproduce the above copyright notice,
;; this list of conditions and the following disclaimer in the documentation
;; and/or other materials provided with the distribution.
;;
;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(ns catacumba.handlers.misc
(:require [cuerdas.core :as str]
[ns-tracker.core :refer [ns-tracker]]
[catacumba.core :refer [on-close]]
[catacumba.impl.routing :as routing]
[catacumba.impl.context :as ct]
[catacumba.impl.handlers :as hs])
(:import ratpack.handling.RequestLogger
ratpack.handling.RequestOutcome
ratpack.handling.Chain
ratpack.handling.Context
ratpack.handling.Handler
ratpack.exec.Execution
ratpack.http.Status
ratpack.func.Block
ratpack.exec.ExecInterceptor
ratpack.exec.ExecInterceptor$ExecType))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; CORS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- allow-origin?
[value {:keys [origin]}]
(cond
(nil? value) value
(= origin "*") origin
(set? origin) (origin value)
(= origin value) origin))
(defn- normalize-headers
[headers]
(->> (map (comp str/lower name) headers)
(str/join ",")))
(defn- normalize-methods
[methods]
(->> (map (comp str/upper name) methods)
(str/join ",")))
(defn- handle-preflight
[context headers {:keys [allow-methods allow-headers max-age allow-credentials]
:or {allow-methods #{:get :post :put :delete}}
:as opts}]
(let [^String origin (get headers :origin)]
(when-let [origin (allow-origin? origin opts)]
(ct/set-headers! context
{:access-control-allow-origin origin
:access-control-allow-methods (normalize-methods allow-methods)})
(when allow-credentials
(ct/set-headers! context {:access-control-allow-credentials true}))
(when max-age
(ct/set-headers! context {:access-control-max-age max-age}))
(when allow-headers
(ct/set-headers! context {:access-control-allow-headers (normalize-headers allow-headers)})))
(hs/send! context "")))
(defn- handle-response
[context headers {:keys [allow-headers expose-headers origin allow-credentials] :as opts}]
(let [^String origin (get headers :origin)]
(when-let [origin (allow-origin? origin opts)]
(ct/set-headers! context {:access-control-allow-origin origin})
(when allow-credentials
(ct/set-headers! context {:access-control-allow-credentials true}))
(when allow-headers
(ct/set-headers! context {:access-control-allow-headers (normalize-headers allow-headers)}))
(when expose-headers
(ct/set-headers! context {:access-control-expose-headers (normalize-headers expose-headers)})))
(ct/delegate)))
(defn- cors-preflight?
[context headers]
(and (= (:method context) :options)
(contains? headers :origin)
(contains? headers :access-control-request-method)))
(defn cors
"A chain handler that handles cors related headers."
[{:keys [origin] :as opts}]
(fn [context]
(let [headers (:headers context)]
(if (cors-preflight? context headers)
(handle-preflight context headers opts)
(handle-response context headers opts)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Autorealoader
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn autoreloader
([] (autoreloader {}))
([{:keys [dirs] :or {dirs ["src"]}}]
(let [tracker (ns-tracker dirs)]
(fn [context]
(doseq [ns-sym (tracker)]
(println "=> reload:" ns-sym)
(require ns-sym :reload))
(ct/delegate)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Logging
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- status->map [^Status status]
{:code (.getCode status)
:message (.getMessage status)})
(defn- outcome->map [^RequestOutcome outcome]
(let [response (.getResponse outcome)]
{:headers (ct/headers->map
(.. response getHeaders asMultiValueMap)
true)
:status (status->map (.getStatus response))
:sent-at (.getSentAt outcome)
:duration (.getDuration outcome)}))
(defn log
([] (RequestLogger/ncsa))
([log-fn]
(fn [context]
(on-close context #(log-fn context (outcome->map %)))
(ct/delegate))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Interceptors
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- exec-interceptor
[interceptor]
(reify ExecInterceptor
(^void intercept [_ ^Execution exc ^ExecInterceptor$ExecType t ^Block b]
(let [continuation #(.execute b)
exectype (if (= t ExecInterceptor$ExecType/BLOCKING)
:blocking
:compute)]
(interceptor exc exectype continuation)))))
(defn interceptor
"Start interceptor from current context.
It wraps the rest of route chain the execution. It receive a
continuation (as a cloure function) that must be called in
order for processing to proceed."
[callback]
(fn [context]
(let [^Context ctx (:catacumba/context context)
^Execution exec (.getExecution ctx)]
(.addInterceptor exec
(exec-interceptor callback)
(reify Block
(^void execute [_]
(.next ctx)))))))
|
[
{
"context": " [:input#username.input {:placeholder \"Username\" :type \"text\"}]]]\n\n [:div.field\n ",
"end": 536,
"score": 0.9546480774879456,
"start": 528,
"tag": "USERNAME",
"value": "Username"
},
{
"context": " [:input#password.input {:placeholder \"Password\" :type \"password\"}]]]\n\n [:div.field\n ",
"end": 661,
"score": 0.9912500381469727,
"start": 653,
"tag": "PASSWORD",
"value": "Password"
},
{
"context": "ut#password.input {:placeholder \"Password\" :type \"password\"}]]]\n\n [:div.field\n [:div.cont",
"end": 678,
"score": 0.9637899994850159,
"start": 670,
"tag": "PASSWORD",
"value": "password"
}
] |
src/cljs/barebones/login/views.cljs
|
burhanloey/barebones
| 0 |
(ns barebones.login.views
(:require [barebones.login.events :as login-events]
[barebones.login.subs :as login-subs]
[re-frame.core :as rf]))
(defn login-page []
(let [show-error? (rf/subscribe [::login-subs/show-error])]
[:div.container
[:section.section
[:div.columns
[:div.column.is-4.is-offset-4
[:div.box
[:p.title.is-6.is-center "Sign in"]
[:form
[:div.field
[:div.control
[:input#username.input {:placeholder "Username" :type "text"}]]]
[:div.field
[:div.control
[:input#password.input {:placeholder "Password" :type "password"}]]]
[:div.field
[:div.control
[:label.checkbox
[:input#remember {:type "checkbox"}]
" Remember me"]]]
[:div.field
[:div.control
[:button.button.is-primary.is-fullwidth
{:on-click (fn [evt]
(.preventDefault evt)
(rf/dispatch [::login-events/login]))}
"Login"]]]]]
(when @show-error?
[:div.notification.is-warning.has-text-centered
"Wrong username or password." [:br] "Guess which one was wrong!"])]]]]))
|
101878
|
(ns barebones.login.views
(:require [barebones.login.events :as login-events]
[barebones.login.subs :as login-subs]
[re-frame.core :as rf]))
(defn login-page []
(let [show-error? (rf/subscribe [::login-subs/show-error])]
[:div.container
[:section.section
[:div.columns
[:div.column.is-4.is-offset-4
[:div.box
[:p.title.is-6.is-center "Sign in"]
[:form
[:div.field
[:div.control
[:input#username.input {:placeholder "Username" :type "text"}]]]
[:div.field
[:div.control
[:input#password.input {:placeholder "<PASSWORD>" :type "<PASSWORD>"}]]]
[:div.field
[:div.control
[:label.checkbox
[:input#remember {:type "checkbox"}]
" Remember me"]]]
[:div.field
[:div.control
[:button.button.is-primary.is-fullwidth
{:on-click (fn [evt]
(.preventDefault evt)
(rf/dispatch [::login-events/login]))}
"Login"]]]]]
(when @show-error?
[:div.notification.is-warning.has-text-centered
"Wrong username or password." [:br] "Guess which one was wrong!"])]]]]))
| true |
(ns barebones.login.views
(:require [barebones.login.events :as login-events]
[barebones.login.subs :as login-subs]
[re-frame.core :as rf]))
(defn login-page []
(let [show-error? (rf/subscribe [::login-subs/show-error])]
[:div.container
[:section.section
[:div.columns
[:div.column.is-4.is-offset-4
[:div.box
[:p.title.is-6.is-center "Sign in"]
[:form
[:div.field
[:div.control
[:input#username.input {:placeholder "Username" :type "text"}]]]
[:div.field
[:div.control
[:input#password.input {:placeholder "PI:PASSWORD:<PASSWORD>END_PI" :type "PI:PASSWORD:<PASSWORD>END_PI"}]]]
[:div.field
[:div.control
[:label.checkbox
[:input#remember {:type "checkbox"}]
" Remember me"]]]
[:div.field
[:div.control
[:button.button.is-primary.is-fullwidth
{:on-click (fn [evt]
(.preventDefault evt)
(rf/dispatch [::login-events/login]))}
"Login"]]]]]
(when @show-error?
[:div.notification.is-warning.has-text-centered
"Wrong username or password." [:br] "Guess which one was wrong!"])]]]]))
|
[
{
"context": "\n (let [prev-user (user-dao/get-user-by-email \"[email protected]\")]\n (user-dao/delete-user! (:id prev-user)))",
"end": 644,
"score": 0.9999184012413025,
"start": 615,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "user)))\n (user-dao/create-user! \"Test\" \"Test\" \"[email protected]\" \"test\")\n (let [user (user-dao/get-user-by-ema",
"end": 766,
"score": 0.9999166131019592,
"start": 737,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "est\")\n (let [user (user-dao/get-user-by-email \"[email protected]\")]\n (is (= 1 (prediction-dao/create-predicti",
"end": 849,
"score": 0.9999120235443115,
"start": 820,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "\n (let [prev-user (user-dao/get-user-by-email \"[email protected]\")]\n (user-dao/delete-user! (:id prev-user)))",
"end": 1096,
"score": 0.9999160766601562,
"start": 1066,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "user)))\n (user-dao/create-user! \"Test\" \"Test\" \"[email protected]\" \"test\")\n (let [user (user-dao/get-user-by-ema",
"end": 1219,
"score": 0.9999211430549622,
"start": 1189,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "est\")\n (let [user (user-dao/get-user-by-email \"[email protected]\")\n user-id (:id user)]\n (prediction",
"end": 1303,
"score": 0.9999207258224487,
"start": 1273,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/clj/landizer/test/dao/prediction_dao.clj
|
MilanSusa/Landizer
| 0 |
(ns landizer.test.dao.prediction-dao
(:require
[java-time.pre-java8]
[luminus-migrations.core :as migrations]
[clojure.test :refer :all]
[landizer.config :refer [env]]
[mount.core :as mount]
[landizer.dao.user-dao :as user-dao]
[landizer.dao.prediction-dao :as prediction-dao]))
(use-fixtures
:once
(fn [f]
(mount/start
#'landizer.config/env
#'landizer.db.core/*db*)
(migrations/migrate ["migrate"] (select-keys env [:database-url]))
(f)))
(deftest test-prediction-dao
(testing "prediction dao creation"
(let [prev-user (user-dao/get-user-by-email "[email protected]")]
(user-dao/delete-user! (:id prev-user)))
(user-dao/create-user! "Test" "Test" "[email protected]" "test")
(let [user (user-dao/get-user-by-email "[email protected]")]
(is (= 1 (prediction-dao/create-prediction! "Test" 100 "https://test-pred-dao-creation.com" (:id user))))))
(testing "prediction dao retrieval by user id"
(let [prev-user (user-dao/get-user-by-email "[email protected]")]
(user-dao/delete-user! (:id prev-user)))
(user-dao/create-user! "Test" "Test" "[email protected]" "test")
(let [user (user-dao/get-user-by-email "[email protected]")
user-id (:id user)]
(prediction-dao/create-prediction! "Test" 100 "https://test-pred-dao-retrieval.com" user-id)
(is (= false
(empty? (prediction-dao/get-predictions-for-user user-id)))))))
|
45060
|
(ns landizer.test.dao.prediction-dao
(:require
[java-time.pre-java8]
[luminus-migrations.core :as migrations]
[clojure.test :refer :all]
[landizer.config :refer [env]]
[mount.core :as mount]
[landizer.dao.user-dao :as user-dao]
[landizer.dao.prediction-dao :as prediction-dao]))
(use-fixtures
:once
(fn [f]
(mount/start
#'landizer.config/env
#'landizer.db.core/*db*)
(migrations/migrate ["migrate"] (select-keys env [:database-url]))
(f)))
(deftest test-prediction-dao
(testing "prediction dao creation"
(let [prev-user (user-dao/get-user-by-email "<EMAIL>")]
(user-dao/delete-user! (:id prev-user)))
(user-dao/create-user! "Test" "Test" "<EMAIL>" "test")
(let [user (user-dao/get-user-by-email "<EMAIL>")]
(is (= 1 (prediction-dao/create-prediction! "Test" 100 "https://test-pred-dao-creation.com" (:id user))))))
(testing "prediction dao retrieval by user id"
(let [prev-user (user-dao/get-user-by-email "<EMAIL>")]
(user-dao/delete-user! (:id prev-user)))
(user-dao/create-user! "Test" "Test" "<EMAIL>" "test")
(let [user (user-dao/get-user-by-email "<EMAIL>")
user-id (:id user)]
(prediction-dao/create-prediction! "Test" 100 "https://test-pred-dao-retrieval.com" user-id)
(is (= false
(empty? (prediction-dao/get-predictions-for-user user-id)))))))
| true |
(ns landizer.test.dao.prediction-dao
(:require
[java-time.pre-java8]
[luminus-migrations.core :as migrations]
[clojure.test :refer :all]
[landizer.config :refer [env]]
[mount.core :as mount]
[landizer.dao.user-dao :as user-dao]
[landizer.dao.prediction-dao :as prediction-dao]))
(use-fixtures
:once
(fn [f]
(mount/start
#'landizer.config/env
#'landizer.db.core/*db*)
(migrations/migrate ["migrate"] (select-keys env [:database-url]))
(f)))
(deftest test-prediction-dao
(testing "prediction dao creation"
(let [prev-user (user-dao/get-user-by-email "PI:EMAIL:<EMAIL>END_PI")]
(user-dao/delete-user! (:id prev-user)))
(user-dao/create-user! "Test" "Test" "PI:EMAIL:<EMAIL>END_PI" "test")
(let [user (user-dao/get-user-by-email "PI:EMAIL:<EMAIL>END_PI")]
(is (= 1 (prediction-dao/create-prediction! "Test" 100 "https://test-pred-dao-creation.com" (:id user))))))
(testing "prediction dao retrieval by user id"
(let [prev-user (user-dao/get-user-by-email "PI:EMAIL:<EMAIL>END_PI")]
(user-dao/delete-user! (:id prev-user)))
(user-dao/create-user! "Test" "Test" "PI:EMAIL:<EMAIL>END_PI" "test")
(let [user (user-dao/get-user-by-email "PI:EMAIL:<EMAIL>END_PI")
user-id (:id user)]
(prediction-dao/create-prediction! "Test" 100 "https://test-pred-dao-retrieval.com" user-id)
(is (= false
(empty? (prediction-dao/get-predictions-for-user user-id)))))))
|
[
{
"context": "deftest test-create-user\r\n (let [test-user-name \"blah\"\r\n user-id (create-user test-user-name (.t",
"end": 1477,
"score": 0.8947024941444397,
"start": 1473,
"tag": "USERNAME",
"value": "blah"
},
{
"context": "et [test-user-name \"blah\"\r\n test-password \"password\"\r\n test-password-chars (.toCharArray \"pass",
"end": 2526,
"score": 0.9993856549263,
"start": 2518,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "word\"\r\n test-password-chars (.toCharArray \"password\")\r\n user-id (create-user test-user-name te",
"end": 2580,
"score": 0.9901096820831299,
"start": 2572,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "p key-pair)\r\n test-password (.toCharArray \"password\")\r\n key-bytes (:bytes (:private-key key-pa",
"end": 3984,
"score": 0.8395447731018066,
"start": 3976,
"tag": "PASSWORD",
"value": "password"
}
] |
test/test/darkexchange/model/user.clj
|
cryptocurrent/Dark-Exchange
| 36 |
(ns test.darkexchange.model.user
(:require [test.fixtures.util :as fixtures-util]
[darkexchange.model.security :as security])
(:use clojure.contrib.test-is
test.fixtures.user
darkexchange.model.user))
(def model "user")
(fixtures-util/use-fixture-maps :once fixture-map)
(deftest test-first-record
(is (get-record 1)))
(deftest test-validate-user-name
(is (validate-user-name "blah") "User with unique name does not validate.")
(is (not (validate-user-name nil)) "Nil user incorrectly validates.")
(is (not (validate-user-name "")) "Empty string user incorrectly validates.")
(is (not (validate-user-name "test-user")) "Non-unique user incorrectly validates."))
(defn to-byte-array [nums]
(byte-array (map byte nums)))
(deftest test-validate-passwords
(is (validate-passwords (byte-array []) (byte-array []))
"Empty string passwords do not validate.")
(is (validate-passwords (to-byte-array [1]) (to-byte-array [1]))
"One byte passwords do not validate.")
(is (validate-passwords (to-byte-array [1 2]) (to-byte-array [1 2]))
"Two byte passwords do not validate.")
(is (validate-passwords (to-byte-array [1 2 3]) (to-byte-array [1 2 3]))
"Three byte passwords do not validate.")
(is (not (validate-passwords (to-byte-array [1 2 3]) (to-byte-array [1 2 4])))
"Unmatched passwords incorrectly validate."))
(deftest test-create-user
(let [test-user-name "blah"
user-id (create-user test-user-name (.toCharArray "password"))]
(is user-id "User id not returned.")
(let [user (get-record user-id)]
(is user (str "User with the id: " user-id " could not be found."))
(is (= (:name user) test-user-name) "The user name does not match the test user name.")
(is (not (contains? user :password)) "The password was not removed before saving.")
(is (:encrypted_password user) "The encrypted password was not set.")
(is (:salt user) "The salt was not set.")
(is (:public_key user) "The public key was not set.")
(is (:public_key_algorithm user) "The public key algorithm was not set.")
(is (:private_key user) "The private key was not set.")
(is (:private_key_algorithm user) "The private key algorithm was not set."))
(when user-id
(destroy-record { :id user-id }))))
(deftest test-login
(is (nil? (current-user)) "Started test with a logged in user.")
(let [test-user-name "blah"
test-password "password"
test-password-chars (.toCharArray "password")
user-id (create-user test-user-name test-password-chars)]
(is (login test-user-name test-password-chars) "The user failed to log in.")
(let [test-current-user (current-user)]
(is test-current-user "The user was not logged in.")
(is (= (:name test-current-user) "blah") "The wrong user was logged in.")
(is (:encrypted_password test-current-user) "No encrypted password was loaded.")
(is (:salt test-current-user) "No salt was loaded.")
(is (:password test-current-user) "The password was not saved.")
(is (:public_key test-current-user) "The public key was not saved.")
(is (:public_key_algorithm test-current-user) "The public key algorithm was not saved.")
(is (:private_key test-current-user) "The private key was not saved.")
(is (:private_key_algorithm test-current-user) "The private key algorithm was not saved."))
(when user-id
(destroy-record { :id user-id })))
(logout)
(is (nil? (current-user)) "The user failed to logout."))
(defn test-bytes [byte-array1 byte-array2]
(when (= (count byte-array1) (count byte-array2))
(not (some identity (map #(not (= %1 %2)) byte-array1 byte-array2)))))
(deftest test-private-key-bytes
(let [key-pair (security/generate-key-pair)
key-pair-map (security/get-key-pair-map key-pair)
test-password (.toCharArray "password")
key-bytes (:bytes (:private-key key-pair-map))
private-key-str (encrypt-private-key test-password key-bytes)
decrypted-bytes (private-key-bytes { :password test-password :private_key private-key-str
:private_key_encryption_algorithm security/default-symmetrical-algorithm })]
(is (= (count key-bytes) (count decrypted-bytes)))
(is (test-bytes key-bytes decrypted-bytes))))
|
121408
|
(ns test.darkexchange.model.user
(:require [test.fixtures.util :as fixtures-util]
[darkexchange.model.security :as security])
(:use clojure.contrib.test-is
test.fixtures.user
darkexchange.model.user))
(def model "user")
(fixtures-util/use-fixture-maps :once fixture-map)
(deftest test-first-record
(is (get-record 1)))
(deftest test-validate-user-name
(is (validate-user-name "blah") "User with unique name does not validate.")
(is (not (validate-user-name nil)) "Nil user incorrectly validates.")
(is (not (validate-user-name "")) "Empty string user incorrectly validates.")
(is (not (validate-user-name "test-user")) "Non-unique user incorrectly validates."))
(defn to-byte-array [nums]
(byte-array (map byte nums)))
(deftest test-validate-passwords
(is (validate-passwords (byte-array []) (byte-array []))
"Empty string passwords do not validate.")
(is (validate-passwords (to-byte-array [1]) (to-byte-array [1]))
"One byte passwords do not validate.")
(is (validate-passwords (to-byte-array [1 2]) (to-byte-array [1 2]))
"Two byte passwords do not validate.")
(is (validate-passwords (to-byte-array [1 2 3]) (to-byte-array [1 2 3]))
"Three byte passwords do not validate.")
(is (not (validate-passwords (to-byte-array [1 2 3]) (to-byte-array [1 2 4])))
"Unmatched passwords incorrectly validate."))
(deftest test-create-user
(let [test-user-name "blah"
user-id (create-user test-user-name (.toCharArray "password"))]
(is user-id "User id not returned.")
(let [user (get-record user-id)]
(is user (str "User with the id: " user-id " could not be found."))
(is (= (:name user) test-user-name) "The user name does not match the test user name.")
(is (not (contains? user :password)) "The password was not removed before saving.")
(is (:encrypted_password user) "The encrypted password was not set.")
(is (:salt user) "The salt was not set.")
(is (:public_key user) "The public key was not set.")
(is (:public_key_algorithm user) "The public key algorithm was not set.")
(is (:private_key user) "The private key was not set.")
(is (:private_key_algorithm user) "The private key algorithm was not set."))
(when user-id
(destroy-record { :id user-id }))))
(deftest test-login
(is (nil? (current-user)) "Started test with a logged in user.")
(let [test-user-name "blah"
test-password "<PASSWORD>"
test-password-chars (.toCharArray "<PASSWORD>")
user-id (create-user test-user-name test-password-chars)]
(is (login test-user-name test-password-chars) "The user failed to log in.")
(let [test-current-user (current-user)]
(is test-current-user "The user was not logged in.")
(is (= (:name test-current-user) "blah") "The wrong user was logged in.")
(is (:encrypted_password test-current-user) "No encrypted password was loaded.")
(is (:salt test-current-user) "No salt was loaded.")
(is (:password test-current-user) "The password was not saved.")
(is (:public_key test-current-user) "The public key was not saved.")
(is (:public_key_algorithm test-current-user) "The public key algorithm was not saved.")
(is (:private_key test-current-user) "The private key was not saved.")
(is (:private_key_algorithm test-current-user) "The private key algorithm was not saved."))
(when user-id
(destroy-record { :id user-id })))
(logout)
(is (nil? (current-user)) "The user failed to logout."))
(defn test-bytes [byte-array1 byte-array2]
(when (= (count byte-array1) (count byte-array2))
(not (some identity (map #(not (= %1 %2)) byte-array1 byte-array2)))))
(deftest test-private-key-bytes
(let [key-pair (security/generate-key-pair)
key-pair-map (security/get-key-pair-map key-pair)
test-password (.toCharArray "<PASSWORD>")
key-bytes (:bytes (:private-key key-pair-map))
private-key-str (encrypt-private-key test-password key-bytes)
decrypted-bytes (private-key-bytes { :password test-password :private_key private-key-str
:private_key_encryption_algorithm security/default-symmetrical-algorithm })]
(is (= (count key-bytes) (count decrypted-bytes)))
(is (test-bytes key-bytes decrypted-bytes))))
| true |
(ns test.darkexchange.model.user
(:require [test.fixtures.util :as fixtures-util]
[darkexchange.model.security :as security])
(:use clojure.contrib.test-is
test.fixtures.user
darkexchange.model.user))
(def model "user")
(fixtures-util/use-fixture-maps :once fixture-map)
(deftest test-first-record
(is (get-record 1)))
(deftest test-validate-user-name
(is (validate-user-name "blah") "User with unique name does not validate.")
(is (not (validate-user-name nil)) "Nil user incorrectly validates.")
(is (not (validate-user-name "")) "Empty string user incorrectly validates.")
(is (not (validate-user-name "test-user")) "Non-unique user incorrectly validates."))
(defn to-byte-array [nums]
(byte-array (map byte nums)))
(deftest test-validate-passwords
(is (validate-passwords (byte-array []) (byte-array []))
"Empty string passwords do not validate.")
(is (validate-passwords (to-byte-array [1]) (to-byte-array [1]))
"One byte passwords do not validate.")
(is (validate-passwords (to-byte-array [1 2]) (to-byte-array [1 2]))
"Two byte passwords do not validate.")
(is (validate-passwords (to-byte-array [1 2 3]) (to-byte-array [1 2 3]))
"Three byte passwords do not validate.")
(is (not (validate-passwords (to-byte-array [1 2 3]) (to-byte-array [1 2 4])))
"Unmatched passwords incorrectly validate."))
(deftest test-create-user
(let [test-user-name "blah"
user-id (create-user test-user-name (.toCharArray "password"))]
(is user-id "User id not returned.")
(let [user (get-record user-id)]
(is user (str "User with the id: " user-id " could not be found."))
(is (= (:name user) test-user-name) "The user name does not match the test user name.")
(is (not (contains? user :password)) "The password was not removed before saving.")
(is (:encrypted_password user) "The encrypted password was not set.")
(is (:salt user) "The salt was not set.")
(is (:public_key user) "The public key was not set.")
(is (:public_key_algorithm user) "The public key algorithm was not set.")
(is (:private_key user) "The private key was not set.")
(is (:private_key_algorithm user) "The private key algorithm was not set."))
(when user-id
(destroy-record { :id user-id }))))
(deftest test-login
(is (nil? (current-user)) "Started test with a logged in user.")
(let [test-user-name "blah"
test-password "PI:PASSWORD:<PASSWORD>END_PI"
test-password-chars (.toCharArray "PI:PASSWORD:<PASSWORD>END_PI")
user-id (create-user test-user-name test-password-chars)]
(is (login test-user-name test-password-chars) "The user failed to log in.")
(let [test-current-user (current-user)]
(is test-current-user "The user was not logged in.")
(is (= (:name test-current-user) "blah") "The wrong user was logged in.")
(is (:encrypted_password test-current-user) "No encrypted password was loaded.")
(is (:salt test-current-user) "No salt was loaded.")
(is (:password test-current-user) "The password was not saved.")
(is (:public_key test-current-user) "The public key was not saved.")
(is (:public_key_algorithm test-current-user) "The public key algorithm was not saved.")
(is (:private_key test-current-user) "The private key was not saved.")
(is (:private_key_algorithm test-current-user) "The private key algorithm was not saved."))
(when user-id
(destroy-record { :id user-id })))
(logout)
(is (nil? (current-user)) "The user failed to logout."))
(defn test-bytes [byte-array1 byte-array2]
(when (= (count byte-array1) (count byte-array2))
(not (some identity (map #(not (= %1 %2)) byte-array1 byte-array2)))))
(deftest test-private-key-bytes
(let [key-pair (security/generate-key-pair)
key-pair-map (security/get-key-pair-map key-pair)
test-password (.toCharArray "PI:PASSWORD:<PASSWORD>END_PI")
key-bytes (:bytes (:private-key key-pair-map))
private-key-str (encrypt-private-key test-password key-bytes)
decrypted-bytes (private-key-bytes { :password test-password :private_key private-key-str
:private_key_encryption_algorithm security/default-symmetrical-algorithm })]
(is (= (count key-bytes) (count decrypted-bytes)))
(is (test-bytes key-bytes decrypted-bytes))))
|
[
{
"context": "ss\"\n (t/is (map? (sut/signup! {:player/name \"foo\",\n :player/avatar \"A",
"end": 316,
"score": 0.9335814118385315,
"start": 313,
"tag": "NAME",
"value": "foo"
},
{
"context": " \"AVATAR\"})))\n (t/is\n (= {:player/name \"foo\",\n :player/avatar \"AVATAR\",\n :pl",
"end": 416,
"score": 0.578253448009491,
"start": 413,
"tag": "NAME",
"value": "foo"
},
{
"context": "esting \"success\"\n (t/is (= {:player/name \"foo\",\n :player/avatar \"AVATAR\",\n ",
"end": 1150,
"score": 0.9799219369888306,
"start": 1147,
"tag": "NAME",
"value": "foo"
}
] |
test/clj/poker/account_test.clj
|
DogLooksGood/holdem
| 98 |
(ns poker.account-test
(:require
[poker.account :as sut]
[clojure.test :as t]
[poker.utils.test-system :refer [wrap-test-system]])
(:import clojure.lang.ExceptionInfo))
(t/use-fixtures :each wrap-test-system)
(t/deftest signup!
(t/testing "success"
(t/is (map? (sut/signup! {:player/name "foo",
:player/avatar "AVATAR"})))
(t/is
(= {:player/name "foo",
:player/avatar "AVATAR",
:player/balance 10000}
(sut/get-player [:player/name :player/avatar :player/balance]
{:player/name "foo"}))))
(t/testing "disallow duplicate names"
(t/is (thrown-with-msg? ExceptionInfo #"Player name not available"
(sut/signup! {:player/name "foo",
:player/avatar "AVATAR"})))))
(t/deftest auth-player-by-token!
(let [{:player/keys [token]} (sut/signup! {:player/name "foo",
:player/avatar
"AVATAR"})]
(t/testing "prepare token" (t/is (uuid? token)))
(t/testing "success"
(t/is (= {:player/name "foo",
:player/avatar "AVATAR",
:player/id {:player/name "foo"},
:crux.db/id {:player/name "foo"},
:player/token token,
:player/balance 10000}
(sut/auth-player-by-token! '[*] token))))
(t/testing "failed"
(t/is
(thrown-with-msg? ExceptionInfo #"Player token invalid"
(sut/auth-player-by-token! [:*]
(java.util.UUID/randomUUID)))))))
|
49601
|
(ns poker.account-test
(:require
[poker.account :as sut]
[clojure.test :as t]
[poker.utils.test-system :refer [wrap-test-system]])
(:import clojure.lang.ExceptionInfo))
(t/use-fixtures :each wrap-test-system)
(t/deftest signup!
(t/testing "success"
(t/is (map? (sut/signup! {:player/name "<NAME>",
:player/avatar "AVATAR"})))
(t/is
(= {:player/name "<NAME>",
:player/avatar "AVATAR",
:player/balance 10000}
(sut/get-player [:player/name :player/avatar :player/balance]
{:player/name "foo"}))))
(t/testing "disallow duplicate names"
(t/is (thrown-with-msg? ExceptionInfo #"Player name not available"
(sut/signup! {:player/name "foo",
:player/avatar "AVATAR"})))))
(t/deftest auth-player-by-token!
(let [{:player/keys [token]} (sut/signup! {:player/name "foo",
:player/avatar
"AVATAR"})]
(t/testing "prepare token" (t/is (uuid? token)))
(t/testing "success"
(t/is (= {:player/name "<NAME>",
:player/avatar "AVATAR",
:player/id {:player/name "foo"},
:crux.db/id {:player/name "foo"},
:player/token token,
:player/balance 10000}
(sut/auth-player-by-token! '[*] token))))
(t/testing "failed"
(t/is
(thrown-with-msg? ExceptionInfo #"Player token invalid"
(sut/auth-player-by-token! [:*]
(java.util.UUID/randomUUID)))))))
| true |
(ns poker.account-test
(:require
[poker.account :as sut]
[clojure.test :as t]
[poker.utils.test-system :refer [wrap-test-system]])
(:import clojure.lang.ExceptionInfo))
(t/use-fixtures :each wrap-test-system)
(t/deftest signup!
(t/testing "success"
(t/is (map? (sut/signup! {:player/name "PI:NAME:<NAME>END_PI",
:player/avatar "AVATAR"})))
(t/is
(= {:player/name "PI:NAME:<NAME>END_PI",
:player/avatar "AVATAR",
:player/balance 10000}
(sut/get-player [:player/name :player/avatar :player/balance]
{:player/name "foo"}))))
(t/testing "disallow duplicate names"
(t/is (thrown-with-msg? ExceptionInfo #"Player name not available"
(sut/signup! {:player/name "foo",
:player/avatar "AVATAR"})))))
(t/deftest auth-player-by-token!
(let [{:player/keys [token]} (sut/signup! {:player/name "foo",
:player/avatar
"AVATAR"})]
(t/testing "prepare token" (t/is (uuid? token)))
(t/testing "success"
(t/is (= {:player/name "PI:NAME:<NAME>END_PI",
:player/avatar "AVATAR",
:player/id {:player/name "foo"},
:crux.db/id {:player/name "foo"},
:player/token token,
:player/balance 10000}
(sut/auth-player-by-token! '[*] token))))
(t/testing "failed"
(t/is
(thrown-with-msg? ExceptionInfo #"Player token invalid"
(sut/auth-player-by-token! [:*]
(java.util.UUID/randomUUID)))))))
|
[
{
"context": "r [C def- P p]]))\n\n(def- g-somedata (R {:some-key 42, :state {:another-key 99} :buffer_paths []}))\n(de",
"end": 485,
"score": 0.5926619172096252,
"start": 483,
"tag": "KEY",
"value": "42"
},
{
"context": "-somedata (R {:some-key 42, :state {:another-key 99} :buffer_paths []}))\n(def- g-std-name (shg/g-name",
"end": 510,
"score": 0.5173286199569702,
"start": 509,
"tag": "KEY",
"value": "9"
}
] |
test/clojupyter/messages_generators_test.clj
|
nighcoder/clojupyter
| 2 |
(ns clojupyter.messages-generators-test
(:require [clojupyter]
[clojupyter.messages :as msgs]
[clojupyter.messages-specs :as msp]
[clojupyter.test-shared :as ts]
[clojupyter.test-shared-generators :as shg :refer [R]]
[clojure.spec.alpha :as s]
[clojure.string :as str]
[clojure.test.check.generators :as gen]
[io.simplect.compose :refer [C def- P p]]))
(def- g-somedata (R {:some-key 42, :state {:another-key 99} :buffer_paths []}))
(def- g-std-name (shg/g-name 2 10))
(def- g-std-exe-count (gen/choose 1 1000))
(def- g-reply-status (gen/frequency [[8 (gen/elements ["ok"])]
[1 (gen/elements ["error"])]]))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE TYPE GENERATOR
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-msgtype
(gen/elements [msgs/COMM-CLOSE
msgs/COMM-INFO-REPLY
msgs/COMM-INFO-REQUEST
msgs/COMM-MSG
msgs/COMM-OPEN
msgs/COMPLETE-REPLY
msgs/COMPLETE-REQUEST
msgs/ERROR
msgs/EXECUTE-INPUT
msgs/EXECUTE-REPLY
msgs/EXECUTE-REQUEST
msgs/EXECUTE-RESULT
msgs/HISTORY-REPLY
msgs/HISTORY-REQUEST
msgs/INPUT-REQUEST
msgs/INPUT-REPLY
msgs/INSPECT-REPLY
msgs/INSPECT-REQUEST
msgs/INTERRUPT-REPLY
msgs/INTERRUPT-REQUEST
msgs/IS-COMPLETE-REPLY
msgs/IS-COMPLETE-REQUEST
msgs/KERNEL-INFO-REPLY
msgs/KERNEL-INFO-REQUEST
msgs/SHUTDOWN-REPLY
msgs/SHUTDOWN-REQUEST
msgs/STATUS
msgs/STREAM]))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE HEADER GENERATOR
;;; ------------------------------------------------------------------------------------------------------------------------
(defn g-message-header
[msgtype]
(gen/let [message-id shg/g-uuid
username g-std-name
session shg/g-uuid
date (gen/fmap #(str "DATE: " %) g-std-name)
version shg/g-version]
(R (msgs/make-jupmsg-header message-id msgtype username session date version))))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE GENERATORS
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-comm-close-content
(gen/let [id shg/g-uuid
data g-somedata]
(R {:msgtype msgs/COMM-CLOSE
:content (->> (msgs/comm-close-content id data)
(s/assert ::msp/comm-close-content))})))
(def g-comm-info-reply-content
(gen/let [n (gen/choose 0 10)
ids (gen/vector shg/g-uuid n)
nms (gen/vector g-std-name n)]
(R {:msgtype msgs/COMM-INFO-REPLY
:content (->> (msgs/comm-info-reply-content (zipmap ids nms))
(s/assert ::msp/comm-info-reply-content))})))
(def g-comm-info-request-content
(gen/let [nm g-std-name]
(R {:msgtype msgs/COMM-INFO-REQUEST
:content (->> (msgs/comm-info-request-content nm)
(s/assert ::msp/comm-info-request-content))})))
(def g-comm-message-content
(gen/let [id shg/g-uuid
data g-somedata]
(R {:msgtype msgs/COMM-MSG
:content (->> (msgs/comm-msg-content id data)
(s/assert ::msp/comm-message-content))})))
(def g-comm-open-content
(gen/let [id shg/g-uuid
modnm (shg/g-name 2 10)
tgtnm (shg/g-name 2 10)
data g-somedata]
(R {:msgtype msgs/COMM-OPEN
:content (->> (msgs/comm-open-content id data {:target_module modnm, :target_name tgtnm})
(s/assert ::msp/comm-open-content))})))
(def g-complete-reply-content
(gen/let [matches (gen/such-that (C count (p >)) (gen/vector g-std-name))
minlen (R (if (-> matches count zero?) 0 (apply min (map count matches))))
maxlen (R (if (-> matches count zero?) 0 (apply max (map count matches))))
cursor-end (gen/choose 0 maxlen)
cursor-start (gen/choose 0 cursor-end)]
(R {:msgtype msgs/COMPLETE-REPLY
:content (->> (msgs/complete-reply-content matches cursor-start cursor-end)
(s/assert ::msp/complete-reply-content))})))
(def g-complete-request-content
(gen/let [codestr (gen/frequency [[5 (gen/elements ["(println )"])]
[1 shg/g-safe-code-string]])
pos (gen/choose 0 10)]
(R {:msgtype msgs/COMPLETE-REQUEST
:content (->> (msgs/complete-request-content codestr pos)
(s/assert ::msp/complete-request-content))})))
(def g-error-message-content
(gen/let [n g-std-exe-count]
(R {:msgtype msgs/ERROR
:content (->> (msgs/error-message-content n)
(s/assert ::msp/error-message-content))})))
(def g-execute-input-message-content
(gen/let [n g-std-exe-count
codestr shg/g-safe-code-string]
(R {:msgtype msgs/EXECUTE-INPUT
:content (->> (msgs/execute-input-msg-content n codestr)
(s/assert ::msp/execute-input-content))})))
(def g-execute-reply-content
(gen/let [status g-reply-status
n g-std-exe-count
ename (gen/elements [nil {:ename "ENAME-HERE"}])
evalue (if ename
(gen/elements [{:evalue "EVALUE-HERE"}])
(gen/elements [{}]))
traceback (if ename
(gen/elements [{:traceback "TRACEBACK-HERE"}])
(gen/elements [{}]))]
(R {:msgtype msgs/EXECUTE-REPLY
:content (->> (msgs/execute-reply-content status n
(merge {} ename evalue traceback))
(s/assert ::msp/execute-reply-content))})))
(def g-execute-request-content
(gen/let [allow-stdin? gen/boolean
silent? gen/boolean
stop-on-error? gen/boolean
store-history? gen/boolean
code shg/g-safe-code-string]
(R {:msgtype msgs/EXECUTE-REQUEST
:content (->> (msgs/execute-request-content code allow-stdin? silent? stop-on-error? store-history?)
(s/assert ::msp/execute-request-content))})))
(def g-execute-result-content
(gen/let [data g-somedata
n g-std-exe-count]
(R {:msgtype msgs/EXECUTE-RESULT
:content (->> (msgs/execute-result-content data n)
(s/assert ::msp/execute-result-content))})))
(def g-history-reply-content
(let [histmaps '({:session 1, :line 1, :source "(list 1 2 3)"}
{:session 1, :line 2, :source "(list 4 5 6)"}
{:session 1, :line 3, :source "(println :ok)"}
{:session 1, :line 4, :source "(* 999 888 77)"})]
(R {:msgtype msgs/HISTORY-REPLY
:content (->> (msgs/history-reply-content histmaps)
(s/assert ::msp/history-reply-content))})))
(def g-history-request-content
(R {:msgtype msgs/HISTORY-REQUEST
:content (msgs/history-request-content)}))
(def g-input-reply-content
(gen/let [v g-std-name]
(R {:msgtype msgs/INPUT-REPLY
:content (msgs/input-reply-content v)})))
(def g-input-request-content
(gen/let [prompt (gen/fmap (P str ":") g-std-name)
password gen/boolean]
(R {:msgtype msgs/INPUT-REQUEST
:content (->> (msgs/input-request-content prompt password)
(s/assert ::msp/input-request-content))})))
(def g-inspect-reply-content
(gen/let [code-str shg/g-safe-code-string
result-str (gen/elements ["RESULT-HERE"])]
(R {:msgtype msgs/INSPECT-REPLY
:content (->> (msgs/inspect-reply-content code-str result-str)
(s/assert ::msp/inspect-reply-content))})))
(def g-inspect-request-content
(gen/let [code shg/g-safe-code-string
pos (gen/choose 0 (count code))]
(R {:msgtype msgs/INSPECT-REQUEST
:content (->> (msgs/inspect-request-content code pos)
(s/assert ::msp/inspect-request-content))})))
;;(def g-interrupt-reply-content) ;; NOT IMPLEMENTED
;;(def g-interrupt-request-content) ;; NOT IMPLEMENTED
(def g-is-complete-reply-content
(gen/let [status g-reply-status]
(R {:msgtype msgs/IS-COMPLETE-REPLY
:content (->> (msgs/is-complete-reply-content status)
(s/assert ::msp/is-complete-reply-content))})))
(def g-is-complete-request-content
(gen/let [codestr shg/g-safe-code-string
len (gen/one-of [(gen/elements [(count codestr)])
(gen/choose 0 (dec (count codestr)))])
codesubstr (R (subs codestr 0 len))]
(R {:msgtype msgs/IS-COMPLETE-REQUEST
:content (->> (msgs/is-complete-request-content codesubstr)
(s/assert ::msp/is-complete-request-content))})))
(def g-kernel-info-reply-content
(gen/let [banner (shg/g-nilable g-std-name)
clj-ver (shg/g-nilable (gen/elements ["1.2.3"]))
impl (shg/g-nilable (gen/elements ["some-other-impl"]))
proto-ver (shg/g-nilable (gen/frequency [[10 (gen/elements [msgs/PROTOCOL-VERSION])]
[1 (gen/elements ["5.1" "5.2" "5.3 " "4.0" "5.8" "7.0" "99.1"])]]))
version-str (shg/g-nilable (gen/frequency [[10 (gen/elements [clojupyter/version])]
[1 (gen/elements ["0.0.0" "1.2.3" "2.3.4" "5.6.0"])]]))]
(R {:msgtype msgs/KERNEL-INFO-REPLY
:content (->> (msgs/kernel-info-reply-content msgs/PROTOCOL-VERSION
{:banner banner
:clj-ver clj-ver
:implementation impl
:protocol-version proto-ver
:version-string version-str})
(s/assert ::msp/kernel-info-reply-content))})))
(def g-kernel-info-request-content
(R {:msgtype msgs/KERNEL-INFO-REQUEST
:content (->> (msgs/kernel-info-request-content)
(s/assert ::msp/kernel-info-request-content))}))
(def g-shutdown-reply
(gen/let [restart? gen/boolean]
(R {:msgtype msgs/SHUTDOWN-REPLY
:content (->> (msgs/shutdown-reply-content restart?)
(s/assert ::msp/shutdown-reply-content))})))
(def g-shutdown-request
(gen/let [restart? gen/boolean]
(R {:msgtype msgs/SHUTDOWN-REQUEST
:content (->> (msgs/shutdown-request-content restart?)
(s/assert ::msp/shutdown-request-content))})))
(def g-status-message-content
(gen/let [state (gen/elements ["busy" "idle" "starting"])]
(R {:msgtype msgs/STATUS
:content (->> (msgs/status-message-content state)
(s/assert ::msp/status-message-content))})))
(def g-stream-message-content
(gen/let [nm g-std-name
text g-std-name]
(R {:msgtype msgs/STREAM
:content (->> (msgs/stream-message-content nm text)
(s/assert ::msp/stream-message-content))})))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MULTI-MESSAGE GENERATORS
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-jupmsg-content-any
(gen/one-of [
g-comm-close-content
g-comm-info-reply-content
g-comm-info-request-content
g-comm-message-content
g-comm-open-content
g-complete-reply-content
g-complete-request-content
g-error-message-content
g-execute-input-message-content
g-execute-reply-content
g-execute-request-content
g-execute-result-content
g-history-reply-content
g-history-request-content
g-input-request-content
g-inspect-reply-content
g-inspect-request-content
g-is-complete-reply-content
g-is-complete-request-content
g-kernel-info-reply-content
g-kernel-info-request-content
g-shutdown-reply
g-shutdown-request
g-status-message-content
g-stream-message-content
]))
(def g-jupmsg-any
(gen/let [{:keys [content msgtype]} g-jupmsg-content-any
envelope (shg/g-byte-arrays 0 0 1 5)
signature (shg/g-byte-array 10 20)
hdr (g-message-header msgtype)
phdr (g-message-header msgtype)
metadata (R {})
buffers (R [])]
(R (msgs/make-jupmsg envelope signature hdr phdr metadata content buffers ))))
|
88817
|
(ns clojupyter.messages-generators-test
(:require [clojupyter]
[clojupyter.messages :as msgs]
[clojupyter.messages-specs :as msp]
[clojupyter.test-shared :as ts]
[clojupyter.test-shared-generators :as shg :refer [R]]
[clojure.spec.alpha :as s]
[clojure.string :as str]
[clojure.test.check.generators :as gen]
[io.simplect.compose :refer [C def- P p]]))
(def- g-somedata (R {:some-key <KEY>, :state {:another-key 9<KEY>} :buffer_paths []}))
(def- g-std-name (shg/g-name 2 10))
(def- g-std-exe-count (gen/choose 1 1000))
(def- g-reply-status (gen/frequency [[8 (gen/elements ["ok"])]
[1 (gen/elements ["error"])]]))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE TYPE GENERATOR
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-msgtype
(gen/elements [msgs/COMM-CLOSE
msgs/COMM-INFO-REPLY
msgs/COMM-INFO-REQUEST
msgs/COMM-MSG
msgs/COMM-OPEN
msgs/COMPLETE-REPLY
msgs/COMPLETE-REQUEST
msgs/ERROR
msgs/EXECUTE-INPUT
msgs/EXECUTE-REPLY
msgs/EXECUTE-REQUEST
msgs/EXECUTE-RESULT
msgs/HISTORY-REPLY
msgs/HISTORY-REQUEST
msgs/INPUT-REQUEST
msgs/INPUT-REPLY
msgs/INSPECT-REPLY
msgs/INSPECT-REQUEST
msgs/INTERRUPT-REPLY
msgs/INTERRUPT-REQUEST
msgs/IS-COMPLETE-REPLY
msgs/IS-COMPLETE-REQUEST
msgs/KERNEL-INFO-REPLY
msgs/KERNEL-INFO-REQUEST
msgs/SHUTDOWN-REPLY
msgs/SHUTDOWN-REQUEST
msgs/STATUS
msgs/STREAM]))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE HEADER GENERATOR
;;; ------------------------------------------------------------------------------------------------------------------------
(defn g-message-header
[msgtype]
(gen/let [message-id shg/g-uuid
username g-std-name
session shg/g-uuid
date (gen/fmap #(str "DATE: " %) g-std-name)
version shg/g-version]
(R (msgs/make-jupmsg-header message-id msgtype username session date version))))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE GENERATORS
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-comm-close-content
(gen/let [id shg/g-uuid
data g-somedata]
(R {:msgtype msgs/COMM-CLOSE
:content (->> (msgs/comm-close-content id data)
(s/assert ::msp/comm-close-content))})))
(def g-comm-info-reply-content
(gen/let [n (gen/choose 0 10)
ids (gen/vector shg/g-uuid n)
nms (gen/vector g-std-name n)]
(R {:msgtype msgs/COMM-INFO-REPLY
:content (->> (msgs/comm-info-reply-content (zipmap ids nms))
(s/assert ::msp/comm-info-reply-content))})))
(def g-comm-info-request-content
(gen/let [nm g-std-name]
(R {:msgtype msgs/COMM-INFO-REQUEST
:content (->> (msgs/comm-info-request-content nm)
(s/assert ::msp/comm-info-request-content))})))
(def g-comm-message-content
(gen/let [id shg/g-uuid
data g-somedata]
(R {:msgtype msgs/COMM-MSG
:content (->> (msgs/comm-msg-content id data)
(s/assert ::msp/comm-message-content))})))
(def g-comm-open-content
(gen/let [id shg/g-uuid
modnm (shg/g-name 2 10)
tgtnm (shg/g-name 2 10)
data g-somedata]
(R {:msgtype msgs/COMM-OPEN
:content (->> (msgs/comm-open-content id data {:target_module modnm, :target_name tgtnm})
(s/assert ::msp/comm-open-content))})))
(def g-complete-reply-content
(gen/let [matches (gen/such-that (C count (p >)) (gen/vector g-std-name))
minlen (R (if (-> matches count zero?) 0 (apply min (map count matches))))
maxlen (R (if (-> matches count zero?) 0 (apply max (map count matches))))
cursor-end (gen/choose 0 maxlen)
cursor-start (gen/choose 0 cursor-end)]
(R {:msgtype msgs/COMPLETE-REPLY
:content (->> (msgs/complete-reply-content matches cursor-start cursor-end)
(s/assert ::msp/complete-reply-content))})))
(def g-complete-request-content
(gen/let [codestr (gen/frequency [[5 (gen/elements ["(println )"])]
[1 shg/g-safe-code-string]])
pos (gen/choose 0 10)]
(R {:msgtype msgs/COMPLETE-REQUEST
:content (->> (msgs/complete-request-content codestr pos)
(s/assert ::msp/complete-request-content))})))
(def g-error-message-content
(gen/let [n g-std-exe-count]
(R {:msgtype msgs/ERROR
:content (->> (msgs/error-message-content n)
(s/assert ::msp/error-message-content))})))
(def g-execute-input-message-content
(gen/let [n g-std-exe-count
codestr shg/g-safe-code-string]
(R {:msgtype msgs/EXECUTE-INPUT
:content (->> (msgs/execute-input-msg-content n codestr)
(s/assert ::msp/execute-input-content))})))
(def g-execute-reply-content
(gen/let [status g-reply-status
n g-std-exe-count
ename (gen/elements [nil {:ename "ENAME-HERE"}])
evalue (if ename
(gen/elements [{:evalue "EVALUE-HERE"}])
(gen/elements [{}]))
traceback (if ename
(gen/elements [{:traceback "TRACEBACK-HERE"}])
(gen/elements [{}]))]
(R {:msgtype msgs/EXECUTE-REPLY
:content (->> (msgs/execute-reply-content status n
(merge {} ename evalue traceback))
(s/assert ::msp/execute-reply-content))})))
(def g-execute-request-content
(gen/let [allow-stdin? gen/boolean
silent? gen/boolean
stop-on-error? gen/boolean
store-history? gen/boolean
code shg/g-safe-code-string]
(R {:msgtype msgs/EXECUTE-REQUEST
:content (->> (msgs/execute-request-content code allow-stdin? silent? stop-on-error? store-history?)
(s/assert ::msp/execute-request-content))})))
(def g-execute-result-content
(gen/let [data g-somedata
n g-std-exe-count]
(R {:msgtype msgs/EXECUTE-RESULT
:content (->> (msgs/execute-result-content data n)
(s/assert ::msp/execute-result-content))})))
(def g-history-reply-content
(let [histmaps '({:session 1, :line 1, :source "(list 1 2 3)"}
{:session 1, :line 2, :source "(list 4 5 6)"}
{:session 1, :line 3, :source "(println :ok)"}
{:session 1, :line 4, :source "(* 999 888 77)"})]
(R {:msgtype msgs/HISTORY-REPLY
:content (->> (msgs/history-reply-content histmaps)
(s/assert ::msp/history-reply-content))})))
(def g-history-request-content
(R {:msgtype msgs/HISTORY-REQUEST
:content (msgs/history-request-content)}))
(def g-input-reply-content
(gen/let [v g-std-name]
(R {:msgtype msgs/INPUT-REPLY
:content (msgs/input-reply-content v)})))
(def g-input-request-content
(gen/let [prompt (gen/fmap (P str ":") g-std-name)
password gen/boolean]
(R {:msgtype msgs/INPUT-REQUEST
:content (->> (msgs/input-request-content prompt password)
(s/assert ::msp/input-request-content))})))
(def g-inspect-reply-content
(gen/let [code-str shg/g-safe-code-string
result-str (gen/elements ["RESULT-HERE"])]
(R {:msgtype msgs/INSPECT-REPLY
:content (->> (msgs/inspect-reply-content code-str result-str)
(s/assert ::msp/inspect-reply-content))})))
(def g-inspect-request-content
(gen/let [code shg/g-safe-code-string
pos (gen/choose 0 (count code))]
(R {:msgtype msgs/INSPECT-REQUEST
:content (->> (msgs/inspect-request-content code pos)
(s/assert ::msp/inspect-request-content))})))
;;(def g-interrupt-reply-content) ;; NOT IMPLEMENTED
;;(def g-interrupt-request-content) ;; NOT IMPLEMENTED
(def g-is-complete-reply-content
(gen/let [status g-reply-status]
(R {:msgtype msgs/IS-COMPLETE-REPLY
:content (->> (msgs/is-complete-reply-content status)
(s/assert ::msp/is-complete-reply-content))})))
(def g-is-complete-request-content
(gen/let [codestr shg/g-safe-code-string
len (gen/one-of [(gen/elements [(count codestr)])
(gen/choose 0 (dec (count codestr)))])
codesubstr (R (subs codestr 0 len))]
(R {:msgtype msgs/IS-COMPLETE-REQUEST
:content (->> (msgs/is-complete-request-content codesubstr)
(s/assert ::msp/is-complete-request-content))})))
(def g-kernel-info-reply-content
(gen/let [banner (shg/g-nilable g-std-name)
clj-ver (shg/g-nilable (gen/elements ["1.2.3"]))
impl (shg/g-nilable (gen/elements ["some-other-impl"]))
proto-ver (shg/g-nilable (gen/frequency [[10 (gen/elements [msgs/PROTOCOL-VERSION])]
[1 (gen/elements ["5.1" "5.2" "5.3 " "4.0" "5.8" "7.0" "99.1"])]]))
version-str (shg/g-nilable (gen/frequency [[10 (gen/elements [clojupyter/version])]
[1 (gen/elements ["0.0.0" "1.2.3" "2.3.4" "5.6.0"])]]))]
(R {:msgtype msgs/KERNEL-INFO-REPLY
:content (->> (msgs/kernel-info-reply-content msgs/PROTOCOL-VERSION
{:banner banner
:clj-ver clj-ver
:implementation impl
:protocol-version proto-ver
:version-string version-str})
(s/assert ::msp/kernel-info-reply-content))})))
(def g-kernel-info-request-content
(R {:msgtype msgs/KERNEL-INFO-REQUEST
:content (->> (msgs/kernel-info-request-content)
(s/assert ::msp/kernel-info-request-content))}))
(def g-shutdown-reply
(gen/let [restart? gen/boolean]
(R {:msgtype msgs/SHUTDOWN-REPLY
:content (->> (msgs/shutdown-reply-content restart?)
(s/assert ::msp/shutdown-reply-content))})))
(def g-shutdown-request
(gen/let [restart? gen/boolean]
(R {:msgtype msgs/SHUTDOWN-REQUEST
:content (->> (msgs/shutdown-request-content restart?)
(s/assert ::msp/shutdown-request-content))})))
(def g-status-message-content
(gen/let [state (gen/elements ["busy" "idle" "starting"])]
(R {:msgtype msgs/STATUS
:content (->> (msgs/status-message-content state)
(s/assert ::msp/status-message-content))})))
(def g-stream-message-content
(gen/let [nm g-std-name
text g-std-name]
(R {:msgtype msgs/STREAM
:content (->> (msgs/stream-message-content nm text)
(s/assert ::msp/stream-message-content))})))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MULTI-MESSAGE GENERATORS
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-jupmsg-content-any
(gen/one-of [
g-comm-close-content
g-comm-info-reply-content
g-comm-info-request-content
g-comm-message-content
g-comm-open-content
g-complete-reply-content
g-complete-request-content
g-error-message-content
g-execute-input-message-content
g-execute-reply-content
g-execute-request-content
g-execute-result-content
g-history-reply-content
g-history-request-content
g-input-request-content
g-inspect-reply-content
g-inspect-request-content
g-is-complete-reply-content
g-is-complete-request-content
g-kernel-info-reply-content
g-kernel-info-request-content
g-shutdown-reply
g-shutdown-request
g-status-message-content
g-stream-message-content
]))
(def g-jupmsg-any
(gen/let [{:keys [content msgtype]} g-jupmsg-content-any
envelope (shg/g-byte-arrays 0 0 1 5)
signature (shg/g-byte-array 10 20)
hdr (g-message-header msgtype)
phdr (g-message-header msgtype)
metadata (R {})
buffers (R [])]
(R (msgs/make-jupmsg envelope signature hdr phdr metadata content buffers ))))
| true |
(ns clojupyter.messages-generators-test
(:require [clojupyter]
[clojupyter.messages :as msgs]
[clojupyter.messages-specs :as msp]
[clojupyter.test-shared :as ts]
[clojupyter.test-shared-generators :as shg :refer [R]]
[clojure.spec.alpha :as s]
[clojure.string :as str]
[clojure.test.check.generators :as gen]
[io.simplect.compose :refer [C def- P p]]))
(def- g-somedata (R {:some-key PI:KEY:<KEY>END_PI, :state {:another-key 9PI:KEY:<KEY>END_PI} :buffer_paths []}))
(def- g-std-name (shg/g-name 2 10))
(def- g-std-exe-count (gen/choose 1 1000))
(def- g-reply-status (gen/frequency [[8 (gen/elements ["ok"])]
[1 (gen/elements ["error"])]]))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE TYPE GENERATOR
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-msgtype
(gen/elements [msgs/COMM-CLOSE
msgs/COMM-INFO-REPLY
msgs/COMM-INFO-REQUEST
msgs/COMM-MSG
msgs/COMM-OPEN
msgs/COMPLETE-REPLY
msgs/COMPLETE-REQUEST
msgs/ERROR
msgs/EXECUTE-INPUT
msgs/EXECUTE-REPLY
msgs/EXECUTE-REQUEST
msgs/EXECUTE-RESULT
msgs/HISTORY-REPLY
msgs/HISTORY-REQUEST
msgs/INPUT-REQUEST
msgs/INPUT-REPLY
msgs/INSPECT-REPLY
msgs/INSPECT-REQUEST
msgs/INTERRUPT-REPLY
msgs/INTERRUPT-REQUEST
msgs/IS-COMPLETE-REPLY
msgs/IS-COMPLETE-REQUEST
msgs/KERNEL-INFO-REPLY
msgs/KERNEL-INFO-REQUEST
msgs/SHUTDOWN-REPLY
msgs/SHUTDOWN-REQUEST
msgs/STATUS
msgs/STREAM]))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE HEADER GENERATOR
;;; ------------------------------------------------------------------------------------------------------------------------
(defn g-message-header
[msgtype]
(gen/let [message-id shg/g-uuid
username g-std-name
session shg/g-uuid
date (gen/fmap #(str "DATE: " %) g-std-name)
version shg/g-version]
(R (msgs/make-jupmsg-header message-id msgtype username session date version))))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MESSAGE GENERATORS
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-comm-close-content
(gen/let [id shg/g-uuid
data g-somedata]
(R {:msgtype msgs/COMM-CLOSE
:content (->> (msgs/comm-close-content id data)
(s/assert ::msp/comm-close-content))})))
(def g-comm-info-reply-content
(gen/let [n (gen/choose 0 10)
ids (gen/vector shg/g-uuid n)
nms (gen/vector g-std-name n)]
(R {:msgtype msgs/COMM-INFO-REPLY
:content (->> (msgs/comm-info-reply-content (zipmap ids nms))
(s/assert ::msp/comm-info-reply-content))})))
(def g-comm-info-request-content
(gen/let [nm g-std-name]
(R {:msgtype msgs/COMM-INFO-REQUEST
:content (->> (msgs/comm-info-request-content nm)
(s/assert ::msp/comm-info-request-content))})))
(def g-comm-message-content
(gen/let [id shg/g-uuid
data g-somedata]
(R {:msgtype msgs/COMM-MSG
:content (->> (msgs/comm-msg-content id data)
(s/assert ::msp/comm-message-content))})))
(def g-comm-open-content
(gen/let [id shg/g-uuid
modnm (shg/g-name 2 10)
tgtnm (shg/g-name 2 10)
data g-somedata]
(R {:msgtype msgs/COMM-OPEN
:content (->> (msgs/comm-open-content id data {:target_module modnm, :target_name tgtnm})
(s/assert ::msp/comm-open-content))})))
(def g-complete-reply-content
(gen/let [matches (gen/such-that (C count (p >)) (gen/vector g-std-name))
minlen (R (if (-> matches count zero?) 0 (apply min (map count matches))))
maxlen (R (if (-> matches count zero?) 0 (apply max (map count matches))))
cursor-end (gen/choose 0 maxlen)
cursor-start (gen/choose 0 cursor-end)]
(R {:msgtype msgs/COMPLETE-REPLY
:content (->> (msgs/complete-reply-content matches cursor-start cursor-end)
(s/assert ::msp/complete-reply-content))})))
(def g-complete-request-content
(gen/let [codestr (gen/frequency [[5 (gen/elements ["(println )"])]
[1 shg/g-safe-code-string]])
pos (gen/choose 0 10)]
(R {:msgtype msgs/COMPLETE-REQUEST
:content (->> (msgs/complete-request-content codestr pos)
(s/assert ::msp/complete-request-content))})))
(def g-error-message-content
(gen/let [n g-std-exe-count]
(R {:msgtype msgs/ERROR
:content (->> (msgs/error-message-content n)
(s/assert ::msp/error-message-content))})))
(def g-execute-input-message-content
(gen/let [n g-std-exe-count
codestr shg/g-safe-code-string]
(R {:msgtype msgs/EXECUTE-INPUT
:content (->> (msgs/execute-input-msg-content n codestr)
(s/assert ::msp/execute-input-content))})))
(def g-execute-reply-content
(gen/let [status g-reply-status
n g-std-exe-count
ename (gen/elements [nil {:ename "ENAME-HERE"}])
evalue (if ename
(gen/elements [{:evalue "EVALUE-HERE"}])
(gen/elements [{}]))
traceback (if ename
(gen/elements [{:traceback "TRACEBACK-HERE"}])
(gen/elements [{}]))]
(R {:msgtype msgs/EXECUTE-REPLY
:content (->> (msgs/execute-reply-content status n
(merge {} ename evalue traceback))
(s/assert ::msp/execute-reply-content))})))
(def g-execute-request-content
(gen/let [allow-stdin? gen/boolean
silent? gen/boolean
stop-on-error? gen/boolean
store-history? gen/boolean
code shg/g-safe-code-string]
(R {:msgtype msgs/EXECUTE-REQUEST
:content (->> (msgs/execute-request-content code allow-stdin? silent? stop-on-error? store-history?)
(s/assert ::msp/execute-request-content))})))
(def g-execute-result-content
(gen/let [data g-somedata
n g-std-exe-count]
(R {:msgtype msgs/EXECUTE-RESULT
:content (->> (msgs/execute-result-content data n)
(s/assert ::msp/execute-result-content))})))
(def g-history-reply-content
(let [histmaps '({:session 1, :line 1, :source "(list 1 2 3)"}
{:session 1, :line 2, :source "(list 4 5 6)"}
{:session 1, :line 3, :source "(println :ok)"}
{:session 1, :line 4, :source "(* 999 888 77)"})]
(R {:msgtype msgs/HISTORY-REPLY
:content (->> (msgs/history-reply-content histmaps)
(s/assert ::msp/history-reply-content))})))
(def g-history-request-content
(R {:msgtype msgs/HISTORY-REQUEST
:content (msgs/history-request-content)}))
(def g-input-reply-content
(gen/let [v g-std-name]
(R {:msgtype msgs/INPUT-REPLY
:content (msgs/input-reply-content v)})))
(def g-input-request-content
(gen/let [prompt (gen/fmap (P str ":") g-std-name)
password gen/boolean]
(R {:msgtype msgs/INPUT-REQUEST
:content (->> (msgs/input-request-content prompt password)
(s/assert ::msp/input-request-content))})))
(def g-inspect-reply-content
(gen/let [code-str shg/g-safe-code-string
result-str (gen/elements ["RESULT-HERE"])]
(R {:msgtype msgs/INSPECT-REPLY
:content (->> (msgs/inspect-reply-content code-str result-str)
(s/assert ::msp/inspect-reply-content))})))
(def g-inspect-request-content
(gen/let [code shg/g-safe-code-string
pos (gen/choose 0 (count code))]
(R {:msgtype msgs/INSPECT-REQUEST
:content (->> (msgs/inspect-request-content code pos)
(s/assert ::msp/inspect-request-content))})))
;;(def g-interrupt-reply-content) ;; NOT IMPLEMENTED
;;(def g-interrupt-request-content) ;; NOT IMPLEMENTED
(def g-is-complete-reply-content
(gen/let [status g-reply-status]
(R {:msgtype msgs/IS-COMPLETE-REPLY
:content (->> (msgs/is-complete-reply-content status)
(s/assert ::msp/is-complete-reply-content))})))
(def g-is-complete-request-content
(gen/let [codestr shg/g-safe-code-string
len (gen/one-of [(gen/elements [(count codestr)])
(gen/choose 0 (dec (count codestr)))])
codesubstr (R (subs codestr 0 len))]
(R {:msgtype msgs/IS-COMPLETE-REQUEST
:content (->> (msgs/is-complete-request-content codesubstr)
(s/assert ::msp/is-complete-request-content))})))
(def g-kernel-info-reply-content
(gen/let [banner (shg/g-nilable g-std-name)
clj-ver (shg/g-nilable (gen/elements ["1.2.3"]))
impl (shg/g-nilable (gen/elements ["some-other-impl"]))
proto-ver (shg/g-nilable (gen/frequency [[10 (gen/elements [msgs/PROTOCOL-VERSION])]
[1 (gen/elements ["5.1" "5.2" "5.3 " "4.0" "5.8" "7.0" "99.1"])]]))
version-str (shg/g-nilable (gen/frequency [[10 (gen/elements [clojupyter/version])]
[1 (gen/elements ["0.0.0" "1.2.3" "2.3.4" "5.6.0"])]]))]
(R {:msgtype msgs/KERNEL-INFO-REPLY
:content (->> (msgs/kernel-info-reply-content msgs/PROTOCOL-VERSION
{:banner banner
:clj-ver clj-ver
:implementation impl
:protocol-version proto-ver
:version-string version-str})
(s/assert ::msp/kernel-info-reply-content))})))
(def g-kernel-info-request-content
(R {:msgtype msgs/KERNEL-INFO-REQUEST
:content (->> (msgs/kernel-info-request-content)
(s/assert ::msp/kernel-info-request-content))}))
(def g-shutdown-reply
(gen/let [restart? gen/boolean]
(R {:msgtype msgs/SHUTDOWN-REPLY
:content (->> (msgs/shutdown-reply-content restart?)
(s/assert ::msp/shutdown-reply-content))})))
(def g-shutdown-request
(gen/let [restart? gen/boolean]
(R {:msgtype msgs/SHUTDOWN-REQUEST
:content (->> (msgs/shutdown-request-content restart?)
(s/assert ::msp/shutdown-request-content))})))
(def g-status-message-content
(gen/let [state (gen/elements ["busy" "idle" "starting"])]
(R {:msgtype msgs/STATUS
:content (->> (msgs/status-message-content state)
(s/assert ::msp/status-message-content))})))
(def g-stream-message-content
(gen/let [nm g-std-name
text g-std-name]
(R {:msgtype msgs/STREAM
:content (->> (msgs/stream-message-content nm text)
(s/assert ::msp/stream-message-content))})))
;;; ------------------------------------------------------------------------------------------------------------------------
;;; MULTI-MESSAGE GENERATORS
;;; ------------------------------------------------------------------------------------------------------------------------
(def g-jupmsg-content-any
(gen/one-of [
g-comm-close-content
g-comm-info-reply-content
g-comm-info-request-content
g-comm-message-content
g-comm-open-content
g-complete-reply-content
g-complete-request-content
g-error-message-content
g-execute-input-message-content
g-execute-reply-content
g-execute-request-content
g-execute-result-content
g-history-reply-content
g-history-request-content
g-input-request-content
g-inspect-reply-content
g-inspect-request-content
g-is-complete-reply-content
g-is-complete-request-content
g-kernel-info-reply-content
g-kernel-info-request-content
g-shutdown-reply
g-shutdown-request
g-status-message-content
g-stream-message-content
]))
(def g-jupmsg-any
(gen/let [{:keys [content msgtype]} g-jupmsg-content-any
envelope (shg/g-byte-arrays 0 0 1 5)
signature (shg/g-byte-array 10 20)
hdr (g-message-header msgtype)
phdr (g-message-header msgtype)
metadata (R {})
buffers (R [])]
(R (msgs/make-jupmsg envelope signature hdr phdr metadata content buffers ))))
|
[
{
"context": " (b/button {:key \"upload-button\" :className \"btn-fill\" :kind :info\n ",
"end": 7481,
"score": 0.764278769493103,
"start": 7475,
"tag": "KEY",
"value": "button"
},
{
"context": "ofileList :show-profiles :device-profile [[:name \"Name\"] [:description \"Description\"]\n ",
"end": 9216,
"score": 0.6835894584655762,
"start": 9212,
"tag": "NAME",
"value": "Name"
}
] |
src/main/org/edgexfoundry/ui/manager/ui/profiles.cljs
|
jpwhitemn/edgex-ui-clojure
| 0 |
;;; Copyright (c) 2018
;;; IoTech Ltd
;;; SPDX-License-Identifier: Apache-2.0
(ns org.edgexfoundry.ui.manager.ui.profiles
(:require [fulcro.client.primitives :as prim :refer [defui defsc]]
[fulcro.i18n :refer [tr]]
[fulcro.client.dom :as dom]
[fulcro.client.data-fetch :as df :refer [load-field-action]]
[fulcro.client.routing :as r]
[fulcro.ui.bootstrap3 :as b]
[fulcro.ui.forms :as f]
[fulcro.ui.file-upload :refer [FileUploadInput file-upload-input file-upload-networking cropped-name]]
[fulcro.client.mutations :as m :refer [defmutation]]
[org.edgexfoundry.ui.manager.ui.table :as t :refer [deftable]]
[org.edgexfoundry.ui.manager.ui.common :as co]
[org.edgexfoundry.ui.manager.api.mutations :as mu]
[org.edgexfoundry.ui.manager.ui.routing :as routing]
[org.edgexfoundry.ui.manager.ui.dialogs :as d]
[goog.object :as gobj]
[cljsjs.highlight]
[cljsjs.highlight.langs.yaml]))
(declare ProfileListEntry)
(declare ProfileYAMLFile)
(defmutation prepare-add-profile [nonparams]
(action [{:keys [state]}]
(swap! state (fn [s]
(-> s
(assoc-in [:fulcro.ui.file-upload/by-id :pr-name :file-upload/files] []))))))
(defn show-add-profile-modal [comp]
(prim/transact! comp `[(prepare-add-profile)
(r/set-route {:router :root/modal-router :target [:add-profile-modal :singleton]})
(b/show-modal {:id :add-profile-modal})
:add-profile]))
(defn upload-profile [comp upload-file-id]
(prim/transact! comp `[(b/hide-modal {:id :add-profile-modal})
(mu/upload-profile {:file-id ~upload-file-id})])
(df/load comp :q/edgex-profiles ProfileListEntry {:target (df/multiple-targets
(conj co/profile-list-ident :content)
(conj co/new-device-ident :profiles))}))
(defmutation load-profile-yaml
[{:keys [id]}]
(action [{:keys [state] :as env}]
(df/load-action env :q/edgex-profile-yaml ProfileYAMLFile
{:target (conj co/profile-yaml-ident :profile-yaml)
:params {:id id}})
(swap! state (fn [s] (-> s
(assoc-in (conj co/profile-yaml-ident :ui/name) (-> s :device-profile id :name))))))
(remote [env] (df/remote-load env)))
(defn table-entry
[comp form name label & params]
(b/row {}
(b/col {:xs 4 :htmlFor name} label)
(b/col {:xs 8} (apply f/form-field comp form name params))))
(defui ^:once AddProfileForm
static prim/InitialAppState
(initial-state [this params] (f/build-form this {:db/id 2 :profile-file (prim/get-initial-state FileUploadInput {:id :pr-name})}))
static f/IForm
(form-spec [this] [(f/id-field :db/id)
(file-upload-input :profile-file)])
static prim/IQuery
(query [this] [f/form-root-key f/form-key :db/id
{:profile-file (prim/get-query FileUploadInput)}])
static prim/Ident
(ident [this props] co/add-profile-ident)
Object
(render [this]
(let [{:keys [db/id] :as props} (prim/props this)]
(dom/div {:className "content"}
(table-entry this props :profile-file "Select Profile YAML File" :accept "application/x-yaml"
:multiple? false
:renderFile (fn [file-component]
(let [onCancel (prim/get-computed file-component :onCancel)
{:keys [file/id file/name file/size file/progress
file/status] :as props} (prim/props file-component)
label (cropped-name name 20)]
(dom/li {:style #js {:listStyleType "none"} :key (str "file-" id)}
(str label " (" size " bytes) ")
(b/glyphicon {:size "14pt" :onClick #(onCancel id)} :remove-circle)
(dom/br nil)
(case status
:failed (dom/span nil "FAILED!")
:done ""
(b/progress-bar {:current progress})))))
:renderControl (fn [onChange accept multiple?]
(let [control-id (str "add-control-" id)
attrs (cond-> {:onChange (fn [evt] (onChange evt))
:id control-id
:style #js {:display "none"}
:value ""
:type "file"}
accept (assoc :accept accept)
multiple? (assoc :multiple "multiple")
:always clj->js)]
(dom/label {:htmlFor control-id} (b/glyphicon {:className "btn btn-primary"} :plus)
(dom/input attrs)))))))))
(def ui-add-profile-form (prim/factory AddProfileForm {:keyfn :db/id}))
(defsc AddProfileModal [this {:keys [profile-form modal modal/page] :as props}]
{:initial-state (fn [p] {:profile-form (prim/get-initial-state AddProfileForm {:db/id 2})
:modal (prim/get-initial-state b/Modal {:id :add-profile-modal :backdrop true})
:modal/page :add-profile-modal})
:ident (fn [] [:add-profile-modal :singleton])
:query [{:profile-form (prim/get-query AddProfileForm)}
{:modal (prim/get-query b/Modal)}
{[:fulcro.ui.file-upload/by-id :pr-name] [:file-upload/files]}
:modal/page]}
(let [files (get-in props [[:fulcro.ui.file-upload/by-id :pr-name] :file-upload/files])
upload-file-id (if (empty? files)
nil
(-> files first second))]
(b/ui-modal modal
(b/ui-modal-title nil
(dom/div #js {:key "title"
:style #js {:fontSize "22px"}} "Upload Device Profile"))
(b/ui-modal-body nil
(ui-add-profile-form profile-form))
(b/ui-modal-footer nil
(b/button {:key "upload-button" :className "btn-fill" :kind :info
:onClick #(upload-profile this upload-file-id)}
"Upload")
(b/button {:key "cancel-button" :className "btn-fill" :kind :danger
:onClick #(prim/transact! this `[(b/hide-modal {:id :add-profile-modal})])}
"Cancel")))))
(defsc ProfileYAMLFile [this {:keys [yaml]}]
{:ident (fn [] [:yaml-file :singleton])
:query [:yaml]})
(defsc ProfileYAML
[this {:keys [ui/name ui/show-profile-yaml profile-yaml] :as props}]
{:initial-state (fn [p] {:ui/name "" :ui/show-profile-yaml true})
:ident (fn [] co/profile-yaml-ident)
:query [:ui/name :ui/show-profile-yaml
{:profile-yaml (prim/get-query ProfileYAMLFile)}]
:componentDidMount (fn [] (let [code (.call (goog.object/get js/document "getElementById") js/document "yaml")]
(js/hljs.highlightBlock code)))}
(dom/div nil
(dom/h5 nil (str "Profile " name))
(b/button
{:onClick #(routing/nav-to! this :profile)}
(dom/i #js {:className "glyphicon fa fa-caret-square-o-left"}))
(dom/pre nil
(dom/code {:id "yaml"} (-> profile-yaml first :yaml)))))
(defn show-profile [this type id]
(prim/transact! this `[(load-profile-yaml {:id ~id})])
(routing/nav-to! this :profile-yaml))
(defn do-delete-profile [this id]
(prim/transact! this `[(mu/delete-profile {:id ~id})
(t/reset-table-page {:id :show-profiles})]))
(deftable ProfileList :show-profiles :device-profile [[:name "Name"] [:description "Description"]
[:modified "Last Modified" #(co/conv-time %2)]]
[{:onClick #(show-add-profile-modal this) :icon "plus"}
{:onClick #(df/refresh! this {:fallback `d/show-error}) :icon "refresh"}]
:name-row-symbol ProfileListEntry
:modals [{:modal d/DeleteModal :params {:modal-id :dp-modal} :callbacks {:onDelete do-delete-profile}}]
:actions [{:title "View Device" :action-class :info :symbol "info" :onClick show-profile}
{:title "Delete Profile" :action-class :danger :symbol "times" :onClick (d/mk-show-modal :dp-modal)}])
|
58100
|
;;; Copyright (c) 2018
;;; IoTech Ltd
;;; SPDX-License-Identifier: Apache-2.0
(ns org.edgexfoundry.ui.manager.ui.profiles
(:require [fulcro.client.primitives :as prim :refer [defui defsc]]
[fulcro.i18n :refer [tr]]
[fulcro.client.dom :as dom]
[fulcro.client.data-fetch :as df :refer [load-field-action]]
[fulcro.client.routing :as r]
[fulcro.ui.bootstrap3 :as b]
[fulcro.ui.forms :as f]
[fulcro.ui.file-upload :refer [FileUploadInput file-upload-input file-upload-networking cropped-name]]
[fulcro.client.mutations :as m :refer [defmutation]]
[org.edgexfoundry.ui.manager.ui.table :as t :refer [deftable]]
[org.edgexfoundry.ui.manager.ui.common :as co]
[org.edgexfoundry.ui.manager.api.mutations :as mu]
[org.edgexfoundry.ui.manager.ui.routing :as routing]
[org.edgexfoundry.ui.manager.ui.dialogs :as d]
[goog.object :as gobj]
[cljsjs.highlight]
[cljsjs.highlight.langs.yaml]))
(declare ProfileListEntry)
(declare ProfileYAMLFile)
(defmutation prepare-add-profile [nonparams]
(action [{:keys [state]}]
(swap! state (fn [s]
(-> s
(assoc-in [:fulcro.ui.file-upload/by-id :pr-name :file-upload/files] []))))))
(defn show-add-profile-modal [comp]
(prim/transact! comp `[(prepare-add-profile)
(r/set-route {:router :root/modal-router :target [:add-profile-modal :singleton]})
(b/show-modal {:id :add-profile-modal})
:add-profile]))
(defn upload-profile [comp upload-file-id]
(prim/transact! comp `[(b/hide-modal {:id :add-profile-modal})
(mu/upload-profile {:file-id ~upload-file-id})])
(df/load comp :q/edgex-profiles ProfileListEntry {:target (df/multiple-targets
(conj co/profile-list-ident :content)
(conj co/new-device-ident :profiles))}))
(defmutation load-profile-yaml
[{:keys [id]}]
(action [{:keys [state] :as env}]
(df/load-action env :q/edgex-profile-yaml ProfileYAMLFile
{:target (conj co/profile-yaml-ident :profile-yaml)
:params {:id id}})
(swap! state (fn [s] (-> s
(assoc-in (conj co/profile-yaml-ident :ui/name) (-> s :device-profile id :name))))))
(remote [env] (df/remote-load env)))
(defn table-entry
[comp form name label & params]
(b/row {}
(b/col {:xs 4 :htmlFor name} label)
(b/col {:xs 8} (apply f/form-field comp form name params))))
(defui ^:once AddProfileForm
static prim/InitialAppState
(initial-state [this params] (f/build-form this {:db/id 2 :profile-file (prim/get-initial-state FileUploadInput {:id :pr-name})}))
static f/IForm
(form-spec [this] [(f/id-field :db/id)
(file-upload-input :profile-file)])
static prim/IQuery
(query [this] [f/form-root-key f/form-key :db/id
{:profile-file (prim/get-query FileUploadInput)}])
static prim/Ident
(ident [this props] co/add-profile-ident)
Object
(render [this]
(let [{:keys [db/id] :as props} (prim/props this)]
(dom/div {:className "content"}
(table-entry this props :profile-file "Select Profile YAML File" :accept "application/x-yaml"
:multiple? false
:renderFile (fn [file-component]
(let [onCancel (prim/get-computed file-component :onCancel)
{:keys [file/id file/name file/size file/progress
file/status] :as props} (prim/props file-component)
label (cropped-name name 20)]
(dom/li {:style #js {:listStyleType "none"} :key (str "file-" id)}
(str label " (" size " bytes) ")
(b/glyphicon {:size "14pt" :onClick #(onCancel id)} :remove-circle)
(dom/br nil)
(case status
:failed (dom/span nil "FAILED!")
:done ""
(b/progress-bar {:current progress})))))
:renderControl (fn [onChange accept multiple?]
(let [control-id (str "add-control-" id)
attrs (cond-> {:onChange (fn [evt] (onChange evt))
:id control-id
:style #js {:display "none"}
:value ""
:type "file"}
accept (assoc :accept accept)
multiple? (assoc :multiple "multiple")
:always clj->js)]
(dom/label {:htmlFor control-id} (b/glyphicon {:className "btn btn-primary"} :plus)
(dom/input attrs)))))))))
(def ui-add-profile-form (prim/factory AddProfileForm {:keyfn :db/id}))
(defsc AddProfileModal [this {:keys [profile-form modal modal/page] :as props}]
{:initial-state (fn [p] {:profile-form (prim/get-initial-state AddProfileForm {:db/id 2})
:modal (prim/get-initial-state b/Modal {:id :add-profile-modal :backdrop true})
:modal/page :add-profile-modal})
:ident (fn [] [:add-profile-modal :singleton])
:query [{:profile-form (prim/get-query AddProfileForm)}
{:modal (prim/get-query b/Modal)}
{[:fulcro.ui.file-upload/by-id :pr-name] [:file-upload/files]}
:modal/page]}
(let [files (get-in props [[:fulcro.ui.file-upload/by-id :pr-name] :file-upload/files])
upload-file-id (if (empty? files)
nil
(-> files first second))]
(b/ui-modal modal
(b/ui-modal-title nil
(dom/div #js {:key "title"
:style #js {:fontSize "22px"}} "Upload Device Profile"))
(b/ui-modal-body nil
(ui-add-profile-form profile-form))
(b/ui-modal-footer nil
(b/button {:key "upload-<KEY>" :className "btn-fill" :kind :info
:onClick #(upload-profile this upload-file-id)}
"Upload")
(b/button {:key "cancel-button" :className "btn-fill" :kind :danger
:onClick #(prim/transact! this `[(b/hide-modal {:id :add-profile-modal})])}
"Cancel")))))
(defsc ProfileYAMLFile [this {:keys [yaml]}]
{:ident (fn [] [:yaml-file :singleton])
:query [:yaml]})
(defsc ProfileYAML
[this {:keys [ui/name ui/show-profile-yaml profile-yaml] :as props}]
{:initial-state (fn [p] {:ui/name "" :ui/show-profile-yaml true})
:ident (fn [] co/profile-yaml-ident)
:query [:ui/name :ui/show-profile-yaml
{:profile-yaml (prim/get-query ProfileYAMLFile)}]
:componentDidMount (fn [] (let [code (.call (goog.object/get js/document "getElementById") js/document "yaml")]
(js/hljs.highlightBlock code)))}
(dom/div nil
(dom/h5 nil (str "Profile " name))
(b/button
{:onClick #(routing/nav-to! this :profile)}
(dom/i #js {:className "glyphicon fa fa-caret-square-o-left"}))
(dom/pre nil
(dom/code {:id "yaml"} (-> profile-yaml first :yaml)))))
(defn show-profile [this type id]
(prim/transact! this `[(load-profile-yaml {:id ~id})])
(routing/nav-to! this :profile-yaml))
(defn do-delete-profile [this id]
(prim/transact! this `[(mu/delete-profile {:id ~id})
(t/reset-table-page {:id :show-profiles})]))
(deftable ProfileList :show-profiles :device-profile [[:name "<NAME>"] [:description "Description"]
[:modified "Last Modified" #(co/conv-time %2)]]
[{:onClick #(show-add-profile-modal this) :icon "plus"}
{:onClick #(df/refresh! this {:fallback `d/show-error}) :icon "refresh"}]
:name-row-symbol ProfileListEntry
:modals [{:modal d/DeleteModal :params {:modal-id :dp-modal} :callbacks {:onDelete do-delete-profile}}]
:actions [{:title "View Device" :action-class :info :symbol "info" :onClick show-profile}
{:title "Delete Profile" :action-class :danger :symbol "times" :onClick (d/mk-show-modal :dp-modal)}])
| true |
;;; Copyright (c) 2018
;;; IoTech Ltd
;;; SPDX-License-Identifier: Apache-2.0
(ns org.edgexfoundry.ui.manager.ui.profiles
(:require [fulcro.client.primitives :as prim :refer [defui defsc]]
[fulcro.i18n :refer [tr]]
[fulcro.client.dom :as dom]
[fulcro.client.data-fetch :as df :refer [load-field-action]]
[fulcro.client.routing :as r]
[fulcro.ui.bootstrap3 :as b]
[fulcro.ui.forms :as f]
[fulcro.ui.file-upload :refer [FileUploadInput file-upload-input file-upload-networking cropped-name]]
[fulcro.client.mutations :as m :refer [defmutation]]
[org.edgexfoundry.ui.manager.ui.table :as t :refer [deftable]]
[org.edgexfoundry.ui.manager.ui.common :as co]
[org.edgexfoundry.ui.manager.api.mutations :as mu]
[org.edgexfoundry.ui.manager.ui.routing :as routing]
[org.edgexfoundry.ui.manager.ui.dialogs :as d]
[goog.object :as gobj]
[cljsjs.highlight]
[cljsjs.highlight.langs.yaml]))
(declare ProfileListEntry)
(declare ProfileYAMLFile)
(defmutation prepare-add-profile [nonparams]
(action [{:keys [state]}]
(swap! state (fn [s]
(-> s
(assoc-in [:fulcro.ui.file-upload/by-id :pr-name :file-upload/files] []))))))
(defn show-add-profile-modal [comp]
(prim/transact! comp `[(prepare-add-profile)
(r/set-route {:router :root/modal-router :target [:add-profile-modal :singleton]})
(b/show-modal {:id :add-profile-modal})
:add-profile]))
(defn upload-profile [comp upload-file-id]
(prim/transact! comp `[(b/hide-modal {:id :add-profile-modal})
(mu/upload-profile {:file-id ~upload-file-id})])
(df/load comp :q/edgex-profiles ProfileListEntry {:target (df/multiple-targets
(conj co/profile-list-ident :content)
(conj co/new-device-ident :profiles))}))
(defmutation load-profile-yaml
[{:keys [id]}]
(action [{:keys [state] :as env}]
(df/load-action env :q/edgex-profile-yaml ProfileYAMLFile
{:target (conj co/profile-yaml-ident :profile-yaml)
:params {:id id}})
(swap! state (fn [s] (-> s
(assoc-in (conj co/profile-yaml-ident :ui/name) (-> s :device-profile id :name))))))
(remote [env] (df/remote-load env)))
(defn table-entry
[comp form name label & params]
(b/row {}
(b/col {:xs 4 :htmlFor name} label)
(b/col {:xs 8} (apply f/form-field comp form name params))))
(defui ^:once AddProfileForm
static prim/InitialAppState
(initial-state [this params] (f/build-form this {:db/id 2 :profile-file (prim/get-initial-state FileUploadInput {:id :pr-name})}))
static f/IForm
(form-spec [this] [(f/id-field :db/id)
(file-upload-input :profile-file)])
static prim/IQuery
(query [this] [f/form-root-key f/form-key :db/id
{:profile-file (prim/get-query FileUploadInput)}])
static prim/Ident
(ident [this props] co/add-profile-ident)
Object
(render [this]
(let [{:keys [db/id] :as props} (prim/props this)]
(dom/div {:className "content"}
(table-entry this props :profile-file "Select Profile YAML File" :accept "application/x-yaml"
:multiple? false
:renderFile (fn [file-component]
(let [onCancel (prim/get-computed file-component :onCancel)
{:keys [file/id file/name file/size file/progress
file/status] :as props} (prim/props file-component)
label (cropped-name name 20)]
(dom/li {:style #js {:listStyleType "none"} :key (str "file-" id)}
(str label " (" size " bytes) ")
(b/glyphicon {:size "14pt" :onClick #(onCancel id)} :remove-circle)
(dom/br nil)
(case status
:failed (dom/span nil "FAILED!")
:done ""
(b/progress-bar {:current progress})))))
:renderControl (fn [onChange accept multiple?]
(let [control-id (str "add-control-" id)
attrs (cond-> {:onChange (fn [evt] (onChange evt))
:id control-id
:style #js {:display "none"}
:value ""
:type "file"}
accept (assoc :accept accept)
multiple? (assoc :multiple "multiple")
:always clj->js)]
(dom/label {:htmlFor control-id} (b/glyphicon {:className "btn btn-primary"} :plus)
(dom/input attrs)))))))))
(def ui-add-profile-form (prim/factory AddProfileForm {:keyfn :db/id}))
(defsc AddProfileModal [this {:keys [profile-form modal modal/page] :as props}]
{:initial-state (fn [p] {:profile-form (prim/get-initial-state AddProfileForm {:db/id 2})
:modal (prim/get-initial-state b/Modal {:id :add-profile-modal :backdrop true})
:modal/page :add-profile-modal})
:ident (fn [] [:add-profile-modal :singleton])
:query [{:profile-form (prim/get-query AddProfileForm)}
{:modal (prim/get-query b/Modal)}
{[:fulcro.ui.file-upload/by-id :pr-name] [:file-upload/files]}
:modal/page]}
(let [files (get-in props [[:fulcro.ui.file-upload/by-id :pr-name] :file-upload/files])
upload-file-id (if (empty? files)
nil
(-> files first second))]
(b/ui-modal modal
(b/ui-modal-title nil
(dom/div #js {:key "title"
:style #js {:fontSize "22px"}} "Upload Device Profile"))
(b/ui-modal-body nil
(ui-add-profile-form profile-form))
(b/ui-modal-footer nil
(b/button {:key "upload-PI:KEY:<KEY>END_PI" :className "btn-fill" :kind :info
:onClick #(upload-profile this upload-file-id)}
"Upload")
(b/button {:key "cancel-button" :className "btn-fill" :kind :danger
:onClick #(prim/transact! this `[(b/hide-modal {:id :add-profile-modal})])}
"Cancel")))))
(defsc ProfileYAMLFile [this {:keys [yaml]}]
{:ident (fn [] [:yaml-file :singleton])
:query [:yaml]})
(defsc ProfileYAML
[this {:keys [ui/name ui/show-profile-yaml profile-yaml] :as props}]
{:initial-state (fn [p] {:ui/name "" :ui/show-profile-yaml true})
:ident (fn [] co/profile-yaml-ident)
:query [:ui/name :ui/show-profile-yaml
{:profile-yaml (prim/get-query ProfileYAMLFile)}]
:componentDidMount (fn [] (let [code (.call (goog.object/get js/document "getElementById") js/document "yaml")]
(js/hljs.highlightBlock code)))}
(dom/div nil
(dom/h5 nil (str "Profile " name))
(b/button
{:onClick #(routing/nav-to! this :profile)}
(dom/i #js {:className "glyphicon fa fa-caret-square-o-left"}))
(dom/pre nil
(dom/code {:id "yaml"} (-> profile-yaml first :yaml)))))
(defn show-profile [this type id]
(prim/transact! this `[(load-profile-yaml {:id ~id})])
(routing/nav-to! this :profile-yaml))
(defn do-delete-profile [this id]
(prim/transact! this `[(mu/delete-profile {:id ~id})
(t/reset-table-page {:id :show-profiles})]))
(deftable ProfileList :show-profiles :device-profile [[:name "PI:NAME:<NAME>END_PI"] [:description "Description"]
[:modified "Last Modified" #(co/conv-time %2)]]
[{:onClick #(show-add-profile-modal this) :icon "plus"}
{:onClick #(df/refresh! this {:fallback `d/show-error}) :icon "refresh"}]
:name-row-symbol ProfileListEntry
:modals [{:modal d/DeleteModal :params {:modal-id :dp-modal} :callbacks {:onDelete do-delete-profile}}]
:actions [{:title "View Device" :action-class :info :symbol "info" :onClick show-profile}
{:title "Delete Profile" :action-class :danger :symbol "times" :onClick (d/mk-show-modal :dp-modal)}])
|
[
{
"context": "l email\n :password pass}}\n ok err))\n\n(s/defn save*\n \"Does",
"end": 2871,
"score": 0.5058358311653137,
"start": 2867,
"tag": "PASSWORD",
"value": "pass"
}
] |
src-ui/jsk/net/api.cljs
|
e85th/jsk
| 0 |
(ns jsk.net.api
(:require [e85th.ui.net.rpc :as rpc]
[taoensso.timbre :as log]
[jsk.common.data :as data]
[re-frame.core :as rf]
[schema.core :as s]
[e85th.ui.rf.macros :refer-macros [defevent-fx]]
[e85th.ui.browser :as browser]
[e85th.ui.util :as u]))
(def auth-error-handler (atom (constantly {})))
(defn set-auth-err-handler!
"f receives the response when there is an auth error. It should return
an empty map or a map that re-frame's fx event handling can process."
[f]
(reset! auth-error-handler f))
(def auth-err-status #{401 403})
(s/defn ^:private full-url
[url-path]
(str (data/api-host) "/api" url-path))
(defn suggest-url
[]
(full-url "/v1/search/suggest"))
(defevent-fx request-err
[_ [_ orig-err-vector {:keys [status] :as response}]]
;; if the response code is 401/403 then send to login
;; otherwise dispatch the original event
(if (auth-err-status status)
(@auth-error-handler response)
{:dispatch (conj orig-err-vector response)}))
(s/defn new-request*
([method url ok err]
(new-request* method url {} ok err))
([method url params ok err]
(-> (rpc/new-re-frame-request method (full-url url) params ok err)
rpc/with-edn-format
(rpc/with-bearer-auth (data/jsk-token)))))
(s/defn new-request
"Same as new-request but with handling of auth errors by request-err"
([method url ok err]
(new-request method url {} ok err))
([method url params ok err]
(new-request* method url params ok [request-err err])))
(s/defn call!
"For testing really. Use effects to actually make calls.
re-frame handling. ok and err can be either keywords or a vector.
If vector then the first should be a keyword to conform to re-frame dispatch
semantics."
([method url params ok err]
(call! (new-request method url params ok err)))
([req]
(let [ensure-handler-fn (fn [{:keys [handler error-handler] :as r}]
(cond-> r
(vector? handler) (assoc :handler #(rf/dispatch (conj handler %)))
(vector? error-handler) (assoc :error-handler #(rf/dispatch (conj error-handler %)))))
normalize (comp ensure-handler-fn)]
(rpc/call (normalize req)))))
(defn- authenticate
[body ok err]
;; uses new-request* to make sure the general auth errors are not handled
;; by the interceptor, but by the passed in err handler
(new-request* :post "/v1/users/actions/authenticate" body ok err))
(s/defn auth-google
"Generates a request map that can be executed by call!"
[token :- s/Str ok err]
(authenticate {:with-google {:token token}} ok err))
(s/defn auth-password
[email :- s/Str pass :- s/Str ok err]
(authenticate {:with-password {:email email
:password pass}}
ok err))
(s/defn save*
"Does either a post or put."
[base-url id-kw data ok err]
(let [entity-id (id-kw data)
[method url] (if entity-id
[:put (str base-url "/" entity-id)]
[:post base-url])]
(new-request method url data ok err)))
;; -- Agents
(s/defn fetch-agent-list
[ok err]
(new-request :get "/v1/agents" ok err))
(s/defn save-agent
[agent ok err]
(save* "/v1/agents" :db/id agent ok err))
(s/defn rm-agent
[id ok err]
(new-request :delete (str "/v1/agents/" id) ok err))
(s/defn fetch-agent
[agent-id ok err]
(new-request :get (str "/v1/agents/" agent-id) ok err))
;; -- Alerts
(s/defn fetch-alert-list
[ok err]
(new-request :get "/v1/alerts" ok err))
(s/defn save-alert
[alert ok err]
(save* "/v1/alerts" :db/id alert ok err))
(s/defn rm-alert
[id ok err]
(new-request :delete (str "/v1/alerts/" id) ok err))
(s/defn fetch-alert
[alert-id ok err]
(new-request :get (str "/v1/alerts/" alert-id) ok err))
;; -- Schedules
(s/defn fetch-schedule-list
[ok err]
(new-request :get "/v1/schedules" ok err))
(s/defn save-schedule
[schedule ok err]
(save* "/v1/schedules" :db/id schedule ok err))
(s/defn rm-schedule
[id ok err]
(new-request :delete (str "/v1/schedules/" id) ok err))
(s/defn fetch-schedule
[schedule-id ok err]
(new-request :get (str "/v1/schedules/" schedule-id) ok err))
;; -- Workflows
(s/defn fetch-workflow-list
[ok err]
(new-request :get "/v1/workflows" ok err))
(s/defn save-workflow
[workflow ok err]
(save* "/v1/workflows" :db/id workflow ok err))
(s/defn rm-workflow
[id ok err]
(new-request :delete (str "/v1/workflows/" id) ok err))
(s/defn fetch-workflow
[workflow-id ok err]
(new-request :get (str "/v1/workflows/" workflow-id) ok err))
;; -- Jobs
(s/defn fetch-job-list
[ok err]
(new-request :get "/v1/jobs" ok err))
(s/defn save-job
[job ok err]
(save* "/v1/jobs" :db/id job ok err))
(s/defn rm-job
[id ok err]
(new-request :delete (str "/v1/jobs/" id) ok err))
(s/defn fetch-job
[job-id ok err]
(new-request :get (str "/v1/jobs/" job-id) ok err))
(s/defn fetch-job-types
[ok err]
(new-request :get "/v1/job-types" ok err))
(s/defn suggest-channels
[text ok err]
(new-request :get "/v1/search/suggest" {:q text} ok err))
(s/defn assoc-alert-channels
[alert-id :- s/Int channel-ids :- [s/Int] ok err]
(new-request :post "/v1/alerts/actions/assoc-channels"
{:alert/id alert-id :channel/ids channel-ids}
ok err))
(s/defn dissoc-alert-channels
[alert-id :- s/Int channel-ids :- [s/Int] ok err]
(new-request :post "/v1/alerts/actions/dissoc-channels"
{:alert/id alert-id :channel/ids channel-ids}
ok err))
(s/defn fetch-explorer-nodes
[type :- s/Str ok err]
(new-request :get "/v1/explorer" {:type type} ok err))
(s/defn create-explorer-node
[type :- s/Str ok err]
(new-request :post "/v1/explorer/actions/create-node" {:type type} ok err))
(s/defn rm-explorer-node
[type :- s/Str id ok err]
(new-request :post "/v1/explorer/actions/rm-node" {:type type
:id id} ok err))
(s/defn assoc-job-schedules
[job-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/assoc-schedules"
{:job/id job-id :schedule/ids schedule-ids}
ok err))
(s/defn dissoc-job-schedules
[job-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/dissoc-schedules"
{:job/id job-id :schedule/ids schedule-ids}
ok err))
(s/defn assoc-job-alerts
[job-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/assoc-alerts"
{:job/id job-id :alert/ids alert-ids}
ok err))
(s/defn dissoc-job-alerts
[job-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/dissoc-alerts"
{:job/id job-id :alert/ids alert-ids}
ok err))
(s/defn assoc-workflow-schedules
[workflow-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/assoc-schedules"
{:workflow/id workflow-id :schedule/ids schedule-ids}
ok err))
(s/defn dissoc-workflow-schedules
[workflow-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/dissoc-schedules"
{:workflow/id workflow-id :schedule/ids schedule-ids}
ok err))
(s/defn assoc-workflow-alerts
[workflow-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/assoc-alerts"
{:workflow/id workflow-id :alert/ids alert-ids}
ok err))
(s/defn dissoc-workflow-alerts
[workflow-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/dissoc-alerts"
{:workflow/id workflow-id :alert/ids alert-ids}
ok err))
(s/defn fetch-current-user
[ok err]
(new-request :get "/v1/users" ok err))
|
24923
|
(ns jsk.net.api
(:require [e85th.ui.net.rpc :as rpc]
[taoensso.timbre :as log]
[jsk.common.data :as data]
[re-frame.core :as rf]
[schema.core :as s]
[e85th.ui.rf.macros :refer-macros [defevent-fx]]
[e85th.ui.browser :as browser]
[e85th.ui.util :as u]))
(def auth-error-handler (atom (constantly {})))
(defn set-auth-err-handler!
"f receives the response when there is an auth error. It should return
an empty map or a map that re-frame's fx event handling can process."
[f]
(reset! auth-error-handler f))
(def auth-err-status #{401 403})
(s/defn ^:private full-url
[url-path]
(str (data/api-host) "/api" url-path))
(defn suggest-url
[]
(full-url "/v1/search/suggest"))
(defevent-fx request-err
[_ [_ orig-err-vector {:keys [status] :as response}]]
;; if the response code is 401/403 then send to login
;; otherwise dispatch the original event
(if (auth-err-status status)
(@auth-error-handler response)
{:dispatch (conj orig-err-vector response)}))
(s/defn new-request*
([method url ok err]
(new-request* method url {} ok err))
([method url params ok err]
(-> (rpc/new-re-frame-request method (full-url url) params ok err)
rpc/with-edn-format
(rpc/with-bearer-auth (data/jsk-token)))))
(s/defn new-request
"Same as new-request but with handling of auth errors by request-err"
([method url ok err]
(new-request method url {} ok err))
([method url params ok err]
(new-request* method url params ok [request-err err])))
(s/defn call!
"For testing really. Use effects to actually make calls.
re-frame handling. ok and err can be either keywords or a vector.
If vector then the first should be a keyword to conform to re-frame dispatch
semantics."
([method url params ok err]
(call! (new-request method url params ok err)))
([req]
(let [ensure-handler-fn (fn [{:keys [handler error-handler] :as r}]
(cond-> r
(vector? handler) (assoc :handler #(rf/dispatch (conj handler %)))
(vector? error-handler) (assoc :error-handler #(rf/dispatch (conj error-handler %)))))
normalize (comp ensure-handler-fn)]
(rpc/call (normalize req)))))
(defn- authenticate
[body ok err]
;; uses new-request* to make sure the general auth errors are not handled
;; by the interceptor, but by the passed in err handler
(new-request* :post "/v1/users/actions/authenticate" body ok err))
(s/defn auth-google
"Generates a request map that can be executed by call!"
[token :- s/Str ok err]
(authenticate {:with-google {:token token}} ok err))
(s/defn auth-password
[email :- s/Str pass :- s/Str ok err]
(authenticate {:with-password {:email email
:password <PASSWORD>}}
ok err))
(s/defn save*
"Does either a post or put."
[base-url id-kw data ok err]
(let [entity-id (id-kw data)
[method url] (if entity-id
[:put (str base-url "/" entity-id)]
[:post base-url])]
(new-request method url data ok err)))
;; -- Agents
(s/defn fetch-agent-list
[ok err]
(new-request :get "/v1/agents" ok err))
(s/defn save-agent
[agent ok err]
(save* "/v1/agents" :db/id agent ok err))
(s/defn rm-agent
[id ok err]
(new-request :delete (str "/v1/agents/" id) ok err))
(s/defn fetch-agent
[agent-id ok err]
(new-request :get (str "/v1/agents/" agent-id) ok err))
;; -- Alerts
(s/defn fetch-alert-list
[ok err]
(new-request :get "/v1/alerts" ok err))
(s/defn save-alert
[alert ok err]
(save* "/v1/alerts" :db/id alert ok err))
(s/defn rm-alert
[id ok err]
(new-request :delete (str "/v1/alerts/" id) ok err))
(s/defn fetch-alert
[alert-id ok err]
(new-request :get (str "/v1/alerts/" alert-id) ok err))
;; -- Schedules
(s/defn fetch-schedule-list
[ok err]
(new-request :get "/v1/schedules" ok err))
(s/defn save-schedule
[schedule ok err]
(save* "/v1/schedules" :db/id schedule ok err))
(s/defn rm-schedule
[id ok err]
(new-request :delete (str "/v1/schedules/" id) ok err))
(s/defn fetch-schedule
[schedule-id ok err]
(new-request :get (str "/v1/schedules/" schedule-id) ok err))
;; -- Workflows
(s/defn fetch-workflow-list
[ok err]
(new-request :get "/v1/workflows" ok err))
(s/defn save-workflow
[workflow ok err]
(save* "/v1/workflows" :db/id workflow ok err))
(s/defn rm-workflow
[id ok err]
(new-request :delete (str "/v1/workflows/" id) ok err))
(s/defn fetch-workflow
[workflow-id ok err]
(new-request :get (str "/v1/workflows/" workflow-id) ok err))
;; -- Jobs
(s/defn fetch-job-list
[ok err]
(new-request :get "/v1/jobs" ok err))
(s/defn save-job
[job ok err]
(save* "/v1/jobs" :db/id job ok err))
(s/defn rm-job
[id ok err]
(new-request :delete (str "/v1/jobs/" id) ok err))
(s/defn fetch-job
[job-id ok err]
(new-request :get (str "/v1/jobs/" job-id) ok err))
(s/defn fetch-job-types
[ok err]
(new-request :get "/v1/job-types" ok err))
(s/defn suggest-channels
[text ok err]
(new-request :get "/v1/search/suggest" {:q text} ok err))
(s/defn assoc-alert-channels
[alert-id :- s/Int channel-ids :- [s/Int] ok err]
(new-request :post "/v1/alerts/actions/assoc-channels"
{:alert/id alert-id :channel/ids channel-ids}
ok err))
(s/defn dissoc-alert-channels
[alert-id :- s/Int channel-ids :- [s/Int] ok err]
(new-request :post "/v1/alerts/actions/dissoc-channels"
{:alert/id alert-id :channel/ids channel-ids}
ok err))
(s/defn fetch-explorer-nodes
[type :- s/Str ok err]
(new-request :get "/v1/explorer" {:type type} ok err))
(s/defn create-explorer-node
[type :- s/Str ok err]
(new-request :post "/v1/explorer/actions/create-node" {:type type} ok err))
(s/defn rm-explorer-node
[type :- s/Str id ok err]
(new-request :post "/v1/explorer/actions/rm-node" {:type type
:id id} ok err))
(s/defn assoc-job-schedules
[job-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/assoc-schedules"
{:job/id job-id :schedule/ids schedule-ids}
ok err))
(s/defn dissoc-job-schedules
[job-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/dissoc-schedules"
{:job/id job-id :schedule/ids schedule-ids}
ok err))
(s/defn assoc-job-alerts
[job-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/assoc-alerts"
{:job/id job-id :alert/ids alert-ids}
ok err))
(s/defn dissoc-job-alerts
[job-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/dissoc-alerts"
{:job/id job-id :alert/ids alert-ids}
ok err))
(s/defn assoc-workflow-schedules
[workflow-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/assoc-schedules"
{:workflow/id workflow-id :schedule/ids schedule-ids}
ok err))
(s/defn dissoc-workflow-schedules
[workflow-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/dissoc-schedules"
{:workflow/id workflow-id :schedule/ids schedule-ids}
ok err))
(s/defn assoc-workflow-alerts
[workflow-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/assoc-alerts"
{:workflow/id workflow-id :alert/ids alert-ids}
ok err))
(s/defn dissoc-workflow-alerts
[workflow-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/dissoc-alerts"
{:workflow/id workflow-id :alert/ids alert-ids}
ok err))
(s/defn fetch-current-user
[ok err]
(new-request :get "/v1/users" ok err))
| true |
(ns jsk.net.api
(:require [e85th.ui.net.rpc :as rpc]
[taoensso.timbre :as log]
[jsk.common.data :as data]
[re-frame.core :as rf]
[schema.core :as s]
[e85th.ui.rf.macros :refer-macros [defevent-fx]]
[e85th.ui.browser :as browser]
[e85th.ui.util :as u]))
(def auth-error-handler (atom (constantly {})))
(defn set-auth-err-handler!
"f receives the response when there is an auth error. It should return
an empty map or a map that re-frame's fx event handling can process."
[f]
(reset! auth-error-handler f))
(def auth-err-status #{401 403})
(s/defn ^:private full-url
[url-path]
(str (data/api-host) "/api" url-path))
(defn suggest-url
[]
(full-url "/v1/search/suggest"))
(defevent-fx request-err
[_ [_ orig-err-vector {:keys [status] :as response}]]
;; if the response code is 401/403 then send to login
;; otherwise dispatch the original event
(if (auth-err-status status)
(@auth-error-handler response)
{:dispatch (conj orig-err-vector response)}))
(s/defn new-request*
([method url ok err]
(new-request* method url {} ok err))
([method url params ok err]
(-> (rpc/new-re-frame-request method (full-url url) params ok err)
rpc/with-edn-format
(rpc/with-bearer-auth (data/jsk-token)))))
(s/defn new-request
"Same as new-request but with handling of auth errors by request-err"
([method url ok err]
(new-request method url {} ok err))
([method url params ok err]
(new-request* method url params ok [request-err err])))
(s/defn call!
"For testing really. Use effects to actually make calls.
re-frame handling. ok and err can be either keywords or a vector.
If vector then the first should be a keyword to conform to re-frame dispatch
semantics."
([method url params ok err]
(call! (new-request method url params ok err)))
([req]
(let [ensure-handler-fn (fn [{:keys [handler error-handler] :as r}]
(cond-> r
(vector? handler) (assoc :handler #(rf/dispatch (conj handler %)))
(vector? error-handler) (assoc :error-handler #(rf/dispatch (conj error-handler %)))))
normalize (comp ensure-handler-fn)]
(rpc/call (normalize req)))))
(defn- authenticate
[body ok err]
;; uses new-request* to make sure the general auth errors are not handled
;; by the interceptor, but by the passed in err handler
(new-request* :post "/v1/users/actions/authenticate" body ok err))
(s/defn auth-google
"Generates a request map that can be executed by call!"
[token :- s/Str ok err]
(authenticate {:with-google {:token token}} ok err))
(s/defn auth-password
[email :- s/Str pass :- s/Str ok err]
(authenticate {:with-password {:email email
:password PI:PASSWORD:<PASSWORD>END_PI}}
ok err))
(s/defn save*
"Does either a post or put."
[base-url id-kw data ok err]
(let [entity-id (id-kw data)
[method url] (if entity-id
[:put (str base-url "/" entity-id)]
[:post base-url])]
(new-request method url data ok err)))
;; -- Agents
(s/defn fetch-agent-list
[ok err]
(new-request :get "/v1/agents" ok err))
(s/defn save-agent
[agent ok err]
(save* "/v1/agents" :db/id agent ok err))
(s/defn rm-agent
[id ok err]
(new-request :delete (str "/v1/agents/" id) ok err))
(s/defn fetch-agent
[agent-id ok err]
(new-request :get (str "/v1/agents/" agent-id) ok err))
;; -- Alerts
(s/defn fetch-alert-list
[ok err]
(new-request :get "/v1/alerts" ok err))
(s/defn save-alert
[alert ok err]
(save* "/v1/alerts" :db/id alert ok err))
(s/defn rm-alert
[id ok err]
(new-request :delete (str "/v1/alerts/" id) ok err))
(s/defn fetch-alert
[alert-id ok err]
(new-request :get (str "/v1/alerts/" alert-id) ok err))
;; -- Schedules
(s/defn fetch-schedule-list
[ok err]
(new-request :get "/v1/schedules" ok err))
(s/defn save-schedule
[schedule ok err]
(save* "/v1/schedules" :db/id schedule ok err))
(s/defn rm-schedule
[id ok err]
(new-request :delete (str "/v1/schedules/" id) ok err))
(s/defn fetch-schedule
[schedule-id ok err]
(new-request :get (str "/v1/schedules/" schedule-id) ok err))
;; -- Workflows
(s/defn fetch-workflow-list
[ok err]
(new-request :get "/v1/workflows" ok err))
(s/defn save-workflow
[workflow ok err]
(save* "/v1/workflows" :db/id workflow ok err))
(s/defn rm-workflow
[id ok err]
(new-request :delete (str "/v1/workflows/" id) ok err))
(s/defn fetch-workflow
[workflow-id ok err]
(new-request :get (str "/v1/workflows/" workflow-id) ok err))
;; -- Jobs
(s/defn fetch-job-list
[ok err]
(new-request :get "/v1/jobs" ok err))
(s/defn save-job
[job ok err]
(save* "/v1/jobs" :db/id job ok err))
(s/defn rm-job
[id ok err]
(new-request :delete (str "/v1/jobs/" id) ok err))
(s/defn fetch-job
[job-id ok err]
(new-request :get (str "/v1/jobs/" job-id) ok err))
(s/defn fetch-job-types
[ok err]
(new-request :get "/v1/job-types" ok err))
(s/defn suggest-channels
[text ok err]
(new-request :get "/v1/search/suggest" {:q text} ok err))
(s/defn assoc-alert-channels
[alert-id :- s/Int channel-ids :- [s/Int] ok err]
(new-request :post "/v1/alerts/actions/assoc-channels"
{:alert/id alert-id :channel/ids channel-ids}
ok err))
(s/defn dissoc-alert-channels
[alert-id :- s/Int channel-ids :- [s/Int] ok err]
(new-request :post "/v1/alerts/actions/dissoc-channels"
{:alert/id alert-id :channel/ids channel-ids}
ok err))
(s/defn fetch-explorer-nodes
[type :- s/Str ok err]
(new-request :get "/v1/explorer" {:type type} ok err))
(s/defn create-explorer-node
[type :- s/Str ok err]
(new-request :post "/v1/explorer/actions/create-node" {:type type} ok err))
(s/defn rm-explorer-node
[type :- s/Str id ok err]
(new-request :post "/v1/explorer/actions/rm-node" {:type type
:id id} ok err))
(s/defn assoc-job-schedules
[job-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/assoc-schedules"
{:job/id job-id :schedule/ids schedule-ids}
ok err))
(s/defn dissoc-job-schedules
[job-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/dissoc-schedules"
{:job/id job-id :schedule/ids schedule-ids}
ok err))
(s/defn assoc-job-alerts
[job-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/assoc-alerts"
{:job/id job-id :alert/ids alert-ids}
ok err))
(s/defn dissoc-job-alerts
[job-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/jobs/actions/dissoc-alerts"
{:job/id job-id :alert/ids alert-ids}
ok err))
(s/defn assoc-workflow-schedules
[workflow-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/assoc-schedules"
{:workflow/id workflow-id :schedule/ids schedule-ids}
ok err))
(s/defn dissoc-workflow-schedules
[workflow-id :- s/Int schedule-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/dissoc-schedules"
{:workflow/id workflow-id :schedule/ids schedule-ids}
ok err))
(s/defn assoc-workflow-alerts
[workflow-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/assoc-alerts"
{:workflow/id workflow-id :alert/ids alert-ids}
ok err))
(s/defn dissoc-workflow-alerts
[workflow-id :- s/Int alert-ids :- [s/Int] ok err]
(new-request :post "/v1/workflows/actions/dissoc-alerts"
{:workflow/id workflow-id :alert/ids alert-ids}
ok err))
(s/defn fetch-current-user
[ok err]
(new-request :get "/v1/users" ok err))
|
[
{
"context": ";; Copyright 2018 Chris Rink\n;;\n;; Licensed under the Apache License, Version ",
"end": 28,
"score": 0.9998577237129211,
"start": 18,
"tag": "NAME",
"value": "Chris Rink"
}
] |
env/dev/src/dev.clj
|
chrisrink10/repopreview
| 0 |
;; Copyright 2018 Chris Rink
;;
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns dev
(:require
[clojure.test :as test]
[clojure.tools.namespace.repl :as repl]
[mount.core :as mount]
[repopreview.config]
[repopreview.logging]
[repopreview.web-server]))
(defn start
[]
(mount/start)
:ready)
(defn stop
[]
(mount/stop))
(defn reset
[]
(stop)
(repl/refresh :after 'dev/start))
(defn run-all-tests
[]
(repl/refresh)
(test/run-all-tests #"repopreview.*-test"))
|
105866
|
;; Copyright 2018 <NAME>
;;
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns dev
(:require
[clojure.test :as test]
[clojure.tools.namespace.repl :as repl]
[mount.core :as mount]
[repopreview.config]
[repopreview.logging]
[repopreview.web-server]))
(defn start
[]
(mount/start)
:ready)
(defn stop
[]
(mount/stop))
(defn reset
[]
(stop)
(repl/refresh :after 'dev/start))
(defn run-all-tests
[]
(repl/refresh)
(test/run-all-tests #"repopreview.*-test"))
| true |
;; Copyright 2018 PI:NAME:<NAME>END_PI
;;
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns dev
(:require
[clojure.test :as test]
[clojure.tools.namespace.repl :as repl]
[mount.core :as mount]
[repopreview.config]
[repopreview.logging]
[repopreview.web-server]))
(defn start
[]
(mount/start)
:ready)
(defn stop
[]
(mount/stop))
(defn reset
[]
(stop)
(repl/refresh :after 'dev/start))
(defn run-all-tests
[]
(repl/refresh)
(test/run-all-tests #"repopreview.*-test"))
|
[
{
"context": "ion was provided\n on the clojure mailing list by Alistair Roche.)\"\n [& {:as kw-args}]\n `(s/merge (s/keys ~@(app",
"end": 2128,
"score": 0.9998849034309387,
"start": 2114,
"tag": "NAME",
"value": "Alistair Roche"
}
] |
cimi/src/com/sixsq/slipstream/ssclj/util/spec.cljc
|
slipstream/cimi-mf2c
| 0 |
(ns com.sixsq.slipstream.ssclj.util.spec
"Utilities that provide common spec definition patterns that aren't
supported directly by the core spec functions and macros."
(:require
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as gen]
[clojure.string :as str]))
(def ^:private all-ascii-chars (map str (map char (range 0 256))))
(defn- regex-chars
"Provides a list of ASCII characters that satisfy the regex pattern."
[pattern]
(set (filter #(re-matches pattern %) all-ascii-chars)))
(defn merge-kw-lists
"Merges two lists (or seqs) of namespaced keywords. The results will
be a sorted vector with duplicates removed."
[kws1 kws2]
(vec (sort (set/union (set kws1) (set kws2)))))
(defn merge-keys-specs
"Merges the given clojure.spec/keys specs provided as a list of maps.
All the arguments are eval'ed and must evaluate to map constants."
[map-specs]
(->> map-specs
(map eval)
(apply merge-with merge-kw-lists)))
(defn unnamespaced-kws
"Removes the namespaces from the provided list of keywords
and returns the resulting set."
[kws]
(set (map (comp keyword name) kws)))
(defn allowed-keys
"Returns a set of all the allowed keys from a clojure.spec/keys
specification provided as a map."
[{:keys [req req-un opt opt-un]}]
(set (concat req
(unnamespaced-kws req-un)
opt
(unnamespaced-kws opt-un))))
(defmacro regex-string
"Creates a string spec that matches the given regex with a generator
that randomly selects from the ASCII characters identified by the
char-pattern."
[char-pattern regex]
(let [allowed-chars (regex-chars char-pattern)]
`(s/with-gen (s/and string? #(re-matches ~regex %))
(constantly (gen/fmap str/join (gen/vector (s/gen ~allowed-chars)))))))
(defmacro only-keys
"Creates a closed map definition where only the defined keys are
permitted. The arguments must be literals, using the same function
signature as clojure.spec/keys. (This implementation was provided
on the clojure mailing list by Alistair Roche.)"
[& {:as kw-args}]
`(s/merge (s/keys ~@(apply concat (vec kw-args)))
(s/map-of ~(allowed-keys kw-args) any?)))
(defmacro only-keys-maps
"Creates a closed map definition from one or more maps that contain
key specifications as for clojure.spec/keys. All of the arguments
are eval'ed, so they may be vars containing the definition(s). All
of the arguments must evaluate to compile-time map constants."
[& map-specs]
(let [map-spec (merge-keys-specs map-specs)]
`(s/merge (s/keys ~@(apply concat (vec map-spec)))
(s/map-of ~(allowed-keys map-spec) any?))))
(defmacro constrained-map
"Creates an open map spec using the supplied keys specs with the
additional constraint that all unspecified entries must match the
given key and value specs. The keys specs will be evaluated."
[key-spec value-spec & map-specs]
(let [map-spec (merge-keys-specs map-specs)]
`(s/merge
(s/every (s/or :attrs (s/tuple ~(allowed-keys map-spec) any?)
:link (s/tuple ~key-spec ~value-spec)))
(s/keys ~@(apply concat (vec map-spec))))))
|
63022
|
(ns com.sixsq.slipstream.ssclj.util.spec
"Utilities that provide common spec definition patterns that aren't
supported directly by the core spec functions and macros."
(:require
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as gen]
[clojure.string :as str]))
(def ^:private all-ascii-chars (map str (map char (range 0 256))))
(defn- regex-chars
"Provides a list of ASCII characters that satisfy the regex pattern."
[pattern]
(set (filter #(re-matches pattern %) all-ascii-chars)))
(defn merge-kw-lists
"Merges two lists (or seqs) of namespaced keywords. The results will
be a sorted vector with duplicates removed."
[kws1 kws2]
(vec (sort (set/union (set kws1) (set kws2)))))
(defn merge-keys-specs
"Merges the given clojure.spec/keys specs provided as a list of maps.
All the arguments are eval'ed and must evaluate to map constants."
[map-specs]
(->> map-specs
(map eval)
(apply merge-with merge-kw-lists)))
(defn unnamespaced-kws
"Removes the namespaces from the provided list of keywords
and returns the resulting set."
[kws]
(set (map (comp keyword name) kws)))
(defn allowed-keys
"Returns a set of all the allowed keys from a clojure.spec/keys
specification provided as a map."
[{:keys [req req-un opt opt-un]}]
(set (concat req
(unnamespaced-kws req-un)
opt
(unnamespaced-kws opt-un))))
(defmacro regex-string
"Creates a string spec that matches the given regex with a generator
that randomly selects from the ASCII characters identified by the
char-pattern."
[char-pattern regex]
(let [allowed-chars (regex-chars char-pattern)]
`(s/with-gen (s/and string? #(re-matches ~regex %))
(constantly (gen/fmap str/join (gen/vector (s/gen ~allowed-chars)))))))
(defmacro only-keys
"Creates a closed map definition where only the defined keys are
permitted. The arguments must be literals, using the same function
signature as clojure.spec/keys. (This implementation was provided
on the clojure mailing list by <NAME>.)"
[& {:as kw-args}]
`(s/merge (s/keys ~@(apply concat (vec kw-args)))
(s/map-of ~(allowed-keys kw-args) any?)))
(defmacro only-keys-maps
"Creates a closed map definition from one or more maps that contain
key specifications as for clojure.spec/keys. All of the arguments
are eval'ed, so they may be vars containing the definition(s). All
of the arguments must evaluate to compile-time map constants."
[& map-specs]
(let [map-spec (merge-keys-specs map-specs)]
`(s/merge (s/keys ~@(apply concat (vec map-spec)))
(s/map-of ~(allowed-keys map-spec) any?))))
(defmacro constrained-map
"Creates an open map spec using the supplied keys specs with the
additional constraint that all unspecified entries must match the
given key and value specs. The keys specs will be evaluated."
[key-spec value-spec & map-specs]
(let [map-spec (merge-keys-specs map-specs)]
`(s/merge
(s/every (s/or :attrs (s/tuple ~(allowed-keys map-spec) any?)
:link (s/tuple ~key-spec ~value-spec)))
(s/keys ~@(apply concat (vec map-spec))))))
| true |
(ns com.sixsq.slipstream.ssclj.util.spec
"Utilities that provide common spec definition patterns that aren't
supported directly by the core spec functions and macros."
(:require
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as gen]
[clojure.string :as str]))
(def ^:private all-ascii-chars (map str (map char (range 0 256))))
(defn- regex-chars
"Provides a list of ASCII characters that satisfy the regex pattern."
[pattern]
(set (filter #(re-matches pattern %) all-ascii-chars)))
(defn merge-kw-lists
"Merges two lists (or seqs) of namespaced keywords. The results will
be a sorted vector with duplicates removed."
[kws1 kws2]
(vec (sort (set/union (set kws1) (set kws2)))))
(defn merge-keys-specs
"Merges the given clojure.spec/keys specs provided as a list of maps.
All the arguments are eval'ed and must evaluate to map constants."
[map-specs]
(->> map-specs
(map eval)
(apply merge-with merge-kw-lists)))
(defn unnamespaced-kws
"Removes the namespaces from the provided list of keywords
and returns the resulting set."
[kws]
(set (map (comp keyword name) kws)))
(defn allowed-keys
"Returns a set of all the allowed keys from a clojure.spec/keys
specification provided as a map."
[{:keys [req req-un opt opt-un]}]
(set (concat req
(unnamespaced-kws req-un)
opt
(unnamespaced-kws opt-un))))
(defmacro regex-string
"Creates a string spec that matches the given regex with a generator
that randomly selects from the ASCII characters identified by the
char-pattern."
[char-pattern regex]
(let [allowed-chars (regex-chars char-pattern)]
`(s/with-gen (s/and string? #(re-matches ~regex %))
(constantly (gen/fmap str/join (gen/vector (s/gen ~allowed-chars)))))))
(defmacro only-keys
"Creates a closed map definition where only the defined keys are
permitted. The arguments must be literals, using the same function
signature as clojure.spec/keys. (This implementation was provided
on the clojure mailing list by PI:NAME:<NAME>END_PI.)"
[& {:as kw-args}]
`(s/merge (s/keys ~@(apply concat (vec kw-args)))
(s/map-of ~(allowed-keys kw-args) any?)))
(defmacro only-keys-maps
"Creates a closed map definition from one or more maps that contain
key specifications as for clojure.spec/keys. All of the arguments
are eval'ed, so they may be vars containing the definition(s). All
of the arguments must evaluate to compile-time map constants."
[& map-specs]
(let [map-spec (merge-keys-specs map-specs)]
`(s/merge (s/keys ~@(apply concat (vec map-spec)))
(s/map-of ~(allowed-keys map-spec) any?))))
(defmacro constrained-map
"Creates an open map spec using the supplied keys specs with the
additional constraint that all unspecified entries must match the
given key and value specs. The keys specs will be evaluated."
[key-spec value-spec & map-specs]
(let [map-spec (merge-keys-specs map-specs)]
`(s/merge
(s/every (s/or :attrs (s/tuple ~(allowed-keys map-spec) any?)
:link (s/tuple ~key-spec ~value-spec)))
(s/keys ~@(apply concat (vec map-spec))))))
|
[
{
"context": "ributed DAP/MAP commodity market.\"\n :author \"Anna Shchiptsova\"}\n phosphorus-markets.core\n (:require [clojure.j",
"end": 582,
"score": 0.9998780488967896,
"start": 566,
"tag": "NAME",
"value": "Anna Shchiptsova"
}
] |
clj/phosphorus-markets/src/phosphorus_markets/core.clj
|
shchipts/phosphorus-affordability
| 0 |
; Copyright (c) 2020 International Institute for Applied Systems Analysis.
; All rights reserved. The use and distribution terms for this software
; are covered by the MIT License (http://opensource.org/licenses/MIT)
; which can be found in the file LICENSE at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns ^{:doc "Equilibrium prices of distributed DAP/MAP commodity market."
:author "Anna Shchiptsova"}
phosphorus-markets.core
(:require [clojure.java.io :as io]
[phosphorus-markets.provider :as provider]
[phosphorus-markets.simulator :as sim]
[utilities-clj.cmd :as cmd]
[utilities-clj.reader :as reader]
[utilities-clj.writer :as writer])
(:gen-class))
(def ^{:private true} cli-args
"Command line arguments."
{:required
[["settings-path" "Path to the file with settings"]]})
(def ^:private cli-options
"Command line options."
[["-k" "--number-of-chunks K" "Chunk auctions into K-sized slices"
:id :k
:default 10
:parse-fn #(Integer/parseInt %)]
["-s" "--save PATH" "PATH to folder for output writing"
:id :save
:default "bin"]])
(defn- write
"Wraps saving data to disk."
[{folder :save} i id m]
(writer/csv-file folder
(str (name id)
" ("
i
").csv")
(provider/to m)))
(defn -main
"Determines competitive equilibrium prices of distributed DAP/MAP
commodity market.
The algorithm runs the generalized English auction for differentiated
DAP/MAP markets. Executes price adjustment procedure for different model
parameterization provided in input files."
[& args]
(cmd/terminal
{:short-desc
"Equilibrium prices of distributed DAP/MAP commodity market."
:args args
:args-desc cli-args
:options cli-options
:execute
(fn [[pars] options]
(time
(->> (io/file pars)
.getParent
(provider/from (reader/load-edn pars)
reader/read-csv)
((fn [coll]
(println (str "Total number of auctions: " (count coll)))
(sim/prun coll options)))
(map-indexed #(do
(doseq [m %2]
(apply write options m))
(println (str "Chunk "
(inc %1)
" of "
(:k options)
" auctions executed."))))
doall
((fn [_]
(println (str "Results saved to: \""
(:save options)
"\"")))))))}))
|
109891
|
; Copyright (c) 2020 International Institute for Applied Systems Analysis.
; All rights reserved. The use and distribution terms for this software
; are covered by the MIT License (http://opensource.org/licenses/MIT)
; which can be found in the file LICENSE at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns ^{:doc "Equilibrium prices of distributed DAP/MAP commodity market."
:author "<NAME>"}
phosphorus-markets.core
(:require [clojure.java.io :as io]
[phosphorus-markets.provider :as provider]
[phosphorus-markets.simulator :as sim]
[utilities-clj.cmd :as cmd]
[utilities-clj.reader :as reader]
[utilities-clj.writer :as writer])
(:gen-class))
(def ^{:private true} cli-args
"Command line arguments."
{:required
[["settings-path" "Path to the file with settings"]]})
(def ^:private cli-options
"Command line options."
[["-k" "--number-of-chunks K" "Chunk auctions into K-sized slices"
:id :k
:default 10
:parse-fn #(Integer/parseInt %)]
["-s" "--save PATH" "PATH to folder for output writing"
:id :save
:default "bin"]])
(defn- write
"Wraps saving data to disk."
[{folder :save} i id m]
(writer/csv-file folder
(str (name id)
" ("
i
").csv")
(provider/to m)))
(defn -main
"Determines competitive equilibrium prices of distributed DAP/MAP
commodity market.
The algorithm runs the generalized English auction for differentiated
DAP/MAP markets. Executes price adjustment procedure for different model
parameterization provided in input files."
[& args]
(cmd/terminal
{:short-desc
"Equilibrium prices of distributed DAP/MAP commodity market."
:args args
:args-desc cli-args
:options cli-options
:execute
(fn [[pars] options]
(time
(->> (io/file pars)
.getParent
(provider/from (reader/load-edn pars)
reader/read-csv)
((fn [coll]
(println (str "Total number of auctions: " (count coll)))
(sim/prun coll options)))
(map-indexed #(do
(doseq [m %2]
(apply write options m))
(println (str "Chunk "
(inc %1)
" of "
(:k options)
" auctions executed."))))
doall
((fn [_]
(println (str "Results saved to: \""
(:save options)
"\"")))))))}))
| true |
; Copyright (c) 2020 International Institute for Applied Systems Analysis.
; All rights reserved. The use and distribution terms for this software
; are covered by the MIT License (http://opensource.org/licenses/MIT)
; which can be found in the file LICENSE at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns ^{:doc "Equilibrium prices of distributed DAP/MAP commodity market."
:author "PI:NAME:<NAME>END_PI"}
phosphorus-markets.core
(:require [clojure.java.io :as io]
[phosphorus-markets.provider :as provider]
[phosphorus-markets.simulator :as sim]
[utilities-clj.cmd :as cmd]
[utilities-clj.reader :as reader]
[utilities-clj.writer :as writer])
(:gen-class))
(def ^{:private true} cli-args
"Command line arguments."
{:required
[["settings-path" "Path to the file with settings"]]})
(def ^:private cli-options
"Command line options."
[["-k" "--number-of-chunks K" "Chunk auctions into K-sized slices"
:id :k
:default 10
:parse-fn #(Integer/parseInt %)]
["-s" "--save PATH" "PATH to folder for output writing"
:id :save
:default "bin"]])
(defn- write
"Wraps saving data to disk."
[{folder :save} i id m]
(writer/csv-file folder
(str (name id)
" ("
i
").csv")
(provider/to m)))
(defn -main
"Determines competitive equilibrium prices of distributed DAP/MAP
commodity market.
The algorithm runs the generalized English auction for differentiated
DAP/MAP markets. Executes price adjustment procedure for different model
parameterization provided in input files."
[& args]
(cmd/terminal
{:short-desc
"Equilibrium prices of distributed DAP/MAP commodity market."
:args args
:args-desc cli-args
:options cli-options
:execute
(fn [[pars] options]
(time
(->> (io/file pars)
.getParent
(provider/from (reader/load-edn pars)
reader/read-csv)
((fn [coll]
(println (str "Total number of auctions: " (count coll)))
(sim/prun coll options)))
(map-indexed #(do
(doseq [m %2]
(apply write options m))
(println (str "Chunk "
(inc %1)
" of "
(:k options)
" auctions executed."))))
doall
((fn [_]
(println (str "Results saved to: \""
(:save options)
"\"")))))))}))
|
[
{
"context": "n't compile macros because I'm not serving them. -adrian\")\n\n (if (= path \"membrane/macros\")\n {:lang :c",
"end": 1215,
"score": 0.9241909980773926,
"start": 1209,
"tag": "NAME",
"value": "adrian"
}
] |
src/membrane/eval.cljs
|
rgkirch/membrane
| 287 |
(ns membrane.eval
(:require [cljs.js :as cljs]
[cljs.core.async :refer [put! chan <! timeout dropping-buffer promise-chan]
:as async])
(:import [goog.net XhrIo]
goog.string))
(defn wrap-js-eval [resource]
(try
;; (println (:source resource))
;; {:value (cljs/js-eval resource)}
(cljs/js-eval resource)
(catch js/Object e
;; (.log js/console e)
{:error e})))
(let [cache (atom {})]
(defn get-file [url cb]
(if (contains? @cache url)
(cb (get @cache url))
(.send XhrIo url
(fn [e]
(let [response (.. e -target getResponseText)]
(swap! cache assoc url response)
(cb response)))))))
(def fake-spec
"
(ns com.rpl.specter)
(declare ATOM ALL FIRST LAST MAP-VALS META)
")
(def macros-clj
"(ns membrane.macros)
(defmacro test-macro [& body]
`(do
(prn \"comp\" 42 ~@body)
[42 ~@body]))
"
)
(defmacro test-macro [& body] `(do (prn "comp" 42 ~@body) [42 ~@body]))
(defn default-load-fn [{:keys [name macros path] :as m} cb]
(prn "trying to load" m)
;; (assert (not macros) "can't compile macros because I'm not serving them. -adrian")
(if (= path "membrane/macros")
{:lang :clj
:cache true
:source macros-clj}
(if (not= (.indexOf path "spec") -1)
(do
(cb {:lang :clj
:cache true
:source fake-spec}))
(if-let [path ({"goog/string" "goog/string/string.js"
"goog/string/StringBuffer" "goog/string/stringbuffer.js"
"com/rpl/specter/util_macros" "/util_macros.clj"
"goog/object" "goog/object/object.js"
"goog/array" "goog/array/array.js"} path)]
(let [url (if (.startsWith path "/" )
path
(str "/js/compiled/out.autouitest/" path))]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang (if (.endsWith url ".js")
:js
:clj)
:cache true
:source source}))))
(let [macro-map {"cljs/tools/reader/reader_types" "reader_types.clj"
"cljs/reader" "reader.clj"
"cljs/env/macros" "macros.clj"
"cljs/analyzer/macros" "macros.clj.1"}]
(if (and macros
(macro-map path))
(let [url (str "/"
(macro-map path))]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang (if (.endsWith url ".js")
:js
:clj)
:cache true
:source source}))))
(if (or macros
(#{"com/rpl/specter"
"com/rpl/specter/protocols"
"com/rpl/specter/impl"
"com/rpl/specter/navs"
"cljs/analyzer/api"
"cljs/analyzer"
"cljs/env"
"cljs/tagged_literals"
"membrane/ui"
"membrane/component"
} path))
(let [url (str "/js/compiled/out.autouitest/" path ".cljc")]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang :clj
:cache true
:source source}))))
(let [url (str "/js/compiled/out.autouitest/" path ".cljs")]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang :clj
:cache true
:source source}))))
)))
)))
)
(def default-compiler-options {:source-map true
:ns 'membrane.autoui
;; :context :statement
;; :verbose false
:load default-load-fn
:def-emits-var true
:eval wrap-js-eval})
(defn eval-async
([form]
(eval-async (cljs/empty-state) form
default-compiler-options))
([state form]
(let [ch (promise-chan)]
(try
(cljs/eval state form
default-compiler-options
#(put! ch %))
(catch js/Object e
(put! ch {:error e})))
ch))
([state form opts]
(let [ch (promise-chan)]
(try
(cljs/eval state form opts
#(put! ch %))
(catch js/Object e
(put! ch {:error e})))
ch)))
|
74609
|
(ns membrane.eval
(:require [cljs.js :as cljs]
[cljs.core.async :refer [put! chan <! timeout dropping-buffer promise-chan]
:as async])
(:import [goog.net XhrIo]
goog.string))
(defn wrap-js-eval [resource]
(try
;; (println (:source resource))
;; {:value (cljs/js-eval resource)}
(cljs/js-eval resource)
(catch js/Object e
;; (.log js/console e)
{:error e})))
(let [cache (atom {})]
(defn get-file [url cb]
(if (contains? @cache url)
(cb (get @cache url))
(.send XhrIo url
(fn [e]
(let [response (.. e -target getResponseText)]
(swap! cache assoc url response)
(cb response)))))))
(def fake-spec
"
(ns com.rpl.specter)
(declare ATOM ALL FIRST LAST MAP-VALS META)
")
(def macros-clj
"(ns membrane.macros)
(defmacro test-macro [& body]
`(do
(prn \"comp\" 42 ~@body)
[42 ~@body]))
"
)
(defmacro test-macro [& body] `(do (prn "comp" 42 ~@body) [42 ~@body]))
(defn default-load-fn [{:keys [name macros path] :as m} cb]
(prn "trying to load" m)
;; (assert (not macros) "can't compile macros because I'm not serving them. -<NAME>")
(if (= path "membrane/macros")
{:lang :clj
:cache true
:source macros-clj}
(if (not= (.indexOf path "spec") -1)
(do
(cb {:lang :clj
:cache true
:source fake-spec}))
(if-let [path ({"goog/string" "goog/string/string.js"
"goog/string/StringBuffer" "goog/string/stringbuffer.js"
"com/rpl/specter/util_macros" "/util_macros.clj"
"goog/object" "goog/object/object.js"
"goog/array" "goog/array/array.js"} path)]
(let [url (if (.startsWith path "/" )
path
(str "/js/compiled/out.autouitest/" path))]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang (if (.endsWith url ".js")
:js
:clj)
:cache true
:source source}))))
(let [macro-map {"cljs/tools/reader/reader_types" "reader_types.clj"
"cljs/reader" "reader.clj"
"cljs/env/macros" "macros.clj"
"cljs/analyzer/macros" "macros.clj.1"}]
(if (and macros
(macro-map path))
(let [url (str "/"
(macro-map path))]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang (if (.endsWith url ".js")
:js
:clj)
:cache true
:source source}))))
(if (or macros
(#{"com/rpl/specter"
"com/rpl/specter/protocols"
"com/rpl/specter/impl"
"com/rpl/specter/navs"
"cljs/analyzer/api"
"cljs/analyzer"
"cljs/env"
"cljs/tagged_literals"
"membrane/ui"
"membrane/component"
} path))
(let [url (str "/js/compiled/out.autouitest/" path ".cljc")]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang :clj
:cache true
:source source}))))
(let [url (str "/js/compiled/out.autouitest/" path ".cljs")]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang :clj
:cache true
:source source}))))
)))
)))
)
(def default-compiler-options {:source-map true
:ns 'membrane.autoui
;; :context :statement
;; :verbose false
:load default-load-fn
:def-emits-var true
:eval wrap-js-eval})
(defn eval-async
([form]
(eval-async (cljs/empty-state) form
default-compiler-options))
([state form]
(let [ch (promise-chan)]
(try
(cljs/eval state form
default-compiler-options
#(put! ch %))
(catch js/Object e
(put! ch {:error e})))
ch))
([state form opts]
(let [ch (promise-chan)]
(try
(cljs/eval state form opts
#(put! ch %))
(catch js/Object e
(put! ch {:error e})))
ch)))
| true |
(ns membrane.eval
(:require [cljs.js :as cljs]
[cljs.core.async :refer [put! chan <! timeout dropping-buffer promise-chan]
:as async])
(:import [goog.net XhrIo]
goog.string))
(defn wrap-js-eval [resource]
(try
;; (println (:source resource))
;; {:value (cljs/js-eval resource)}
(cljs/js-eval resource)
(catch js/Object e
;; (.log js/console e)
{:error e})))
(let [cache (atom {})]
(defn get-file [url cb]
(if (contains? @cache url)
(cb (get @cache url))
(.send XhrIo url
(fn [e]
(let [response (.. e -target getResponseText)]
(swap! cache assoc url response)
(cb response)))))))
(def fake-spec
"
(ns com.rpl.specter)
(declare ATOM ALL FIRST LAST MAP-VALS META)
")
(def macros-clj
"(ns membrane.macros)
(defmacro test-macro [& body]
`(do
(prn \"comp\" 42 ~@body)
[42 ~@body]))
"
)
(defmacro test-macro [& body] `(do (prn "comp" 42 ~@body) [42 ~@body]))
(defn default-load-fn [{:keys [name macros path] :as m} cb]
(prn "trying to load" m)
;; (assert (not macros) "can't compile macros because I'm not serving them. -PI:NAME:<NAME>END_PI")
(if (= path "membrane/macros")
{:lang :clj
:cache true
:source macros-clj}
(if (not= (.indexOf path "spec") -1)
(do
(cb {:lang :clj
:cache true
:source fake-spec}))
(if-let [path ({"goog/string" "goog/string/string.js"
"goog/string/StringBuffer" "goog/string/stringbuffer.js"
"com/rpl/specter/util_macros" "/util_macros.clj"
"goog/object" "goog/object/object.js"
"goog/array" "goog/array/array.js"} path)]
(let [url (if (.startsWith path "/" )
path
(str "/js/compiled/out.autouitest/" path))]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang (if (.endsWith url ".js")
:js
:clj)
:cache true
:source source}))))
(let [macro-map {"cljs/tools/reader/reader_types" "reader_types.clj"
"cljs/reader" "reader.clj"
"cljs/env/macros" "macros.clj"
"cljs/analyzer/macros" "macros.clj.1"}]
(if (and macros
(macro-map path))
(let [url (str "/"
(macro-map path))]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang (if (.endsWith url ".js")
:js
:clj)
:cache true
:source source}))))
(if (or macros
(#{"com/rpl/specter"
"com/rpl/specter/protocols"
"com/rpl/specter/impl"
"com/rpl/specter/navs"
"cljs/analyzer/api"
"cljs/analyzer"
"cljs/env"
"cljs/tagged_literals"
"membrane/ui"
"membrane/component"
} path))
(let [url (str "/js/compiled/out.autouitest/" path ".cljc")]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang :clj
:cache true
:source source}))))
(let [url (str "/js/compiled/out.autouitest/" path ".cljs")]
(get-file url
(fn [source]
(println "got url" url)
(cb {:lang :clj
:cache true
:source source}))))
)))
)))
)
(def default-compiler-options {:source-map true
:ns 'membrane.autoui
;; :context :statement
;; :verbose false
:load default-load-fn
:def-emits-var true
:eval wrap-js-eval})
(defn eval-async
([form]
(eval-async (cljs/empty-state) form
default-compiler-options))
([state form]
(let [ch (promise-chan)]
(try
(cljs/eval state form
default-compiler-options
#(put! ch %))
(catch js/Object e
(put! ch {:error e})))
ch))
([state form opts]
(let [ch (promise-chan)]
(try
(cljs/eval state form opts
#(put! ch %))
(catch js/Object e
(put! ch {:error e})))
ch)))
|
[
{
"context": "(ns ^{:author \"James McClain <[email protected]>\"}\n worldtree.ser",
"end": 28,
"score": 0.9998575448989868,
"start": 15,
"tag": "NAME",
"value": "James McClain"
},
{
"context": "(ns ^{:author \"James McClain <[email protected]>\"}\n worldtree.series\n (:require [clojure.set :a",
"end": 59,
"score": 0.9999328851699829,
"start": 30,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
src/worldtree/series.clj
|
jamesmcclain/WorldTree
| 1 |
(ns ^{:author "James McClain <[email protected]>"}
worldtree.series
(:require [clojure.set :as set]
[clojure.java.io :as io]
[clojure.core.memoize :as memo])
(:use [gloss.core]
[gloss.io]))
(defstruct segment :ymin :ymax :m :b :i)
(defstruct node :type :y :left :middle :right)
(defstruct leaf :type :segments)
(defstruct intersection :T+t :i :j)
(defstruct change :T+t :ij)
(defstruct timestep :chunk-list :change-list)
(def change-frame (compile-frame (struct change :float64 [:uint16 :uint16]))) ; n ≤ 2^16 - 1
(def timestep-frame (compile-frame (struct timestep (repeated :uint16) (repeated change-frame))))
;; ------------------------- SEGMENTS AND RANKINGS -------------------------
;; Comparator for segments
(defn segment< [one two]
(let [{m1 :m b1 :b i1 :i} one
{m2 :m b2 :b i2 :i} two]
(cond (and (== b1 b2) (== m1 m2)) (< i1 i2)
(== b1 b2) (> m1 m2)
:else (> b1 b2))))
(defmacro snapshot
([dataset t]
`((get ~dataset :snapshot) ~t))
([dataset t i]
`(nth ((get ~dataset :snapshot) ~t) ~i)))
;; Compute all of the segments at the given time.
(defn compute-segments [dataset time]
(letfn [(compute-segment [dataset ^long t ^long i]
(let [y0 (snapshot dataset t i)
y1 (snapshot dataset (inc t) i)
m (- y1 y0)
b y0
b+m (+ b m)]
(struct segment (min b b+m) (max b b+m) m b i)))]
(map #(compute-segment dataset time %) (range (dec (:n dataset))))))
;; ------------------------- SEGMENT TREE -------------------------
;; Take a sorted list of segments and return a segment tree suitable
;; for finding overlapping segments.
(defn build-segment-tree [segments]
(if (not (empty? segments))
(let [median (rand-nth segments)
y (/ (+ (:ymin median) (:ymax median)) 2)]
(letfn [(left? [segment] (< (:ymax segment) y))
(right? [segment] (> (:ymin segment) y))]
(let [[left middle right]
(loop [left [] middle [] right [] segments segments]
(if (empty? segments)
[left middle right] ; if done, return the lists
(let [segment (first segments)
segments (rest segments)]
(cond (left? segment) (recur (conj left segment) middle right segments)
(right? segment) (recur left middle (conj right segment) segments)
:else (recur left (conj middle segment) right segments)))))]
;; At this point, left, middle, and right are bound to the
;; vectors computed in the loop.
(if (and (empty? left) (empty? right))
(struct leaf :leaf segments)
(struct node :node y (build-segment-tree left) (build-segment-tree middle) (build-segment-tree right))))))))
;; Query the segment tree for segments that intersect q after time
;; T+t. Only the first such segment is returned.
(defn- query-segment-tree [T tree q T+t]
(letfn [(compute-intersection [T segment1 segment2]
(let [{m1 :m b1 :b} segment1
{m2 :m b2 :b} segment2]
(if (not (== m1 m2))
(let [t (/ (- b2 b1) (- m1 m2))
i (:i segment1)
j (:i segment2)
[i j] [(min i j) (max i j)]]
(if (and (< 0.0 t) (< t 1.0))
(struct intersection (+ t T) i j))))))
(after-time? [inter] ; after time T+t?
(if (not (nil? inter)) (> (:T+t inter) T+t)))]
(cond (= (:type tree) :node) ; node
(let [y (:y tree)
ymin (:ymin q)
ymax (:ymax q)
inters (list (query-segment-tree T (:middle tree) q T+t)
(if (<= ymin y) (query-segment-tree T (:left tree) q T+t))
(if (>= ymax y) (query-segment-tree T (:right tree) q T+t)))
inters (remove nil? inters)]
(if (not (empty? inters))
(reduce (partial min-key :T+t) inters)))
(= (:type tree) :leaf) ; leaf
(let [inters (map #(compute-intersection T q %) (:segments tree))
inters (filter after-time? inters)]
(if (not (empty? inters))
(reduce (partial min-key :T+t) inters))))))
;; ------------------------- CHUNKS AND TIMESTEPS -------------------------
;; Find all of the intersections that change the composition of the
;; chunk for times between (T,T+1).
(defn- compute-chunk-intersections [tick segments tree starting-segment]
(loop [current-segment starting-segment
current-time (+ tick 0.0)
trace []]
(let [inter (query-segment-tree tick tree current-segment current-time)]
(if (nil? inter)
trace ; no more intersections in this timestep, return trace
(let [next-index (if (== (:i inter) (:i current-segment))
(:j inter) (:i inter)) ; the index of the intersecting segment
next-segment (nth segments next-index) ; the intersecting segment
next-time (:T+t inter)
change (struct change next-time (list (:i current-segment) next-index))]
(if (not (segment< next-segment current-segment))
;; If next-segment has a worse rank than the
;; current-segment, then the former's intersection with
;; the latter changes the chunk's composition, so that
;; intersection must be recorded.
(recur next-segment next-time (conj trace change))
;; Otherwise, if next-segment has a better rank, that
;; means that it is intersecting the current segment from
;; a superior position. The fact that next-segment is the
;; new bottom of the chunk needs to be remembered, but the
;; intersection itself does not.
(recur next-segment next-time trace)))))))
;; Find all of the action in [T,T+1) and record it.
(defn compute-and-store-timestep [dir chunks tick segments tree sorted-a]
(doseq [chunk chunks]
(let [chunk-list (map :i (take chunk sorted-a))
change-list (compute-chunk-intersections tick segments tree (nth sorted-a (dec chunk)))
timestep (struct timestep chunk-list change-list)
filename (str dir "/" chunk "/" tick)]
(with-open [out (io/output-stream filename)]
(encode-to-stream timestep-frame out (list timestep))))))
;; Fetch timestep [T,T+1).
(defn fetch-timestep [dir chunk tick]
(let [file (io/file (str dir "/" chunk "/" tick))]
(with-open [in (io/input-stream file)]
(let [buffer (byte-array (.length file))]
(.read in buffer)
(let [step (decode timestep-frame buffer)]
(struct timestep (set (:chunk-list step)) (:change-list step)))))))
(def fetch-timestep-memo (memo/fifo fetch-timestep :fifo/threshold (* 12 1024)))
|
123898
|
(ns ^{:author "<NAME> <<EMAIL>>"}
worldtree.series
(:require [clojure.set :as set]
[clojure.java.io :as io]
[clojure.core.memoize :as memo])
(:use [gloss.core]
[gloss.io]))
(defstruct segment :ymin :ymax :m :b :i)
(defstruct node :type :y :left :middle :right)
(defstruct leaf :type :segments)
(defstruct intersection :T+t :i :j)
(defstruct change :T+t :ij)
(defstruct timestep :chunk-list :change-list)
(def change-frame (compile-frame (struct change :float64 [:uint16 :uint16]))) ; n ≤ 2^16 - 1
(def timestep-frame (compile-frame (struct timestep (repeated :uint16) (repeated change-frame))))
;; ------------------------- SEGMENTS AND RANKINGS -------------------------
;; Comparator for segments
(defn segment< [one two]
(let [{m1 :m b1 :b i1 :i} one
{m2 :m b2 :b i2 :i} two]
(cond (and (== b1 b2) (== m1 m2)) (< i1 i2)
(== b1 b2) (> m1 m2)
:else (> b1 b2))))
(defmacro snapshot
([dataset t]
`((get ~dataset :snapshot) ~t))
([dataset t i]
`(nth ((get ~dataset :snapshot) ~t) ~i)))
;; Compute all of the segments at the given time.
(defn compute-segments [dataset time]
(letfn [(compute-segment [dataset ^long t ^long i]
(let [y0 (snapshot dataset t i)
y1 (snapshot dataset (inc t) i)
m (- y1 y0)
b y0
b+m (+ b m)]
(struct segment (min b b+m) (max b b+m) m b i)))]
(map #(compute-segment dataset time %) (range (dec (:n dataset))))))
;; ------------------------- SEGMENT TREE -------------------------
;; Take a sorted list of segments and return a segment tree suitable
;; for finding overlapping segments.
(defn build-segment-tree [segments]
(if (not (empty? segments))
(let [median (rand-nth segments)
y (/ (+ (:ymin median) (:ymax median)) 2)]
(letfn [(left? [segment] (< (:ymax segment) y))
(right? [segment] (> (:ymin segment) y))]
(let [[left middle right]
(loop [left [] middle [] right [] segments segments]
(if (empty? segments)
[left middle right] ; if done, return the lists
(let [segment (first segments)
segments (rest segments)]
(cond (left? segment) (recur (conj left segment) middle right segments)
(right? segment) (recur left middle (conj right segment) segments)
:else (recur left (conj middle segment) right segments)))))]
;; At this point, left, middle, and right are bound to the
;; vectors computed in the loop.
(if (and (empty? left) (empty? right))
(struct leaf :leaf segments)
(struct node :node y (build-segment-tree left) (build-segment-tree middle) (build-segment-tree right))))))))
;; Query the segment tree for segments that intersect q after time
;; T+t. Only the first such segment is returned.
(defn- query-segment-tree [T tree q T+t]
(letfn [(compute-intersection [T segment1 segment2]
(let [{m1 :m b1 :b} segment1
{m2 :m b2 :b} segment2]
(if (not (== m1 m2))
(let [t (/ (- b2 b1) (- m1 m2))
i (:i segment1)
j (:i segment2)
[i j] [(min i j) (max i j)]]
(if (and (< 0.0 t) (< t 1.0))
(struct intersection (+ t T) i j))))))
(after-time? [inter] ; after time T+t?
(if (not (nil? inter)) (> (:T+t inter) T+t)))]
(cond (= (:type tree) :node) ; node
(let [y (:y tree)
ymin (:ymin q)
ymax (:ymax q)
inters (list (query-segment-tree T (:middle tree) q T+t)
(if (<= ymin y) (query-segment-tree T (:left tree) q T+t))
(if (>= ymax y) (query-segment-tree T (:right tree) q T+t)))
inters (remove nil? inters)]
(if (not (empty? inters))
(reduce (partial min-key :T+t) inters)))
(= (:type tree) :leaf) ; leaf
(let [inters (map #(compute-intersection T q %) (:segments tree))
inters (filter after-time? inters)]
(if (not (empty? inters))
(reduce (partial min-key :T+t) inters))))))
;; ------------------------- CHUNKS AND TIMESTEPS -------------------------
;; Find all of the intersections that change the composition of the
;; chunk for times between (T,T+1).
(defn- compute-chunk-intersections [tick segments tree starting-segment]
(loop [current-segment starting-segment
current-time (+ tick 0.0)
trace []]
(let [inter (query-segment-tree tick tree current-segment current-time)]
(if (nil? inter)
trace ; no more intersections in this timestep, return trace
(let [next-index (if (== (:i inter) (:i current-segment))
(:j inter) (:i inter)) ; the index of the intersecting segment
next-segment (nth segments next-index) ; the intersecting segment
next-time (:T+t inter)
change (struct change next-time (list (:i current-segment) next-index))]
(if (not (segment< next-segment current-segment))
;; If next-segment has a worse rank than the
;; current-segment, then the former's intersection with
;; the latter changes the chunk's composition, so that
;; intersection must be recorded.
(recur next-segment next-time (conj trace change))
;; Otherwise, if next-segment has a better rank, that
;; means that it is intersecting the current segment from
;; a superior position. The fact that next-segment is the
;; new bottom of the chunk needs to be remembered, but the
;; intersection itself does not.
(recur next-segment next-time trace)))))))
;; Find all of the action in [T,T+1) and record it.
(defn compute-and-store-timestep [dir chunks tick segments tree sorted-a]
(doseq [chunk chunks]
(let [chunk-list (map :i (take chunk sorted-a))
change-list (compute-chunk-intersections tick segments tree (nth sorted-a (dec chunk)))
timestep (struct timestep chunk-list change-list)
filename (str dir "/" chunk "/" tick)]
(with-open [out (io/output-stream filename)]
(encode-to-stream timestep-frame out (list timestep))))))
;; Fetch timestep [T,T+1).
(defn fetch-timestep [dir chunk tick]
(let [file (io/file (str dir "/" chunk "/" tick))]
(with-open [in (io/input-stream file)]
(let [buffer (byte-array (.length file))]
(.read in buffer)
(let [step (decode timestep-frame buffer)]
(struct timestep (set (:chunk-list step)) (:change-list step)))))))
(def fetch-timestep-memo (memo/fifo fetch-timestep :fifo/threshold (* 12 1024)))
| true |
(ns ^{:author "PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>"}
worldtree.series
(:require [clojure.set :as set]
[clojure.java.io :as io]
[clojure.core.memoize :as memo])
(:use [gloss.core]
[gloss.io]))
(defstruct segment :ymin :ymax :m :b :i)
(defstruct node :type :y :left :middle :right)
(defstruct leaf :type :segments)
(defstruct intersection :T+t :i :j)
(defstruct change :T+t :ij)
(defstruct timestep :chunk-list :change-list)
(def change-frame (compile-frame (struct change :float64 [:uint16 :uint16]))) ; n ≤ 2^16 - 1
(def timestep-frame (compile-frame (struct timestep (repeated :uint16) (repeated change-frame))))
;; ------------------------- SEGMENTS AND RANKINGS -------------------------
;; Comparator for segments
(defn segment< [one two]
(let [{m1 :m b1 :b i1 :i} one
{m2 :m b2 :b i2 :i} two]
(cond (and (== b1 b2) (== m1 m2)) (< i1 i2)
(== b1 b2) (> m1 m2)
:else (> b1 b2))))
(defmacro snapshot
([dataset t]
`((get ~dataset :snapshot) ~t))
([dataset t i]
`(nth ((get ~dataset :snapshot) ~t) ~i)))
;; Compute all of the segments at the given time.
(defn compute-segments [dataset time]
(letfn [(compute-segment [dataset ^long t ^long i]
(let [y0 (snapshot dataset t i)
y1 (snapshot dataset (inc t) i)
m (- y1 y0)
b y0
b+m (+ b m)]
(struct segment (min b b+m) (max b b+m) m b i)))]
(map #(compute-segment dataset time %) (range (dec (:n dataset))))))
;; ------------------------- SEGMENT TREE -------------------------
;; Take a sorted list of segments and return a segment tree suitable
;; for finding overlapping segments.
(defn build-segment-tree [segments]
(if (not (empty? segments))
(let [median (rand-nth segments)
y (/ (+ (:ymin median) (:ymax median)) 2)]
(letfn [(left? [segment] (< (:ymax segment) y))
(right? [segment] (> (:ymin segment) y))]
(let [[left middle right]
(loop [left [] middle [] right [] segments segments]
(if (empty? segments)
[left middle right] ; if done, return the lists
(let [segment (first segments)
segments (rest segments)]
(cond (left? segment) (recur (conj left segment) middle right segments)
(right? segment) (recur left middle (conj right segment) segments)
:else (recur left (conj middle segment) right segments)))))]
;; At this point, left, middle, and right are bound to the
;; vectors computed in the loop.
(if (and (empty? left) (empty? right))
(struct leaf :leaf segments)
(struct node :node y (build-segment-tree left) (build-segment-tree middle) (build-segment-tree right))))))))
;; Query the segment tree for segments that intersect q after time
;; T+t. Only the first such segment is returned.
(defn- query-segment-tree [T tree q T+t]
(letfn [(compute-intersection [T segment1 segment2]
(let [{m1 :m b1 :b} segment1
{m2 :m b2 :b} segment2]
(if (not (== m1 m2))
(let [t (/ (- b2 b1) (- m1 m2))
i (:i segment1)
j (:i segment2)
[i j] [(min i j) (max i j)]]
(if (and (< 0.0 t) (< t 1.0))
(struct intersection (+ t T) i j))))))
(after-time? [inter] ; after time T+t?
(if (not (nil? inter)) (> (:T+t inter) T+t)))]
(cond (= (:type tree) :node) ; node
(let [y (:y tree)
ymin (:ymin q)
ymax (:ymax q)
inters (list (query-segment-tree T (:middle tree) q T+t)
(if (<= ymin y) (query-segment-tree T (:left tree) q T+t))
(if (>= ymax y) (query-segment-tree T (:right tree) q T+t)))
inters (remove nil? inters)]
(if (not (empty? inters))
(reduce (partial min-key :T+t) inters)))
(= (:type tree) :leaf) ; leaf
(let [inters (map #(compute-intersection T q %) (:segments tree))
inters (filter after-time? inters)]
(if (not (empty? inters))
(reduce (partial min-key :T+t) inters))))))
;; ------------------------- CHUNKS AND TIMESTEPS -------------------------
;; Find all of the intersections that change the composition of the
;; chunk for times between (T,T+1).
(defn- compute-chunk-intersections [tick segments tree starting-segment]
(loop [current-segment starting-segment
current-time (+ tick 0.0)
trace []]
(let [inter (query-segment-tree tick tree current-segment current-time)]
(if (nil? inter)
trace ; no more intersections in this timestep, return trace
(let [next-index (if (== (:i inter) (:i current-segment))
(:j inter) (:i inter)) ; the index of the intersecting segment
next-segment (nth segments next-index) ; the intersecting segment
next-time (:T+t inter)
change (struct change next-time (list (:i current-segment) next-index))]
(if (not (segment< next-segment current-segment))
;; If next-segment has a worse rank than the
;; current-segment, then the former's intersection with
;; the latter changes the chunk's composition, so that
;; intersection must be recorded.
(recur next-segment next-time (conj trace change))
;; Otherwise, if next-segment has a better rank, that
;; means that it is intersecting the current segment from
;; a superior position. The fact that next-segment is the
;; new bottom of the chunk needs to be remembered, but the
;; intersection itself does not.
(recur next-segment next-time trace)))))))
;; Find all of the action in [T,T+1) and record it.
(defn compute-and-store-timestep [dir chunks tick segments tree sorted-a]
(doseq [chunk chunks]
(let [chunk-list (map :i (take chunk sorted-a))
change-list (compute-chunk-intersections tick segments tree (nth sorted-a (dec chunk)))
timestep (struct timestep chunk-list change-list)
filename (str dir "/" chunk "/" tick)]
(with-open [out (io/output-stream filename)]
(encode-to-stream timestep-frame out (list timestep))))))
;; Fetch timestep [T,T+1).
(defn fetch-timestep [dir chunk tick]
(let [file (io/file (str dir "/" chunk "/" tick))]
(with-open [in (io/input-stream file)]
(let [buffer (byte-array (.length file))]
(.read in buffer)
(let [step (decode timestep-frame buffer)]
(struct timestep (set (:chunk-list step)) (:change-list step)))))))
(def fetch-timestep-memo (memo/fifo fetch-timestep :fifo/threshold (* 12 1024)))
|
[
{
"context": "ndler (with-req (with-cmd :user/get {:user/email \"[email protected]\"})))))\n\n (is (same? 201 :user/created\n (s",
"end": 1809,
"score": 0.999910831451416,
"start": 1795,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "er (with-req (with-cmd :user/create {:user/email \"[email protected]\"})))))\n\n (is (same? 200 :user/found\n (sut",
"end": 1935,
"score": 0.9999072551727295,
"start": 1921,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "ndler (with-req (with-cmd :user/get {:user/email \"[email protected]\"})))))\n\n (is (same? 204 :user/deleted\n (s",
"end": 2056,
"score": 0.9999061226844788,
"start": 2042,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "er (with-req (with-cmd :user/delete {:user/email \"[email protected]\"}))))))\n",
"end": 2182,
"score": 0.9999043345451355,
"start": 2168,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/src/example/web_test.cljc
|
just-sultanov/clj-unifier
| 27 |
(ns example.web-test
(:require
#?(:clj [clojure.test :refer [deftest is]]
:cljs [cljs.test :refer-macros [deftest is]])
[unifier.response :as r]
[example.helpers :as helpers]
[example.data :as data]
[example.web :as sut]))
;;;;
;; Test helpers
;;;
(defn- with-req
([cmd]
(with-req :en cmd))
([language cmd]
(with-req :v1 language cmd))
([version language cmd]
(merge cmd
{:request/id (helpers/new-id)
:request/csrf-token (helpers/new-id)
:session/id (helpers/new-id)
:api/version version
:i18n/language language})))
(defn- with-cmd
([name]
(with-cmd name nil))
([name context]
(with-cmd name :v1 context))
([name version context]
{:cmd/name name
:cmd/version version
:cmd/context context}))
(defn same? [status type x]
(and
(= status (:status x))
(= type (get-in x [:body :type]))
true))
;;;;
;; Tests
;;;;
(deftest web-test
(is (same? 405 ::r/unsupported
(sut/cmd-handler (with-req (with-cmd ::unknown)))))
(is (same? 405 ::r/unsupported
(sut/cmd-handler (with-req :v2 :en (with-cmd :user/get-all)))))
(is (same? 200 :users/found
(sut/cmd-handler (with-req (with-cmd :users/get-all)))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get data/user1)))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get data/user2)))))
(is (same? 409 :user/not-created
(sut/cmd-handler (with-req (with-cmd :user/create data/user1)))))
(is (same? 409 :user/not-created
(sut/cmd-handler (with-req (with-cmd :user/create data/user2)))))
(is (same? 404 :user/not-found
(sut/cmd-handler (with-req (with-cmd :user/get {:user/email "[email protected]"})))))
(is (same? 201 :user/created
(sut/cmd-handler (with-req (with-cmd :user/create {:user/email "[email protected]"})))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get {:user/email "[email protected]"})))))
(is (same? 204 :user/deleted
(sut/cmd-handler (with-req (with-cmd :user/delete {:user/email "[email protected]"}))))))
|
81370
|
(ns example.web-test
(:require
#?(:clj [clojure.test :refer [deftest is]]
:cljs [cljs.test :refer-macros [deftest is]])
[unifier.response :as r]
[example.helpers :as helpers]
[example.data :as data]
[example.web :as sut]))
;;;;
;; Test helpers
;;;
(defn- with-req
([cmd]
(with-req :en cmd))
([language cmd]
(with-req :v1 language cmd))
([version language cmd]
(merge cmd
{:request/id (helpers/new-id)
:request/csrf-token (helpers/new-id)
:session/id (helpers/new-id)
:api/version version
:i18n/language language})))
(defn- with-cmd
([name]
(with-cmd name nil))
([name context]
(with-cmd name :v1 context))
([name version context]
{:cmd/name name
:cmd/version version
:cmd/context context}))
(defn same? [status type x]
(and
(= status (:status x))
(= type (get-in x [:body :type]))
true))
;;;;
;; Tests
;;;;
(deftest web-test
(is (same? 405 ::r/unsupported
(sut/cmd-handler (with-req (with-cmd ::unknown)))))
(is (same? 405 ::r/unsupported
(sut/cmd-handler (with-req :v2 :en (with-cmd :user/get-all)))))
(is (same? 200 :users/found
(sut/cmd-handler (with-req (with-cmd :users/get-all)))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get data/user1)))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get data/user2)))))
(is (same? 409 :user/not-created
(sut/cmd-handler (with-req (with-cmd :user/create data/user1)))))
(is (same? 409 :user/not-created
(sut/cmd-handler (with-req (with-cmd :user/create data/user2)))))
(is (same? 404 :user/not-found
(sut/cmd-handler (with-req (with-cmd :user/get {:user/email "<EMAIL>"})))))
(is (same? 201 :user/created
(sut/cmd-handler (with-req (with-cmd :user/create {:user/email "<EMAIL>"})))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get {:user/email "<EMAIL>"})))))
(is (same? 204 :user/deleted
(sut/cmd-handler (with-req (with-cmd :user/delete {:user/email "<EMAIL>"}))))))
| true |
(ns example.web-test
(:require
#?(:clj [clojure.test :refer [deftest is]]
:cljs [cljs.test :refer-macros [deftest is]])
[unifier.response :as r]
[example.helpers :as helpers]
[example.data :as data]
[example.web :as sut]))
;;;;
;; Test helpers
;;;
(defn- with-req
([cmd]
(with-req :en cmd))
([language cmd]
(with-req :v1 language cmd))
([version language cmd]
(merge cmd
{:request/id (helpers/new-id)
:request/csrf-token (helpers/new-id)
:session/id (helpers/new-id)
:api/version version
:i18n/language language})))
(defn- with-cmd
([name]
(with-cmd name nil))
([name context]
(with-cmd name :v1 context))
([name version context]
{:cmd/name name
:cmd/version version
:cmd/context context}))
(defn same? [status type x]
(and
(= status (:status x))
(= type (get-in x [:body :type]))
true))
;;;;
;; Tests
;;;;
(deftest web-test
(is (same? 405 ::r/unsupported
(sut/cmd-handler (with-req (with-cmd ::unknown)))))
(is (same? 405 ::r/unsupported
(sut/cmd-handler (with-req :v2 :en (with-cmd :user/get-all)))))
(is (same? 200 :users/found
(sut/cmd-handler (with-req (with-cmd :users/get-all)))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get data/user1)))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get data/user2)))))
(is (same? 409 :user/not-created
(sut/cmd-handler (with-req (with-cmd :user/create data/user1)))))
(is (same? 409 :user/not-created
(sut/cmd-handler (with-req (with-cmd :user/create data/user2)))))
(is (same? 404 :user/not-found
(sut/cmd-handler (with-req (with-cmd :user/get {:user/email "PI:EMAIL:<EMAIL>END_PI"})))))
(is (same? 201 :user/created
(sut/cmd-handler (with-req (with-cmd :user/create {:user/email "PI:EMAIL:<EMAIL>END_PI"})))))
(is (same? 200 :user/found
(sut/cmd-handler (with-req (with-cmd :user/get {:user/email "PI:EMAIL:<EMAIL>END_PI"})))))
(is (same? 204 :user/deleted
(sut/cmd-handler (with-req (with-cmd :user/delete {:user/email "PI:EMAIL:<EMAIL>END_PI"}))))))
|
[
{
"context": "(ns ^{:author \"Adam Berger\"} ulvm.scopes.nodejs.write-file\n \"Utility to wri",
"end": 26,
"score": 0.9998739361763,
"start": 15,
"tag": "NAME",
"value": "Adam Berger"
}
] |
examples/scopes/nodejs-scope/src/ulvm/scopes/nodejs/write_file.clj
|
abrgr/ulvm
| 0 |
(ns ^{:author "Adam Berger"} ulvm.scopes.nodejs.write-file
"Utility to write files"
(:require [clojure.string :as string]
[amazonica.aws.s3 :as s3]))
(defn w
"Write a file"
[path contents]
(let [stream (java.io.ByteArrayInputStream. (.getBytes contents "utf-8"))
; s3 keys do not start with a slash
k (string/replace-first path #"^[/]" "")
bucket (System/getenv "SCOPE_NAME")]
(s3/put-object
{:access-key bucket
:secret-key (System/getenv "SECRET_KEY")
:endpoint (System/getenv "FS_BASE_URI")}
:key k
:bucket-name bucket
:input-stream stream)))
|
249
|
(ns ^{:author "<NAME>"} ulvm.scopes.nodejs.write-file
"Utility to write files"
(:require [clojure.string :as string]
[amazonica.aws.s3 :as s3]))
(defn w
"Write a file"
[path contents]
(let [stream (java.io.ByteArrayInputStream. (.getBytes contents "utf-8"))
; s3 keys do not start with a slash
k (string/replace-first path #"^[/]" "")
bucket (System/getenv "SCOPE_NAME")]
(s3/put-object
{:access-key bucket
:secret-key (System/getenv "SECRET_KEY")
:endpoint (System/getenv "FS_BASE_URI")}
:key k
:bucket-name bucket
:input-stream stream)))
| true |
(ns ^{:author "PI:NAME:<NAME>END_PI"} ulvm.scopes.nodejs.write-file
"Utility to write files"
(:require [clojure.string :as string]
[amazonica.aws.s3 :as s3]))
(defn w
"Write a file"
[path contents]
(let [stream (java.io.ByteArrayInputStream. (.getBytes contents "utf-8"))
; s3 keys do not start with a slash
k (string/replace-first path #"^[/]" "")
bucket (System/getenv "SCOPE_NAME")]
(s3/put-object
{:access-key bucket
:secret-key (System/getenv "SECRET_KEY")
:endpoint (System/getenv "FS_BASE_URI")}
:key k
:bucket-name bucket
:input-stream stream)))
|
[
{
"context": "\n;;6)URI metadata are ignores for example <mailto:[email protected]> is \"[email protected]\" for Louna\n\n\n;;query 2.4\n#_(q",
"end": 987,
"score": 0.9998966455459595,
"start": 973,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "e ignores for example <mailto:[email protected]> is \"[email protected]\" for Louna\n\n\n;;query 2.4\n#_(q {:q-out [\"print\"]}\n",
"end": 1007,
"score": 0.9999009370803833,
"start": 993,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": " {:q-out [\"print\"]}\n [?mbox]\n (:foaf.name ?x \"Johnny Lee Outlaw\")\n (:foaf.mbox ?mbox)\n (c (:rdf \"web2.5.3\"",
"end": 1214,
"score": 0.995976984500885,
"start": 1197,
"tag": "NAME",
"value": "Johnny Lee Outlaw"
},
{
"context": "rdf.object ?t ?title)\n (:empty.saidBy ?t \"Bob\")\n (c {:c-out [\"print\"]}\n (:rdf \"web2.9\"))",
"end": 1772,
"score": 0.9921889901161194,
"start": 1769,
"tag": "NAME",
"value": "Bob"
},
{
"context": "or no\" questions\n#_(println (? (q (:foaf.name ?x \"Alice\")\n (c (:rdf \"web10.5\")))))\n\n#_(prin",
"end": 4623,
"score": 0.9995666742324829,
"start": 4618,
"tag": "NAME",
"value": "Alice"
},
{
"context": " \"web10.5\")))))\n\n#_(println (? (q (:foaf.name ?x \"Alice\")\n (:foaf.mbox ?x \"[email protected]",
"end": 4704,
"score": 0.9996088147163391,
"start": 4699,
"tag": "NAME",
"value": "Alice"
},
{
"context": ".name ?x \"Alice\")\n (:foaf.mbox ?x \"[email protected]\")\n (c (:rdf \"web10.5\")))))\n\n;;query",
"end": 4757,
"score": 0.9999033212661743,
"start": 4739,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/web_qc/q.clj
|
tkaryadis/louna-local
| 0 |
(ns web-qc.q
(:use louna.louna
louna.louna-util))
;;examples from https://www.w3.org/2001/sw/DataAccess/rq23/examples.html
;;Some differences
;;1)Louna inside stores URI'S as :prefix.name,if the user has give the prefix-URI pair
;; for example in a .ttl file,it will use the user's prefix,if not it will auto-generate a prefix
;; for exampl p1,p2 ....
;; We never use URI's in louna queries.We use the notation :prefix.name
;; To see the mathings we see at ns file that louna generates,but we generally dont have to look at it
;; except for prefixes that we havent used in the .ttl file and its louna's auto-generated
;;2)Louna will convert the empty namespace to :empty.name for example :book1 = :empty.book1 inside louna
;;3)language tag is ignored by louna "cat"@en for louna is just "cat"
;;4)special datatype for example "abc"^^dt:specialDatatype for louna its only "abc"
;;5)dates are like strings
;;6)URI metadata are ignores for example <mailto:[email protected]> is "[email protected]" for Louna
;;query 2.4
#_(q {:q-out ["print"]}
[?book ?title]
(:dc.title ?book ?title)
(c (:rdf "web2.4")))
;;query 2.5.3
#_(q {:q-out ["print"]}
[?mbox]
(:foaf.name ?x "Johnny Lee Outlaw")
(:foaf.mbox ?mbox)
(c (:rdf "web2.5.3")))
;;query 2.6
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(c (:rdf "web2.6")))
;;query 2.7
#_(q {:q-out ["print"]}
[?x ?name]
(:foaf.name ?x ?name)
(c (:rdf "web2.7")))
;;2.9
#_(q {:q-out ["print"]}
[?book ?title]
(:dc.title ?book ?title)
(c (:rdf "web2.9")))
;;2.9 q1
#_(q {:q-out ["print"]}
[?book ?title]
(:rdf.subject ?t ?book)
(:rdf.predicate ?t :dc.title)
(:rdf.object ?t ?title)
(:empty.saidBy ?t "Bob")
(c {:c-out ["print"]}
(:rdf "web2.9")))
;;3.1
#_(q {:q-out ["print"]}
[?v]
(?p ?v 42)
(c (:rdf "web3.1")))
;;3.1
#_(q {:q-out ["print"]}
[?x]
(?p ?x "cat")
(c (:rdf "web3.1")))
;;3.2
#_(q {:q-out ["print"]}
[?title ?price]
(:ns.price ?x ?price)
(< ?price 30)
(:dc.title ?x ?title)
(c (:rdf "web3.2")))
;;4.1(its on web2.6 data)
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(c (:rdf "web2.6")))
;;query 4.1-q1
#_(q {:q-out ["print"]}
[?name ?mbox]
((:foaf.name ?x ?name))
((:foaf.mbox ?x ?mbox))
(c {:c-out ["print"]}
(:rdf "web2.6")))
;;query 5.1
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:if (:foaf.mbox ?x ?mbox))
(c (:rdf "web5.1")))
;;query 5.2
#_(q {:q-out ["print"]}
[?title ?price]
(:dc.title ?x ?title)
(:if (:ns.price ?x ?price)
(< ?price 30))
(c (:rdf "web5.2")))
;;query 5.3
#_(q {:q-out ["print"]}
[?name ?mbox ?hpage]
(:foaf.name ?x ?name)
(:if (:foaf.mbox ?x ?mbox))
(:if (:foaf.homepage ?x ?hpage))
(c (:rdf "web5.3")))
;;query 5.5
#_(q {:q-out ["print"]}
[?foafName ?mbox ?gname ?fname]
(:foaf.name ?x ?foafName)
(:if (:foaf.mbox ?x ?mbox))
(:if (:vcard.N ?x ?vc)
(:vcard.Given ?vc ?gname)
(:if (:vcard.Family ?vc ?fname)))
(c (:rdf "web5.5")))
;;query 6.1
#_(q {:q-out ["print"]}
[?title]
(:dc10.title ?book ?title)
(:add (:dc11.title ?book ?title))
(c (:rdf "web6.1")))
;;query 6.1-q1
#_(q {:q-out ["print"]}
[?x ?y]
(:dc10.title ?book ?x)
(:add (:dc11.title ?book ?y))
(c (:rdf "web6.1")))
;;query 6.1-q2
;;typo in the webpage,look at data
;;alice wrote "SPARQL Query Language Tutorial"
#_(q {:q-out ["print"]}
[?title ?author]
(:dc11.title ?book ?title)
(:dc11.creator ?book ?author)
(:add (:dc10.title ?book ?title)
(:dc10.creator ?book ?author))
(c (:rdf "web6.1")))
;;7-8-9 are using named graphs ,that louna don't support
;;query 10.1.2 DISTINCT
#_(q {:q-out ["print"]}
[distinct ?name]
(:foaf.name ?x ?name)
(c (:rdf "web10")))
;;query 10.1.3 ORDER BY
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(:sort-by ?name)
(c (:rdf "web10")))
;;query 10.1.3-q1,query 10.1.3-q2 (skipped,they dont give the data)
;;query 10.1.4 LIMIT
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(:limit 1)
(c (:rdf "web10")))
;;query 10.1.5 OFFSET (not implemented in louna but its easy to implemement)
;;query 10.2 Selecting Variables
#_(q {:q-out ["print"]}
[?nameX ?nameY ?nickY]
(:foaf.knows ?x ?y)
(:foaf.name ?x ?nameX)
(:foaf.name ?y ?nameY)
(:if (:foaf.nick ?y ?nickY))
(c (:rdf "web10.2")))
;;query 10.5 Asking "yes or no" questions
#_(println (? (q (:foaf.name ?x "Alice")
(c (:rdf "web10.5")))))
#_(println (? (q (:foaf.name ?x "Alice")
(:foaf.mbox ?x "[email protected]")
(c (:rdf "web10.5")))))
;;query 11.4.1 bound
#_(q {:q-out ["print"]}
[?givenName]
(:foaf.givenName ?x ?givenName)
(:if (:dc.date ?x ?date))
(not (nil? ?date))
(c (:rdf "web11.4")))
;;query 11.4.1-q1
#_(q {:q-out ["print"]}
[?givenName]
(:foaf.givenName ?x ?givenName)
(:if (:dc.date ?x ?date))
(nil? ?date)
(c (:rdf "web11.4")))
;;louna dont support metadata URIs like
;;<mail:to..>
;;so the next queries a bit different
;;query 11.4.2
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(not (prefixed? ?mbox))
(c (:rdf "web11.4.2")))
;;query 11.4.3 isBlank
#_(q {:q-out ["print"]}
[?given ?family]
(:a.annotates ?annot :sp.rdf-sparql-query)
(:dc.creator ?annot ?c)
(:if (:foaf.given ?c ?given)
(:foaf.family ?family))
(blank? ?c)
(c (:rdf "web11.4.3")))
;;query 11.4.5
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(clojure.string/includes? ?mbox "@work.example")
(c (:rdf "web11.4.5")))
;;query 11.4.7
#_(q {:q-out ["print"]}
[?name ?shoeSize]
(:foaf.name ?x ?name)
(:eg.shoeSize ?x ?shoeSize)
(integer? ?shoeSize)
(c (:rdf "web11.4.7")))
;;query 11.4.10
#_(q {:q-out ["print"]}
[?name1 ?name2]
(:foaf.name ?x ?name1)
(:foaf.mbox ?x ?mbox1)
(:foaf.name ?y ?name2)
(:foaf.mbox ?y ?mbox2)
(and (= ?mbox1 ?mbox2)
(not= ?name1 ?name2))
(c (:rdf "web11.4.10")))
;;query 11.4.12
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(re-matches #"[^a]li.+" ?name)
(c (:rdf "web11.4.12")))
|
45688
|
(ns web-qc.q
(:use louna.louna
louna.louna-util))
;;examples from https://www.w3.org/2001/sw/DataAccess/rq23/examples.html
;;Some differences
;;1)Louna inside stores URI'S as :prefix.name,if the user has give the prefix-URI pair
;; for example in a .ttl file,it will use the user's prefix,if not it will auto-generate a prefix
;; for exampl p1,p2 ....
;; We never use URI's in louna queries.We use the notation :prefix.name
;; To see the mathings we see at ns file that louna generates,but we generally dont have to look at it
;; except for prefixes that we havent used in the .ttl file and its louna's auto-generated
;;2)Louna will convert the empty namespace to :empty.name for example :book1 = :empty.book1 inside louna
;;3)language tag is ignored by louna "cat"@en for louna is just "cat"
;;4)special datatype for example "abc"^^dt:specialDatatype for louna its only "abc"
;;5)dates are like strings
;;6)URI metadata are ignores for example <mailto:<EMAIL>> is "<EMAIL>" for Louna
;;query 2.4
#_(q {:q-out ["print"]}
[?book ?title]
(:dc.title ?book ?title)
(c (:rdf "web2.4")))
;;query 2.5.3
#_(q {:q-out ["print"]}
[?mbox]
(:foaf.name ?x "<NAME>")
(:foaf.mbox ?mbox)
(c (:rdf "web2.5.3")))
;;query 2.6
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(c (:rdf "web2.6")))
;;query 2.7
#_(q {:q-out ["print"]}
[?x ?name]
(:foaf.name ?x ?name)
(c (:rdf "web2.7")))
;;2.9
#_(q {:q-out ["print"]}
[?book ?title]
(:dc.title ?book ?title)
(c (:rdf "web2.9")))
;;2.9 q1
#_(q {:q-out ["print"]}
[?book ?title]
(:rdf.subject ?t ?book)
(:rdf.predicate ?t :dc.title)
(:rdf.object ?t ?title)
(:empty.saidBy ?t "<NAME>")
(c {:c-out ["print"]}
(:rdf "web2.9")))
;;3.1
#_(q {:q-out ["print"]}
[?v]
(?p ?v 42)
(c (:rdf "web3.1")))
;;3.1
#_(q {:q-out ["print"]}
[?x]
(?p ?x "cat")
(c (:rdf "web3.1")))
;;3.2
#_(q {:q-out ["print"]}
[?title ?price]
(:ns.price ?x ?price)
(< ?price 30)
(:dc.title ?x ?title)
(c (:rdf "web3.2")))
;;4.1(its on web2.6 data)
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(c (:rdf "web2.6")))
;;query 4.1-q1
#_(q {:q-out ["print"]}
[?name ?mbox]
((:foaf.name ?x ?name))
((:foaf.mbox ?x ?mbox))
(c {:c-out ["print"]}
(:rdf "web2.6")))
;;query 5.1
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:if (:foaf.mbox ?x ?mbox))
(c (:rdf "web5.1")))
;;query 5.2
#_(q {:q-out ["print"]}
[?title ?price]
(:dc.title ?x ?title)
(:if (:ns.price ?x ?price)
(< ?price 30))
(c (:rdf "web5.2")))
;;query 5.3
#_(q {:q-out ["print"]}
[?name ?mbox ?hpage]
(:foaf.name ?x ?name)
(:if (:foaf.mbox ?x ?mbox))
(:if (:foaf.homepage ?x ?hpage))
(c (:rdf "web5.3")))
;;query 5.5
#_(q {:q-out ["print"]}
[?foafName ?mbox ?gname ?fname]
(:foaf.name ?x ?foafName)
(:if (:foaf.mbox ?x ?mbox))
(:if (:vcard.N ?x ?vc)
(:vcard.Given ?vc ?gname)
(:if (:vcard.Family ?vc ?fname)))
(c (:rdf "web5.5")))
;;query 6.1
#_(q {:q-out ["print"]}
[?title]
(:dc10.title ?book ?title)
(:add (:dc11.title ?book ?title))
(c (:rdf "web6.1")))
;;query 6.1-q1
#_(q {:q-out ["print"]}
[?x ?y]
(:dc10.title ?book ?x)
(:add (:dc11.title ?book ?y))
(c (:rdf "web6.1")))
;;query 6.1-q2
;;typo in the webpage,look at data
;;alice wrote "SPARQL Query Language Tutorial"
#_(q {:q-out ["print"]}
[?title ?author]
(:dc11.title ?book ?title)
(:dc11.creator ?book ?author)
(:add (:dc10.title ?book ?title)
(:dc10.creator ?book ?author))
(c (:rdf "web6.1")))
;;7-8-9 are using named graphs ,that louna don't support
;;query 10.1.2 DISTINCT
#_(q {:q-out ["print"]}
[distinct ?name]
(:foaf.name ?x ?name)
(c (:rdf "web10")))
;;query 10.1.3 ORDER BY
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(:sort-by ?name)
(c (:rdf "web10")))
;;query 10.1.3-q1,query 10.1.3-q2 (skipped,they dont give the data)
;;query 10.1.4 LIMIT
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(:limit 1)
(c (:rdf "web10")))
;;query 10.1.5 OFFSET (not implemented in louna but its easy to implemement)
;;query 10.2 Selecting Variables
#_(q {:q-out ["print"]}
[?nameX ?nameY ?nickY]
(:foaf.knows ?x ?y)
(:foaf.name ?x ?nameX)
(:foaf.name ?y ?nameY)
(:if (:foaf.nick ?y ?nickY))
(c (:rdf "web10.2")))
;;query 10.5 Asking "yes or no" questions
#_(println (? (q (:foaf.name ?x "<NAME>")
(c (:rdf "web10.5")))))
#_(println (? (q (:foaf.name ?x "<NAME>")
(:foaf.mbox ?x "<EMAIL>")
(c (:rdf "web10.5")))))
;;query 11.4.1 bound
#_(q {:q-out ["print"]}
[?givenName]
(:foaf.givenName ?x ?givenName)
(:if (:dc.date ?x ?date))
(not (nil? ?date))
(c (:rdf "web11.4")))
;;query 11.4.1-q1
#_(q {:q-out ["print"]}
[?givenName]
(:foaf.givenName ?x ?givenName)
(:if (:dc.date ?x ?date))
(nil? ?date)
(c (:rdf "web11.4")))
;;louna dont support metadata URIs like
;;<mail:to..>
;;so the next queries a bit different
;;query 11.4.2
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(not (prefixed? ?mbox))
(c (:rdf "web11.4.2")))
;;query 11.4.3 isBlank
#_(q {:q-out ["print"]}
[?given ?family]
(:a.annotates ?annot :sp.rdf-sparql-query)
(:dc.creator ?annot ?c)
(:if (:foaf.given ?c ?given)
(:foaf.family ?family))
(blank? ?c)
(c (:rdf "web11.4.3")))
;;query 11.4.5
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(clojure.string/includes? ?mbox "@work.example")
(c (:rdf "web11.4.5")))
;;query 11.4.7
#_(q {:q-out ["print"]}
[?name ?shoeSize]
(:foaf.name ?x ?name)
(:eg.shoeSize ?x ?shoeSize)
(integer? ?shoeSize)
(c (:rdf "web11.4.7")))
;;query 11.4.10
#_(q {:q-out ["print"]}
[?name1 ?name2]
(:foaf.name ?x ?name1)
(:foaf.mbox ?x ?mbox1)
(:foaf.name ?y ?name2)
(:foaf.mbox ?y ?mbox2)
(and (= ?mbox1 ?mbox2)
(not= ?name1 ?name2))
(c (:rdf "web11.4.10")))
;;query 11.4.12
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(re-matches #"[^a]li.+" ?name)
(c (:rdf "web11.4.12")))
| true |
(ns web-qc.q
(:use louna.louna
louna.louna-util))
;;examples from https://www.w3.org/2001/sw/DataAccess/rq23/examples.html
;;Some differences
;;1)Louna inside stores URI'S as :prefix.name,if the user has give the prefix-URI pair
;; for example in a .ttl file,it will use the user's prefix,if not it will auto-generate a prefix
;; for exampl p1,p2 ....
;; We never use URI's in louna queries.We use the notation :prefix.name
;; To see the mathings we see at ns file that louna generates,but we generally dont have to look at it
;; except for prefixes that we havent used in the .ttl file and its louna's auto-generated
;;2)Louna will convert the empty namespace to :empty.name for example :book1 = :empty.book1 inside louna
;;3)language tag is ignored by louna "cat"@en for louna is just "cat"
;;4)special datatype for example "abc"^^dt:specialDatatype for louna its only "abc"
;;5)dates are like strings
;;6)URI metadata are ignores for example <mailto:PI:EMAIL:<EMAIL>END_PI> is "PI:EMAIL:<EMAIL>END_PI" for Louna
;;query 2.4
#_(q {:q-out ["print"]}
[?book ?title]
(:dc.title ?book ?title)
(c (:rdf "web2.4")))
;;query 2.5.3
#_(q {:q-out ["print"]}
[?mbox]
(:foaf.name ?x "PI:NAME:<NAME>END_PI")
(:foaf.mbox ?mbox)
(c (:rdf "web2.5.3")))
;;query 2.6
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(c (:rdf "web2.6")))
;;query 2.7
#_(q {:q-out ["print"]}
[?x ?name]
(:foaf.name ?x ?name)
(c (:rdf "web2.7")))
;;2.9
#_(q {:q-out ["print"]}
[?book ?title]
(:dc.title ?book ?title)
(c (:rdf "web2.9")))
;;2.9 q1
#_(q {:q-out ["print"]}
[?book ?title]
(:rdf.subject ?t ?book)
(:rdf.predicate ?t :dc.title)
(:rdf.object ?t ?title)
(:empty.saidBy ?t "PI:NAME:<NAME>END_PI")
(c {:c-out ["print"]}
(:rdf "web2.9")))
;;3.1
#_(q {:q-out ["print"]}
[?v]
(?p ?v 42)
(c (:rdf "web3.1")))
;;3.1
#_(q {:q-out ["print"]}
[?x]
(?p ?x "cat")
(c (:rdf "web3.1")))
;;3.2
#_(q {:q-out ["print"]}
[?title ?price]
(:ns.price ?x ?price)
(< ?price 30)
(:dc.title ?x ?title)
(c (:rdf "web3.2")))
;;4.1(its on web2.6 data)
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(c (:rdf "web2.6")))
;;query 4.1-q1
#_(q {:q-out ["print"]}
[?name ?mbox]
((:foaf.name ?x ?name))
((:foaf.mbox ?x ?mbox))
(c {:c-out ["print"]}
(:rdf "web2.6")))
;;query 5.1
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:if (:foaf.mbox ?x ?mbox))
(c (:rdf "web5.1")))
;;query 5.2
#_(q {:q-out ["print"]}
[?title ?price]
(:dc.title ?x ?title)
(:if (:ns.price ?x ?price)
(< ?price 30))
(c (:rdf "web5.2")))
;;query 5.3
#_(q {:q-out ["print"]}
[?name ?mbox ?hpage]
(:foaf.name ?x ?name)
(:if (:foaf.mbox ?x ?mbox))
(:if (:foaf.homepage ?x ?hpage))
(c (:rdf "web5.3")))
;;query 5.5
#_(q {:q-out ["print"]}
[?foafName ?mbox ?gname ?fname]
(:foaf.name ?x ?foafName)
(:if (:foaf.mbox ?x ?mbox))
(:if (:vcard.N ?x ?vc)
(:vcard.Given ?vc ?gname)
(:if (:vcard.Family ?vc ?fname)))
(c (:rdf "web5.5")))
;;query 6.1
#_(q {:q-out ["print"]}
[?title]
(:dc10.title ?book ?title)
(:add (:dc11.title ?book ?title))
(c (:rdf "web6.1")))
;;query 6.1-q1
#_(q {:q-out ["print"]}
[?x ?y]
(:dc10.title ?book ?x)
(:add (:dc11.title ?book ?y))
(c (:rdf "web6.1")))
;;query 6.1-q2
;;typo in the webpage,look at data
;;alice wrote "SPARQL Query Language Tutorial"
#_(q {:q-out ["print"]}
[?title ?author]
(:dc11.title ?book ?title)
(:dc11.creator ?book ?author)
(:add (:dc10.title ?book ?title)
(:dc10.creator ?book ?author))
(c (:rdf "web6.1")))
;;7-8-9 are using named graphs ,that louna don't support
;;query 10.1.2 DISTINCT
#_(q {:q-out ["print"]}
[distinct ?name]
(:foaf.name ?x ?name)
(c (:rdf "web10")))
;;query 10.1.3 ORDER BY
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(:sort-by ?name)
(c (:rdf "web10")))
;;query 10.1.3-q1,query 10.1.3-q2 (skipped,they dont give the data)
;;query 10.1.4 LIMIT
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(:limit 1)
(c (:rdf "web10")))
;;query 10.1.5 OFFSET (not implemented in louna but its easy to implemement)
;;query 10.2 Selecting Variables
#_(q {:q-out ["print"]}
[?nameX ?nameY ?nickY]
(:foaf.knows ?x ?y)
(:foaf.name ?x ?nameX)
(:foaf.name ?y ?nameY)
(:if (:foaf.nick ?y ?nickY))
(c (:rdf "web10.2")))
;;query 10.5 Asking "yes or no" questions
#_(println (? (q (:foaf.name ?x "PI:NAME:<NAME>END_PI")
(c (:rdf "web10.5")))))
#_(println (? (q (:foaf.name ?x "PI:NAME:<NAME>END_PI")
(:foaf.mbox ?x "PI:EMAIL:<EMAIL>END_PI")
(c (:rdf "web10.5")))))
;;query 11.4.1 bound
#_(q {:q-out ["print"]}
[?givenName]
(:foaf.givenName ?x ?givenName)
(:if (:dc.date ?x ?date))
(not (nil? ?date))
(c (:rdf "web11.4")))
;;query 11.4.1-q1
#_(q {:q-out ["print"]}
[?givenName]
(:foaf.givenName ?x ?givenName)
(:if (:dc.date ?x ?date))
(nil? ?date)
(c (:rdf "web11.4")))
;;louna dont support metadata URIs like
;;<mail:to..>
;;so the next queries a bit different
;;query 11.4.2
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(not (prefixed? ?mbox))
(c (:rdf "web11.4.2")))
;;query 11.4.3 isBlank
#_(q {:q-out ["print"]}
[?given ?family]
(:a.annotates ?annot :sp.rdf-sparql-query)
(:dc.creator ?annot ?c)
(:if (:foaf.given ?c ?given)
(:foaf.family ?family))
(blank? ?c)
(c (:rdf "web11.4.3")))
;;query 11.4.5
#_(q {:q-out ["print"]}
[?name ?mbox]
(:foaf.name ?x ?name)
(:foaf.mbox ?x ?mbox)
(clojure.string/includes? ?mbox "@work.example")
(c (:rdf "web11.4.5")))
;;query 11.4.7
#_(q {:q-out ["print"]}
[?name ?shoeSize]
(:foaf.name ?x ?name)
(:eg.shoeSize ?x ?shoeSize)
(integer? ?shoeSize)
(c (:rdf "web11.4.7")))
;;query 11.4.10
#_(q {:q-out ["print"]}
[?name1 ?name2]
(:foaf.name ?x ?name1)
(:foaf.mbox ?x ?mbox1)
(:foaf.name ?y ?name2)
(:foaf.mbox ?y ?mbox2)
(and (= ?mbox1 ?mbox2)
(not= ?name1 ?name2))
(c (:rdf "web11.4.10")))
;;query 11.4.12
#_(q {:q-out ["print"]}
[?name]
(:foaf.name ?x ?name)
(re-matches #"[^a]li.+" ?name)
(c (:rdf "web11.4.12")))
|
[
{
"context": ";; Copyright (c) Stuart Sierra, 2012. All rights reserved. The use and\n;; distri",
"end": 30,
"score": 0.9998827576637268,
"start": 17,
"tag": "NAME",
"value": "Stuart Sierra"
},
{
"context": "or any other, from this software.\n\n(ns ^{:author \"Stuart Sierra\"\n :doc \"Read and track namespace information",
"end": 493,
"score": 0.9998875856399536,
"start": 480,
"tag": "NAME",
"value": "Stuart Sierra"
}
] |
server/target/clojure/tools/namespace/file.clj
|
OctavioBR/healthcheck
| 0 |
;; Copyright (c) Stuart Sierra, 2012. All rights reserved. The use and
;; distribution terms for this software are covered by the Eclipse
;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
;; which can be found in the file epl-v10.html at the root of this
;; distribution. By using this software in any fashion, you are
;; agreeing to be bound by the terms of this license. You must not
;; remove this notice, or any other, from this software.
(ns ^{:author "Stuart Sierra"
:doc "Read and track namespace information from files"}
clojure.tools.namespace.file
(:require [clojure.java.io :as io]
[clojure.tools.namespace.parse :as parse]
[clojure.tools.namespace.track :as track])
(:import (java.io PushbackReader)))
(defn read-file-ns-decl
"Attempts to read a (ns ...) declaration from file, and returns the
unevaluated form. Returns nil if read fails, or if the first form
is not a ns declaration."
[file]
(with-open [rdr (PushbackReader. (io/reader file))]
(parse/read-ns-decl rdr)))
(defn clojure-file?
"Returns true if the java.io.File represents a normal Clojure source
file."
[^java.io.File file]
(and (.isFile file)
(or
(.endsWith (.getName file) ".clj")
(.endsWith (.getName file) ".cljc"))))
;;; Dependency tracker
(defn- files-and-deps [files]
(reduce (fn [m file]
(if-let [decl (read-file-ns-decl file)]
(let [deps (parse/deps-from-ns-decl decl)
name (second decl)]
(-> m
(assoc-in [:depmap name] deps)
(assoc-in [:filemap file] name)))
m))
{} files))
(def ^:private merge-map (fnil merge {}))
(defn add-files
"Reads ns declarations from files; returns an updated dependency
tracker with those files added."
[tracker files]
(let [{:keys [depmap filemap]} (files-and-deps files)]
(-> tracker
(track/add depmap)
(update-in [::filemap] merge-map filemap))))
(defn remove-files
"Returns an updated dependency tracker with files removed. The files
must have been previously added with add-files."
[tracker files]
(-> tracker
(track/remove (keep (::filemap tracker {}) files))
(update-in [::filemap] #(apply dissoc % files))))
|
17914
|
;; Copyright (c) <NAME>, 2012. All rights reserved. The use and
;; distribution terms for this software are covered by the Eclipse
;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
;; which can be found in the file epl-v10.html at the root of this
;; distribution. By using this software in any fashion, you are
;; agreeing to be bound by the terms of this license. You must not
;; remove this notice, or any other, from this software.
(ns ^{:author "<NAME>"
:doc "Read and track namespace information from files"}
clojure.tools.namespace.file
(:require [clojure.java.io :as io]
[clojure.tools.namespace.parse :as parse]
[clojure.tools.namespace.track :as track])
(:import (java.io PushbackReader)))
(defn read-file-ns-decl
"Attempts to read a (ns ...) declaration from file, and returns the
unevaluated form. Returns nil if read fails, or if the first form
is not a ns declaration."
[file]
(with-open [rdr (PushbackReader. (io/reader file))]
(parse/read-ns-decl rdr)))
(defn clojure-file?
"Returns true if the java.io.File represents a normal Clojure source
file."
[^java.io.File file]
(and (.isFile file)
(or
(.endsWith (.getName file) ".clj")
(.endsWith (.getName file) ".cljc"))))
;;; Dependency tracker
(defn- files-and-deps [files]
(reduce (fn [m file]
(if-let [decl (read-file-ns-decl file)]
(let [deps (parse/deps-from-ns-decl decl)
name (second decl)]
(-> m
(assoc-in [:depmap name] deps)
(assoc-in [:filemap file] name)))
m))
{} files))
(def ^:private merge-map (fnil merge {}))
(defn add-files
"Reads ns declarations from files; returns an updated dependency
tracker with those files added."
[tracker files]
(let [{:keys [depmap filemap]} (files-and-deps files)]
(-> tracker
(track/add depmap)
(update-in [::filemap] merge-map filemap))))
(defn remove-files
"Returns an updated dependency tracker with files removed. The files
must have been previously added with add-files."
[tracker files]
(-> tracker
(track/remove (keep (::filemap tracker {}) files))
(update-in [::filemap] #(apply dissoc % files))))
| true |
;; Copyright (c) PI:NAME:<NAME>END_PI, 2012. All rights reserved. The use and
;; distribution terms for this software are covered by the Eclipse
;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
;; which can be found in the file epl-v10.html at the root of this
;; distribution. By using this software in any fashion, you are
;; agreeing to be bound by the terms of this license. You must not
;; remove this notice, or any other, from this software.
(ns ^{:author "PI:NAME:<NAME>END_PI"
:doc "Read and track namespace information from files"}
clojure.tools.namespace.file
(:require [clojure.java.io :as io]
[clojure.tools.namespace.parse :as parse]
[clojure.tools.namespace.track :as track])
(:import (java.io PushbackReader)))
(defn read-file-ns-decl
"Attempts to read a (ns ...) declaration from file, and returns the
unevaluated form. Returns nil if read fails, or if the first form
is not a ns declaration."
[file]
(with-open [rdr (PushbackReader. (io/reader file))]
(parse/read-ns-decl rdr)))
(defn clojure-file?
"Returns true if the java.io.File represents a normal Clojure source
file."
[^java.io.File file]
(and (.isFile file)
(or
(.endsWith (.getName file) ".clj")
(.endsWith (.getName file) ".cljc"))))
;;; Dependency tracker
(defn- files-and-deps [files]
(reduce (fn [m file]
(if-let [decl (read-file-ns-decl file)]
(let [deps (parse/deps-from-ns-decl decl)
name (second decl)]
(-> m
(assoc-in [:depmap name] deps)
(assoc-in [:filemap file] name)))
m))
{} files))
(def ^:private merge-map (fnil merge {}))
(defn add-files
"Reads ns declarations from files; returns an updated dependency
tracker with those files added."
[tracker files]
(let [{:keys [depmap filemap]} (files-and-deps files)]
(-> tracker
(track/add depmap)
(update-in [::filemap] merge-map filemap))))
(defn remove-files
"Returns an updated dependency tracker with files removed. The files
must have been previously added with add-files."
[tracker files]
(-> tracker
(track/remove (keep (::filemap tracker {}) files))
(update-in [::filemap] #(apply dissoc % files))))
|
[
{
"context": "tly loaded)\n#re/foo|bar/\n;=> (re \"foo|bar\")\n\n#nme(Will Richardson)\n#nme(Sarang Love Leehan)\n\n#(func 1 2 3)\n\nfoobar\n",
"end": 768,
"score": 0.999830424785614,
"start": 753,
"tag": "NAME",
"value": "Will Richardson"
},
{
"context": "r/\n;=> (re \"foo|bar\")\n\n#nme(Will Richardson)\n#nme(Sarang Love Leehan)\n\n#(func 1 2 3)\n\nfoobar\n\n/.*?/\n\n%foobar\n;=> (gens",
"end": 793,
"score": 0.9998788833618164,
"start": 775,
"tag": "NAME",
"value": "Sarang Love Leehan"
}
] |
example.clj
|
javanut13/slang
| 5 |
(module Main)
(require others/files/*)
(SomeModule.do-things-with [1 2 3])
(type Thing
name
limit
size)
(Thing "Something" 56 98)
(Thing name: "Something" limit: 12 size: 1)
(defn do-foo [a b & other *stuff]
; other is list of arguments
; stuff is map of keyword arguments that don't match a or b
)
; Implement the iterable protocol for the Thing type
(impl Iterable Thing map [it callback]
(map (:name it) callback))
; Implement many things at once
(impls Iterable Thing
(map [it callback] ...)
(reverse [it] ...))
; we can use map on my-thing because it now implements the protocol
(map my-thing println)
; will call the w macro (it must be currently loaded)
#re/foo|bar/
;=> (re "foo|bar")
#nme(Will Richardson)
#nme(Sarang Love Leehan)
#(func 1 2 3)
foobar
/.*?/
%foobar
;=> (gensym foobar)
(defmacro w [input]
`~(String.split input " "))
; TODO which separators are allowed?
#w<word word word word>
;=> ["word" "word" "word" "word"]
; For shortcut functions?
$(things %1)
; macro methods available:
read
eval
compile-only ; do something only at compile time (like define a binding)
module
require
; useful functions
Iterable.reverse, each, map, filter, reduce, reject
Accessable.get, get?, has-key?, keys, values, assoc
set!, swap! ; and friends?
|
55639
|
(module Main)
(require others/files/*)
(SomeModule.do-things-with [1 2 3])
(type Thing
name
limit
size)
(Thing "Something" 56 98)
(Thing name: "Something" limit: 12 size: 1)
(defn do-foo [a b & other *stuff]
; other is list of arguments
; stuff is map of keyword arguments that don't match a or b
)
; Implement the iterable protocol for the Thing type
(impl Iterable Thing map [it callback]
(map (:name it) callback))
; Implement many things at once
(impls Iterable Thing
(map [it callback] ...)
(reverse [it] ...))
; we can use map on my-thing because it now implements the protocol
(map my-thing println)
; will call the w macro (it must be currently loaded)
#re/foo|bar/
;=> (re "foo|bar")
#nme(<NAME>)
#nme(<NAME>)
#(func 1 2 3)
foobar
/.*?/
%foobar
;=> (gensym foobar)
(defmacro w [input]
`~(String.split input " "))
; TODO which separators are allowed?
#w<word word word word>
;=> ["word" "word" "word" "word"]
; For shortcut functions?
$(things %1)
; macro methods available:
read
eval
compile-only ; do something only at compile time (like define a binding)
module
require
; useful functions
Iterable.reverse, each, map, filter, reduce, reject
Accessable.get, get?, has-key?, keys, values, assoc
set!, swap! ; and friends?
| true |
(module Main)
(require others/files/*)
(SomeModule.do-things-with [1 2 3])
(type Thing
name
limit
size)
(Thing "Something" 56 98)
(Thing name: "Something" limit: 12 size: 1)
(defn do-foo [a b & other *stuff]
; other is list of arguments
; stuff is map of keyword arguments that don't match a or b
)
; Implement the iterable protocol for the Thing type
(impl Iterable Thing map [it callback]
(map (:name it) callback))
; Implement many things at once
(impls Iterable Thing
(map [it callback] ...)
(reverse [it] ...))
; we can use map on my-thing because it now implements the protocol
(map my-thing println)
; will call the w macro (it must be currently loaded)
#re/foo|bar/
;=> (re "foo|bar")
#nme(PI:NAME:<NAME>END_PI)
#nme(PI:NAME:<NAME>END_PI)
#(func 1 2 3)
foobar
/.*?/
%foobar
;=> (gensym foobar)
(defmacro w [input]
`~(String.split input " "))
; TODO which separators are allowed?
#w<word word word word>
;=> ["word" "word" "word" "word"]
; For shortcut functions?
$(things %1)
; macro methods available:
read
eval
compile-only ; do something only at compile time (like define a binding)
module
require
; useful functions
Iterable.reverse, each, map, filter, reduce, reject
Accessable.get, get?, has-key?, keys, values, assoc
set!, swap! ; and friends?
|
[
{
"context": "ns under the License.\n;;\n;; Copyright © 2013-2022, Kenneth Leung. All rights reserved.\n\n(ns czlab.bixby.demo.flows",
"end": 597,
"score": 0.999852180480957,
"start": 584,
"tag": "NAME",
"value": "Kenneth Leung"
}
] |
src/main/clojure/czlab/bixby/demo/flows/core.clj
|
llnek/skaro
| 0 |
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;
;; Copyright © 2013-2022, Kenneth Leung. All rights reserved.
(ns czlab.bixby.demo.flows.core
(:require [czlab.flux.core :as wf]
[czlab.basal.core :as c]
[czlab.bixby.core :as b]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;(set! *warn-on-reflection* true)
;; What this example demostrates is a webservice which takes in some user info, authenticate the
;; user, then exec some EC2 operations such as granting permission to access an AMI, and
;; permission to access/snapshot a given volume. When all is done, a reply will be sent back
;; to the user.
;;
;; This flow showcases the use of conditional activities such a choices & decision.
;; Shows how to loop using wloop, and how to use fork & join.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- perf-auth-mtd
[t]
(case t
"facebook" #(c/do->nil %2 (c/prn!! "-> use facebook"))
"google+" #(c/do->nil %2 (c/prn!! "-> use google+"))
"openid" #(c/do->nil % (c/prn!! "-> use open-id"))
#(c/do->nil % (c/prn!! "-> use internal db"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step1. choose a method to authenticate the user
;;here, we'll use choice<> to pick which method
(defn- auth-user
[]
;; hard code to use facebook in this example, but you
;; could check some data from the job,
;; such as URI/Query params
;; and decide on which value to switch on
(wf/choice<>
#(let [_ %]
(c/prn!! "step(1): choose an auth-method") "facebook")
"facebook" (perf-auth-mtd "facebook")
"google+" (perf-auth-mtd "google+")
"openid" (perf-auth-mtd "openid")
:default
(perf-auth-mtd "db")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step2
(c/def- get-profile
#(c/do->nil %2 (c/prn!! "step(2): get user profile\n%s" "->user is superuser")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step3 we are going to dummy up a retry of 2 times to simulate network/operation
;;issues encountered with EC2 while trying to grant permission
;;so here , we are using a wloop to do that
(c/def- prov-ami
(wf/while<>
#(let [job %
v (c/getv job :ami_count)
c (if (some? v) (inc v) 0)]
(c/setv job :ami_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :ami_count)
c (if (some? v) v 0)]
(if (== 2 c)
(c/prn!! "step(3): granted permission for user %s"
"to launch this ami(id)")
(c/prn!! "step(3): failed to contact %s%s%s"
"ami- server, will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step3'. we are going to dummy up a retry of 2 times to simulate network/operation
;;issues encountered with EC2 while trying to grant volume permission
;;so here , we are using a wloop to do that
(c/def- prov-vol
(wf/while<>
#(let [job %
v (c/getv job :vol_count)
c (if (some? v) (inc v) 0)]
(c/setv job :vol_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :vol_count)
c (if (some? v) v 0)]
(if (== c 2)
(c/prn!! "step(3'): granted permission for user %s"
"to access/snapshot this volume(id)")
(c/prn!! "step(3'): failed to contact vol- server, %s%s%s"
"will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step4. pretend to write stuff to db. again, we are going to dummy up the case
;;where the db write fails a couple of times
;;so again , we are using a wloop to do that
(c/def- save-sdb
(wf/while<>
#(let [job %
v (c/getv job :wdb_count)
c (if (some? v) (inc v) 0)]
(c/setv job :wdb_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :wdb_count)
c (if (some? v) v 0)]
(if (== c 2)
(c/prn!! "step(4): wrote stuff to database successfully")
(c/prn!! "step(4): failed to contact db- server, %s%s%s"
"will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;this is the step where it will do the provisioning of the AMI and the EBS volume
;;in parallel. To do that, we use a split-we want to fork off both tasks in parallel. Since
;;we don't want to continue until both provisioning tasks are done. we use a AndJoin to hold/freeze
;;the workflow
(c/def- provision
(wf/group<> (wf/split-join<> [:type :and] prov-ami prov-vol) save-sdb))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; this is the final step, after all the work are done, reply back to the caller.
;; like, returning a 200-OK
(c/def- reply-user
#(c/do->nil
(let [job %2]
(c/prn!! "step(5): we'd probably return a 200 OK %s"
"back to caller here"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(c/def- error-user
#(c/do->nil
(let [job %2]
(c/prn!! "step(5): we'd probably return a 200 OK %s"
"but with errors"))))
;; do a final test to see what sort of response should we send back to the user.
(c/def- final-test
(wf/decision<> #(c/do->true %) reply-user error-user))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn demo
[evt]
;; this workflow is a small (4 step) workflow, with the 3rd step (Provision) being
;; a split, which forks off more steps in parallel.
(let [p (c/parent evt)
s (c/parent p)
c (b/scheduler s)]
(wf/exec (wf/workflow*
(wf/group<> (auth-user)
get-profile provision final-test)) (wf/job<> c nil evt))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;EOF
|
28518
|
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;
;; Copyright © 2013-2022, <NAME>. All rights reserved.
(ns czlab.bixby.demo.flows.core
(:require [czlab.flux.core :as wf]
[czlab.basal.core :as c]
[czlab.bixby.core :as b]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;(set! *warn-on-reflection* true)
;; What this example demostrates is a webservice which takes in some user info, authenticate the
;; user, then exec some EC2 operations such as granting permission to access an AMI, and
;; permission to access/snapshot a given volume. When all is done, a reply will be sent back
;; to the user.
;;
;; This flow showcases the use of conditional activities such a choices & decision.
;; Shows how to loop using wloop, and how to use fork & join.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- perf-auth-mtd
[t]
(case t
"facebook" #(c/do->nil %2 (c/prn!! "-> use facebook"))
"google+" #(c/do->nil %2 (c/prn!! "-> use google+"))
"openid" #(c/do->nil % (c/prn!! "-> use open-id"))
#(c/do->nil % (c/prn!! "-> use internal db"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step1. choose a method to authenticate the user
;;here, we'll use choice<> to pick which method
(defn- auth-user
[]
;; hard code to use facebook in this example, but you
;; could check some data from the job,
;; such as URI/Query params
;; and decide on which value to switch on
(wf/choice<>
#(let [_ %]
(c/prn!! "step(1): choose an auth-method") "facebook")
"facebook" (perf-auth-mtd "facebook")
"google+" (perf-auth-mtd "google+")
"openid" (perf-auth-mtd "openid")
:default
(perf-auth-mtd "db")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step2
(c/def- get-profile
#(c/do->nil %2 (c/prn!! "step(2): get user profile\n%s" "->user is superuser")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step3 we are going to dummy up a retry of 2 times to simulate network/operation
;;issues encountered with EC2 while trying to grant permission
;;so here , we are using a wloop to do that
(c/def- prov-ami
(wf/while<>
#(let [job %
v (c/getv job :ami_count)
c (if (some? v) (inc v) 0)]
(c/setv job :ami_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :ami_count)
c (if (some? v) v 0)]
(if (== 2 c)
(c/prn!! "step(3): granted permission for user %s"
"to launch this ami(id)")
(c/prn!! "step(3): failed to contact %s%s%s"
"ami- server, will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step3'. we are going to dummy up a retry of 2 times to simulate network/operation
;;issues encountered with EC2 while trying to grant volume permission
;;so here , we are using a wloop to do that
(c/def- prov-vol
(wf/while<>
#(let [job %
v (c/getv job :vol_count)
c (if (some? v) (inc v) 0)]
(c/setv job :vol_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :vol_count)
c (if (some? v) v 0)]
(if (== c 2)
(c/prn!! "step(3'): granted permission for user %s"
"to access/snapshot this volume(id)")
(c/prn!! "step(3'): failed to contact vol- server, %s%s%s"
"will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step4. pretend to write stuff to db. again, we are going to dummy up the case
;;where the db write fails a couple of times
;;so again , we are using a wloop to do that
(c/def- save-sdb
(wf/while<>
#(let [job %
v (c/getv job :wdb_count)
c (if (some? v) (inc v) 0)]
(c/setv job :wdb_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :wdb_count)
c (if (some? v) v 0)]
(if (== c 2)
(c/prn!! "step(4): wrote stuff to database successfully")
(c/prn!! "step(4): failed to contact db- server, %s%s%s"
"will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;this is the step where it will do the provisioning of the AMI and the EBS volume
;;in parallel. To do that, we use a split-we want to fork off both tasks in parallel. Since
;;we don't want to continue until both provisioning tasks are done. we use a AndJoin to hold/freeze
;;the workflow
(c/def- provision
(wf/group<> (wf/split-join<> [:type :and] prov-ami prov-vol) save-sdb))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; this is the final step, after all the work are done, reply back to the caller.
;; like, returning a 200-OK
(c/def- reply-user
#(c/do->nil
(let [job %2]
(c/prn!! "step(5): we'd probably return a 200 OK %s"
"back to caller here"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(c/def- error-user
#(c/do->nil
(let [job %2]
(c/prn!! "step(5): we'd probably return a 200 OK %s"
"but with errors"))))
;; do a final test to see what sort of response should we send back to the user.
(c/def- final-test
(wf/decision<> #(c/do->true %) reply-user error-user))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn demo
[evt]
;; this workflow is a small (4 step) workflow, with the 3rd step (Provision) being
;; a split, which forks off more steps in parallel.
(let [p (c/parent evt)
s (c/parent p)
c (b/scheduler s)]
(wf/exec (wf/workflow*
(wf/group<> (auth-user)
get-profile provision final-test)) (wf/job<> c nil evt))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;EOF
| true |
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;
;; Copyright © 2013-2022, PI:NAME:<NAME>END_PI. All rights reserved.
(ns czlab.bixby.demo.flows.core
(:require [czlab.flux.core :as wf]
[czlab.basal.core :as c]
[czlab.bixby.core :as b]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;(set! *warn-on-reflection* true)
;; What this example demostrates is a webservice which takes in some user info, authenticate the
;; user, then exec some EC2 operations such as granting permission to access an AMI, and
;; permission to access/snapshot a given volume. When all is done, a reply will be sent back
;; to the user.
;;
;; This flow showcases the use of conditional activities such a choices & decision.
;; Shows how to loop using wloop, and how to use fork & join.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- perf-auth-mtd
[t]
(case t
"facebook" #(c/do->nil %2 (c/prn!! "-> use facebook"))
"google+" #(c/do->nil %2 (c/prn!! "-> use google+"))
"openid" #(c/do->nil % (c/prn!! "-> use open-id"))
#(c/do->nil % (c/prn!! "-> use internal db"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step1. choose a method to authenticate the user
;;here, we'll use choice<> to pick which method
(defn- auth-user
[]
;; hard code to use facebook in this example, but you
;; could check some data from the job,
;; such as URI/Query params
;; and decide on which value to switch on
(wf/choice<>
#(let [_ %]
(c/prn!! "step(1): choose an auth-method") "facebook")
"facebook" (perf-auth-mtd "facebook")
"google+" (perf-auth-mtd "google+")
"openid" (perf-auth-mtd "openid")
:default
(perf-auth-mtd "db")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step2
(c/def- get-profile
#(c/do->nil %2 (c/prn!! "step(2): get user profile\n%s" "->user is superuser")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step3 we are going to dummy up a retry of 2 times to simulate network/operation
;;issues encountered with EC2 while trying to grant permission
;;so here , we are using a wloop to do that
(c/def- prov-ami
(wf/while<>
#(let [job %
v (c/getv job :ami_count)
c (if (some? v) (inc v) 0)]
(c/setv job :ami_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :ami_count)
c (if (some? v) v 0)]
(if (== 2 c)
(c/prn!! "step(3): granted permission for user %s"
"to launch this ami(id)")
(c/prn!! "step(3): failed to contact %s%s%s"
"ami- server, will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step3'. we are going to dummy up a retry of 2 times to simulate network/operation
;;issues encountered with EC2 while trying to grant volume permission
;;so here , we are using a wloop to do that
(c/def- prov-vol
(wf/while<>
#(let [job %
v (c/getv job :vol_count)
c (if (some? v) (inc v) 0)]
(c/setv job :vol_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :vol_count)
c (if (some? v) v 0)]
(if (== c 2)
(c/prn!! "step(3'): granted permission for user %s"
"to access/snapshot this volume(id)")
(c/prn!! "step(3'): failed to contact vol- server, %s%s%s"
"will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;step4. pretend to write stuff to db. again, we are going to dummy up the case
;;where the db write fails a couple of times
;;so again , we are using a wloop to do that
(c/def- save-sdb
(wf/while<>
#(let [job %
v (c/getv job :wdb_count)
c (if (some? v) (inc v) 0)]
(c/setv job :wdb_count c)
(< c 3))
#(c/do->nil
(let [job %2
v (c/getv job :wdb_count)
c (if (some? v) v 0)]
(if (== c 2)
(c/prn!! "step(4): wrote stuff to database successfully")
(c/prn!! "step(4): failed to contact db- server, %s%s%s"
"will retry again (" c ")"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;this is the step where it will do the provisioning of the AMI and the EBS volume
;;in parallel. To do that, we use a split-we want to fork off both tasks in parallel. Since
;;we don't want to continue until both provisioning tasks are done. we use a AndJoin to hold/freeze
;;the workflow
(c/def- provision
(wf/group<> (wf/split-join<> [:type :and] prov-ami prov-vol) save-sdb))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; this is the final step, after all the work are done, reply back to the caller.
;; like, returning a 200-OK
(c/def- reply-user
#(c/do->nil
(let [job %2]
(c/prn!! "step(5): we'd probably return a 200 OK %s"
"back to caller here"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(c/def- error-user
#(c/do->nil
(let [job %2]
(c/prn!! "step(5): we'd probably return a 200 OK %s"
"but with errors"))))
;; do a final test to see what sort of response should we send back to the user.
(c/def- final-test
(wf/decision<> #(c/do->true %) reply-user error-user))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn demo
[evt]
;; this workflow is a small (4 step) workflow, with the 3rd step (Provision) being
;; a split, which forks off more steps in parallel.
(let [p (c/parent evt)
s (c/parent p)
c (b/scheduler s)]
(wf/exec (wf/workflow*
(wf/group<> (auth-user)
get-profile provision final-test)) (wf/job<> c nil evt))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;EOF
|
[
{
"context": ";;;; Copyright 2015 Peter Stephens. All Rights Reserved.\r\n;;;;\r\n;;;; Licensed unde",
"end": 36,
"score": 0.9996947050094604,
"start": 22,
"tag": "NAME",
"value": "Peter Stephens"
}
] |
src/common/bible/io.cljs
|
pstephens/kingjames.bible
| 23 |
;;;; Copyright 2015 Peter Stephens. All Rights Reserved.
;;;;
;;;; Licensed under the Apache License, Version 2.0 (the "License");
;;;; you may not use this file except in compliance with the License.
;;;; You may obtain a copy of the License at
;;;;
;;;; http://www.apache.org/licenses/LICENSE-2.0
;;;;
;;;; Unless required by applicable law or agreed to in writing, software
;;;; distributed under the License is distributed on an "AS IS" BASIS,
;;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;;; See the License for the specific language governing permissions and
;;;; limitations under the License.
(ns common.bible.io)
(def verse-partition-size 781)
(defn- get-chapter-count-for-books [m]
(->> m
(map :chapters)
(map count)
(vec)))
(defn- get-verse-count-for-chapters [m]
(->> m
(mapcat :chapters)
(map :verses)
(map count)
(vec)))
(defn- filtered-chapter-indexes-to-set [m f]
(->> m
(mapcat :chapters)
(keep-indexed #(if (f %2) %1))
(set)))
(defn normalized->persisted-bible [m]
{:books (get-chapter-count-for-books m)
:chapters (get-verse-count-for-chapters m)
:subtitle (filtered-chapter-indexes-to-set m :subtitle)
:postscript (filtered-chapter-indexes-to-set m :postscript)
:partition-size verse-partition-size})
(defn normalized->persisted-verses [m]
(->> m
(mapcat :chapters)
(mapcat :verses)
(vec)))
|
12987
|
;;;; Copyright 2015 <NAME>. All Rights Reserved.
;;;;
;;;; Licensed under the Apache License, Version 2.0 (the "License");
;;;; you may not use this file except in compliance with the License.
;;;; You may obtain a copy of the License at
;;;;
;;;; http://www.apache.org/licenses/LICENSE-2.0
;;;;
;;;; Unless required by applicable law or agreed to in writing, software
;;;; distributed under the License is distributed on an "AS IS" BASIS,
;;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;;; See the License for the specific language governing permissions and
;;;; limitations under the License.
(ns common.bible.io)
(def verse-partition-size 781)
(defn- get-chapter-count-for-books [m]
(->> m
(map :chapters)
(map count)
(vec)))
(defn- get-verse-count-for-chapters [m]
(->> m
(mapcat :chapters)
(map :verses)
(map count)
(vec)))
(defn- filtered-chapter-indexes-to-set [m f]
(->> m
(mapcat :chapters)
(keep-indexed #(if (f %2) %1))
(set)))
(defn normalized->persisted-bible [m]
{:books (get-chapter-count-for-books m)
:chapters (get-verse-count-for-chapters m)
:subtitle (filtered-chapter-indexes-to-set m :subtitle)
:postscript (filtered-chapter-indexes-to-set m :postscript)
:partition-size verse-partition-size})
(defn normalized->persisted-verses [m]
(->> m
(mapcat :chapters)
(mapcat :verses)
(vec)))
| true |
;;;; Copyright 2015 PI:NAME:<NAME>END_PI. All Rights Reserved.
;;;;
;;;; Licensed under the Apache License, Version 2.0 (the "License");
;;;; you may not use this file except in compliance with the License.
;;;; You may obtain a copy of the License at
;;;;
;;;; http://www.apache.org/licenses/LICENSE-2.0
;;;;
;;;; Unless required by applicable law or agreed to in writing, software
;;;; distributed under the License is distributed on an "AS IS" BASIS,
;;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;;; See the License for the specific language governing permissions and
;;;; limitations under the License.
(ns common.bible.io)
(def verse-partition-size 781)
(defn- get-chapter-count-for-books [m]
(->> m
(map :chapters)
(map count)
(vec)))
(defn- get-verse-count-for-chapters [m]
(->> m
(mapcat :chapters)
(map :verses)
(map count)
(vec)))
(defn- filtered-chapter-indexes-to-set [m f]
(->> m
(mapcat :chapters)
(keep-indexed #(if (f %2) %1))
(set)))
(defn normalized->persisted-bible [m]
{:books (get-chapter-count-for-books m)
:chapters (get-verse-count-for-chapters m)
:subtitle (filtered-chapter-indexes-to-set m :subtitle)
:postscript (filtered-chapter-indexes-to-set m :postscript)
:partition-size verse-partition-size})
(defn normalized->persisted-verses [m]
(->> m
(mapcat :chapters)
(mapcat :verses)
(vec)))
|
[
{
"context": "]\n (t/is (= 48 (count picasso)))\n (t/is (= \"Pablo\" (:http://xmlns.com/foaf/0.1/givenName picasso)))",
"end": 15020,
"score": 0.9929696321487427,
"start": 15015,
"tag": "NAME",
"value": "Pablo"
},
{
"context": "ect-store)\n ivan {:crux.db/id :ivan :name \"Ivan\"}\n\n v1-ivan (assoc ivan :version 1)\n ",
"end": 15846,
"score": 0.9968289136886597,
"start": 15842,
"tag": "NAME",
"value": "Ivan"
},
{
"context": "og f/*kv*)\n ivan {:crux.db/id :ivan :name \"Ivan\"}\n start-valid-time #inst \"2019\"\n n",
"end": 20671,
"score": 0.9959139823913574,
"start": 20667,
"tag": "NAME",
"value": "Ivan"
},
{
"context": "og f/*kv*)\n ivan {:crux.db/id :ivan :name \"Ivan\"}\n\n tx1-ivan (assoc ivan :version 1)\n ",
"end": 22297,
"score": 0.999322772026062,
"start": 22293,
"tag": "NAME",
"value": "Ivan"
},
{
"context": "ion 2)\n tx2-petr {:crux.db/id :petr :name \"Petr\"}\n tx2-valid-time #inst \"2018-11-27\"\n ",
"end": 22615,
"score": 0.9993245005607605,
"start": 22611,
"tag": "NAME",
"value": "Petr"
}
] |
test/crux/tx_test.clj
|
souenzzo/crux
| 0 |
(ns crux.tx-test
(:require [clojure.test :as t]
[clojure.test.check.clojure-test :as tcct]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[clojure.java.io :as io]
[clojure.spec.alpha :as s]
[clojure.set :as set]
[crux.byte-utils :as bu]
[crux.codec :as c]
[crux.db :as db]
[crux.index :as idx]
[crux.fixtures :as f]
[crux.tx :as tx]
[crux.kv :as kv]
[crux.lru :as lru]
[crux.memory :as mem]
[crux.morton :as morton]
[crux.rdf :as rdf]
[crux.query :as q]
[taoensso.nippy :as nippy]
[crux.bootstrap :as b])
(:import java.util.Date))
(t/use-fixtures :each f/with-each-kv-store-implementation f/with-kv-store f/with-silent-test-check)
(defn load-ntriples-example [resource]
(with-open [in (io/input-stream (io/resource resource))]
(->> (rdf/ntriples-seq in)
(rdf/statements->maps)
(map #(rdf/use-default-language % :en))
(#(rdf/maps-by-id %)))))
;; TODO: This is a large, useful, test that exercises many parts, but
;; might be better split up.
(t/deftest test-can-index-tx-ops-acceptance-test
(let [object-store (f/kv-object-store-w-cache f/*kv*)
tx-log (f/kv-tx-log f/*kv* object-store)
indexer (tx/->KvIndexer f/*kv* tx-log object-store)
picasso (-> (load-ntriples-example "crux/Pablo_Picasso.ntriples")
:http://dbpedia.org/resource/Pablo_Picasso)
content-hash (c/new-id picasso)
valid-time #inst "2018-05-21"
eid (c/new-id :http://dbpedia.org/resource/Pablo_Picasso)
{:crux.tx/keys [tx-time tx-id]}
@(db/submit-tx tx-log [[:crux.tx/put picasso valid-time]])
expected-entities [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "can see entity at transact and valid time"
(t/is (= expected-entities
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time tx-time)))
(t/is (= expected-entities
(idx/all-entities snapshot tx-time tx-time))))
(t/testing "cannot see entity before valid or transact time"
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-20" tx-time)))
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time #inst "2018-05-20")))
(t/is (empty? (idx/all-entities snapshot #inst "2018-05-20" tx-time)))
(t/is (empty? (idx/all-entities snapshot tx-time #inst "2018-05-20"))))
(t/testing "can see entity after valid or transact time"
(t/is (some? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-22" tx-time)))
(t/is (some? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time tx-time))))
(t/testing "can see entity history"
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]
(idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)))))
(t/testing "add new version of entity in the past"
(let [new-picasso (assoc picasso :foo :bar)
new-content-hash (c/new-id new-picasso)
new-valid-time #inst "2018-05-20"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time)))
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-20" #inst "2018-05-21"))))))
(t/testing "add new version of entity in the future"
(let [new-picasso (assoc picasso :baz :boz)
new-content-hash (c/new-id new-picasso)
new-valid-time #inst "2018-05-22"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time))))
(t/testing "can correct entity at earlier valid time"
(let [new-picasso (assoc picasso :bar :foo)
new-content-hash (c/new-id new-picasso)
prev-tx-time new-tx-time
prev-tx-id new-tx-id
new-valid-time #inst "2018-05-22"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time)))
(t/is (= prev-tx-id (-> (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] prev-tx-time prev-tx-time)
(first)
:tx-id))))
(t/testing "compare and set does nothing with wrong content hash"
(let [old-picasso (assoc picasso :baz :boz)
{cas-failure-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/cas old-picasso new-picasso new-valid-time]])]
(t/is (= cas-failure-tx-time (tx/await-tx-time indexer cas-failure-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time cas-failure-tx-time))))))
(t/testing "compare and set updates with correct content hash"
(let [old-picasso new-picasso
new-picasso (assoc old-picasso :baz :boz)
new-content-hash (c/new-id new-picasso)
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/cas old-picasso new-picasso new-valid-time]])]
(t/is (= new-tx-time (tx/await-tx-time indexer new-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time))))))
(t/testing "compare and set can update non existing nil entity"
(let [new-eid (c/new-id :http://dbpedia.org/resource/Pablo2)
new-picasso (assoc new-picasso :crux.db/id :http://dbpedia.org/resource/Pablo2)
new-content-hash (c/new-id new-picasso)
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/cas nil new-picasso new-valid-time]])]
(t/is (= new-tx-time (tx/await-tx-time indexer new-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid new-eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo2] new-valid-time new-tx-time))))))))
(t/testing "can delete entity"
(let [new-valid-time #inst "2018-05-23"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :http://dbpedia.org/resource/Pablo_Picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/testing "first version of entity is still visible in the past"
(t/is (= tx-id (-> (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] valid-time new-tx-time)
(first)
:tx-id)))))))))
(t/testing "can retrieve history of entity"
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(t/is (= 6 (count (map :content-hash picasso-history))))
(with-open [i (kv/new-iterator snapshot)]
(doseq [{:keys [content-hash]} picasso-history
:when (not (= (c/new-id nil) content-hash))
:let [version-k (c/encode-attribute+entity+content-hash+value-key-to
nil
(c/->id-buffer :http://xmlns.com/foaf/0.1/givenName)
(c/->id-buffer :http://dbpedia.org/resource/Pablo_Picasso)
(c/->id-buffer content-hash)
(c/->value-buffer "Pablo"))]]
(t/is (kv/get-value snapshot version-k)))))))
(t/testing "can evict entity"
(let [new-valid-time #inst "2018-05-23"
; read documents before transaction to populate the cache
_ (with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(db/get-objects object-store snapshot (keep :content-hash picasso-history))))
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/evict :http://dbpedia.org/resource/Pablo_Picasso #inst "1970" new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/testing "eviction keeps tx history"
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(t/is (= 6 (count (map :content-hash picasso-history))))
(t/testing "eviction removes docs"
(t/is (empty? (db/get-objects object-store snapshot (keep :content-hash picasso-history))))))))))))
(t/deftest test-can-store-doc
(let [object-store (idx/->KvObjectStore f/*kv*)
tx-log (f/kv-tx-log f/*kv*)
picasso (-> (load-ntriples-example "crux/Pablo_Picasso.ntriples")
:http://dbpedia.org/resource/Pablo_Picasso)
content-hash (c/new-id picasso)]
(t/is (= 48 (count picasso)))
(t/is (= "Pablo" (:http://xmlns.com/foaf/0.1/givenName picasso)))
(db/submit-doc tx-log content-hash picasso)
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= {content-hash picasso}
(db/get-objects object-store snapshot [content-hash])))
(t/testing "non existent docs are ignored"
(t/is (= {content-hash picasso}
(db/get-objects object-store
snapshot
[content-hash
"090622a35d4b579d2fcfebf823821298711d3867"])))
(t/is (empty? (db/get-objects object-store snapshot [])))))))
(t/deftest test-can-correct-ranges-in-the-past
(let [object-store (idx/->KvObjectStore f/*kv*)
tx-log (f/kv-tx-log f/*kv* object-store)
ivan {:crux.db/id :ivan :name "Ivan"}
v1-ivan (assoc ivan :version 1)
v1-valid-time #inst "2018-11-26"
{v1-tx-time :crux.tx/tx-time
v1-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v1-ivan v1-valid-time]])
v2-ivan (assoc ivan :version 2)
v2-valid-time #inst "2018-11-27"
{v2-tx-time :crux.tx/tx-time
v2-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v2-ivan v2-valid-time]])
v3-ivan (assoc ivan :version 3)
v3-valid-time #inst "2018-11-28"
{v3-tx-time :crux.tx/tx-time
v3-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v3-ivan v3-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity is visible"
(t/is (= v1-tx-id (-> (idx/entities-at snapshot [:ivan] v1-valid-time v3-tx-time)
(first)
:tx-id))))
(t/testing "second version of entity is visible"
(t/is (= v2-tx-id (-> (idx/entities-at snapshot [:ivan] v2-valid-time v3-tx-time)
(first)
:tx-id))))
(t/testing "third version of entity is visible"
(t/is (= v3-tx-id (-> (idx/entities-at snapshot [:ivan] v3-valid-time v3-tx-time)
(first)
:tx-id)))))
(let [corrected-ivan (assoc ivan :version 4)
corrected-start-valid-time #inst "2018-11-27"
corrected-end-valid-time #inst "2018-11-29"
{corrected-tx-time :crux.tx/tx-time
corrected-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put corrected-ivan corrected-start-valid-time corrected-end-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity is still there"
(t/is (= v1-tx-id (-> (idx/entities-at snapshot [:ivan] v1-valid-time corrected-tx-time)
(first)
:tx-id))))
(t/testing "second version of entity was corrected"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v2-valid-time corrected-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))
(t/testing "third version of entity was corrected"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time corrected-tx-time)
(first)
(select-keys [:tx-id :content-hash]))))))
(let [deleted-start-valid-time #inst "2018-11-25"
deleted-end-valid-time #inst "2018-11-28"
{deleted-tx-time :crux.tx/tx-time
deleted-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :ivan deleted-start-valid-time deleted-end-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity was deleted"
(t/is (empty? (idx/entities-at snapshot [:ivan] v1-valid-time deleted-tx-time))))
(t/testing "second version of entity was deleted"
(t/is (empty? (idx/entities-at snapshot [:ivan] v2-valid-time deleted-tx-time))))
(t/testing "third version of entity is still there"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time deleted-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))))
(t/testing "end range is exclusive"
(let [{deleted-tx-time :crux.tx/tx-time
deleted-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :ivan v3-valid-time v3-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "third version of entity is still there"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time deleted-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))))))))
;; TODO: This test just shows that this is an issue, if we fix the
;; underlying issue this test should start failing. We can then change
;; the second assertion if we want to keep it around to ensure it
;; keeps working.
(t/deftest test-corrections-in-the-past-slowes-down-bitemp-144
(let [tx-log (f/kv-tx-log f/*kv*)
ivan {:crux.db/id :ivan :name "Ivan"}
start-valid-time #inst "2019"
number-of-versions 1000]
@(db/submit-tx tx-log (vec (for [n (range number-of-versions)]
[:crux.tx/put (assoc ivan :verison n) (Date. (+ (.getTime start-valid-time) (inc (long n))))])))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [baseline-time (let [start-time (System/nanoTime)
valid-time (Date. (+ (.getTime start-valid-time) number-of-versions))]
(t/testing "last version of entity is visible at now"
(t/is (= valid-time (-> (idx/entities-at snapshot [:ivan] valid-time (Date.))
(first)
:vt))))
(- (System/nanoTime) start-time))]
(let [start-time (System/nanoTime)
valid-time (Date. (+ (.getTime start-valid-time) number-of-versions))]
(t/testing "no version is visible before transactions"
(t/is (nil? (idx/entities-at snapshot [:ivan] valid-time valid-time)))
(let [corrections-time (- (System/nanoTime) start-time)]
;; TODO: This can be a bit flaky. This assertion was
;; mainly there to prove the opposite, but it has been
;; fixed. Can be added back to sanity check when
;; changing indexes.
#_(t/is (>= baseline-time corrections-time)))))))))
(t/deftest test-can-read-kv-tx-log
(let [tx-log (f/kv-tx-log f/*kv*)
ivan {:crux.db/id :ivan :name "Ivan"}
tx1-ivan (assoc ivan :version 1)
tx1-valid-time #inst "2018-11-26"
{tx1-id :crux.tx/tx-id
tx1-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/put tx1-ivan tx1-valid-time]])
tx2-ivan (assoc ivan :version 2)
tx2-petr {:crux.db/id :petr :name "Petr"}
tx2-valid-time #inst "2018-11-27"
{tx2-id :crux.tx/tx-id
tx2-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/put tx2-ivan tx2-valid-time]
[:crux.tx/put tx2-petr tx2-valid-time]])]
(with-open [tx-log-context (db/new-tx-log-context tx-log)]
(let [log (db/tx-log tx-log tx-log-context nil)]
(t/is (not (realized? log)))
(t/is (= [{:crux.tx/tx-id tx1-id
:crux.tx/tx-time tx1-tx-time
:crux.api/tx-ops [[:crux.tx/put (c/new-id :ivan) (c/new-id tx1-ivan) tx1-valid-time]]}
{:crux.tx/tx-id tx2-id
:crux.tx/tx-time tx2-tx-time
:crux.api/tx-ops [[:crux.tx/put (c/new-id :ivan) (c/new-id tx2-ivan) tx2-valid-time]
[:crux.tx/put (c/new-id :petr) (c/new-id tx2-petr) tx2-valid-time]]}]
log))))))
|
94900
|
(ns crux.tx-test
(:require [clojure.test :as t]
[clojure.test.check.clojure-test :as tcct]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[clojure.java.io :as io]
[clojure.spec.alpha :as s]
[clojure.set :as set]
[crux.byte-utils :as bu]
[crux.codec :as c]
[crux.db :as db]
[crux.index :as idx]
[crux.fixtures :as f]
[crux.tx :as tx]
[crux.kv :as kv]
[crux.lru :as lru]
[crux.memory :as mem]
[crux.morton :as morton]
[crux.rdf :as rdf]
[crux.query :as q]
[taoensso.nippy :as nippy]
[crux.bootstrap :as b])
(:import java.util.Date))
(t/use-fixtures :each f/with-each-kv-store-implementation f/with-kv-store f/with-silent-test-check)
(defn load-ntriples-example [resource]
(with-open [in (io/input-stream (io/resource resource))]
(->> (rdf/ntriples-seq in)
(rdf/statements->maps)
(map #(rdf/use-default-language % :en))
(#(rdf/maps-by-id %)))))
;; TODO: This is a large, useful, test that exercises many parts, but
;; might be better split up.
(t/deftest test-can-index-tx-ops-acceptance-test
(let [object-store (f/kv-object-store-w-cache f/*kv*)
tx-log (f/kv-tx-log f/*kv* object-store)
indexer (tx/->KvIndexer f/*kv* tx-log object-store)
picasso (-> (load-ntriples-example "crux/Pablo_Picasso.ntriples")
:http://dbpedia.org/resource/Pablo_Picasso)
content-hash (c/new-id picasso)
valid-time #inst "2018-05-21"
eid (c/new-id :http://dbpedia.org/resource/Pablo_Picasso)
{:crux.tx/keys [tx-time tx-id]}
@(db/submit-tx tx-log [[:crux.tx/put picasso valid-time]])
expected-entities [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "can see entity at transact and valid time"
(t/is (= expected-entities
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time tx-time)))
(t/is (= expected-entities
(idx/all-entities snapshot tx-time tx-time))))
(t/testing "cannot see entity before valid or transact time"
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-20" tx-time)))
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time #inst "2018-05-20")))
(t/is (empty? (idx/all-entities snapshot #inst "2018-05-20" tx-time)))
(t/is (empty? (idx/all-entities snapshot tx-time #inst "2018-05-20"))))
(t/testing "can see entity after valid or transact time"
(t/is (some? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-22" tx-time)))
(t/is (some? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time tx-time))))
(t/testing "can see entity history"
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]
(idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)))))
(t/testing "add new version of entity in the past"
(let [new-picasso (assoc picasso :foo :bar)
new-content-hash (c/new-id new-picasso)
new-valid-time #inst "2018-05-20"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time)))
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-20" #inst "2018-05-21"))))))
(t/testing "add new version of entity in the future"
(let [new-picasso (assoc picasso :baz :boz)
new-content-hash (c/new-id new-picasso)
new-valid-time #inst "2018-05-22"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time))))
(t/testing "can correct entity at earlier valid time"
(let [new-picasso (assoc picasso :bar :foo)
new-content-hash (c/new-id new-picasso)
prev-tx-time new-tx-time
prev-tx-id new-tx-id
new-valid-time #inst "2018-05-22"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time)))
(t/is (= prev-tx-id (-> (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] prev-tx-time prev-tx-time)
(first)
:tx-id))))
(t/testing "compare and set does nothing with wrong content hash"
(let [old-picasso (assoc picasso :baz :boz)
{cas-failure-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/cas old-picasso new-picasso new-valid-time]])]
(t/is (= cas-failure-tx-time (tx/await-tx-time indexer cas-failure-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time cas-failure-tx-time))))))
(t/testing "compare and set updates with correct content hash"
(let [old-picasso new-picasso
new-picasso (assoc old-picasso :baz :boz)
new-content-hash (c/new-id new-picasso)
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/cas old-picasso new-picasso new-valid-time]])]
(t/is (= new-tx-time (tx/await-tx-time indexer new-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time))))))
(t/testing "compare and set can update non existing nil entity"
(let [new-eid (c/new-id :http://dbpedia.org/resource/Pablo2)
new-picasso (assoc new-picasso :crux.db/id :http://dbpedia.org/resource/Pablo2)
new-content-hash (c/new-id new-picasso)
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/cas nil new-picasso new-valid-time]])]
(t/is (= new-tx-time (tx/await-tx-time indexer new-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid new-eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo2] new-valid-time new-tx-time))))))))
(t/testing "can delete entity"
(let [new-valid-time #inst "2018-05-23"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :http://dbpedia.org/resource/Pablo_Picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/testing "first version of entity is still visible in the past"
(t/is (= tx-id (-> (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] valid-time new-tx-time)
(first)
:tx-id)))))))))
(t/testing "can retrieve history of entity"
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(t/is (= 6 (count (map :content-hash picasso-history))))
(with-open [i (kv/new-iterator snapshot)]
(doseq [{:keys [content-hash]} picasso-history
:when (not (= (c/new-id nil) content-hash))
:let [version-k (c/encode-attribute+entity+content-hash+value-key-to
nil
(c/->id-buffer :http://xmlns.com/foaf/0.1/givenName)
(c/->id-buffer :http://dbpedia.org/resource/Pablo_Picasso)
(c/->id-buffer content-hash)
(c/->value-buffer "Pablo"))]]
(t/is (kv/get-value snapshot version-k)))))))
(t/testing "can evict entity"
(let [new-valid-time #inst "2018-05-23"
; read documents before transaction to populate the cache
_ (with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(db/get-objects object-store snapshot (keep :content-hash picasso-history))))
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/evict :http://dbpedia.org/resource/Pablo_Picasso #inst "1970" new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/testing "eviction keeps tx history"
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(t/is (= 6 (count (map :content-hash picasso-history))))
(t/testing "eviction removes docs"
(t/is (empty? (db/get-objects object-store snapshot (keep :content-hash picasso-history))))))))))))
(t/deftest test-can-store-doc
(let [object-store (idx/->KvObjectStore f/*kv*)
tx-log (f/kv-tx-log f/*kv*)
picasso (-> (load-ntriples-example "crux/Pablo_Picasso.ntriples")
:http://dbpedia.org/resource/Pablo_Picasso)
content-hash (c/new-id picasso)]
(t/is (= 48 (count picasso)))
(t/is (= "<NAME>" (:http://xmlns.com/foaf/0.1/givenName picasso)))
(db/submit-doc tx-log content-hash picasso)
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= {content-hash picasso}
(db/get-objects object-store snapshot [content-hash])))
(t/testing "non existent docs are ignored"
(t/is (= {content-hash picasso}
(db/get-objects object-store
snapshot
[content-hash
"090622a35d4b579d2fcfebf823821298711d3867"])))
(t/is (empty? (db/get-objects object-store snapshot [])))))))
(t/deftest test-can-correct-ranges-in-the-past
(let [object-store (idx/->KvObjectStore f/*kv*)
tx-log (f/kv-tx-log f/*kv* object-store)
ivan {:crux.db/id :ivan :name "<NAME>"}
v1-ivan (assoc ivan :version 1)
v1-valid-time #inst "2018-11-26"
{v1-tx-time :crux.tx/tx-time
v1-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v1-ivan v1-valid-time]])
v2-ivan (assoc ivan :version 2)
v2-valid-time #inst "2018-11-27"
{v2-tx-time :crux.tx/tx-time
v2-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v2-ivan v2-valid-time]])
v3-ivan (assoc ivan :version 3)
v3-valid-time #inst "2018-11-28"
{v3-tx-time :crux.tx/tx-time
v3-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v3-ivan v3-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity is visible"
(t/is (= v1-tx-id (-> (idx/entities-at snapshot [:ivan] v1-valid-time v3-tx-time)
(first)
:tx-id))))
(t/testing "second version of entity is visible"
(t/is (= v2-tx-id (-> (idx/entities-at snapshot [:ivan] v2-valid-time v3-tx-time)
(first)
:tx-id))))
(t/testing "third version of entity is visible"
(t/is (= v3-tx-id (-> (idx/entities-at snapshot [:ivan] v3-valid-time v3-tx-time)
(first)
:tx-id)))))
(let [corrected-ivan (assoc ivan :version 4)
corrected-start-valid-time #inst "2018-11-27"
corrected-end-valid-time #inst "2018-11-29"
{corrected-tx-time :crux.tx/tx-time
corrected-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put corrected-ivan corrected-start-valid-time corrected-end-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity is still there"
(t/is (= v1-tx-id (-> (idx/entities-at snapshot [:ivan] v1-valid-time corrected-tx-time)
(first)
:tx-id))))
(t/testing "second version of entity was corrected"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v2-valid-time corrected-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))
(t/testing "third version of entity was corrected"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time corrected-tx-time)
(first)
(select-keys [:tx-id :content-hash]))))))
(let [deleted-start-valid-time #inst "2018-11-25"
deleted-end-valid-time #inst "2018-11-28"
{deleted-tx-time :crux.tx/tx-time
deleted-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :ivan deleted-start-valid-time deleted-end-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity was deleted"
(t/is (empty? (idx/entities-at snapshot [:ivan] v1-valid-time deleted-tx-time))))
(t/testing "second version of entity was deleted"
(t/is (empty? (idx/entities-at snapshot [:ivan] v2-valid-time deleted-tx-time))))
(t/testing "third version of entity is still there"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time deleted-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))))
(t/testing "end range is exclusive"
(let [{deleted-tx-time :crux.tx/tx-time
deleted-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :ivan v3-valid-time v3-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "third version of entity is still there"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time deleted-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))))))))
;; TODO: This test just shows that this is an issue, if we fix the
;; underlying issue this test should start failing. We can then change
;; the second assertion if we want to keep it around to ensure it
;; keeps working.
(t/deftest test-corrections-in-the-past-slowes-down-bitemp-144
(let [tx-log (f/kv-tx-log f/*kv*)
ivan {:crux.db/id :ivan :name "<NAME>"}
start-valid-time #inst "2019"
number-of-versions 1000]
@(db/submit-tx tx-log (vec (for [n (range number-of-versions)]
[:crux.tx/put (assoc ivan :verison n) (Date. (+ (.getTime start-valid-time) (inc (long n))))])))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [baseline-time (let [start-time (System/nanoTime)
valid-time (Date. (+ (.getTime start-valid-time) number-of-versions))]
(t/testing "last version of entity is visible at now"
(t/is (= valid-time (-> (idx/entities-at snapshot [:ivan] valid-time (Date.))
(first)
:vt))))
(- (System/nanoTime) start-time))]
(let [start-time (System/nanoTime)
valid-time (Date. (+ (.getTime start-valid-time) number-of-versions))]
(t/testing "no version is visible before transactions"
(t/is (nil? (idx/entities-at snapshot [:ivan] valid-time valid-time)))
(let [corrections-time (- (System/nanoTime) start-time)]
;; TODO: This can be a bit flaky. This assertion was
;; mainly there to prove the opposite, but it has been
;; fixed. Can be added back to sanity check when
;; changing indexes.
#_(t/is (>= baseline-time corrections-time)))))))))
(t/deftest test-can-read-kv-tx-log
(let [tx-log (f/kv-tx-log f/*kv*)
ivan {:crux.db/id :ivan :name "<NAME>"}
tx1-ivan (assoc ivan :version 1)
tx1-valid-time #inst "2018-11-26"
{tx1-id :crux.tx/tx-id
tx1-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/put tx1-ivan tx1-valid-time]])
tx2-ivan (assoc ivan :version 2)
tx2-petr {:crux.db/id :petr :name "<NAME>"}
tx2-valid-time #inst "2018-11-27"
{tx2-id :crux.tx/tx-id
tx2-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/put tx2-ivan tx2-valid-time]
[:crux.tx/put tx2-petr tx2-valid-time]])]
(with-open [tx-log-context (db/new-tx-log-context tx-log)]
(let [log (db/tx-log tx-log tx-log-context nil)]
(t/is (not (realized? log)))
(t/is (= [{:crux.tx/tx-id tx1-id
:crux.tx/tx-time tx1-tx-time
:crux.api/tx-ops [[:crux.tx/put (c/new-id :ivan) (c/new-id tx1-ivan) tx1-valid-time]]}
{:crux.tx/tx-id tx2-id
:crux.tx/tx-time tx2-tx-time
:crux.api/tx-ops [[:crux.tx/put (c/new-id :ivan) (c/new-id tx2-ivan) tx2-valid-time]
[:crux.tx/put (c/new-id :petr) (c/new-id tx2-petr) tx2-valid-time]]}]
log))))))
| true |
(ns crux.tx-test
(:require [clojure.test :as t]
[clojure.test.check.clojure-test :as tcct]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[clojure.java.io :as io]
[clojure.spec.alpha :as s]
[clojure.set :as set]
[crux.byte-utils :as bu]
[crux.codec :as c]
[crux.db :as db]
[crux.index :as idx]
[crux.fixtures :as f]
[crux.tx :as tx]
[crux.kv :as kv]
[crux.lru :as lru]
[crux.memory :as mem]
[crux.morton :as morton]
[crux.rdf :as rdf]
[crux.query :as q]
[taoensso.nippy :as nippy]
[crux.bootstrap :as b])
(:import java.util.Date))
(t/use-fixtures :each f/with-each-kv-store-implementation f/with-kv-store f/with-silent-test-check)
(defn load-ntriples-example [resource]
(with-open [in (io/input-stream (io/resource resource))]
(->> (rdf/ntriples-seq in)
(rdf/statements->maps)
(map #(rdf/use-default-language % :en))
(#(rdf/maps-by-id %)))))
;; TODO: This is a large, useful, test that exercises many parts, but
;; might be better split up.
(t/deftest test-can-index-tx-ops-acceptance-test
(let [object-store (f/kv-object-store-w-cache f/*kv*)
tx-log (f/kv-tx-log f/*kv* object-store)
indexer (tx/->KvIndexer f/*kv* tx-log object-store)
picasso (-> (load-ntriples-example "crux/Pablo_Picasso.ntriples")
:http://dbpedia.org/resource/Pablo_Picasso)
content-hash (c/new-id picasso)
valid-time #inst "2018-05-21"
eid (c/new-id :http://dbpedia.org/resource/Pablo_Picasso)
{:crux.tx/keys [tx-time tx-id]}
@(db/submit-tx tx-log [[:crux.tx/put picasso valid-time]])
expected-entities [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "can see entity at transact and valid time"
(t/is (= expected-entities
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time tx-time)))
(t/is (= expected-entities
(idx/all-entities snapshot tx-time tx-time))))
(t/testing "cannot see entity before valid or transact time"
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-20" tx-time)))
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time #inst "2018-05-20")))
(t/is (empty? (idx/all-entities snapshot #inst "2018-05-20" tx-time)))
(t/is (empty? (idx/all-entities snapshot tx-time #inst "2018-05-20"))))
(t/testing "can see entity after valid or transact time"
(t/is (some? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-22" tx-time)))
(t/is (some? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] tx-time tx-time))))
(t/testing "can see entity history"
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]
(idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)))))
(t/testing "add new version of entity in the past"
(let [new-picasso (assoc picasso :foo :bar)
new-content-hash (c/new-id new-picasso)
new-valid-time #inst "2018-05-20"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time)))
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] #inst "2018-05-20" #inst "2018-05-21"))))))
(t/testing "add new version of entity in the future"
(let [new-picasso (assoc picasso :baz :boz)
new-content-hash (c/new-id new-picasso)
new-valid-time #inst "2018-05-22"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash content-hash
:vt valid-time
:tt tx-time
:tx-id tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time))))
(t/testing "can correct entity at earlier valid time"
(let [new-picasso (assoc picasso :bar :foo)
new-content-hash (c/new-id new-picasso)
prev-tx-time new-tx-time
prev-tx-id new-tx-id
new-valid-time #inst "2018-05-22"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put new-picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})] (idx/all-entities snapshot new-valid-time new-tx-time)))
(t/is (= prev-tx-id (-> (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] prev-tx-time prev-tx-time)
(first)
:tx-id))))
(t/testing "compare and set does nothing with wrong content hash"
(let [old-picasso (assoc picasso :baz :boz)
{cas-failure-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/cas old-picasso new-picasso new-valid-time]])]
(t/is (= cas-failure-tx-time (tx/await-tx-time indexer cas-failure-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time cas-failure-tx-time))))))
(t/testing "compare and set updates with correct content hash"
(let [old-picasso new-picasso
new-picasso (assoc old-picasso :baz :boz)
new-content-hash (c/new-id new-picasso)
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/cas old-picasso new-picasso new-valid-time]])]
(t/is (= new-tx-time (tx/await-tx-time indexer new-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time))))))
(t/testing "compare and set can update non existing nil entity"
(let [new-eid (c/new-id :http://dbpedia.org/resource/Pablo2)
new-picasso (assoc new-picasso :crux.db/id :http://dbpedia.org/resource/Pablo2)
new-content-hash (c/new-id new-picasso)
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/cas nil new-picasso new-valid-time]])]
(t/is (= new-tx-time (tx/await-tx-time indexer new-tx-time {:crux.tx-log/await-tx-timeout 1000})))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= [(c/map->EntityTx {:eid new-eid
:content-hash new-content-hash
:vt new-valid-time
:tt new-tx-time
:tx-id new-tx-id})]
(idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo2] new-valid-time new-tx-time))))))))
(t/testing "can delete entity"
(let [new-valid-time #inst "2018-05-23"
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :http://dbpedia.org/resource/Pablo_Picasso new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/testing "first version of entity is still visible in the past"
(t/is (= tx-id (-> (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] valid-time new-tx-time)
(first)
:tx-id)))))))))
(t/testing "can retrieve history of entity"
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(t/is (= 6 (count (map :content-hash picasso-history))))
(with-open [i (kv/new-iterator snapshot)]
(doseq [{:keys [content-hash]} picasso-history
:when (not (= (c/new-id nil) content-hash))
:let [version-k (c/encode-attribute+entity+content-hash+value-key-to
nil
(c/->id-buffer :http://xmlns.com/foaf/0.1/givenName)
(c/->id-buffer :http://dbpedia.org/resource/Pablo_Picasso)
(c/->id-buffer content-hash)
(c/->value-buffer "Pablo"))]]
(t/is (kv/get-value snapshot version-k)))))))
(t/testing "can evict entity"
(let [new-valid-time #inst "2018-05-23"
; read documents before transaction to populate the cache
_ (with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(db/get-objects object-store snapshot (keep :content-hash picasso-history))))
{new-tx-time :crux.tx/tx-time
new-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/evict :http://dbpedia.org/resource/Pablo_Picasso #inst "1970" new-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (empty? (idx/entities-at snapshot [:http://dbpedia.org/resource/Pablo_Picasso] new-valid-time new-tx-time)))
(t/testing "eviction keeps tx history"
(let [picasso-history (idx/entity-history snapshot :http://dbpedia.org/resource/Pablo_Picasso)]
(t/is (= 6 (count (map :content-hash picasso-history))))
(t/testing "eviction removes docs"
(t/is (empty? (db/get-objects object-store snapshot (keep :content-hash picasso-history))))))))))))
(t/deftest test-can-store-doc
(let [object-store (idx/->KvObjectStore f/*kv*)
tx-log (f/kv-tx-log f/*kv*)
picasso (-> (load-ntriples-example "crux/Pablo_Picasso.ntriples")
:http://dbpedia.org/resource/Pablo_Picasso)
content-hash (c/new-id picasso)]
(t/is (= 48 (count picasso)))
(t/is (= "PI:NAME:<NAME>END_PI" (:http://xmlns.com/foaf/0.1/givenName picasso)))
(db/submit-doc tx-log content-hash picasso)
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/is (= {content-hash picasso}
(db/get-objects object-store snapshot [content-hash])))
(t/testing "non existent docs are ignored"
(t/is (= {content-hash picasso}
(db/get-objects object-store
snapshot
[content-hash
"090622a35d4b579d2fcfebf823821298711d3867"])))
(t/is (empty? (db/get-objects object-store snapshot [])))))))
(t/deftest test-can-correct-ranges-in-the-past
(let [object-store (idx/->KvObjectStore f/*kv*)
tx-log (f/kv-tx-log f/*kv* object-store)
ivan {:crux.db/id :ivan :name "PI:NAME:<NAME>END_PI"}
v1-ivan (assoc ivan :version 1)
v1-valid-time #inst "2018-11-26"
{v1-tx-time :crux.tx/tx-time
v1-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v1-ivan v1-valid-time]])
v2-ivan (assoc ivan :version 2)
v2-valid-time #inst "2018-11-27"
{v2-tx-time :crux.tx/tx-time
v2-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v2-ivan v2-valid-time]])
v3-ivan (assoc ivan :version 3)
v3-valid-time #inst "2018-11-28"
{v3-tx-time :crux.tx/tx-time
v3-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put v3-ivan v3-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity is visible"
(t/is (= v1-tx-id (-> (idx/entities-at snapshot [:ivan] v1-valid-time v3-tx-time)
(first)
:tx-id))))
(t/testing "second version of entity is visible"
(t/is (= v2-tx-id (-> (idx/entities-at snapshot [:ivan] v2-valid-time v3-tx-time)
(first)
:tx-id))))
(t/testing "third version of entity is visible"
(t/is (= v3-tx-id (-> (idx/entities-at snapshot [:ivan] v3-valid-time v3-tx-time)
(first)
:tx-id)))))
(let [corrected-ivan (assoc ivan :version 4)
corrected-start-valid-time #inst "2018-11-27"
corrected-end-valid-time #inst "2018-11-29"
{corrected-tx-time :crux.tx/tx-time
corrected-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/put corrected-ivan corrected-start-valid-time corrected-end-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity is still there"
(t/is (= v1-tx-id (-> (idx/entities-at snapshot [:ivan] v1-valid-time corrected-tx-time)
(first)
:tx-id))))
(t/testing "second version of entity was corrected"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v2-valid-time corrected-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))
(t/testing "third version of entity was corrected"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time corrected-tx-time)
(first)
(select-keys [:tx-id :content-hash]))))))
(let [deleted-start-valid-time #inst "2018-11-25"
deleted-end-valid-time #inst "2018-11-28"
{deleted-tx-time :crux.tx/tx-time
deleted-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :ivan deleted-start-valid-time deleted-end-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "first version of entity was deleted"
(t/is (empty? (idx/entities-at snapshot [:ivan] v1-valid-time deleted-tx-time))))
(t/testing "second version of entity was deleted"
(t/is (empty? (idx/entities-at snapshot [:ivan] v2-valid-time deleted-tx-time))))
(t/testing "third version of entity is still there"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time deleted-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))))
(t/testing "end range is exclusive"
(let [{deleted-tx-time :crux.tx/tx-time
deleted-tx-id :crux.tx/tx-id}
@(db/submit-tx tx-log [[:crux.tx/delete :ivan v3-valid-time v3-valid-time]])]
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(t/testing "third version of entity is still there"
(t/is (= {:content-hash (c/new-id corrected-ivan)
:tx-id corrected-tx-id}
(-> (idx/entities-at snapshot [:ivan] v3-valid-time deleted-tx-time)
(first)
(select-keys [:tx-id :content-hash])))))))))))
;; TODO: This test just shows that this is an issue, if we fix the
;; underlying issue this test should start failing. We can then change
;; the second assertion if we want to keep it around to ensure it
;; keeps working.
(t/deftest test-corrections-in-the-past-slowes-down-bitemp-144
(let [tx-log (f/kv-tx-log f/*kv*)
ivan {:crux.db/id :ivan :name "PI:NAME:<NAME>END_PI"}
start-valid-time #inst "2019"
number-of-versions 1000]
@(db/submit-tx tx-log (vec (for [n (range number-of-versions)]
[:crux.tx/put (assoc ivan :verison n) (Date. (+ (.getTime start-valid-time) (inc (long n))))])))
(with-open [snapshot (kv/new-snapshot f/*kv*)]
(let [baseline-time (let [start-time (System/nanoTime)
valid-time (Date. (+ (.getTime start-valid-time) number-of-versions))]
(t/testing "last version of entity is visible at now"
(t/is (= valid-time (-> (idx/entities-at snapshot [:ivan] valid-time (Date.))
(first)
:vt))))
(- (System/nanoTime) start-time))]
(let [start-time (System/nanoTime)
valid-time (Date. (+ (.getTime start-valid-time) number-of-versions))]
(t/testing "no version is visible before transactions"
(t/is (nil? (idx/entities-at snapshot [:ivan] valid-time valid-time)))
(let [corrections-time (- (System/nanoTime) start-time)]
;; TODO: This can be a bit flaky. This assertion was
;; mainly there to prove the opposite, but it has been
;; fixed. Can be added back to sanity check when
;; changing indexes.
#_(t/is (>= baseline-time corrections-time)))))))))
(t/deftest test-can-read-kv-tx-log
(let [tx-log (f/kv-tx-log f/*kv*)
ivan {:crux.db/id :ivan :name "PI:NAME:<NAME>END_PI"}
tx1-ivan (assoc ivan :version 1)
tx1-valid-time #inst "2018-11-26"
{tx1-id :crux.tx/tx-id
tx1-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/put tx1-ivan tx1-valid-time]])
tx2-ivan (assoc ivan :version 2)
tx2-petr {:crux.db/id :petr :name "PI:NAME:<NAME>END_PI"}
tx2-valid-time #inst "2018-11-27"
{tx2-id :crux.tx/tx-id
tx2-tx-time :crux.tx/tx-time}
@(db/submit-tx tx-log [[:crux.tx/put tx2-ivan tx2-valid-time]
[:crux.tx/put tx2-petr tx2-valid-time]])]
(with-open [tx-log-context (db/new-tx-log-context tx-log)]
(let [log (db/tx-log tx-log tx-log-context nil)]
(t/is (not (realized? log)))
(t/is (= [{:crux.tx/tx-id tx1-id
:crux.tx/tx-time tx1-tx-time
:crux.api/tx-ops [[:crux.tx/put (c/new-id :ivan) (c/new-id tx1-ivan) tx1-valid-time]]}
{:crux.tx/tx-id tx2-id
:crux.tx/tx-time tx2-tx-time
:crux.api/tx-ops [[:crux.tx/put (c/new-id :ivan) (c/new-id tx2-ivan) tx2-valid-time]
[:crux.tx/put (c/new-id :petr) (c/new-id tx2-petr) tx2-valid-time]]}]
log))))))
|
[
{
"context": "tlements+\n [{:resid \"res1\" :catappid 11 :userid \"user1\" :start (time/date-time 2001 10 11) :mail \"user1@",
"end": 944,
"score": 0.8185912370681763,
"start": 939,
"tag": "USERNAME",
"value": "user1"
},
{
"context": "\"user1\" :start (time/date-time 2001 10 11) :mail \"[email protected]\" :end (time/date-time 2003 10 11)}\n {:resid \"re",
"end": 999,
"score": 0.9997691512107849,
"start": 988,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "3 10 11)}\n {:resid \"res2\" :catappid 12 :userid \"user2\" :start (time/date-time 2002 10 11) :mail \"user2@",
"end": 1080,
"score": 0.9487768411636353,
"start": 1075,
"tag": "USERNAME",
"value": "user2"
},
{
"context": "\"user2\" :start (time/date-time 2002 10 11) :mail \"[email protected]\"}])\n\n(def +expected-payload+\n [{:resource \"res1\"",
"end": 1135,
"score": 0.999823272228241,
"start": 1124,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "load+\n [{:resource \"res1\" :application 11 :user \"user1\" :mail \"[email protected]\" :end \"2003-10-11T00:00:00.00",
"end": 1214,
"score": 0.9980932474136353,
"start": 1209,
"tag": "USERNAME",
"value": "user1"
},
{
"context": "ource \"res1\" :application 11 :user \"user1\" :mail \"[email protected]\" :end \"2003-10-11T00:00:00.000Z\"}\n {:resource \"",
"end": 1234,
"score": 0.9997875094413757,
"start": 1223,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "00Z\"}\n {:resource \"res2\" :application 12 :user \"user2\" :mail \"[email protected]\" :end nil}])\n\n(defn run-with-",
"end": 1318,
"score": 0.9980891346931458,
"start": 1313,
"tag": "USERNAME",
"value": "user2"
},
{
"context": "ource \"res2\" :application 12 :user \"user2\" :mail \"[email protected]\" :end nil}])\n\n(defn run-with-server\n [endpoint-s",
"end": 1338,
"score": 0.9997552037239075,
"start": 1327,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "test test-entitlement-granting\n (let [applicant \"bob\"\n member \"elsa\"\n admin \"owner\"\n ",
"end": 4123,
"score": 0.5495917797088623,
"start": 4120,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "test-helpers/create-user! {:eppn applicant :mail \"[email protected]\" :commonName \"Bob\"})\n (test-helpers/create-use",
"end": 5330,
"score": 0.993057906627655,
"start": 5325,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "user! {:eppn applicant :mail \"[email protected]\" :commonName \"Bob\"})\n (test-helpers/create-user! {:eppn member :",
"end": 5348,
"score": 0.9996485114097595,
"start": 5345,
"tag": "NAME",
"value": "Bob"
},
{
"context": " (test-helpers/create-user! {:eppn member :mail \"[email protected]\" :commonName \"Elsa\"})\n (test-helpers/create-us",
"end": 5411,
"score": 0.9993184208869934,
"start": 5404,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "-user! {:eppn member :mail \"[email protected]\" :commonName \"Elsa\"})\n (test-helpers/create-user! {:eppn admin :m",
"end": 5430,
"score": 0.999738335609436,
"start": 5426,
"tag": "NAME",
"value": "Elsa"
},
{
"context": " (test-helpers/create-user! {:eppn admin :mail \"[email protected]\" :commonName \"Owner\"})\n\n (entitlements/process",
"end": 5493,
"score": 0.999455988407135,
"start": 5485,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "-user! {:eppn admin :mail \"[email protected]\" :commonName \"Owner\"})\n\n (entitlements/process-outbox!) ;; empty o",
"end": 5513,
"score": 0.9074419140815735,
"start": 5508,
"tag": "NAME",
"value": "Owner"
},
{
"context": "app-id :mail \"[email protected]\" :resource \"resource1\" :user \"bob\" :end nil}]}\n {:path \"/a",
"end": 8166,
"score": 0.9985740184783936,
"start": 8163,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "app-id :mail \"[email protected]\" :resource \"resource2\" :user \"bob\" :end nil}]}}\n (set add-pa",
"end": 8294,
"score": 0.9983412027359009,
"start": 8291,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "{:resource \"resource1\" :application app-id :user \"elsa\" :mail \"[email protected]\" :end nil}]}\n ",
"end": 9537,
"score": 0.999647319316864,
"start": 9533,
"tag": "USERNAME",
"value": "elsa"
},
{
"context": "esource1\" :application app-id :user \"elsa\" :mail \"[email protected]\" :end nil}]}\n {:path \"/add",
"end": 9553,
"score": 0.9895399212837219,
"start": 9546,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "{:resource \"resource2\" :application app-id :user \"elsa\" :mail \"[email protected]\" :end nil}]}}\n ",
"end": 9666,
"score": 0.9996318221092224,
"start": 9662,
"tag": "USERNAME",
"value": "elsa"
},
{
"context": "esource2\" :application app-id :user \"elsa\" :mail \"[email protected]\" :end nil}]}}\n (set add-path",
"end": 9682,
"score": 0.9880086183547974,
"start": 9675,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "{:resource \"resource1\" :application app-id :user \"elsa\" :mail \"[email protected]\" :end +test-time-string+}]}\n ",
"end": 10724,
"score": 0.9996606707572937,
"start": 10720,
"tag": "USERNAME",
"value": "elsa"
},
{
"context": "esource1\" :application app-id :user \"elsa\" :mail \"[email protected]\" :end +test-time-string+}]}\n ",
"end": 10740,
"score": 0.9944273829460144,
"start": 10733,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "{:resource \"resource2\" :application app-id :user \"elsa\" :mail \"[email protected]\" :end +test-time-string+}]}}\n ",
"end": 10869,
"score": 0.9996461868286133,
"start": 10865,
"tag": "USERNAME",
"value": "elsa"
},
{
"context": "esource2\" :application app-id :user \"elsa\" :mail \"[email protected]\" :end +test-time-string+}]}}\n ",
"end": 10885,
"score": 0.9717279672622681,
"start": 10878,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "{:resource \"resource3\" :application app-id :user \"bob\" :mail \"[email protected]\" :end nil}]}}\n ",
"end": 12095,
"score": 0.9995539784431458,
"start": 12092,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "resource3\" :application app-id :user \"bob\" :mail \"[email protected]\" :end nil}]}}\n (set add-path",
"end": 12109,
"score": 0.9105121493339539,
"start": 12104,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "{:resource \"resource2\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}}\n ",
"end": 12265,
"score": 0.999553382396698,
"start": 12262,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "resource2\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}}\n ",
"end": 12279,
"score": 0.786512017250061,
"start": 12274,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "{:resource \"resource1\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}\n ",
"end": 13164,
"score": 0.9996224641799927,
"start": 13161,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "esource1\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}\n ",
"end": 13174,
"score": 0.5686050653457642,
"start": 13174,
"tag": "EMAIL",
"value": ""
},
{
"context": "ource1\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}\n ",
"end": 13176,
"score": 0.6308653354644775,
"start": 13176,
"tag": "EMAIL",
"value": ""
},
{
"context": "{:resource \"resource3\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}}\n ",
"end": 13306,
"score": 0.9995744228363037,
"start": 13303,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "resource3\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}}\n ",
"end": 13320,
"score": 0.836490273475647,
"start": 13315,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "{:resource \"resource1\" :application app-id :user \"bob\" :mail \"[email protected]\" :end \"2100-01-01T00:00:00.000Z\"}]}",
"end": 14686,
"score": 0.9989434480667114,
"start": 14683,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "{:resource \"resource1\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}]\n ",
"end": 16252,
"score": 0.9971179366111755,
"start": 16249,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "resource1\" :application app-id :user \"bob\" :mail \"[email protected]\" :end +test-time-string+}]}]\n ",
"end": 16266,
"score": 0.8229495882987976,
"start": 16261,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/clj/rems/db/test_entitlements.clj
|
ossilva/rems
| 0 |
(ns ^:integration rems.db.test-entitlements
(:require [clj-time.core :as time]
[clojure.test :refer :all]
[rems.db.applications :as applications]
[rems.db.core :as db]
[rems.db.entitlements :as entitlements]
[rems.db.test-data-helpers :as test-helpers]
[rems.db.testing :refer [caches-fixture test-db-fixture rollback-db-fixture]]
[rems.json :as json]
[rems.testing-util :refer [fixed-time-fixture suppress-logging]]
[stub-http.core :as stub]))
(def +test-time+ (time/date-time 2050 01 01)) ;; needs to be in the future so that catalogue items are active
(def +test-time-string+ "2050-01-01T00:00:00.000Z")
(use-fixtures
:once
(fixed-time-fixture +test-time+)
(suppress-logging "rems.db.entitlements")
test-db-fixture
rollback-db-fixture
caches-fixture)
(def +entitlements+
[{:resid "res1" :catappid 11 :userid "user1" :start (time/date-time 2001 10 11) :mail "[email protected]" :end (time/date-time 2003 10 11)}
{:resid "res2" :catappid 12 :userid "user2" :start (time/date-time 2002 10 11) :mail "[email protected]"}])
(def +expected-payload+
[{:resource "res1" :application 11 :user "user1" :mail "[email protected]" :end "2003-10-11T00:00:00.000Z"}
{:resource "res2" :application 12 :user "user2" :mail "[email protected]" :end nil}])
(defn run-with-server
[endpoint-spec callback]
(with-open [server (stub/start! {"/add" endpoint-spec
"/remove" endpoint-spec
"/ga4gh" endpoint-spec})]
(with-redefs [rems.config/env (assoc rems.config/env
:entitlements-target {:add (str (:uri server) "/add")
:remove (str (:uri server) "/remove")
:ga4gh (str (:uri server) "/ga4gh")})]
(callback server))))
(deftest test-post-entitlements!
(testing "ok :add action"
(run-with-server
{:status 200}
(fn [server]
(is (nil? (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))
(is (= [+expected-payload+] (for [r (stub/recorded-requests server)]
(json/parse-string (get-in r [:body "postData"]))))))))
(testing "ok :ga4gh action"
(run-with-server
{:status 200}
(fn [server]
(is (nil? (#'entitlements/post-entitlements! {:action :ga4gh :entitlements +entitlements+})))
(let [data (-> (stub/recorded-requests server)
first
(get-in [:body "postData"])
json/parse-string)]
(is (= [:ga4gh_passport_v1] (keys data)))
(is (= [true true] (map string? (:ga4gh_passport_v1 data))))))))
(testing "not-found"
(run-with-server
{:status 404}
(fn [_]
(is (= "failed: 404" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+}))))))
(testing "timeout"
(run-with-server
{:status 200 :delay 5000} ;; timeout of 2500 in code
(fn [_]
(is (= "failed: exception" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+}))))))
(testing "invalid url"
(with-redefs [rems.config/env (assoc rems.config/env
:entitlements-target {:add "http://invalid/entitlements"})]
(is (= "failed: exception" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))))
(testing "no server configured"
(is (nil? (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))))
(defn- get-requests [server]
(doall
(for [req (stub/recorded-requests server)]
{:path (:path req)
:body (json/parse-string (get-in req [:body "postData"]))})))
(defn- requests-for-paths [server ^String path]
(filter #(= (% :path) path) (set (get-requests server))))
(defn- is-valid-ga4gh? [entry]
(string? (first (:ga4gh_passport_v1 (:body entry)))))
(deftest test-entitlement-granting
(let [applicant "bob"
member "elsa"
admin "owner"
wfid (test-helpers/create-workflow! {:handlers [admin]})
form-id (test-helpers/create-form! {})
lic-id1 (test-helpers/create-license! {})
lic-id2 (test-helpers/create-license! {})
item1 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource1"
:license-ids [lic-id1]})
:form-id form-id
:workflow-id wfid})
item2 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource2"
:license-ids [lic-id2]})
:form-id form-id
:workflow-id wfid})
item3 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource3"
:license-ids [lic-id1]})
:form-id form-id
:workflow-id wfid})]
(test-helpers/create-user! {:eppn applicant :mail "[email protected]" :commonName "Bob"})
(test-helpers/create-user! {:eppn member :mail "[email protected]" :commonName "Elsa"})
(test-helpers/create-user! {:eppn admin :mail "[email protected]" :commonName "Owner"})
(entitlements/process-outbox!) ;; empty outbox from pending posts
(let [app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1 item2]})]
(testing "submitted application should not yet cause entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/add-member
:application-id app-id
:actor admin
:member {:userid member}})
(entitlements/process-outbox!)
(is (empty? (db/get-entitlements {:application app-id})))
(is (empty? (stub/recorded-requests server)))))
(testing "approved application, licenses accepted by one user generates entitlements for that user"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:comment ""})
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:actor member
:accepted-licenses [lic-id1]}) ; only accept some licenses
(is (= {applicant #{lic-id1 lic-id2}
member #{lic-id1}}
(:application/accepted-licenses (applications/get-application app-id))))
(entitlements/process-outbox!)
(testing "entitlements exist in db"
(is (= #{[applicant "resource1"] [applicant "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id}))))))
(testing "entitlements were POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:application app-id :mail "[email protected]" :resource "resource1" :user "bob" :end nil}]}
{:path "/add" :body [{:application app-id :mail "[email protected]" :resource "resource2" :user "bob" :end nil}]}}
(set add-paths)))
(is (= 2 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths))))))))
(testing "approved application, more accepted licenses generates more entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:actor member
:accepted-licenses [lic-id1 lic-id2]}) ; now accept all licenses
(entitlements/process-outbox!)
(testing "all entitlements exist in db"
(is (= #{[applicant "resource1"] [applicant "resource2"]
[member "resource1"] [member "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id}))))))
(testing "new entitlements were POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:resource "resource1" :application app-id :user "elsa" :mail "[email protected]" :end nil}]}
{:path "/add" :body [{:resource "resource2" :application app-id :user "elsa" :mail "[email protected]" :end nil}]}}
(set add-paths)))
(is (= 2 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths)))))))
(testing "removing a member ends entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/remove-member
:application-id app-id
:actor admin
:member {:userid member}
:comment "Left team"})
(entitlements/process-outbox!)
(testing "entitlements removed from db"
(is (= #{[applicant "resource1"] [applicant "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id :active-at (time/now)}))))))
(testing "removed entitlements were POSTed to callback"
(is (= #{{:path "/remove" :body [{:resource "resource1" :application app-id :user "elsa" :mail "[email protected]" :end +test-time-string+}]}
{:path "/remove" :body [{:resource "resource2" :application app-id :user "elsa" :mail "[email protected]" :end +test-time-string+}]}}
(set (get-requests server))))))))
(testing "changing resources changes entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/change-resources
:application-id app-id
:actor admin
:catalogue-item-ids [item1 item3]
:comment "Removed second resource, added third resource"})
(entitlements/process-outbox!)
(testing "entitlements changed in db"
(is (= #{[applicant "resource1"] [applicant "resource3"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id :active-at (time/now)}))))))
(testing "entitlement changes POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
remove-paths (requests-for-paths server "/remove")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:resource "resource3" :application app-id :user "bob" :mail "[email protected]" :end nil}]}}
(set add-paths)))
(is (= #{{:path "/remove" :body [{:resource "resource2" :application app-id :user "bob" :mail "[email protected]" :end +test-time-string+}]}}
(set remove-paths)))
(is (= 1 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths)))))))
(testing "closed application should end entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/close
:application-id app-id
:actor admin
:comment "Finished"})
(entitlements/process-outbox!)
(testing "entitlements ended in db"
(is (= [] (db/get-entitlements {:application app-id :active-at (time/now)}))))
(testing "ended entitlements POSTed to callback"
(is (= #{{:path "/remove" :body [{:resource "resource1" :application app-id :user "bob" :mail "[email protected]" :end +test-time-string+}]}
{:path "/remove" :body [{:resource "resource3" :application app-id :user "bob" :mail "[email protected]" :end +test-time-string+}]}}
(set (get-requests server)))))))))
(testing "approve with end time"
(let [end (time/date-time 2100 01 01)
app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1]})]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:entitlement-end end
:comment ""})
(run-with-server
{:status 200}
(fn [server]
(entitlements/process-outbox!)
(is (= [{:resid "resource1" :userid applicant :end (time/date-time 2100 01 01)}]
(mapv #(select-keys % [:resid :userid :end]) (db/get-entitlements {:application app-id}))))
(is (= [{:path "/add" :body [{:resource "resource1" :application app-id :user "bob" :mail "[email protected]" :end "2100-01-01T00:00:00.000Z"}]}]
(requests-for-paths server "/add")))))))
(let [app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1]})]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:comment ""})
(entitlements/process-outbox!)
(testing "revoked application should end entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/revoke
:application-id app-id
:actor admin
:comment "Banned"})
(entitlements/process-outbox!)
(testing "entitlements ended in db"
(is (= [] (db/get-entitlements {:application app-id :active-at (time/now)}))))
(testing "ended entitlements POSTed to callback"
(is (= [{:path "/remove" :body [{:resource "resource1" :application app-id :user "bob" :mail "[email protected]" :end +test-time-string+}]}]
(get-requests server))))))))))
|
92394
|
(ns ^:integration rems.db.test-entitlements
(:require [clj-time.core :as time]
[clojure.test :refer :all]
[rems.db.applications :as applications]
[rems.db.core :as db]
[rems.db.entitlements :as entitlements]
[rems.db.test-data-helpers :as test-helpers]
[rems.db.testing :refer [caches-fixture test-db-fixture rollback-db-fixture]]
[rems.json :as json]
[rems.testing-util :refer [fixed-time-fixture suppress-logging]]
[stub-http.core :as stub]))
(def +test-time+ (time/date-time 2050 01 01)) ;; needs to be in the future so that catalogue items are active
(def +test-time-string+ "2050-01-01T00:00:00.000Z")
(use-fixtures
:once
(fixed-time-fixture +test-time+)
(suppress-logging "rems.db.entitlements")
test-db-fixture
rollback-db-fixture
caches-fixture)
(def +entitlements+
[{:resid "res1" :catappid 11 :userid "user1" :start (time/date-time 2001 10 11) :mail "<EMAIL>" :end (time/date-time 2003 10 11)}
{:resid "res2" :catappid 12 :userid "user2" :start (time/date-time 2002 10 11) :mail "<EMAIL>"}])
(def +expected-payload+
[{:resource "res1" :application 11 :user "user1" :mail "<EMAIL>" :end "2003-10-11T00:00:00.000Z"}
{:resource "res2" :application 12 :user "user2" :mail "<EMAIL>" :end nil}])
(defn run-with-server
[endpoint-spec callback]
(with-open [server (stub/start! {"/add" endpoint-spec
"/remove" endpoint-spec
"/ga4gh" endpoint-spec})]
(with-redefs [rems.config/env (assoc rems.config/env
:entitlements-target {:add (str (:uri server) "/add")
:remove (str (:uri server) "/remove")
:ga4gh (str (:uri server) "/ga4gh")})]
(callback server))))
(deftest test-post-entitlements!
(testing "ok :add action"
(run-with-server
{:status 200}
(fn [server]
(is (nil? (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))
(is (= [+expected-payload+] (for [r (stub/recorded-requests server)]
(json/parse-string (get-in r [:body "postData"]))))))))
(testing "ok :ga4gh action"
(run-with-server
{:status 200}
(fn [server]
(is (nil? (#'entitlements/post-entitlements! {:action :ga4gh :entitlements +entitlements+})))
(let [data (-> (stub/recorded-requests server)
first
(get-in [:body "postData"])
json/parse-string)]
(is (= [:ga4gh_passport_v1] (keys data)))
(is (= [true true] (map string? (:ga4gh_passport_v1 data))))))))
(testing "not-found"
(run-with-server
{:status 404}
(fn [_]
(is (= "failed: 404" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+}))))))
(testing "timeout"
(run-with-server
{:status 200 :delay 5000} ;; timeout of 2500 in code
(fn [_]
(is (= "failed: exception" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+}))))))
(testing "invalid url"
(with-redefs [rems.config/env (assoc rems.config/env
:entitlements-target {:add "http://invalid/entitlements"})]
(is (= "failed: exception" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))))
(testing "no server configured"
(is (nil? (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))))
(defn- get-requests [server]
(doall
(for [req (stub/recorded-requests server)]
{:path (:path req)
:body (json/parse-string (get-in req [:body "postData"]))})))
(defn- requests-for-paths [server ^String path]
(filter #(= (% :path) path) (set (get-requests server))))
(defn- is-valid-ga4gh? [entry]
(string? (first (:ga4gh_passport_v1 (:body entry)))))
(deftest test-entitlement-granting
(let [applicant "bob"
member "elsa"
admin "owner"
wfid (test-helpers/create-workflow! {:handlers [admin]})
form-id (test-helpers/create-form! {})
lic-id1 (test-helpers/create-license! {})
lic-id2 (test-helpers/create-license! {})
item1 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource1"
:license-ids [lic-id1]})
:form-id form-id
:workflow-id wfid})
item2 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource2"
:license-ids [lic-id2]})
:form-id form-id
:workflow-id wfid})
item3 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource3"
:license-ids [lic-id1]})
:form-id form-id
:workflow-id wfid})]
(test-helpers/create-user! {:eppn applicant :mail "<EMAIL>" :commonName "<NAME>"})
(test-helpers/create-user! {:eppn member :mail "<EMAIL>" :commonName "<NAME>"})
(test-helpers/create-user! {:eppn admin :mail "<EMAIL>" :commonName "<NAME>"})
(entitlements/process-outbox!) ;; empty outbox from pending posts
(let [app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1 item2]})]
(testing "submitted application should not yet cause entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/add-member
:application-id app-id
:actor admin
:member {:userid member}})
(entitlements/process-outbox!)
(is (empty? (db/get-entitlements {:application app-id})))
(is (empty? (stub/recorded-requests server)))))
(testing "approved application, licenses accepted by one user generates entitlements for that user"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:comment ""})
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:actor member
:accepted-licenses [lic-id1]}) ; only accept some licenses
(is (= {applicant #{lic-id1 lic-id2}
member #{lic-id1}}
(:application/accepted-licenses (applications/get-application app-id))))
(entitlements/process-outbox!)
(testing "entitlements exist in db"
(is (= #{[applicant "resource1"] [applicant "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id}))))))
(testing "entitlements were POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:application app-id :mail "[email protected]" :resource "resource1" :user "bob" :end nil}]}
{:path "/add" :body [{:application app-id :mail "[email protected]" :resource "resource2" :user "bob" :end nil}]}}
(set add-paths)))
(is (= 2 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths))))))))
(testing "approved application, more accepted licenses generates more entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:actor member
:accepted-licenses [lic-id1 lic-id2]}) ; now accept all licenses
(entitlements/process-outbox!)
(testing "all entitlements exist in db"
(is (= #{[applicant "resource1"] [applicant "resource2"]
[member "resource1"] [member "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id}))))))
(testing "new entitlements were POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:resource "resource1" :application app-id :user "elsa" :mail "<EMAIL>" :end nil}]}
{:path "/add" :body [{:resource "resource2" :application app-id :user "elsa" :mail "<EMAIL>" :end nil}]}}
(set add-paths)))
(is (= 2 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths)))))))
(testing "removing a member ends entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/remove-member
:application-id app-id
:actor admin
:member {:userid member}
:comment "Left team"})
(entitlements/process-outbox!)
(testing "entitlements removed from db"
(is (= #{[applicant "resource1"] [applicant "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id :active-at (time/now)}))))))
(testing "removed entitlements were POSTed to callback"
(is (= #{{:path "/remove" :body [{:resource "resource1" :application app-id :user "elsa" :mail "<EMAIL>" :end +test-time-string+}]}
{:path "/remove" :body [{:resource "resource2" :application app-id :user "elsa" :mail "<EMAIL>" :end +test-time-string+}]}}
(set (get-requests server))))))))
(testing "changing resources changes entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/change-resources
:application-id app-id
:actor admin
:catalogue-item-ids [item1 item3]
:comment "Removed second resource, added third resource"})
(entitlements/process-outbox!)
(testing "entitlements changed in db"
(is (= #{[applicant "resource1"] [applicant "resource3"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id :active-at (time/now)}))))))
(testing "entitlement changes POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
remove-paths (requests-for-paths server "/remove")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:resource "resource3" :application app-id :user "bob" :mail "<EMAIL>" :end nil}]}}
(set add-paths)))
(is (= #{{:path "/remove" :body [{:resource "resource2" :application app-id :user "bob" :mail "<EMAIL>" :end +test-time-string+}]}}
(set remove-paths)))
(is (= 1 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths)))))))
(testing "closed application should end entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/close
:application-id app-id
:actor admin
:comment "Finished"})
(entitlements/process-outbox!)
(testing "entitlements ended in db"
(is (= [] (db/get-entitlements {:application app-id :active-at (time/now)}))))
(testing "ended entitlements POSTed to callback"
(is (= #{{:path "/remove" :body [{:resource "resource1" :application app-id :user "bob" :mail "b<EMAIL>@o<EMAIL>.b" :end +test-time-string+}]}
{:path "/remove" :body [{:resource "resource3" :application app-id :user "bob" :mail "<EMAIL>" :end +test-time-string+}]}}
(set (get-requests server)))))))))
(testing "approve with end time"
(let [end (time/date-time 2100 01 01)
app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1]})]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:entitlement-end end
:comment ""})
(run-with-server
{:status 200}
(fn [server]
(entitlements/process-outbox!)
(is (= [{:resid "resource1" :userid applicant :end (time/date-time 2100 01 01)}]
(mapv #(select-keys % [:resid :userid :end]) (db/get-entitlements {:application app-id}))))
(is (= [{:path "/add" :body [{:resource "resource1" :application app-id :user "bob" :mail "[email protected]" :end "2100-01-01T00:00:00.000Z"}]}]
(requests-for-paths server "/add")))))))
(let [app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1]})]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:comment ""})
(entitlements/process-outbox!)
(testing "revoked application should end entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/revoke
:application-id app-id
:actor admin
:comment "Banned"})
(entitlements/process-outbox!)
(testing "entitlements ended in db"
(is (= [] (db/get-entitlements {:application app-id :active-at (time/now)}))))
(testing "ended entitlements POSTed to callback"
(is (= [{:path "/remove" :body [{:resource "resource1" :application app-id :user "bob" :mail "<EMAIL>" :end +test-time-string+}]}]
(get-requests server))))))))))
| true |
(ns ^:integration rems.db.test-entitlements
(:require [clj-time.core :as time]
[clojure.test :refer :all]
[rems.db.applications :as applications]
[rems.db.core :as db]
[rems.db.entitlements :as entitlements]
[rems.db.test-data-helpers :as test-helpers]
[rems.db.testing :refer [caches-fixture test-db-fixture rollback-db-fixture]]
[rems.json :as json]
[rems.testing-util :refer [fixed-time-fixture suppress-logging]]
[stub-http.core :as stub]))
(def +test-time+ (time/date-time 2050 01 01)) ;; needs to be in the future so that catalogue items are active
(def +test-time-string+ "2050-01-01T00:00:00.000Z")
(use-fixtures
:once
(fixed-time-fixture +test-time+)
(suppress-logging "rems.db.entitlements")
test-db-fixture
rollback-db-fixture
caches-fixture)
(def +entitlements+
[{:resid "res1" :catappid 11 :userid "user1" :start (time/date-time 2001 10 11) :mail "PI:EMAIL:<EMAIL>END_PI" :end (time/date-time 2003 10 11)}
{:resid "res2" :catappid 12 :userid "user2" :start (time/date-time 2002 10 11) :mail "PI:EMAIL:<EMAIL>END_PI"}])
(def +expected-payload+
[{:resource "res1" :application 11 :user "user1" :mail "PI:EMAIL:<EMAIL>END_PI" :end "2003-10-11T00:00:00.000Z"}
{:resource "res2" :application 12 :user "user2" :mail "PI:EMAIL:<EMAIL>END_PI" :end nil}])
(defn run-with-server
[endpoint-spec callback]
(with-open [server (stub/start! {"/add" endpoint-spec
"/remove" endpoint-spec
"/ga4gh" endpoint-spec})]
(with-redefs [rems.config/env (assoc rems.config/env
:entitlements-target {:add (str (:uri server) "/add")
:remove (str (:uri server) "/remove")
:ga4gh (str (:uri server) "/ga4gh")})]
(callback server))))
(deftest test-post-entitlements!
(testing "ok :add action"
(run-with-server
{:status 200}
(fn [server]
(is (nil? (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))
(is (= [+expected-payload+] (for [r (stub/recorded-requests server)]
(json/parse-string (get-in r [:body "postData"]))))))))
(testing "ok :ga4gh action"
(run-with-server
{:status 200}
(fn [server]
(is (nil? (#'entitlements/post-entitlements! {:action :ga4gh :entitlements +entitlements+})))
(let [data (-> (stub/recorded-requests server)
first
(get-in [:body "postData"])
json/parse-string)]
(is (= [:ga4gh_passport_v1] (keys data)))
(is (= [true true] (map string? (:ga4gh_passport_v1 data))))))))
(testing "not-found"
(run-with-server
{:status 404}
(fn [_]
(is (= "failed: 404" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+}))))))
(testing "timeout"
(run-with-server
{:status 200 :delay 5000} ;; timeout of 2500 in code
(fn [_]
(is (= "failed: exception" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+}))))))
(testing "invalid url"
(with-redefs [rems.config/env (assoc rems.config/env
:entitlements-target {:add "http://invalid/entitlements"})]
(is (= "failed: exception" (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))))
(testing "no server configured"
(is (nil? (#'entitlements/post-entitlements! {:action :add :entitlements +entitlements+})))))
(defn- get-requests [server]
(doall
(for [req (stub/recorded-requests server)]
{:path (:path req)
:body (json/parse-string (get-in req [:body "postData"]))})))
(defn- requests-for-paths [server ^String path]
(filter #(= (% :path) path) (set (get-requests server))))
(defn- is-valid-ga4gh? [entry]
(string? (first (:ga4gh_passport_v1 (:body entry)))))
(deftest test-entitlement-granting
(let [applicant "bob"
member "elsa"
admin "owner"
wfid (test-helpers/create-workflow! {:handlers [admin]})
form-id (test-helpers/create-form! {})
lic-id1 (test-helpers/create-license! {})
lic-id2 (test-helpers/create-license! {})
item1 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource1"
:license-ids [lic-id1]})
:form-id form-id
:workflow-id wfid})
item2 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource2"
:license-ids [lic-id2]})
:form-id form-id
:workflow-id wfid})
item3 (test-helpers/create-catalogue-item!
{:resource-id (test-helpers/create-resource! {:resource-ext-id "resource3"
:license-ids [lic-id1]})
:form-id form-id
:workflow-id wfid})]
(test-helpers/create-user! {:eppn applicant :mail "PI:EMAIL:<EMAIL>END_PI" :commonName "PI:NAME:<NAME>END_PI"})
(test-helpers/create-user! {:eppn member :mail "PI:EMAIL:<EMAIL>END_PI" :commonName "PI:NAME:<NAME>END_PI"})
(test-helpers/create-user! {:eppn admin :mail "PI:EMAIL:<EMAIL>END_PI" :commonName "PI:NAME:<NAME>END_PI"})
(entitlements/process-outbox!) ;; empty outbox from pending posts
(let [app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1 item2]})]
(testing "submitted application should not yet cause entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/add-member
:application-id app-id
:actor admin
:member {:userid member}})
(entitlements/process-outbox!)
(is (empty? (db/get-entitlements {:application app-id})))
(is (empty? (stub/recorded-requests server)))))
(testing "approved application, licenses accepted by one user generates entitlements for that user"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:comment ""})
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:actor member
:accepted-licenses [lic-id1]}) ; only accept some licenses
(is (= {applicant #{lic-id1 lic-id2}
member #{lic-id1}}
(:application/accepted-licenses (applications/get-application app-id))))
(entitlements/process-outbox!)
(testing "entitlements exist in db"
(is (= #{[applicant "resource1"] [applicant "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id}))))))
(testing "entitlements were POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:application app-id :mail "[email protected]" :resource "resource1" :user "bob" :end nil}]}
{:path "/add" :body [{:application app-id :mail "[email protected]" :resource "resource2" :user "bob" :end nil}]}}
(set add-paths)))
(is (= 2 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths))))))))
(testing "approved application, more accepted licenses generates more entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:actor member
:accepted-licenses [lic-id1 lic-id2]}) ; now accept all licenses
(entitlements/process-outbox!)
(testing "all entitlements exist in db"
(is (= #{[applicant "resource1"] [applicant "resource2"]
[member "resource1"] [member "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id}))))))
(testing "new entitlements were POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:resource "resource1" :application app-id :user "elsa" :mail "PI:EMAIL:<EMAIL>END_PI" :end nil}]}
{:path "/add" :body [{:resource "resource2" :application app-id :user "elsa" :mail "PI:EMAIL:<EMAIL>END_PI" :end nil}]}}
(set add-paths)))
(is (= 2 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths)))))))
(testing "removing a member ends entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/remove-member
:application-id app-id
:actor admin
:member {:userid member}
:comment "Left team"})
(entitlements/process-outbox!)
(testing "entitlements removed from db"
(is (= #{[applicant "resource1"] [applicant "resource2"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id :active-at (time/now)}))))))
(testing "removed entitlements were POSTed to callback"
(is (= #{{:path "/remove" :body [{:resource "resource1" :application app-id :user "elsa" :mail "PI:EMAIL:<EMAIL>END_PI" :end +test-time-string+}]}
{:path "/remove" :body [{:resource "resource2" :application app-id :user "elsa" :mail "PI:EMAIL:<EMAIL>END_PI" :end +test-time-string+}]}}
(set (get-requests server))))))))
(testing "changing resources changes entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/change-resources
:application-id app-id
:actor admin
:catalogue-item-ids [item1 item3]
:comment "Removed second resource, added third resource"})
(entitlements/process-outbox!)
(testing "entitlements changed in db"
(is (= #{[applicant "resource1"] [applicant "resource3"]}
(set (map (juxt :userid :resid) (db/get-entitlements {:application app-id :active-at (time/now)}))))))
(testing "entitlement changes POSTed to callbacks"
(let [add-paths (requests-for-paths server "/add")
remove-paths (requests-for-paths server "/remove")
ga4gh-paths (requests-for-paths server "/ga4gh")]
(is (= #{{:path "/add" :body [{:resource "resource3" :application app-id :user "bob" :mail "PI:EMAIL:<EMAIL>END_PI" :end nil}]}}
(set add-paths)))
(is (= #{{:path "/remove" :body [{:resource "resource2" :application app-id :user "bob" :mail "PI:EMAIL:<EMAIL>END_PI" :end +test-time-string+}]}}
(set remove-paths)))
(is (= 1 (count ga4gh-paths)))
(is (every? is-valid-ga4gh? ga4gh-paths)))))))
(testing "closed application should end entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/close
:application-id app-id
:actor admin
:comment "Finished"})
(entitlements/process-outbox!)
(testing "entitlements ended in db"
(is (= [] (db/get-entitlements {:application app-id :active-at (time/now)}))))
(testing "ended entitlements POSTed to callback"
(is (= #{{:path "/remove" :body [{:resource "resource1" :application app-id :user "bob" :mail "bPI:EMAIL:<EMAIL>END_PI@oPI:EMAIL:<EMAIL>END_PI.b" :end +test-time-string+}]}
{:path "/remove" :body [{:resource "resource3" :application app-id :user "bob" :mail "PI:EMAIL:<EMAIL>END_PI" :end +test-time-string+}]}}
(set (get-requests server)))))))))
(testing "approve with end time"
(let [end (time/date-time 2100 01 01)
app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1]})]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:entitlement-end end
:comment ""})
(run-with-server
{:status 200}
(fn [server]
(entitlements/process-outbox!)
(is (= [{:resid "resource1" :userid applicant :end (time/date-time 2100 01 01)}]
(mapv #(select-keys % [:resid :userid :end]) (db/get-entitlements {:application app-id}))))
(is (= [{:path "/add" :body [{:resource "resource1" :application app-id :user "bob" :mail "[email protected]" :end "2100-01-01T00:00:00.000Z"}]}]
(requests-for-paths server "/add")))))))
(let [app-id (test-helpers/create-application! {:actor applicant :catalogue-item-ids [item1]})]
(test-helpers/command! {:type :application.command/accept-licenses
:application-id app-id
:accepted-licenses [lic-id1 lic-id2]
:actor applicant})
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor applicant})
(test-helpers/command! {:type :application.command/approve
:application-id app-id
:actor admin
:comment ""})
(entitlements/process-outbox!)
(testing "revoked application should end entitlements"
(run-with-server
{:status 200}
(fn [server]
(test-helpers/command! {:type :application.command/revoke
:application-id app-id
:actor admin
:comment "Banned"})
(entitlements/process-outbox!)
(testing "entitlements ended in db"
(is (= [] (db/get-entitlements {:application app-id :active-at (time/now)}))))
(testing "ended entitlements POSTed to callback"
(is (= [{:path "/remove" :body [{:resource "resource1" :application app-id :user "bob" :mail "PI:EMAIL:<EMAIL>END_PI" :end +test-time-string+}]}]
(get-requests server))))))))))
|
[
{
"context": "; Copyright (c) 2010-2021 Haifeng Li. All rights reserved.\n;\n; Smile is free softwar",
"end": 38,
"score": 0.999737024307251,
"start": 28,
"tag": "NAME",
"value": "Haifeng Li"
},
{
"context": "le.regression\n \"Regression Analysis\"\n {:author \"Haifeng Li\"}\n (:import [smile.regression OLS RidgeRegressio",
"end": 765,
"score": 0.9997809529304504,
"start": 755,
"tag": "NAME",
"value": "Haifeng Li"
},
{
"context": " Soon after the introduction of gradient boosting Friedman proposed a\n minor modification to the algorithm,",
"end": 25523,
"score": 0.9902691841125488,
"start": 25515,
"tag": "NAME",
"value": "Friedman"
},
{
"context": "raining set drawn at random\n without replacement. Friedman observed a substantial improvement in\n gradient ",
"end": 25795,
"score": 0.9939799904823303,
"start": 25787,
"tag": "NAME",
"value": "Friedman"
}
] |
clojure/src/smile/regression.clj
|
takanori-ugai/smile
| 0 |
; Copyright (c) 2010-2021 Haifeng Li. All rights reserved.
;
; Smile is free software: you can redistribute it and/or modify
; it under the terms of the GNU General Public License as published by
; the Free Software Foundation, either version 3 of the License, or
; (at your option) any later version.
;
; Smile is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; You should have received a copy of the GNU General Public License
; along with Smile. If not, see <https://www.gnu.org/licenses/>.
(ns smile.regression
"Regression Analysis"
{:author "Haifeng Li"}
(:import [smile.regression OLS RidgeRegression LASSO MLP RBFNetwork SVR
RegressionTree RandomForest GradientTreeBoost
GaussianProcessRegression]
[smile.base.cart Loss]))
(defn lm
"Fitting linear models (ordinary least squares).
In linear regression, the model specification is that the dependent
variable is a linear combination of the parameters (but need not be
linear in the independent variables). The residual is the difference
between the value of the dependent variable predicted by the model,
and the true value of the dependent variable. Ordinary least squares
obtains parameter estimates that minimize the sum of squared residuals,
SSE (also denoted RSS).
The OLS estimator is consistent when the independent variables are
exogenous and there is no multicollinearity, and optimal in the class
of linear unbiased estimators when the errors are homoscedastic and
serially uncorrelated. Under these conditions, the method of OLS provides
minimum-variance mean-unbiased estimation when the errors have finite
variances.
There are several different frameworks in which the linear regression
model can be cast in order to make the OLS technique applicable. Each
of these settings produces the same formulas and same results, the only
difference is the interpretation and the assumptions which have to be
imposed in order for the method to give meaningful results. The choice
of the applicable framework depends mostly on the nature of data at hand,
and on the inference task which has to be performed.
Least squares corresponds to the maximum likelihood criterion if the
experimental errors have a normal distribution and can also be derived
as a method of moments estimator.
Once a regression model has been constructed, it may be important to
confirm the goodness of fit of the model and the statistical significance
of the estimated parameters. Commonly used checks of goodness of fit
include the R-squared, analysis of the pattern of residuals and hypothesis
testing. Statistical significance can be checked by an F-test of the overall
fit, followed by t-tests of individual parameters.
Interpretations of these diagnostic tests rest heavily on the model
assumptions. Although examination of the residuals can be used to
invalidate a model, the results of a t-test or F-test are sometimes more
difficult to interpret if the model's assumptions are violated.
For example, if the error term does not have a normal distribution,
in small samples the estimated parameters will not follow normal
distributions and complicate inference. With relatively large samples,
however, a central limit theorem can be invoked such that hypothesis
testing may proceed using asymptotic approximations.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`method` is the fitting method ('qr' or 'svd').
`recursive` is the flag if the return model supports recursive least squares."
([formula data] (lm formula data "qr" true true))
([formula data method, stderr recursive] (OLS/fit formula data method stderr recursive)))
(defn ridge
"Ridge Regression.
When the predictor variables are highly correlated amongst
themselves, the coefficients of the resulting least squares fit may be very
imprecise. By allowing a small amount of bias in the estimates, more
reasonable coefficients may often be obtained. Ridge regression is one
method to address these issues. Often, small amounts of bias lead to
dramatic reductions in the variance of the estimated model coefficients.
Ridge regression is such a technique which shrinks the regression
coefficients by imposing a penalty on their size. Ridge regression was
originally developed to overcome the singularity of the X'X matrix.
This matrix is perturbed so as to make its determinant appreciably
different from 0.
Ridge regression is a kind of Tikhonov regularization, which is the most
commonly used method of regularization of ill-posed problems. Another
interpretation of ridge regression is available through Bayesian estimation.
In this setting the belief that weight should be small is coded into a prior
distribution.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`lambda` is the shrinkage/regularization parameter."
[formula data lambda] (RidgeRegression/fit formula data lambda))
(defn lasso
"Least absolute shrinkage and selection operator.
The Lasso is a shrinkage and selection method for linear regression.
It minimizes the usual sum of squared errors, with a bound on the sum
of the absolute values of the coefficients (i.e. L<sub>1</sub>-regularized).
It has connections to soft-thresholding of wavelet coefficients, forward
stage-wise regression, and boosting methods.
The Lasso typically yields a sparse solution, of which the parameter
vector β has relatively few nonzero coefficients. In contrast, the
solution of L<sub>2</sub>-regularized least squares (i.e. ridge regression)
typically has all coefficients nonzero. Because it effectively
reduces the number of variables, the Lasso is useful in some contexts.
For over-determined systems (more instances than variables, commonly in
machine learning), we normalize variables with mean 0 and standard deviation
1. For under-determined systems (less instances than variables, e.g.
compressed sensing), we assume white noise (i.e. no intercept in the linear
model) and do not perform normalization. Note that the solution
is not unique in this case.
There is no analytic formula or expression for the optimal solution to the
L<sub>1</sub>-regularized least squares problems. Therefore, its solution
must be computed numerically. The objective function in the
L<sub>1</sub>-regularized least squares is convex but not differentiable,
so solving it is more of a computational challenge than solving the
L<sub>2</sub>-regularized least squares. The Lasso may be solved using
quadratic programming or more general convex optimization methods, as well
as by specific algorithms such as the least angle regression algorithm.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`lambda` is the shrinkage/regularization parameter.
`tol` is the tolerance for stopping iterations (relative target duality gap).
`max-iter` is the maximum number of iterations."
([formula data lambda] (lasso formula data lambda 0.001 5000))
([formula data lambda tol max-iter] (LASSO/fit formula data lambda tol max-iter)))
(defn mlp
"Multilayer perceptron neural network.
An MLP consists of several layers of nodes, interconnected through weighted
acyclic arcs from each preceding layer to the following, without lateral or
feedback connections. Each node calculates a transformed weighted linear
combination of its inputs (output activations from the preceding layer), with
one of the weights acting as a trainable bias connected to a constant input.
The transformation, called activation function, is a bounded non-decreasing
(non-linear) function, such as the sigmoid functions (ranges from 0 to 1).
Another popular activation function is hyperbolic tangent which is actually
equivalent to the sigmoid function in shape but ranges from -1 to 1.
More specialized activation functions include radial basis functions which
are used in RBF networks.
The representational capabilities of a MLP are determined by the range of
mappings it may implement through weight variation. Single layer perceptrons
are capable of solving only linearly separable problems. With the sigmoid
function as activation function, the single-layer network is identical
to the logistic regression model.
The universal approximation theorem for neural networks states that every
continuous function that maps intervals of real numbers to some output
interval of real numbers can be approximated arbitrarily closely by a
multi-layer perceptron with just one hidden layer. This result holds only
for restricted classes of activation functions, which are extremely complex
and NOT smooth for subtle mathematical reasons. On the other hand, smoothness
is important for gradient descent learning. Besides, the proof is not
constructive regarding the number of neurons required or the settings of
the weights. Therefore, complex systems will have more layers of neurons
with some having increased layers of input neurons and output neurons
in practice.
The most popular algorithm to train MLPs is back-propagation, which is a
gradient descent method. Based on chain rule, the algorithm propagates the
error back through the network and adjusts the weights of each connection in
order to reduce the value of the error function by some small amount.
For this reason, back-propagation can only be applied on networks with
differentiable activation functions.
During error back propagation, we usually times the gradient with a small
number η, called learning rate, which is carefully selected to ensure
that the network converges to a local minimum of the error function
fast enough, without producing oscillations. One way to avoid oscillation
at large η, is to make the change in weight dependent on the past weight
change by adding a momentum term.
Although the back-propagation algorithm may performs gradient
descent on the total error of all instances in a batch way,
the learning rule is often applied to each instance separately in an online
way or stochastic way. There exists empirical indication that the stochastic
way results in faster convergence.
In practice, the problem of over-fitting has emerged. This arises in
convoluted or over-specified systems when the capacity of the network
significantly exceeds the needed free parameters. There are two general
approaches for avoiding this problem: The first is to use cross-validation
and similar techniques to check for the presence of over-fitting and
optimally select hyper-parameters such as to minimize the generalization
error. The second is to use some form of regularization, which emerges
naturally in a Bayesian framework, where the regularization can be
performed by selecting a larger prior probability over simpler models;
but also in statistical learning theory, where the goal is to minimize over
the 'empirical risk' and the 'structural risk'.
For neural networks, the input patterns usually should be
scaled/standardized. Commonly, each input variable is scaled into
interval `[0, 1]` or to have mean 0 and standard deviation 1.
For penalty functions and output units, the following natural pairings are
recommended:
- linear output units and a least squares penalty function.
- a two-class cross-entropy penalty function and a logistic
activation function.
- a multi-class cross-entropy penalty function and a softmax
activation function.
By assigning a softmax activation function on the output layer of
the neural network for categorical target variables, the outputs
can be interpreted as posterior probabilities, which are very useful.
`x` is the training samples.
`y` is the response variable.
`builders` are the builders of layers from bottom to top.
`epochs` is the the number of epochs of stochastic learning.
`eta` is the the learning rate.
`alpha` is the momentum factor.
`lambda` is the weight decay for regularization."
([x y builders] (mlp x y builders 10 0.1 0.0 0.0))
([x y builders epochs eta alpha lambda]
(let [net (MLP. (.length (aget x 0)) builders)]
((.setLearningRate net eta)
(.setMomentum net alpha)
(.setWeightDecay net lambda)
(dotimes [i epochs] (.update net x, y))
net))))
(defn rbfnet
"Radial basis function networks.
A radial basis function network is an artificial neural network that uses
radial basis functions as activation functions. It is a linear combination
of radial basis functions. They are used in function approximation, time
series prediction, and control.
In its basic form, radial basis function network is in the form
```
y(x) = Σ w<sub>i</sub> φ(||x-c<sub>i</sub>||)
```
where the approximating function y(x) is represented as a sum of N radial
basis functions φ, each associated with a different center c<sub>i</sub>,
and weighted by an appropriate coefficient w<sub>i</sub>. For distance,
one usually chooses Euclidean distance. The weights w<sub>i</sub> can
be estimated using the matrix methods of linear least squares, because
the approximating function is linear in the weights.
The centers c<sub>i</sub> can be randomly selected from training data,
or learned by some clustering method (e.g. k-means), or learned together
with weight parameters undergo a supervised learning processing
(e.g. error-correction learning).
The popular choices for φ comprise the Gaussian function and the so
called thin plate splines. The advantage of the thin plate splines is that
their conditioning is invariant under scalings. Gaussian, multi-quadric
and inverse multi-quadric are infinitely smooth and and involve a scale
or shape parameter, r<sub><small>0</small></sub> > 0. Decreasing
r<sub><small>0</small></sub> tends to flatten the basis function. For a
given function, the quality of approximation may strongly depend on this
parameter. In particular, increasing r<sub><small>0</small></sub> has the
effect of better conditioning (the separation distance of the scaled points
increases).
A variant on RBF networks is normalized radial basis function (NRBF)
networks, in which we require the sum of the basis functions to be unity.
NRBF arises more naturally from a Bayesian statistical perspective. However,
there is no evidence that either the NRBF method is consistently superior
to the RBF method, or vice versa.
SVMs with Gaussian kernel have similar structure as RBF networks with
Gaussian radial basis functions. However, the SVM approach 'automatically'
solves the network complexity problem since the size of the hidden layer
is obtained as the result of the QP procedure. Hidden neurons and
support vectors correspond to each other, so the center problems of
the RBF network is also solved, as the support vectors serve as the
basis function centers. It was reported that with similar number of support
vectors/centers, SVM shows better generalization performance than RBF
network when the training data size is relatively small. On the other hand,
RBF network gives better generalization performance than SVM on large
training data.
`x` is the training samples.
`y` is the response variable.
`neurons` are the radial basis functions.
If `normalized` is true, train a normalized RBF network."
([x y neurons] (rbfnet x y neurons false))
([x y neurons normalized] (RBFNetwork/fit x y neurons normalized)))
(defn svr
"Support vector regression.
Like SVM for classification, the model produced by SVR depends only on a
subset of the training data, because the cost function ignores any training
data close to the model prediction (within a threshold).
`x` is the training data.
`y` is the response variable.
`kernel` is the kernel function.
`eps` is the loss function error threshold.
`C` is the soft margin penalty parameter.
`tol` is the tolerance of convergence test."
([x y kernel eps C] (svr x y kernel eps C 1E-3))
([x y kernel eps C tol] (SVR/fit x y kernel eps C tol)))
(defn cart
"Regression tree.
A classification/regression tree can be learned by splitting the training
set into subsets based on an attribute value test. This process is repeated
on each derived subset in a recursive manner called recursive partitioning.
The recursion is completed when the subset at a node all has the same value
of the target variable, or when splitting no longer adds value to the
predictions.
The algorithms that are used for constructing decision trees usually
work top-down by choosing a variable at each step that is the next best
variable to use in splitting the set of items. 'Best' is defined by how
well the variable splits the set into homogeneous subsets that have
the same value of the target variable. Different algorithms use different
formulae for measuring 'best'. Used by the CART algorithm, Gini impurity
is a measure of how often a randomly chosen element from the set would
be incorrectly labeled if it were randomly labeled according to the
distribution of labels in the subset. Gini impurity can be computed by
summing the probability of each item being chosen times the probability
of a mistake in categorizing that item. It reaches its minimum (zero) when
all cases in the node fall into a single target category. Information gain
is another popular measure, used by the ID3, C4.5 and C5.0 algorithms.
Information gain is based on the concept of entropy used in information
theory. For categorical variables with different number of levels, however,
information gain are biased in favor of those attributes with more levels.
Instead, one may employ the information gain ratio, which solves the drawback
of information gain.
Classification and Regression Tree techniques have a number of advantages
over many of those alternative techniques.
- Simple to understand and interpret:
In most cases, the interpretation of results summarized in a tree is
very simple. This simplicity is useful not only for purposes of rapid
classification of new observations, but can also often yield a much simpler
'model' for explaining why observations are classified or predicted in a
particular manner.
- Able to handle both numerical and categorical data:
Other techniques are usually specialized in analyzing datasets that
have only one type of variable.
- Nonparametric and nonlinear:
The final results of using tree methods for classification or regression
can be summarized in a series of (usually few) logical if-then conditions
(tree nodes). Therefore, there is no implicit assumption that the underlying
relationships between the predictor variables and the dependent variable
are linear, follow some specific non-linear link function, or that they
are even monotonic in nature. Thus, tree methods are particularly well
suited for data mining tasks, where there is often little a priori
knowledge nor any coherent set of theories or predictions regarding which
variables are related and how. In those types of data analytics, tree
methods can often reveal simple relationships between just a few variables
that could have easily gone unnoticed using other analytic techniques.
One major problem with classification and regression trees is their high
variance. Often a small change in the data can result in a very different
series of splits, making interpretation somewhat precarious. Besides,
decision-tree learners can create over-complex trees that cause over-fitting.
Mechanisms such as pruning are necessary to avoid this problem.
Another limitation of trees is the lack of smoothness of the prediction
surface.
Some techniques such as bagging, boosting, and random forest use more than
one decision tree for their analysis.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes."
([formula data] (cart formula data 20 0 5))
([formula data max-depth max-nodes node-size]
(RegressionTree/fit formula data max-depth max-nodes node-size)))
(defn random-forest
"Random forest.
Random forest is an ensemble classifier that consists of many decision
trees and outputs the majority vote of individual trees. The method
combines bagging idea and the random selection of features.
Each tree is constructed using the following algorithm:
1. If the number of cases in the training set is N, randomly sample N cases
with replacement from the original data. This sample will
be the training set for growing the tree.
2. If there are M input variables, a number m << M is specified such
that at each node, m variables are selected at random out of the M and
the best split on these m is used to split the node. The value of m is
held constant during the forest growing.
3. Each tree is grown to the largest extent possible. There is no pruning.
The advantages of random forest are:
- For many data sets, it produces a highly accurate classifier.
- It runs efficiently on large data sets.
- It can handle thousands of input variables without variable deletion.
- It gives estimates of what variables are important in the classification.
- It generates an internal unbiased estimate of the generalization error
as the forest building progresses.
- It has an effective method for estimating missing data and maintains
accuracy when a large proportion of the data are missing.
The disadvantages are
- Random forests are prone to over-fitting for some datasets. This is
even more pronounced on noisy data.
- For data including categorical variables with different number of
levels, random forests are biased in favor of those attributes with more
levels. Therefore, the variable importance scores from random forest are
not reliable for this type of data.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`ntrees` is the number of trees.
`mtry` is the number of random selected features to be used to determine
the decision at a node of the tree. `dim/3` seems to give
generally good performance, where `dim` is the number of variables.
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes.
`subsample` is the sampling rate for training tree. 1.0 means sampling
with replacement. < 1.0 means sampling without replacement."
([formula data] (random-forest formula data 500 0 20 500 5 1.0))
([formula data ntrees mtry max-depth max-nodes node-size subsample]
(RandomForest/fit formula data ntrees mtry max-depth max-nodes node-size subsample)))
(defn gbm
"Gradient boosted classification trees.
Generic gradient boosting at the t-th step would fit a regression tree to
pseudo-residuals. Let J be the number of its leaves. The tree partitions
the input space into J disjoint regions and predicts a constant value in
each region. The parameter J controls the maximum allowed
level of interaction between variables in the model. With J = 2 (decision
stumps), no interaction between variables is allowed. With J = 3 the model
may include effects of the interaction between up to two variables, and
so on. Hastie et al. comment that typically 4 ≤ J ≤ 8 work well
for boosting and results are fairly insensitive to the choice of in
this range, J = 2 is insufficient for many applications, and J > 10 is
unlikely to be required.
Fitting the training set too closely can lead to degradation of the model's
generalization ability. Several so-called regularization techniques reduce
this over-fitting effect by constraining the fitting procedure.
One natural regularization parameter is the number of gradient boosting
iterations T (i.e. the number of trees in the model when the base learner
is a decision tree). Increasing T reduces the error on training set,
but setting it too high may lead to over-fitting. An optimal value of T
is often selected by monitoring prediction error on a separate validation
data set.
Another regularization approach is the shrinkage which times a parameter
η (called the 'learning rate') to update term.
Empirically it has been found that using small learning rates (such as
η < 0.1) yields dramatic improvements in model's generalization ability
over gradient boosting without shrinking (η = 1). However, it comes at
the price of increasing computational time both during training and
prediction: lower learning rate requires more iterations.
Soon after the introduction of gradient boosting Friedman proposed a
minor modification to the algorithm, motivated by Breiman's bagging method.
Specifically, he proposed that at each iteration of the algorithm, a base
learner should be fit on a subsample of the training set drawn at random
without replacement. Friedman observed a substantial improvement in
gradient boosting's accuracy with this modification.
Subsample size is some constant fraction f of the size of the training set.
When f = 1, the algorithm is deterministic and identical to the one
described above. Smaller values of f introduce randomness into the
algorithm and help prevent over-fitting, acting as a kind of regularization.
The algorithm also becomes faster, because regression trees have to be fit
to smaller datasets at each iteration. Typically, f is set to 0.5, meaning
that one half of the training set is used to build each base learner.
Also, like in bagging, sub-sampling allows one to define an out-of-bag
estimate of the prediction performance improvement by evaluating predictions
on those observations which were not used in the building of the next
base learner. Out-of-bag estimates help avoid the need for an independent
validation dataset, but often underestimate actual performance improvement
and the optimal number of iterations.
Gradient tree boosting implementations often also use regularization by
limiting the minimum number of observations in trees' terminal nodes.
It's used in the tree building process by ignoring any splits that lead
to nodes containing fewer than this number of training set instances.
Imposing this limit helps to reduce variance in predictions at leaves.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`loss` is the loss function for regression.
`ntrees` is the number of iterations (trees).
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes.
`shrinkage` is the shrinkage parameter in (0, 1] controls the learning
rate of procedure.
`subsample` is the sampling fraction for stochastic tree boosting."
([formula data] (gbm formula data (Loss/lad) 500 20 6 5 0.05 0.7))
([formula data loss ntrees max-depth max-nodes node-size shrinkage subsample]
(GradientTreeBoost/fit formula data loss ntrees max-depth max-nodes node-size shrinkage subsample)))
(defn gpr
"Gaussian process.
A Gaussian process is a stochastic process whose realizations consist of
random values associated with every point in a range of times (or of space)
such that each such random variable has a normal distribution. Moreover,
every finite collection of those random variables has a multivariate normal
distribution.
A Gaussian process can be used as a prior probability distribution over
functions in Bayesian inference. Given any set of N points in the desired
domain of your functions, take a multivariate Gaussian whose covariance
matrix parameter is the Gram matrix of N points with some desired kernel,
and sample from that Gaussian. Inference of continuous values with a
Gaussian process prior is known as Gaussian process regression.
The fitting is performed in the reproducing kernel Hilbert space with
the 'kernel trick'. The loss function is squared-error. This also arises
as the kriging estimate of a Gaussian random field in spatial statistics.
A significant problem with Gaussian process prediction is that it typically
scales as O(n<sup>3</sup>). For large problems (e.g. n > 10,000) both
storing the Gram matrix and solving the associated linear systems are
prohibitive on modern workstations. An extensive range of proposals have
been suggested to deal with this problem. A popular approach is the
reduced-rank Approximations of the Gram Matrix, known as Nystrom
approximation. Greedy approximation is another popular approach that uses
an active set of training points of size m selected from the training set
of size n > m. We assume that it is impossible to search for the optimal
subset of size m due to combinatorics. The points in the active set could
be selected randomly, but in general we might expect better performance
if the points are selected greedily w.r.t. some criterion. Recently,
researchers had proposed relaxing the constraint that the inducing variables
must be a subset of training/test cases, turning the discrete selection
problem into one of continuous optimization.
This method fits a regular Gaussian process model.
`x` is the training dataset.
`y` is the response variable.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable.
`tol` is the stopping tolerance for HPO.
`max-iter` is the maximum number of iterations for HPO. No HPO if maxIter <= 0."
([x y kernel noise] (GaussianProcessRegression/fit x y kernel noise))
([x y kernel noise normalize tol max-iter] (GaussianProcessRegression/fit x y kernel noise normalize tol max-iter)))
(defn gpr-approx
"Approximate Gaussian process with a subset of regressors.
`x` is the training dataset.
`y` is the response variable.
`t` is the inducing input, which are pre-selected or inducing samples
acting as active set of regressors. In simple case, these can be chosen
randomly from the training set or as the centers of k-means clustering.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable."
([x y t kernel noise] (GaussianProcessRegression/fit x y t kernel noise))
([x y t kernel noise normalize] (GaussianProcessRegression/fit x y t kernel noise normalize)))
(defn gpr-nystrom
"Approximate Gaussian process with Nystrom approximation of kernel matrix.
`x` is the training dataset.
`y` is the response variable.
`t` is the inducing input, which are pre-selected or inducing samples
acting as active set of regressors. In simple case, these can be chosen
randomly from the training set or as the centers of k-means clustering.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable."
([x y t kernel noise] (GaussianProcessRegression/nystrom x y t kernel noise))
([x y t kernel noise normalize] (GaussianProcessRegression/nystrom x y t kernel noise normalize)))
|
121898
|
; Copyright (c) 2010-2021 <NAME>. All rights reserved.
;
; Smile is free software: you can redistribute it and/or modify
; it under the terms of the GNU General Public License as published by
; the Free Software Foundation, either version 3 of the License, or
; (at your option) any later version.
;
; Smile is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; You should have received a copy of the GNU General Public License
; along with Smile. If not, see <https://www.gnu.org/licenses/>.
(ns smile.regression
"Regression Analysis"
{:author "<NAME>"}
(:import [smile.regression OLS RidgeRegression LASSO MLP RBFNetwork SVR
RegressionTree RandomForest GradientTreeBoost
GaussianProcessRegression]
[smile.base.cart Loss]))
(defn lm
"Fitting linear models (ordinary least squares).
In linear regression, the model specification is that the dependent
variable is a linear combination of the parameters (but need not be
linear in the independent variables). The residual is the difference
between the value of the dependent variable predicted by the model,
and the true value of the dependent variable. Ordinary least squares
obtains parameter estimates that minimize the sum of squared residuals,
SSE (also denoted RSS).
The OLS estimator is consistent when the independent variables are
exogenous and there is no multicollinearity, and optimal in the class
of linear unbiased estimators when the errors are homoscedastic and
serially uncorrelated. Under these conditions, the method of OLS provides
minimum-variance mean-unbiased estimation when the errors have finite
variances.
There are several different frameworks in which the linear regression
model can be cast in order to make the OLS technique applicable. Each
of these settings produces the same formulas and same results, the only
difference is the interpretation and the assumptions which have to be
imposed in order for the method to give meaningful results. The choice
of the applicable framework depends mostly on the nature of data at hand,
and on the inference task which has to be performed.
Least squares corresponds to the maximum likelihood criterion if the
experimental errors have a normal distribution and can also be derived
as a method of moments estimator.
Once a regression model has been constructed, it may be important to
confirm the goodness of fit of the model and the statistical significance
of the estimated parameters. Commonly used checks of goodness of fit
include the R-squared, analysis of the pattern of residuals and hypothesis
testing. Statistical significance can be checked by an F-test of the overall
fit, followed by t-tests of individual parameters.
Interpretations of these diagnostic tests rest heavily on the model
assumptions. Although examination of the residuals can be used to
invalidate a model, the results of a t-test or F-test are sometimes more
difficult to interpret if the model's assumptions are violated.
For example, if the error term does not have a normal distribution,
in small samples the estimated parameters will not follow normal
distributions and complicate inference. With relatively large samples,
however, a central limit theorem can be invoked such that hypothesis
testing may proceed using asymptotic approximations.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`method` is the fitting method ('qr' or 'svd').
`recursive` is the flag if the return model supports recursive least squares."
([formula data] (lm formula data "qr" true true))
([formula data method, stderr recursive] (OLS/fit formula data method stderr recursive)))
(defn ridge
"Ridge Regression.
When the predictor variables are highly correlated amongst
themselves, the coefficients of the resulting least squares fit may be very
imprecise. By allowing a small amount of bias in the estimates, more
reasonable coefficients may often be obtained. Ridge regression is one
method to address these issues. Often, small amounts of bias lead to
dramatic reductions in the variance of the estimated model coefficients.
Ridge regression is such a technique which shrinks the regression
coefficients by imposing a penalty on their size. Ridge regression was
originally developed to overcome the singularity of the X'X matrix.
This matrix is perturbed so as to make its determinant appreciably
different from 0.
Ridge regression is a kind of Tikhonov regularization, which is the most
commonly used method of regularization of ill-posed problems. Another
interpretation of ridge regression is available through Bayesian estimation.
In this setting the belief that weight should be small is coded into a prior
distribution.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`lambda` is the shrinkage/regularization parameter."
[formula data lambda] (RidgeRegression/fit formula data lambda))
(defn lasso
"Least absolute shrinkage and selection operator.
The Lasso is a shrinkage and selection method for linear regression.
It minimizes the usual sum of squared errors, with a bound on the sum
of the absolute values of the coefficients (i.e. L<sub>1</sub>-regularized).
It has connections to soft-thresholding of wavelet coefficients, forward
stage-wise regression, and boosting methods.
The Lasso typically yields a sparse solution, of which the parameter
vector β has relatively few nonzero coefficients. In contrast, the
solution of L<sub>2</sub>-regularized least squares (i.e. ridge regression)
typically has all coefficients nonzero. Because it effectively
reduces the number of variables, the Lasso is useful in some contexts.
For over-determined systems (more instances than variables, commonly in
machine learning), we normalize variables with mean 0 and standard deviation
1. For under-determined systems (less instances than variables, e.g.
compressed sensing), we assume white noise (i.e. no intercept in the linear
model) and do not perform normalization. Note that the solution
is not unique in this case.
There is no analytic formula or expression for the optimal solution to the
L<sub>1</sub>-regularized least squares problems. Therefore, its solution
must be computed numerically. The objective function in the
L<sub>1</sub>-regularized least squares is convex but not differentiable,
so solving it is more of a computational challenge than solving the
L<sub>2</sub>-regularized least squares. The Lasso may be solved using
quadratic programming or more general convex optimization methods, as well
as by specific algorithms such as the least angle regression algorithm.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`lambda` is the shrinkage/regularization parameter.
`tol` is the tolerance for stopping iterations (relative target duality gap).
`max-iter` is the maximum number of iterations."
([formula data lambda] (lasso formula data lambda 0.001 5000))
([formula data lambda tol max-iter] (LASSO/fit formula data lambda tol max-iter)))
(defn mlp
"Multilayer perceptron neural network.
An MLP consists of several layers of nodes, interconnected through weighted
acyclic arcs from each preceding layer to the following, without lateral or
feedback connections. Each node calculates a transformed weighted linear
combination of its inputs (output activations from the preceding layer), with
one of the weights acting as a trainable bias connected to a constant input.
The transformation, called activation function, is a bounded non-decreasing
(non-linear) function, such as the sigmoid functions (ranges from 0 to 1).
Another popular activation function is hyperbolic tangent which is actually
equivalent to the sigmoid function in shape but ranges from -1 to 1.
More specialized activation functions include radial basis functions which
are used in RBF networks.
The representational capabilities of a MLP are determined by the range of
mappings it may implement through weight variation. Single layer perceptrons
are capable of solving only linearly separable problems. With the sigmoid
function as activation function, the single-layer network is identical
to the logistic regression model.
The universal approximation theorem for neural networks states that every
continuous function that maps intervals of real numbers to some output
interval of real numbers can be approximated arbitrarily closely by a
multi-layer perceptron with just one hidden layer. This result holds only
for restricted classes of activation functions, which are extremely complex
and NOT smooth for subtle mathematical reasons. On the other hand, smoothness
is important for gradient descent learning. Besides, the proof is not
constructive regarding the number of neurons required or the settings of
the weights. Therefore, complex systems will have more layers of neurons
with some having increased layers of input neurons and output neurons
in practice.
The most popular algorithm to train MLPs is back-propagation, which is a
gradient descent method. Based on chain rule, the algorithm propagates the
error back through the network and adjusts the weights of each connection in
order to reduce the value of the error function by some small amount.
For this reason, back-propagation can only be applied on networks with
differentiable activation functions.
During error back propagation, we usually times the gradient with a small
number η, called learning rate, which is carefully selected to ensure
that the network converges to a local minimum of the error function
fast enough, without producing oscillations. One way to avoid oscillation
at large η, is to make the change in weight dependent on the past weight
change by adding a momentum term.
Although the back-propagation algorithm may performs gradient
descent on the total error of all instances in a batch way,
the learning rule is often applied to each instance separately in an online
way or stochastic way. There exists empirical indication that the stochastic
way results in faster convergence.
In practice, the problem of over-fitting has emerged. This arises in
convoluted or over-specified systems when the capacity of the network
significantly exceeds the needed free parameters. There are two general
approaches for avoiding this problem: The first is to use cross-validation
and similar techniques to check for the presence of over-fitting and
optimally select hyper-parameters such as to minimize the generalization
error. The second is to use some form of regularization, which emerges
naturally in a Bayesian framework, where the regularization can be
performed by selecting a larger prior probability over simpler models;
but also in statistical learning theory, where the goal is to minimize over
the 'empirical risk' and the 'structural risk'.
For neural networks, the input patterns usually should be
scaled/standardized. Commonly, each input variable is scaled into
interval `[0, 1]` or to have mean 0 and standard deviation 1.
For penalty functions and output units, the following natural pairings are
recommended:
- linear output units and a least squares penalty function.
- a two-class cross-entropy penalty function and a logistic
activation function.
- a multi-class cross-entropy penalty function and a softmax
activation function.
By assigning a softmax activation function on the output layer of
the neural network for categorical target variables, the outputs
can be interpreted as posterior probabilities, which are very useful.
`x` is the training samples.
`y` is the response variable.
`builders` are the builders of layers from bottom to top.
`epochs` is the the number of epochs of stochastic learning.
`eta` is the the learning rate.
`alpha` is the momentum factor.
`lambda` is the weight decay for regularization."
([x y builders] (mlp x y builders 10 0.1 0.0 0.0))
([x y builders epochs eta alpha lambda]
(let [net (MLP. (.length (aget x 0)) builders)]
((.setLearningRate net eta)
(.setMomentum net alpha)
(.setWeightDecay net lambda)
(dotimes [i epochs] (.update net x, y))
net))))
(defn rbfnet
"Radial basis function networks.
A radial basis function network is an artificial neural network that uses
radial basis functions as activation functions. It is a linear combination
of radial basis functions. They are used in function approximation, time
series prediction, and control.
In its basic form, radial basis function network is in the form
```
y(x) = Σ w<sub>i</sub> φ(||x-c<sub>i</sub>||)
```
where the approximating function y(x) is represented as a sum of N radial
basis functions φ, each associated with a different center c<sub>i</sub>,
and weighted by an appropriate coefficient w<sub>i</sub>. For distance,
one usually chooses Euclidean distance. The weights w<sub>i</sub> can
be estimated using the matrix methods of linear least squares, because
the approximating function is linear in the weights.
The centers c<sub>i</sub> can be randomly selected from training data,
or learned by some clustering method (e.g. k-means), or learned together
with weight parameters undergo a supervised learning processing
(e.g. error-correction learning).
The popular choices for φ comprise the Gaussian function and the so
called thin plate splines. The advantage of the thin plate splines is that
their conditioning is invariant under scalings. Gaussian, multi-quadric
and inverse multi-quadric are infinitely smooth and and involve a scale
or shape parameter, r<sub><small>0</small></sub> > 0. Decreasing
r<sub><small>0</small></sub> tends to flatten the basis function. For a
given function, the quality of approximation may strongly depend on this
parameter. In particular, increasing r<sub><small>0</small></sub> has the
effect of better conditioning (the separation distance of the scaled points
increases).
A variant on RBF networks is normalized radial basis function (NRBF)
networks, in which we require the sum of the basis functions to be unity.
NRBF arises more naturally from a Bayesian statistical perspective. However,
there is no evidence that either the NRBF method is consistently superior
to the RBF method, or vice versa.
SVMs with Gaussian kernel have similar structure as RBF networks with
Gaussian radial basis functions. However, the SVM approach 'automatically'
solves the network complexity problem since the size of the hidden layer
is obtained as the result of the QP procedure. Hidden neurons and
support vectors correspond to each other, so the center problems of
the RBF network is also solved, as the support vectors serve as the
basis function centers. It was reported that with similar number of support
vectors/centers, SVM shows better generalization performance than RBF
network when the training data size is relatively small. On the other hand,
RBF network gives better generalization performance than SVM on large
training data.
`x` is the training samples.
`y` is the response variable.
`neurons` are the radial basis functions.
If `normalized` is true, train a normalized RBF network."
([x y neurons] (rbfnet x y neurons false))
([x y neurons normalized] (RBFNetwork/fit x y neurons normalized)))
(defn svr
"Support vector regression.
Like SVM for classification, the model produced by SVR depends only on a
subset of the training data, because the cost function ignores any training
data close to the model prediction (within a threshold).
`x` is the training data.
`y` is the response variable.
`kernel` is the kernel function.
`eps` is the loss function error threshold.
`C` is the soft margin penalty parameter.
`tol` is the tolerance of convergence test."
([x y kernel eps C] (svr x y kernel eps C 1E-3))
([x y kernel eps C tol] (SVR/fit x y kernel eps C tol)))
(defn cart
"Regression tree.
A classification/regression tree can be learned by splitting the training
set into subsets based on an attribute value test. This process is repeated
on each derived subset in a recursive manner called recursive partitioning.
The recursion is completed when the subset at a node all has the same value
of the target variable, or when splitting no longer adds value to the
predictions.
The algorithms that are used for constructing decision trees usually
work top-down by choosing a variable at each step that is the next best
variable to use in splitting the set of items. 'Best' is defined by how
well the variable splits the set into homogeneous subsets that have
the same value of the target variable. Different algorithms use different
formulae for measuring 'best'. Used by the CART algorithm, Gini impurity
is a measure of how often a randomly chosen element from the set would
be incorrectly labeled if it were randomly labeled according to the
distribution of labels in the subset. Gini impurity can be computed by
summing the probability of each item being chosen times the probability
of a mistake in categorizing that item. It reaches its minimum (zero) when
all cases in the node fall into a single target category. Information gain
is another popular measure, used by the ID3, C4.5 and C5.0 algorithms.
Information gain is based on the concept of entropy used in information
theory. For categorical variables with different number of levels, however,
information gain are biased in favor of those attributes with more levels.
Instead, one may employ the information gain ratio, which solves the drawback
of information gain.
Classification and Regression Tree techniques have a number of advantages
over many of those alternative techniques.
- Simple to understand and interpret:
In most cases, the interpretation of results summarized in a tree is
very simple. This simplicity is useful not only for purposes of rapid
classification of new observations, but can also often yield a much simpler
'model' for explaining why observations are classified or predicted in a
particular manner.
- Able to handle both numerical and categorical data:
Other techniques are usually specialized in analyzing datasets that
have only one type of variable.
- Nonparametric and nonlinear:
The final results of using tree methods for classification or regression
can be summarized in a series of (usually few) logical if-then conditions
(tree nodes). Therefore, there is no implicit assumption that the underlying
relationships between the predictor variables and the dependent variable
are linear, follow some specific non-linear link function, or that they
are even monotonic in nature. Thus, tree methods are particularly well
suited for data mining tasks, where there is often little a priori
knowledge nor any coherent set of theories or predictions regarding which
variables are related and how. In those types of data analytics, tree
methods can often reveal simple relationships between just a few variables
that could have easily gone unnoticed using other analytic techniques.
One major problem with classification and regression trees is their high
variance. Often a small change in the data can result in a very different
series of splits, making interpretation somewhat precarious. Besides,
decision-tree learners can create over-complex trees that cause over-fitting.
Mechanisms such as pruning are necessary to avoid this problem.
Another limitation of trees is the lack of smoothness of the prediction
surface.
Some techniques such as bagging, boosting, and random forest use more than
one decision tree for their analysis.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes."
([formula data] (cart formula data 20 0 5))
([formula data max-depth max-nodes node-size]
(RegressionTree/fit formula data max-depth max-nodes node-size)))
(defn random-forest
"Random forest.
Random forest is an ensemble classifier that consists of many decision
trees and outputs the majority vote of individual trees. The method
combines bagging idea and the random selection of features.
Each tree is constructed using the following algorithm:
1. If the number of cases in the training set is N, randomly sample N cases
with replacement from the original data. This sample will
be the training set for growing the tree.
2. If there are M input variables, a number m << M is specified such
that at each node, m variables are selected at random out of the M and
the best split on these m is used to split the node. The value of m is
held constant during the forest growing.
3. Each tree is grown to the largest extent possible. There is no pruning.
The advantages of random forest are:
- For many data sets, it produces a highly accurate classifier.
- It runs efficiently on large data sets.
- It can handle thousands of input variables without variable deletion.
- It gives estimates of what variables are important in the classification.
- It generates an internal unbiased estimate of the generalization error
as the forest building progresses.
- It has an effective method for estimating missing data and maintains
accuracy when a large proportion of the data are missing.
The disadvantages are
- Random forests are prone to over-fitting for some datasets. This is
even more pronounced on noisy data.
- For data including categorical variables with different number of
levels, random forests are biased in favor of those attributes with more
levels. Therefore, the variable importance scores from random forest are
not reliable for this type of data.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`ntrees` is the number of trees.
`mtry` is the number of random selected features to be used to determine
the decision at a node of the tree. `dim/3` seems to give
generally good performance, where `dim` is the number of variables.
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes.
`subsample` is the sampling rate for training tree. 1.0 means sampling
with replacement. < 1.0 means sampling without replacement."
([formula data] (random-forest formula data 500 0 20 500 5 1.0))
([formula data ntrees mtry max-depth max-nodes node-size subsample]
(RandomForest/fit formula data ntrees mtry max-depth max-nodes node-size subsample)))
(defn gbm
"Gradient boosted classification trees.
Generic gradient boosting at the t-th step would fit a regression tree to
pseudo-residuals. Let J be the number of its leaves. The tree partitions
the input space into J disjoint regions and predicts a constant value in
each region. The parameter J controls the maximum allowed
level of interaction between variables in the model. With J = 2 (decision
stumps), no interaction between variables is allowed. With J = 3 the model
may include effects of the interaction between up to two variables, and
so on. Hastie et al. comment that typically 4 ≤ J ≤ 8 work well
for boosting and results are fairly insensitive to the choice of in
this range, J = 2 is insufficient for many applications, and J > 10 is
unlikely to be required.
Fitting the training set too closely can lead to degradation of the model's
generalization ability. Several so-called regularization techniques reduce
this over-fitting effect by constraining the fitting procedure.
One natural regularization parameter is the number of gradient boosting
iterations T (i.e. the number of trees in the model when the base learner
is a decision tree). Increasing T reduces the error on training set,
but setting it too high may lead to over-fitting. An optimal value of T
is often selected by monitoring prediction error on a separate validation
data set.
Another regularization approach is the shrinkage which times a parameter
η (called the 'learning rate') to update term.
Empirically it has been found that using small learning rates (such as
η < 0.1) yields dramatic improvements in model's generalization ability
over gradient boosting without shrinking (η = 1). However, it comes at
the price of increasing computational time both during training and
prediction: lower learning rate requires more iterations.
Soon after the introduction of gradient boosting <NAME> proposed a
minor modification to the algorithm, motivated by Breiman's bagging method.
Specifically, he proposed that at each iteration of the algorithm, a base
learner should be fit on a subsample of the training set drawn at random
without replacement. <NAME> observed a substantial improvement in
gradient boosting's accuracy with this modification.
Subsample size is some constant fraction f of the size of the training set.
When f = 1, the algorithm is deterministic and identical to the one
described above. Smaller values of f introduce randomness into the
algorithm and help prevent over-fitting, acting as a kind of regularization.
The algorithm also becomes faster, because regression trees have to be fit
to smaller datasets at each iteration. Typically, f is set to 0.5, meaning
that one half of the training set is used to build each base learner.
Also, like in bagging, sub-sampling allows one to define an out-of-bag
estimate of the prediction performance improvement by evaluating predictions
on those observations which were not used in the building of the next
base learner. Out-of-bag estimates help avoid the need for an independent
validation dataset, but often underestimate actual performance improvement
and the optimal number of iterations.
Gradient tree boosting implementations often also use regularization by
limiting the minimum number of observations in trees' terminal nodes.
It's used in the tree building process by ignoring any splits that lead
to nodes containing fewer than this number of training set instances.
Imposing this limit helps to reduce variance in predictions at leaves.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`loss` is the loss function for regression.
`ntrees` is the number of iterations (trees).
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes.
`shrinkage` is the shrinkage parameter in (0, 1] controls the learning
rate of procedure.
`subsample` is the sampling fraction for stochastic tree boosting."
([formula data] (gbm formula data (Loss/lad) 500 20 6 5 0.05 0.7))
([formula data loss ntrees max-depth max-nodes node-size shrinkage subsample]
(GradientTreeBoost/fit formula data loss ntrees max-depth max-nodes node-size shrinkage subsample)))
(defn gpr
"Gaussian process.
A Gaussian process is a stochastic process whose realizations consist of
random values associated with every point in a range of times (or of space)
such that each such random variable has a normal distribution. Moreover,
every finite collection of those random variables has a multivariate normal
distribution.
A Gaussian process can be used as a prior probability distribution over
functions in Bayesian inference. Given any set of N points in the desired
domain of your functions, take a multivariate Gaussian whose covariance
matrix parameter is the Gram matrix of N points with some desired kernel,
and sample from that Gaussian. Inference of continuous values with a
Gaussian process prior is known as Gaussian process regression.
The fitting is performed in the reproducing kernel Hilbert space with
the 'kernel trick'. The loss function is squared-error. This also arises
as the kriging estimate of a Gaussian random field in spatial statistics.
A significant problem with Gaussian process prediction is that it typically
scales as O(n<sup>3</sup>). For large problems (e.g. n > 10,000) both
storing the Gram matrix and solving the associated linear systems are
prohibitive on modern workstations. An extensive range of proposals have
been suggested to deal with this problem. A popular approach is the
reduced-rank Approximations of the Gram Matrix, known as Nystrom
approximation. Greedy approximation is another popular approach that uses
an active set of training points of size m selected from the training set
of size n > m. We assume that it is impossible to search for the optimal
subset of size m due to combinatorics. The points in the active set could
be selected randomly, but in general we might expect better performance
if the points are selected greedily w.r.t. some criterion. Recently,
researchers had proposed relaxing the constraint that the inducing variables
must be a subset of training/test cases, turning the discrete selection
problem into one of continuous optimization.
This method fits a regular Gaussian process model.
`x` is the training dataset.
`y` is the response variable.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable.
`tol` is the stopping tolerance for HPO.
`max-iter` is the maximum number of iterations for HPO. No HPO if maxIter <= 0."
([x y kernel noise] (GaussianProcessRegression/fit x y kernel noise))
([x y kernel noise normalize tol max-iter] (GaussianProcessRegression/fit x y kernel noise normalize tol max-iter)))
(defn gpr-approx
"Approximate Gaussian process with a subset of regressors.
`x` is the training dataset.
`y` is the response variable.
`t` is the inducing input, which are pre-selected or inducing samples
acting as active set of regressors. In simple case, these can be chosen
randomly from the training set or as the centers of k-means clustering.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable."
([x y t kernel noise] (GaussianProcessRegression/fit x y t kernel noise))
([x y t kernel noise normalize] (GaussianProcessRegression/fit x y t kernel noise normalize)))
(defn gpr-nystrom
"Approximate Gaussian process with Nystrom approximation of kernel matrix.
`x` is the training dataset.
`y` is the response variable.
`t` is the inducing input, which are pre-selected or inducing samples
acting as active set of regressors. In simple case, these can be chosen
randomly from the training set or as the centers of k-means clustering.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable."
([x y t kernel noise] (GaussianProcessRegression/nystrom x y t kernel noise))
([x y t kernel noise normalize] (GaussianProcessRegression/nystrom x y t kernel noise normalize)))
| true |
; Copyright (c) 2010-2021 PI:NAME:<NAME>END_PI. All rights reserved.
;
; Smile is free software: you can redistribute it and/or modify
; it under the terms of the GNU General Public License as published by
; the Free Software Foundation, either version 3 of the License, or
; (at your option) any later version.
;
; Smile is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; You should have received a copy of the GNU General Public License
; along with Smile. If not, see <https://www.gnu.org/licenses/>.
(ns smile.regression
"Regression Analysis"
{:author "PI:NAME:<NAME>END_PI"}
(:import [smile.regression OLS RidgeRegression LASSO MLP RBFNetwork SVR
RegressionTree RandomForest GradientTreeBoost
GaussianProcessRegression]
[smile.base.cart Loss]))
(defn lm
"Fitting linear models (ordinary least squares).
In linear regression, the model specification is that the dependent
variable is a linear combination of the parameters (but need not be
linear in the independent variables). The residual is the difference
between the value of the dependent variable predicted by the model,
and the true value of the dependent variable. Ordinary least squares
obtains parameter estimates that minimize the sum of squared residuals,
SSE (also denoted RSS).
The OLS estimator is consistent when the independent variables are
exogenous and there is no multicollinearity, and optimal in the class
of linear unbiased estimators when the errors are homoscedastic and
serially uncorrelated. Under these conditions, the method of OLS provides
minimum-variance mean-unbiased estimation when the errors have finite
variances.
There are several different frameworks in which the linear regression
model can be cast in order to make the OLS technique applicable. Each
of these settings produces the same formulas and same results, the only
difference is the interpretation and the assumptions which have to be
imposed in order for the method to give meaningful results. The choice
of the applicable framework depends mostly on the nature of data at hand,
and on the inference task which has to be performed.
Least squares corresponds to the maximum likelihood criterion if the
experimental errors have a normal distribution and can also be derived
as a method of moments estimator.
Once a regression model has been constructed, it may be important to
confirm the goodness of fit of the model and the statistical significance
of the estimated parameters. Commonly used checks of goodness of fit
include the R-squared, analysis of the pattern of residuals and hypothesis
testing. Statistical significance can be checked by an F-test of the overall
fit, followed by t-tests of individual parameters.
Interpretations of these diagnostic tests rest heavily on the model
assumptions. Although examination of the residuals can be used to
invalidate a model, the results of a t-test or F-test are sometimes more
difficult to interpret if the model's assumptions are violated.
For example, if the error term does not have a normal distribution,
in small samples the estimated parameters will not follow normal
distributions and complicate inference. With relatively large samples,
however, a central limit theorem can be invoked such that hypothesis
testing may proceed using asymptotic approximations.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`method` is the fitting method ('qr' or 'svd').
`recursive` is the flag if the return model supports recursive least squares."
([formula data] (lm formula data "qr" true true))
([formula data method, stderr recursive] (OLS/fit formula data method stderr recursive)))
(defn ridge
"Ridge Regression.
When the predictor variables are highly correlated amongst
themselves, the coefficients of the resulting least squares fit may be very
imprecise. By allowing a small amount of bias in the estimates, more
reasonable coefficients may often be obtained. Ridge regression is one
method to address these issues. Often, small amounts of bias lead to
dramatic reductions in the variance of the estimated model coefficients.
Ridge regression is such a technique which shrinks the regression
coefficients by imposing a penalty on their size. Ridge regression was
originally developed to overcome the singularity of the X'X matrix.
This matrix is perturbed so as to make its determinant appreciably
different from 0.
Ridge regression is a kind of Tikhonov regularization, which is the most
commonly used method of regularization of ill-posed problems. Another
interpretation of ridge regression is available through Bayesian estimation.
In this setting the belief that weight should be small is coded into a prior
distribution.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`lambda` is the shrinkage/regularization parameter."
[formula data lambda] (RidgeRegression/fit formula data lambda))
(defn lasso
"Least absolute shrinkage and selection operator.
The Lasso is a shrinkage and selection method for linear regression.
It minimizes the usual sum of squared errors, with a bound on the sum
of the absolute values of the coefficients (i.e. L<sub>1</sub>-regularized).
It has connections to soft-thresholding of wavelet coefficients, forward
stage-wise regression, and boosting methods.
The Lasso typically yields a sparse solution, of which the parameter
vector β has relatively few nonzero coefficients. In contrast, the
solution of L<sub>2</sub>-regularized least squares (i.e. ridge regression)
typically has all coefficients nonzero. Because it effectively
reduces the number of variables, the Lasso is useful in some contexts.
For over-determined systems (more instances than variables, commonly in
machine learning), we normalize variables with mean 0 and standard deviation
1. For under-determined systems (less instances than variables, e.g.
compressed sensing), we assume white noise (i.e. no intercept in the linear
model) and do not perform normalization. Note that the solution
is not unique in this case.
There is no analytic formula or expression for the optimal solution to the
L<sub>1</sub>-regularized least squares problems. Therefore, its solution
must be computed numerically. The objective function in the
L<sub>1</sub>-regularized least squares is convex but not differentiable,
so solving it is more of a computational challenge than solving the
L<sub>2</sub>-regularized least squares. The Lasso may be solved using
quadratic programming or more general convex optimization methods, as well
as by specific algorithms such as the least angle regression algorithm.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`lambda` is the shrinkage/regularization parameter.
`tol` is the tolerance for stopping iterations (relative target duality gap).
`max-iter` is the maximum number of iterations."
([formula data lambda] (lasso formula data lambda 0.001 5000))
([formula data lambda tol max-iter] (LASSO/fit formula data lambda tol max-iter)))
(defn mlp
"Multilayer perceptron neural network.
An MLP consists of several layers of nodes, interconnected through weighted
acyclic arcs from each preceding layer to the following, without lateral or
feedback connections. Each node calculates a transformed weighted linear
combination of its inputs (output activations from the preceding layer), with
one of the weights acting as a trainable bias connected to a constant input.
The transformation, called activation function, is a bounded non-decreasing
(non-linear) function, such as the sigmoid functions (ranges from 0 to 1).
Another popular activation function is hyperbolic tangent which is actually
equivalent to the sigmoid function in shape but ranges from -1 to 1.
More specialized activation functions include radial basis functions which
are used in RBF networks.
The representational capabilities of a MLP are determined by the range of
mappings it may implement through weight variation. Single layer perceptrons
are capable of solving only linearly separable problems. With the sigmoid
function as activation function, the single-layer network is identical
to the logistic regression model.
The universal approximation theorem for neural networks states that every
continuous function that maps intervals of real numbers to some output
interval of real numbers can be approximated arbitrarily closely by a
multi-layer perceptron with just one hidden layer. This result holds only
for restricted classes of activation functions, which are extremely complex
and NOT smooth for subtle mathematical reasons. On the other hand, smoothness
is important for gradient descent learning. Besides, the proof is not
constructive regarding the number of neurons required or the settings of
the weights. Therefore, complex systems will have more layers of neurons
with some having increased layers of input neurons and output neurons
in practice.
The most popular algorithm to train MLPs is back-propagation, which is a
gradient descent method. Based on chain rule, the algorithm propagates the
error back through the network and adjusts the weights of each connection in
order to reduce the value of the error function by some small amount.
For this reason, back-propagation can only be applied on networks with
differentiable activation functions.
During error back propagation, we usually times the gradient with a small
number η, called learning rate, which is carefully selected to ensure
that the network converges to a local minimum of the error function
fast enough, without producing oscillations. One way to avoid oscillation
at large η, is to make the change in weight dependent on the past weight
change by adding a momentum term.
Although the back-propagation algorithm may performs gradient
descent on the total error of all instances in a batch way,
the learning rule is often applied to each instance separately in an online
way or stochastic way. There exists empirical indication that the stochastic
way results in faster convergence.
In practice, the problem of over-fitting has emerged. This arises in
convoluted or over-specified systems when the capacity of the network
significantly exceeds the needed free parameters. There are two general
approaches for avoiding this problem: The first is to use cross-validation
and similar techniques to check for the presence of over-fitting and
optimally select hyper-parameters such as to minimize the generalization
error. The second is to use some form of regularization, which emerges
naturally in a Bayesian framework, where the regularization can be
performed by selecting a larger prior probability over simpler models;
but also in statistical learning theory, where the goal is to minimize over
the 'empirical risk' and the 'structural risk'.
For neural networks, the input patterns usually should be
scaled/standardized. Commonly, each input variable is scaled into
interval `[0, 1]` or to have mean 0 and standard deviation 1.
For penalty functions and output units, the following natural pairings are
recommended:
- linear output units and a least squares penalty function.
- a two-class cross-entropy penalty function and a logistic
activation function.
- a multi-class cross-entropy penalty function and a softmax
activation function.
By assigning a softmax activation function on the output layer of
the neural network for categorical target variables, the outputs
can be interpreted as posterior probabilities, which are very useful.
`x` is the training samples.
`y` is the response variable.
`builders` are the builders of layers from bottom to top.
`epochs` is the the number of epochs of stochastic learning.
`eta` is the the learning rate.
`alpha` is the momentum factor.
`lambda` is the weight decay for regularization."
([x y builders] (mlp x y builders 10 0.1 0.0 0.0))
([x y builders epochs eta alpha lambda]
(let [net (MLP. (.length (aget x 0)) builders)]
((.setLearningRate net eta)
(.setMomentum net alpha)
(.setWeightDecay net lambda)
(dotimes [i epochs] (.update net x, y))
net))))
(defn rbfnet
"Radial basis function networks.
A radial basis function network is an artificial neural network that uses
radial basis functions as activation functions. It is a linear combination
of radial basis functions. They are used in function approximation, time
series prediction, and control.
In its basic form, radial basis function network is in the form
```
y(x) = Σ w<sub>i</sub> φ(||x-c<sub>i</sub>||)
```
where the approximating function y(x) is represented as a sum of N radial
basis functions φ, each associated with a different center c<sub>i</sub>,
and weighted by an appropriate coefficient w<sub>i</sub>. For distance,
one usually chooses Euclidean distance. The weights w<sub>i</sub> can
be estimated using the matrix methods of linear least squares, because
the approximating function is linear in the weights.
The centers c<sub>i</sub> can be randomly selected from training data,
or learned by some clustering method (e.g. k-means), or learned together
with weight parameters undergo a supervised learning processing
(e.g. error-correction learning).
The popular choices for φ comprise the Gaussian function and the so
called thin plate splines. The advantage of the thin plate splines is that
their conditioning is invariant under scalings. Gaussian, multi-quadric
and inverse multi-quadric are infinitely smooth and and involve a scale
or shape parameter, r<sub><small>0</small></sub> > 0. Decreasing
r<sub><small>0</small></sub> tends to flatten the basis function. For a
given function, the quality of approximation may strongly depend on this
parameter. In particular, increasing r<sub><small>0</small></sub> has the
effect of better conditioning (the separation distance of the scaled points
increases).
A variant on RBF networks is normalized radial basis function (NRBF)
networks, in which we require the sum of the basis functions to be unity.
NRBF arises more naturally from a Bayesian statistical perspective. However,
there is no evidence that either the NRBF method is consistently superior
to the RBF method, or vice versa.
SVMs with Gaussian kernel have similar structure as RBF networks with
Gaussian radial basis functions. However, the SVM approach 'automatically'
solves the network complexity problem since the size of the hidden layer
is obtained as the result of the QP procedure. Hidden neurons and
support vectors correspond to each other, so the center problems of
the RBF network is also solved, as the support vectors serve as the
basis function centers. It was reported that with similar number of support
vectors/centers, SVM shows better generalization performance than RBF
network when the training data size is relatively small. On the other hand,
RBF network gives better generalization performance than SVM on large
training data.
`x` is the training samples.
`y` is the response variable.
`neurons` are the radial basis functions.
If `normalized` is true, train a normalized RBF network."
([x y neurons] (rbfnet x y neurons false))
([x y neurons normalized] (RBFNetwork/fit x y neurons normalized)))
(defn svr
"Support vector regression.
Like SVM for classification, the model produced by SVR depends only on a
subset of the training data, because the cost function ignores any training
data close to the model prediction (within a threshold).
`x` is the training data.
`y` is the response variable.
`kernel` is the kernel function.
`eps` is the loss function error threshold.
`C` is the soft margin penalty parameter.
`tol` is the tolerance of convergence test."
([x y kernel eps C] (svr x y kernel eps C 1E-3))
([x y kernel eps C tol] (SVR/fit x y kernel eps C tol)))
(defn cart
"Regression tree.
A classification/regression tree can be learned by splitting the training
set into subsets based on an attribute value test. This process is repeated
on each derived subset in a recursive manner called recursive partitioning.
The recursion is completed when the subset at a node all has the same value
of the target variable, or when splitting no longer adds value to the
predictions.
The algorithms that are used for constructing decision trees usually
work top-down by choosing a variable at each step that is the next best
variable to use in splitting the set of items. 'Best' is defined by how
well the variable splits the set into homogeneous subsets that have
the same value of the target variable. Different algorithms use different
formulae for measuring 'best'. Used by the CART algorithm, Gini impurity
is a measure of how often a randomly chosen element from the set would
be incorrectly labeled if it were randomly labeled according to the
distribution of labels in the subset. Gini impurity can be computed by
summing the probability of each item being chosen times the probability
of a mistake in categorizing that item. It reaches its minimum (zero) when
all cases in the node fall into a single target category. Information gain
is another popular measure, used by the ID3, C4.5 and C5.0 algorithms.
Information gain is based on the concept of entropy used in information
theory. For categorical variables with different number of levels, however,
information gain are biased in favor of those attributes with more levels.
Instead, one may employ the information gain ratio, which solves the drawback
of information gain.
Classification and Regression Tree techniques have a number of advantages
over many of those alternative techniques.
- Simple to understand and interpret:
In most cases, the interpretation of results summarized in a tree is
very simple. This simplicity is useful not only for purposes of rapid
classification of new observations, but can also often yield a much simpler
'model' for explaining why observations are classified or predicted in a
particular manner.
- Able to handle both numerical and categorical data:
Other techniques are usually specialized in analyzing datasets that
have only one type of variable.
- Nonparametric and nonlinear:
The final results of using tree methods for classification or regression
can be summarized in a series of (usually few) logical if-then conditions
(tree nodes). Therefore, there is no implicit assumption that the underlying
relationships between the predictor variables and the dependent variable
are linear, follow some specific non-linear link function, or that they
are even monotonic in nature. Thus, tree methods are particularly well
suited for data mining tasks, where there is often little a priori
knowledge nor any coherent set of theories or predictions regarding which
variables are related and how. In those types of data analytics, tree
methods can often reveal simple relationships between just a few variables
that could have easily gone unnoticed using other analytic techniques.
One major problem with classification and regression trees is their high
variance. Often a small change in the data can result in a very different
series of splits, making interpretation somewhat precarious. Besides,
decision-tree learners can create over-complex trees that cause over-fitting.
Mechanisms such as pruning are necessary to avoid this problem.
Another limitation of trees is the lack of smoothness of the prediction
surface.
Some techniques such as bagging, boosting, and random forest use more than
one decision tree for their analysis.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes."
([formula data] (cart formula data 20 0 5))
([formula data max-depth max-nodes node-size]
(RegressionTree/fit formula data max-depth max-nodes node-size)))
(defn random-forest
"Random forest.
Random forest is an ensemble classifier that consists of many decision
trees and outputs the majority vote of individual trees. The method
combines bagging idea and the random selection of features.
Each tree is constructed using the following algorithm:
1. If the number of cases in the training set is N, randomly sample N cases
with replacement from the original data. This sample will
be the training set for growing the tree.
2. If there are M input variables, a number m << M is specified such
that at each node, m variables are selected at random out of the M and
the best split on these m is used to split the node. The value of m is
held constant during the forest growing.
3. Each tree is grown to the largest extent possible. There is no pruning.
The advantages of random forest are:
- For many data sets, it produces a highly accurate classifier.
- It runs efficiently on large data sets.
- It can handle thousands of input variables without variable deletion.
- It gives estimates of what variables are important in the classification.
- It generates an internal unbiased estimate of the generalization error
as the forest building progresses.
- It has an effective method for estimating missing data and maintains
accuracy when a large proportion of the data are missing.
The disadvantages are
- Random forests are prone to over-fitting for some datasets. This is
even more pronounced on noisy data.
- For data including categorical variables with different number of
levels, random forests are biased in favor of those attributes with more
levels. Therefore, the variable importance scores from random forest are
not reliable for this type of data.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`ntrees` is the number of trees.
`mtry` is the number of random selected features to be used to determine
the decision at a node of the tree. `dim/3` seems to give
generally good performance, where `dim` is the number of variables.
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes.
`subsample` is the sampling rate for training tree. 1.0 means sampling
with replacement. < 1.0 means sampling without replacement."
([formula data] (random-forest formula data 500 0 20 500 5 1.0))
([formula data ntrees mtry max-depth max-nodes node-size subsample]
(RandomForest/fit formula data ntrees mtry max-depth max-nodes node-size subsample)))
(defn gbm
"Gradient boosted classification trees.
Generic gradient boosting at the t-th step would fit a regression tree to
pseudo-residuals. Let J be the number of its leaves. The tree partitions
the input space into J disjoint regions and predicts a constant value in
each region. The parameter J controls the maximum allowed
level of interaction between variables in the model. With J = 2 (decision
stumps), no interaction between variables is allowed. With J = 3 the model
may include effects of the interaction between up to two variables, and
so on. Hastie et al. comment that typically 4 ≤ J ≤ 8 work well
for boosting and results are fairly insensitive to the choice of in
this range, J = 2 is insufficient for many applications, and J > 10 is
unlikely to be required.
Fitting the training set too closely can lead to degradation of the model's
generalization ability. Several so-called regularization techniques reduce
this over-fitting effect by constraining the fitting procedure.
One natural regularization parameter is the number of gradient boosting
iterations T (i.e. the number of trees in the model when the base learner
is a decision tree). Increasing T reduces the error on training set,
but setting it too high may lead to over-fitting. An optimal value of T
is often selected by monitoring prediction error on a separate validation
data set.
Another regularization approach is the shrinkage which times a parameter
η (called the 'learning rate') to update term.
Empirically it has been found that using small learning rates (such as
η < 0.1) yields dramatic improvements in model's generalization ability
over gradient boosting without shrinking (η = 1). However, it comes at
the price of increasing computational time both during training and
prediction: lower learning rate requires more iterations.
Soon after the introduction of gradient boosting PI:NAME:<NAME>END_PI proposed a
minor modification to the algorithm, motivated by Breiman's bagging method.
Specifically, he proposed that at each iteration of the algorithm, a base
learner should be fit on a subsample of the training set drawn at random
without replacement. PI:NAME:<NAME>END_PI observed a substantial improvement in
gradient boosting's accuracy with this modification.
Subsample size is some constant fraction f of the size of the training set.
When f = 1, the algorithm is deterministic and identical to the one
described above. Smaller values of f introduce randomness into the
algorithm and help prevent over-fitting, acting as a kind of regularization.
The algorithm also becomes faster, because regression trees have to be fit
to smaller datasets at each iteration. Typically, f is set to 0.5, meaning
that one half of the training set is used to build each base learner.
Also, like in bagging, sub-sampling allows one to define an out-of-bag
estimate of the prediction performance improvement by evaluating predictions
on those observations which were not used in the building of the next
base learner. Out-of-bag estimates help avoid the need for an independent
validation dataset, but often underestimate actual performance improvement
and the optimal number of iterations.
Gradient tree boosting implementations often also use regularization by
limiting the minimum number of observations in trees' terminal nodes.
It's used in the tree building process by ignoring any splits that lead
to nodes containing fewer than this number of training set instances.
Imposing this limit helps to reduce variance in predictions at leaves.
`formula` is a symbolic description of the model to be fitted.
`data` is the data frame of the explanatory and response variables.
`loss` is the loss function for regression.
`ntrees` is the number of iterations (trees).
`max-depth` is the maximum depth of the tree.
`max-nodes` is the maximum number of leaf nodes in the tree.
`node-size` is the minimum size of leaf nodes.
`shrinkage` is the shrinkage parameter in (0, 1] controls the learning
rate of procedure.
`subsample` is the sampling fraction for stochastic tree boosting."
([formula data] (gbm formula data (Loss/lad) 500 20 6 5 0.05 0.7))
([formula data loss ntrees max-depth max-nodes node-size shrinkage subsample]
(GradientTreeBoost/fit formula data loss ntrees max-depth max-nodes node-size shrinkage subsample)))
(defn gpr
"Gaussian process.
A Gaussian process is a stochastic process whose realizations consist of
random values associated with every point in a range of times (or of space)
such that each such random variable has a normal distribution. Moreover,
every finite collection of those random variables has a multivariate normal
distribution.
A Gaussian process can be used as a prior probability distribution over
functions in Bayesian inference. Given any set of N points in the desired
domain of your functions, take a multivariate Gaussian whose covariance
matrix parameter is the Gram matrix of N points with some desired kernel,
and sample from that Gaussian. Inference of continuous values with a
Gaussian process prior is known as Gaussian process regression.
The fitting is performed in the reproducing kernel Hilbert space with
the 'kernel trick'. The loss function is squared-error. This also arises
as the kriging estimate of a Gaussian random field in spatial statistics.
A significant problem with Gaussian process prediction is that it typically
scales as O(n<sup>3</sup>). For large problems (e.g. n > 10,000) both
storing the Gram matrix and solving the associated linear systems are
prohibitive on modern workstations. An extensive range of proposals have
been suggested to deal with this problem. A popular approach is the
reduced-rank Approximations of the Gram Matrix, known as Nystrom
approximation. Greedy approximation is another popular approach that uses
an active set of training points of size m selected from the training set
of size n > m. We assume that it is impossible to search for the optimal
subset of size m due to combinatorics. The points in the active set could
be selected randomly, but in general we might expect better performance
if the points are selected greedily w.r.t. some criterion. Recently,
researchers had proposed relaxing the constraint that the inducing variables
must be a subset of training/test cases, turning the discrete selection
problem into one of continuous optimization.
This method fits a regular Gaussian process model.
`x` is the training dataset.
`y` is the response variable.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable.
`tol` is the stopping tolerance for HPO.
`max-iter` is the maximum number of iterations for HPO. No HPO if maxIter <= 0."
([x y kernel noise] (GaussianProcessRegression/fit x y kernel noise))
([x y kernel noise normalize tol max-iter] (GaussianProcessRegression/fit x y kernel noise normalize tol max-iter)))
(defn gpr-approx
"Approximate Gaussian process with a subset of regressors.
`x` is the training dataset.
`y` is the response variable.
`t` is the inducing input, which are pre-selected or inducing samples
acting as active set of regressors. In simple case, these can be chosen
randomly from the training set or as the centers of k-means clustering.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable."
([x y t kernel noise] (GaussianProcessRegression/fit x y t kernel noise))
([x y t kernel noise normalize] (GaussianProcessRegression/fit x y t kernel noise normalize)))
(defn gpr-nystrom
"Approximate Gaussian process with Nystrom approximation of kernel matrix.
`x` is the training dataset.
`y` is the response variable.
`t` is the inducing input, which are pre-selected or inducing samples
acting as active set of regressors. In simple case, these can be chosen
randomly from the training set or as the centers of k-means clustering.
`kernel` is the Mercer kernel.
`noise` is the noise variance, which also works as a regularization parameter.
`normalize` is the option to normalize the response variable."
([x y t kernel noise] (GaussianProcessRegression/nystrom x y t kernel noise))
([x y t kernel noise normalize] (GaussianProcessRegression/nystrom x y t kernel noise normalize)))
|
[
{
"context": " [:a { :href \"//twitter.com/nowprovision\" } \n \"Matt Freeman\"]\n ",
"end": 3528,
"score": 0.8167638778686523,
"start": 3516,
"tag": "USERNAME",
"value": "nowprovision"
},
{
"context": "com/nowprovision\" } \n \"Matt Freeman\"]\n ]]] ]) \n :compon",
"end": 3574,
"score": 0.9998887777328491,
"start": 3562,
"tag": "NAME",
"value": "Matt Freeman"
}
] |
src-cljs/webhookproxyweb/components/layout.cljs
|
sitepodmatt/webhookproxyweb
| 3 |
(ns webhookproxyweb.components.layout
(:require [clojure.string :refer [join]]
[freeman.ospa.core :refer [dispatch
resolve-route]]
[reagent.core :refer [create-class]]))
(defn hide-drawer []
; this hacky - find a better way to handle material integration
(->
(.querySelector js/document ".mdl-layout__drawer")
(.-classList)
(.remove "is-visible"))
(->
(.querySelector js/document ".mdl-layout__obfuscator")
(.-classList)
(.remove "is-visible")))
(defn layout [content]
(create-class
{:reagent-render (fn [content]
[:div.mdl-layout.mdl-js-layout.mdl-layout--fixed-header
[:header.mdl-layout__header
[:div.mdl-layout__header-row
[:a.mdl-layout-title.ajax-link
{:alt "Listings"
:on-click #(do (dispatch [:redirect :list-webhooks]) false) }
[:img {:src "/img/webhooks.svg"
:style {:width "50px"
:height "50px" }} ]
"WebHookProxy"]
[:div.mdl-layout-spacer]
[:nav.mdl-navigation.mdl-layout--large-screen-only.right-actions
[:a.mdl-navigation__link.ajax-link {:style {:font-weight "bold" } :on-click #(do (dispatch [:redirect :add-webhook]) false) }
[:i.material-icons "add"]
[:span "Add webhook"]]
[:a.mdl-navigation__link.ajax-link {:style {:font-weight "bold" } :on-click #(do (dispatch [:logout]) false) }
[:i.material-icons "exit_to_app"]
[:span "Logout"]]
]]
]
[:div.mdl-layout__drawer
[:span.mdl-layout-title "WebHookProxy"]
[:nav.mdl-navigation
[:a.mdl-navigation__link.ajax-link
{:href "#" :on-click #(do
(dispatch [:redirect :list-webhooks])
(hide-drawer)
false) }
"Edit Webhooks"]
[:a.mdl-navigation__link.ajax-link
{:on-click #(do
(dispatch [:redirect :add-webhook])
(hide-drawer)
false) }
"Add Webhook"]
[:a.mdl-navigation__link.ajax-link
{:on-click #(do (dispatch [:logout])
(hide-drawer)
false) }
"Logout"]]]
[:main.mdl-layout__content
[:div.page-content
content]]
[:footer.mdl-mini-footer
[:div.mdl-mini-footer__left-section
[:p
[:i.material-icons "star"]
"Maintained by "
[:a { :href "//twitter.com/nowprovision" }
"Matt Freeman"]
]]] ])
:component-did-mount (fn [this]
(let [chandler (goog.object.get js/window "componentHandler")]
((goog.object.get chandler "upgradeElement") (.getDOMNode this)))
)
}))
|
34732
|
(ns webhookproxyweb.components.layout
(:require [clojure.string :refer [join]]
[freeman.ospa.core :refer [dispatch
resolve-route]]
[reagent.core :refer [create-class]]))
(defn hide-drawer []
; this hacky - find a better way to handle material integration
(->
(.querySelector js/document ".mdl-layout__drawer")
(.-classList)
(.remove "is-visible"))
(->
(.querySelector js/document ".mdl-layout__obfuscator")
(.-classList)
(.remove "is-visible")))
(defn layout [content]
(create-class
{:reagent-render (fn [content]
[:div.mdl-layout.mdl-js-layout.mdl-layout--fixed-header
[:header.mdl-layout__header
[:div.mdl-layout__header-row
[:a.mdl-layout-title.ajax-link
{:alt "Listings"
:on-click #(do (dispatch [:redirect :list-webhooks]) false) }
[:img {:src "/img/webhooks.svg"
:style {:width "50px"
:height "50px" }} ]
"WebHookProxy"]
[:div.mdl-layout-spacer]
[:nav.mdl-navigation.mdl-layout--large-screen-only.right-actions
[:a.mdl-navigation__link.ajax-link {:style {:font-weight "bold" } :on-click #(do (dispatch [:redirect :add-webhook]) false) }
[:i.material-icons "add"]
[:span "Add webhook"]]
[:a.mdl-navigation__link.ajax-link {:style {:font-weight "bold" } :on-click #(do (dispatch [:logout]) false) }
[:i.material-icons "exit_to_app"]
[:span "Logout"]]
]]
]
[:div.mdl-layout__drawer
[:span.mdl-layout-title "WebHookProxy"]
[:nav.mdl-navigation
[:a.mdl-navigation__link.ajax-link
{:href "#" :on-click #(do
(dispatch [:redirect :list-webhooks])
(hide-drawer)
false) }
"Edit Webhooks"]
[:a.mdl-navigation__link.ajax-link
{:on-click #(do
(dispatch [:redirect :add-webhook])
(hide-drawer)
false) }
"Add Webhook"]
[:a.mdl-navigation__link.ajax-link
{:on-click #(do (dispatch [:logout])
(hide-drawer)
false) }
"Logout"]]]
[:main.mdl-layout__content
[:div.page-content
content]]
[:footer.mdl-mini-footer
[:div.mdl-mini-footer__left-section
[:p
[:i.material-icons "star"]
"Maintained by "
[:a { :href "//twitter.com/nowprovision" }
"<NAME>"]
]]] ])
:component-did-mount (fn [this]
(let [chandler (goog.object.get js/window "componentHandler")]
((goog.object.get chandler "upgradeElement") (.getDOMNode this)))
)
}))
| true |
(ns webhookproxyweb.components.layout
(:require [clojure.string :refer [join]]
[freeman.ospa.core :refer [dispatch
resolve-route]]
[reagent.core :refer [create-class]]))
(defn hide-drawer []
; this hacky - find a better way to handle material integration
(->
(.querySelector js/document ".mdl-layout__drawer")
(.-classList)
(.remove "is-visible"))
(->
(.querySelector js/document ".mdl-layout__obfuscator")
(.-classList)
(.remove "is-visible")))
(defn layout [content]
(create-class
{:reagent-render (fn [content]
[:div.mdl-layout.mdl-js-layout.mdl-layout--fixed-header
[:header.mdl-layout__header
[:div.mdl-layout__header-row
[:a.mdl-layout-title.ajax-link
{:alt "Listings"
:on-click #(do (dispatch [:redirect :list-webhooks]) false) }
[:img {:src "/img/webhooks.svg"
:style {:width "50px"
:height "50px" }} ]
"WebHookProxy"]
[:div.mdl-layout-spacer]
[:nav.mdl-navigation.mdl-layout--large-screen-only.right-actions
[:a.mdl-navigation__link.ajax-link {:style {:font-weight "bold" } :on-click #(do (dispatch [:redirect :add-webhook]) false) }
[:i.material-icons "add"]
[:span "Add webhook"]]
[:a.mdl-navigation__link.ajax-link {:style {:font-weight "bold" } :on-click #(do (dispatch [:logout]) false) }
[:i.material-icons "exit_to_app"]
[:span "Logout"]]
]]
]
[:div.mdl-layout__drawer
[:span.mdl-layout-title "WebHookProxy"]
[:nav.mdl-navigation
[:a.mdl-navigation__link.ajax-link
{:href "#" :on-click #(do
(dispatch [:redirect :list-webhooks])
(hide-drawer)
false) }
"Edit Webhooks"]
[:a.mdl-navigation__link.ajax-link
{:on-click #(do
(dispatch [:redirect :add-webhook])
(hide-drawer)
false) }
"Add Webhook"]
[:a.mdl-navigation__link.ajax-link
{:on-click #(do (dispatch [:logout])
(hide-drawer)
false) }
"Logout"]]]
[:main.mdl-layout__content
[:div.page-content
content]]
[:footer.mdl-mini-footer
[:div.mdl-mini-footer__left-section
[:p
[:i.material-icons "star"]
"Maintained by "
[:a { :href "//twitter.com/nowprovision" }
"PI:NAME:<NAME>END_PI"]
]]] ])
:component-did-mount (fn [this]
(let [chandler (goog.object.get js/window "componentHandler")]
((goog.object.get chandler "upgradeElement") (.getDOMNode this)))
)
}))
|
[
{
"context": "name]\n (println \"Hello \" name))\n\n(sayHello \"Hello Heavens!\")\n",
"end": 280,
"score": 0.9824873805046082,
"start": 273,
"tag": "NAME",
"value": "Heavens"
}
] |
src/com/mshiray/recipebcp/core.clj
|
mshiray/recipebcp
| 0 |
(ns clojuretryout.core
(:gen-class))
(defn -main
"I don't do a whole lot ... yet."
[& args]
(println "Hello, World!" args))
;simple function to accept string param and print hello
;msg on console
(defn sayHello [name]
(println "Hello " name))
(sayHello "Hello Heavens!")
|
104582
|
(ns clojuretryout.core
(:gen-class))
(defn -main
"I don't do a whole lot ... yet."
[& args]
(println "Hello, World!" args))
;simple function to accept string param and print hello
;msg on console
(defn sayHello [name]
(println "Hello " name))
(sayHello "Hello <NAME>!")
| true |
(ns clojuretryout.core
(:gen-class))
(defn -main
"I don't do a whole lot ... yet."
[& args]
(println "Hello, World!" args))
;simple function to accept string param and print hello
;msg on console
(defn sayHello [name]
(println "Hello " name))
(sayHello "Hello PI:NAME:<NAME>END_PI!")
|
[
{
"context": "deftest run-tests]])\n\n(def favorite-fruit {:name \"Kiwi\", :color \"Green\", :kcal_per_100g 61 :distinguish_",
"end": 102,
"score": 0.9991243481636047,
"start": 98,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "k \"Hairy\"})\n\n(deftest exercise-202-test\n (is (= \"Kiwi\" (get favorite-fruit :name)))\n (is (= \"Green\" (g",
"end": 209,
"score": 0.9986056685447693,
"start": 205,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "ape favorite-fruit \"egg-like\")))\n (is (= {:name \"Kiwi\", :color \"Green\", :kcal_per_100g 61, :distinguish",
"end": 619,
"score": 0.99901282787323,
"start": 615,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "orite-fruit :shape \"egg-like\")))\n (is (= {:name \"Kiwi\", :color \"Green\", :kcal_per_100g 61, :distinguish",
"end": 774,
"score": 0.9990847110748291,
"start": 770,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "\"Hairy\"}\n favorite-fruit))\n (is (= {:name \"Kiwi\", :color \"Brown\", :kcal_per_100g 61, :distinguish",
"end": 884,
"score": 0.9988769292831421,
"start": 880,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "favorite-fruit :color \"Brown\")))\n (is (= {:name \"Kiwi\", :color \"Green\", :kcal_per_100g 61, :distinguish",
"end": 1017,
"score": 0.9990227222442627,
"start": 1013,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "0 :iran 311000 :chile 225000})))\n (is (= {:name \"Kiwi\", :color \"Green\", :kcal_per_100g 60, :distinguish",
"end": 1354,
"score": 0.998757004737854,
"start": 1350,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "l_per_100g favorite-fruit) 1))))\n (is (= {:name \"Kiwi\", :color \"Green\", :kcal_per_100g 60, :distinguish",
"end": 1525,
"score": 0.998714029788971,
"start": 1521,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "rite-fruit :kcal_per_100g dec)))\n (is (= {:name \"Kiwi\", :color \"Green\", :kcal_per_100g 51, :distinguish",
"end": 1663,
"score": 0.9988542795181274,
"start": 1659,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "ite-fruit :kcal_per_100g - 10)))\n (is (= {:name \"Kiwi\", :color \"Green\", :kcal_per_100g 61}\n (diss",
"end": 1802,
"score": 0.9986547231674194,
"start": 1798,
"tag": "NAME",
"value": "Kiwi"
},
{
"context": "orite-fruit :distinguish_mark)))\n (is (= {:name \"Kiwi\", :distinguish_mark \"Hairy\"}\n (dissoc favor",
"end": 1912,
"score": 0.9981999397277832,
"start": 1908,
"tag": "NAME",
"value": "Kiwi"
}
] |
Chapter02/tests/Exercise2.02/repl.clj
|
transducer/The-Clojure-Workshop
| 55 |
(require '[clojure.test :as test :refer [is are deftest run-tests]])
(def favorite-fruit {:name "Kiwi", :color "Green", :kcal_per_100g 61 :distinguish_mark "Hairy"})
(deftest exercise-202-test
(is (= "Kiwi" (get favorite-fruit :name)))
(is (= "Green" (get favorite-fruit :color)))
(is (= nil (get favorite-fruit :taste)))
(is (= "Very good 8/10" (get favorite-fruit :taste "Very good 8/10")))
(is (= 61 (get favorite-fruit :kcal_per_100g 0)))
(is (= "Green" (favorite-fruit :color)))
(is (= "Green" (:color favorite-fruit)))
(is (= "egg-like" (:shape favorite-fruit "egg-like")))
(is (= {:name "Kiwi", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy", :shape "egg-like"}
(assoc favorite-fruit :shape "egg-like")))
(is (= {:name "Kiwi", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy"}
favorite-fruit))
(is (= {:name "Kiwi", :color "Brown", :kcal_per_100g 61, :distinguish_mark "Hairy"}
(assoc favorite-fruit :color "Brown")))
(is (= {:name "Kiwi", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy", :yearly_production_in_tonnes {:china 2025000, :italy 541000, :new_zealand 412000, :iran 311000, :chile 225000}}
(assoc favorite-fruit :yearly_production_in_tonnes {:china 2025000 :italy 541000 :new_zealand 412000 :iran 311000 :chile 225000})))
(is (= {:name "Kiwi", :color "Green", :kcal_per_100g 60, :distinguish_mark "Hairy"}
(assoc favorite-fruit :kcal_per_100g (- (:kcal_per_100g favorite-fruit) 1))))
(is (= {:name "Kiwi", :color "Green", :kcal_per_100g 60, :distinguish_mark "Hairy"}
(update favorite-fruit :kcal_per_100g dec)))
(is (= {:name "Kiwi", :color "Green", :kcal_per_100g 51, :distinguish_mark "Hairy"}
(update favorite-fruit :kcal_per_100g - 10)))
(is (= {:name "Kiwi", :color "Green", :kcal_per_100g 61}
(dissoc favorite-fruit :distinguish_mark)))
(is (= {:name "Kiwi", :distinguish_mark "Hairy"}
(dissoc favorite-fruit :kcal_per_100g :color))))
(run-tests)
|
57391
|
(require '[clojure.test :as test :refer [is are deftest run-tests]])
(def favorite-fruit {:name "<NAME>", :color "Green", :kcal_per_100g 61 :distinguish_mark "Hairy"})
(deftest exercise-202-test
(is (= "<NAME>" (get favorite-fruit :name)))
(is (= "Green" (get favorite-fruit :color)))
(is (= nil (get favorite-fruit :taste)))
(is (= "Very good 8/10" (get favorite-fruit :taste "Very good 8/10")))
(is (= 61 (get favorite-fruit :kcal_per_100g 0)))
(is (= "Green" (favorite-fruit :color)))
(is (= "Green" (:color favorite-fruit)))
(is (= "egg-like" (:shape favorite-fruit "egg-like")))
(is (= {:name "<NAME>", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy", :shape "egg-like"}
(assoc favorite-fruit :shape "egg-like")))
(is (= {:name "<NAME>", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy"}
favorite-fruit))
(is (= {:name "<NAME>", :color "Brown", :kcal_per_100g 61, :distinguish_mark "Hairy"}
(assoc favorite-fruit :color "Brown")))
(is (= {:name "<NAME>", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy", :yearly_production_in_tonnes {:china 2025000, :italy 541000, :new_zealand 412000, :iran 311000, :chile 225000}}
(assoc favorite-fruit :yearly_production_in_tonnes {:china 2025000 :italy 541000 :new_zealand 412000 :iran 311000 :chile 225000})))
(is (= {:name "<NAME>", :color "Green", :kcal_per_100g 60, :distinguish_mark "Hairy"}
(assoc favorite-fruit :kcal_per_100g (- (:kcal_per_100g favorite-fruit) 1))))
(is (= {:name "<NAME>", :color "Green", :kcal_per_100g 60, :distinguish_mark "Hairy"}
(update favorite-fruit :kcal_per_100g dec)))
(is (= {:name "<NAME>", :color "Green", :kcal_per_100g 51, :distinguish_mark "Hairy"}
(update favorite-fruit :kcal_per_100g - 10)))
(is (= {:name "<NAME>", :color "Green", :kcal_per_100g 61}
(dissoc favorite-fruit :distinguish_mark)))
(is (= {:name "<NAME>", :distinguish_mark "Hairy"}
(dissoc favorite-fruit :kcal_per_100g :color))))
(run-tests)
| true |
(require '[clojure.test :as test :refer [is are deftest run-tests]])
(def favorite-fruit {:name "PI:NAME:<NAME>END_PI", :color "Green", :kcal_per_100g 61 :distinguish_mark "Hairy"})
(deftest exercise-202-test
(is (= "PI:NAME:<NAME>END_PI" (get favorite-fruit :name)))
(is (= "Green" (get favorite-fruit :color)))
(is (= nil (get favorite-fruit :taste)))
(is (= "Very good 8/10" (get favorite-fruit :taste "Very good 8/10")))
(is (= 61 (get favorite-fruit :kcal_per_100g 0)))
(is (= "Green" (favorite-fruit :color)))
(is (= "Green" (:color favorite-fruit)))
(is (= "egg-like" (:shape favorite-fruit "egg-like")))
(is (= {:name "PI:NAME:<NAME>END_PI", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy", :shape "egg-like"}
(assoc favorite-fruit :shape "egg-like")))
(is (= {:name "PI:NAME:<NAME>END_PI", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy"}
favorite-fruit))
(is (= {:name "PI:NAME:<NAME>END_PI", :color "Brown", :kcal_per_100g 61, :distinguish_mark "Hairy"}
(assoc favorite-fruit :color "Brown")))
(is (= {:name "PI:NAME:<NAME>END_PI", :color "Green", :kcal_per_100g 61, :distinguish_mark "Hairy", :yearly_production_in_tonnes {:china 2025000, :italy 541000, :new_zealand 412000, :iran 311000, :chile 225000}}
(assoc favorite-fruit :yearly_production_in_tonnes {:china 2025000 :italy 541000 :new_zealand 412000 :iran 311000 :chile 225000})))
(is (= {:name "PI:NAME:<NAME>END_PI", :color "Green", :kcal_per_100g 60, :distinguish_mark "Hairy"}
(assoc favorite-fruit :kcal_per_100g (- (:kcal_per_100g favorite-fruit) 1))))
(is (= {:name "PI:NAME:<NAME>END_PI", :color "Green", :kcal_per_100g 60, :distinguish_mark "Hairy"}
(update favorite-fruit :kcal_per_100g dec)))
(is (= {:name "PI:NAME:<NAME>END_PI", :color "Green", :kcal_per_100g 51, :distinguish_mark "Hairy"}
(update favorite-fruit :kcal_per_100g - 10)))
(is (= {:name "PI:NAME:<NAME>END_PI", :color "Green", :kcal_per_100g 61}
(dissoc favorite-fruit :distinguish_mark)))
(is (= {:name "PI:NAME:<NAME>END_PI", :distinguish_mark "Hairy"}
(dissoc favorite-fruit :kcal_per_100g :color))))
(run-tests)
|
[
{
"context": "crux-node)\n login-code (db/->login-code \"[email protected]\")\n tokens (db/->tokens)]\n (assert-t",
"end": 570,
"score": 0.9999021291732788,
"start": 554,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "crux-node)\n login-code (db/->login-code \"[email protected]\")\n _ (tu/await-put! c login-code)\n ",
"end": 771,
"score": 0.9999068379402161,
"start": 755,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/instant_website/auth_test.clj
|
instantwebsite/core-api
| 0 |
(ns instant-website.auth-test
(:require
[clojure.test :refer :all]
[clojure.pprint :refer [pprint]]
[crux.api :as crux]
[instant-website.auth :as auth]
[instant-website.test-utils :as tu]
[instant-website.db :as db]))
(defn assert-tokens [t]
(is (= 23 (count (:tokens/plugin t))))
(is (= 23 (count (:tokens/api t))))
(is (= \f (first (:tokens/plugin t))))
(is (= \a (first (:tokens/api t)))))
(deftest create-tokens-test
(testing "Create new tokens"
(let [c (tu/crux-node)
login-code (db/->login-code "[email protected]")
tokens (db/->tokens)]
(assert-tokens tokens)))
(testing "Create new tokens via trade-login-code!"
(let [c (tu/crux-node)
login-code (db/->login-code "[email protected]")
_ (tu/await-put! c login-code)
req {:crux c
:route-params {:email (:login-code/email login-code)
:code (:login-code/code login-code)}}
profile (:body (auth/handle-trade-login-code! req))]
(assert-tokens profile)
(testing "Authenticate with API Token"
(let [api-req {:crux c
:identity {:user-id (:crux.db/id profile)}}
auth-res (auth/api-token-authfn
{:crux c}
(:tokens/api profile))
me-res (:body (auth/me-handler api-req))]
(is (= (:user-id auth-res)
(:crux.db/id profile)))
(is (= (-> me-res :user :crux.db/id)
(:crux.db/id profile)))))
(testing "Authenticate with Plugin Token"
(let [api-req {:crux c
:identity {:user-id (:crux.db/id profile)}}
auth-res (auth/plugin-token-authfn
{:crux c}
(:tokens/plugin profile))
me-res (:body (auth/plugin-me-handler api-req))]
(is (= (:user-id auth-res)
(:crux.db/id profile)))
(is (= (:crux.db/id me-res)
(:crux.db/id profile)))))))
(testing "Authorize resources"
(testing "check ownership"
(let [user-id "testing"
resource {:name "hello"
:user-id "testing"}
wrapped-func (fn [] resource)]
(is (= resource (auth/check-ownership user-id resource :user-id wrapped-func)))
(is (= {:status 403} (auth/check-ownership "falseid" resource :user-id wrapped-func)))))
(testing "check website ownership"
(let [c (tu/crux-node)
user (tu/create-test-user c)
wrong-user (tu/create-test-user c)
website (tu/create-test-website c user)
req {:crux c
:route-params {:website-id (:crux.db/id website)}}]
(testing "with right user"
(let [req (assoc-in req [:identity :user-id] (:crux.db/id user))
res (auth/check-website-ownership req (fn [w] website))]
(is (= website res))))
(testing "with wrong user"
(let [req (assoc-in req [:identity :user-id] (:crux.db/id wrong-user))
res (auth/check-website-ownership req (fn [w] website))]
(is (= {:status 403} res))))))))
|
52097
|
(ns instant-website.auth-test
(:require
[clojure.test :refer :all]
[clojure.pprint :refer [pprint]]
[crux.api :as crux]
[instant-website.auth :as auth]
[instant-website.test-utils :as tu]
[instant-website.db :as db]))
(defn assert-tokens [t]
(is (= 23 (count (:tokens/plugin t))))
(is (= 23 (count (:tokens/api t))))
(is (= \f (first (:tokens/plugin t))))
(is (= \a (first (:tokens/api t)))))
(deftest create-tokens-test
(testing "Create new tokens"
(let [c (tu/crux-node)
login-code (db/->login-code "<EMAIL>")
tokens (db/->tokens)]
(assert-tokens tokens)))
(testing "Create new tokens via trade-login-code!"
(let [c (tu/crux-node)
login-code (db/->login-code "<EMAIL>")
_ (tu/await-put! c login-code)
req {:crux c
:route-params {:email (:login-code/email login-code)
:code (:login-code/code login-code)}}
profile (:body (auth/handle-trade-login-code! req))]
(assert-tokens profile)
(testing "Authenticate with API Token"
(let [api-req {:crux c
:identity {:user-id (:crux.db/id profile)}}
auth-res (auth/api-token-authfn
{:crux c}
(:tokens/api profile))
me-res (:body (auth/me-handler api-req))]
(is (= (:user-id auth-res)
(:crux.db/id profile)))
(is (= (-> me-res :user :crux.db/id)
(:crux.db/id profile)))))
(testing "Authenticate with Plugin Token"
(let [api-req {:crux c
:identity {:user-id (:crux.db/id profile)}}
auth-res (auth/plugin-token-authfn
{:crux c}
(:tokens/plugin profile))
me-res (:body (auth/plugin-me-handler api-req))]
(is (= (:user-id auth-res)
(:crux.db/id profile)))
(is (= (:crux.db/id me-res)
(:crux.db/id profile)))))))
(testing "Authorize resources"
(testing "check ownership"
(let [user-id "testing"
resource {:name "hello"
:user-id "testing"}
wrapped-func (fn [] resource)]
(is (= resource (auth/check-ownership user-id resource :user-id wrapped-func)))
(is (= {:status 403} (auth/check-ownership "falseid" resource :user-id wrapped-func)))))
(testing "check website ownership"
(let [c (tu/crux-node)
user (tu/create-test-user c)
wrong-user (tu/create-test-user c)
website (tu/create-test-website c user)
req {:crux c
:route-params {:website-id (:crux.db/id website)}}]
(testing "with right user"
(let [req (assoc-in req [:identity :user-id] (:crux.db/id user))
res (auth/check-website-ownership req (fn [w] website))]
(is (= website res))))
(testing "with wrong user"
(let [req (assoc-in req [:identity :user-id] (:crux.db/id wrong-user))
res (auth/check-website-ownership req (fn [w] website))]
(is (= {:status 403} res))))))))
| true |
(ns instant-website.auth-test
(:require
[clojure.test :refer :all]
[clojure.pprint :refer [pprint]]
[crux.api :as crux]
[instant-website.auth :as auth]
[instant-website.test-utils :as tu]
[instant-website.db :as db]))
(defn assert-tokens [t]
(is (= 23 (count (:tokens/plugin t))))
(is (= 23 (count (:tokens/api t))))
(is (= \f (first (:tokens/plugin t))))
(is (= \a (first (:tokens/api t)))))
(deftest create-tokens-test
(testing "Create new tokens"
(let [c (tu/crux-node)
login-code (db/->login-code "PI:EMAIL:<EMAIL>END_PI")
tokens (db/->tokens)]
(assert-tokens tokens)))
(testing "Create new tokens via trade-login-code!"
(let [c (tu/crux-node)
login-code (db/->login-code "PI:EMAIL:<EMAIL>END_PI")
_ (tu/await-put! c login-code)
req {:crux c
:route-params {:email (:login-code/email login-code)
:code (:login-code/code login-code)}}
profile (:body (auth/handle-trade-login-code! req))]
(assert-tokens profile)
(testing "Authenticate with API Token"
(let [api-req {:crux c
:identity {:user-id (:crux.db/id profile)}}
auth-res (auth/api-token-authfn
{:crux c}
(:tokens/api profile))
me-res (:body (auth/me-handler api-req))]
(is (= (:user-id auth-res)
(:crux.db/id profile)))
(is (= (-> me-res :user :crux.db/id)
(:crux.db/id profile)))))
(testing "Authenticate with Plugin Token"
(let [api-req {:crux c
:identity {:user-id (:crux.db/id profile)}}
auth-res (auth/plugin-token-authfn
{:crux c}
(:tokens/plugin profile))
me-res (:body (auth/plugin-me-handler api-req))]
(is (= (:user-id auth-res)
(:crux.db/id profile)))
(is (= (:crux.db/id me-res)
(:crux.db/id profile)))))))
(testing "Authorize resources"
(testing "check ownership"
(let [user-id "testing"
resource {:name "hello"
:user-id "testing"}
wrapped-func (fn [] resource)]
(is (= resource (auth/check-ownership user-id resource :user-id wrapped-func)))
(is (= {:status 403} (auth/check-ownership "falseid" resource :user-id wrapped-func)))))
(testing "check website ownership"
(let [c (tu/crux-node)
user (tu/create-test-user c)
wrong-user (tu/create-test-user c)
website (tu/create-test-website c user)
req {:crux c
:route-params {:website-id (:crux.db/id website)}}]
(testing "with right user"
(let [req (assoc-in req [:identity :user-id] (:crux.db/id user))
res (auth/check-website-ownership req (fn [w] website))]
(is (= website res))))
(testing "with wrong user"
(let [req (assoc-in req [:identity :user-id] (:crux.db/id wrong-user))
res (auth/check-website-ownership req (fn [w] website))]
(is (= {:status 403} res))))))))
|
[
{
"context": "e.harrigan.components.thymeleaf.impl\n {:author [\"David Harrigan\"]}\n (:import\n [org.thymeleaf TemplateEngine]\n ",
"end": 73,
"score": 0.9998770952224731,
"start": 59,
"tag": "NAME",
"value": "David Harrigan"
}
] |
src/online/harrigan/components/thymeleaf/impl.clj
|
dharrigan/unresolved-var
| 0 |
(ns online.harrigan.components.thymeleaf.impl
{:author ["David Harrigan"]}
(:import
[org.thymeleaf TemplateEngine]
[org.thymeleaf.context Context IContext]
[org.thymeleaf.templateresolver ClassLoaderTemplateResolver]))
(set! *warn-on-reflection* true)
(def ^:private template-resolver-defaults
{:prefix "public/"
:suffix ".html"
:cacheable false
:cache-ttl-ms 0})
(defn ^:private create-template-resolver
[config]
(let [{:keys [prefix suffix cacheable cache-ttl-ms]} (merge template-resolver-defaults config)]
(doto
(ClassLoaderTemplateResolver.)
(.setCacheable cacheable)
(.setCacheTTLMs cache-ttl-ms)
(.setPrefix prefix)
(.setSuffix suffix))))
(defn ^:private keywords-as-strings
[m]
(zipmap (map name (keys m)) (vals m)))
(defn render
[viewname data {:keys [template-engine] :as app-config}]
(let [context (Context.)]
(when data
(.setVariables context (keywords-as-strings data)))
(.process ^TemplateEngine template-engine ^String viewname ^IContext context)))
;; CLIP Lifecycle Functions
#_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]}
(defn start ^ClassLoaderTemplateResolver
[config]
(doto
(TemplateEngine.)
(.setTemplateResolver (create-template-resolver config))))
|
41941
|
(ns online.harrigan.components.thymeleaf.impl
{:author ["<NAME>"]}
(:import
[org.thymeleaf TemplateEngine]
[org.thymeleaf.context Context IContext]
[org.thymeleaf.templateresolver ClassLoaderTemplateResolver]))
(set! *warn-on-reflection* true)
(def ^:private template-resolver-defaults
{:prefix "public/"
:suffix ".html"
:cacheable false
:cache-ttl-ms 0})
(defn ^:private create-template-resolver
[config]
(let [{:keys [prefix suffix cacheable cache-ttl-ms]} (merge template-resolver-defaults config)]
(doto
(ClassLoaderTemplateResolver.)
(.setCacheable cacheable)
(.setCacheTTLMs cache-ttl-ms)
(.setPrefix prefix)
(.setSuffix suffix))))
(defn ^:private keywords-as-strings
[m]
(zipmap (map name (keys m)) (vals m)))
(defn render
[viewname data {:keys [template-engine] :as app-config}]
(let [context (Context.)]
(when data
(.setVariables context (keywords-as-strings data)))
(.process ^TemplateEngine template-engine ^String viewname ^IContext context)))
;; CLIP Lifecycle Functions
#_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]}
(defn start ^ClassLoaderTemplateResolver
[config]
(doto
(TemplateEngine.)
(.setTemplateResolver (create-template-resolver config))))
| true |
(ns online.harrigan.components.thymeleaf.impl
{:author ["PI:NAME:<NAME>END_PI"]}
(:import
[org.thymeleaf TemplateEngine]
[org.thymeleaf.context Context IContext]
[org.thymeleaf.templateresolver ClassLoaderTemplateResolver]))
(set! *warn-on-reflection* true)
(def ^:private template-resolver-defaults
{:prefix "public/"
:suffix ".html"
:cacheable false
:cache-ttl-ms 0})
(defn ^:private create-template-resolver
[config]
(let [{:keys [prefix suffix cacheable cache-ttl-ms]} (merge template-resolver-defaults config)]
(doto
(ClassLoaderTemplateResolver.)
(.setCacheable cacheable)
(.setCacheTTLMs cache-ttl-ms)
(.setPrefix prefix)
(.setSuffix suffix))))
(defn ^:private keywords-as-strings
[m]
(zipmap (map name (keys m)) (vals m)))
(defn render
[viewname data {:keys [template-engine] :as app-config}]
(let [context (Context.)]
(when data
(.setVariables context (keywords-as-strings data)))
(.process ^TemplateEngine template-engine ^String viewname ^IContext context)))
;; CLIP Lifecycle Functions
#_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]}
(defn start ^ClassLoaderTemplateResolver
[config]
(doto
(TemplateEngine.)
(.setTemplateResolver (create-template-resolver config))))
|
[
{
"context": "\n (is (repo/commit! repo (-> (get-person repo \"Alice\" \"Appleseed\")\n (reg",
"end": 1013,
"score": 0.9992087483406067,
"start": 1008,
"tag": "NAME",
"value": "Alice"
},
{
"context": " (repo/commit! repo (-> (get-person repo \"Alice\" \"Appleseed\")\n (registered))))\n",
"end": 1025,
"score": 0.9986915588378906,
"start": 1016,
"tag": "NAME",
"value": "Appleseed"
},
{
"context": "type ::registered\n :given-name \"Alice\"\n :family-name \"Appleseed\"}]\n ",
"end": 1152,
"score": 0.9995394349098206,
"start": 1147,
"tag": "NAME",
"value": "Alice"
},
{
"context": "given-name \"Alice\"\n :family-name \"Appleseed\"}]\n (retrieve-events store (::aggreg",
"end": 1193,
"score": 0.9914303421974182,
"start": 1184,
"tag": "NAME",
"value": "Appleseed"
},
{
"context": " (retrieve-events store (::aggregate/id (person \"Alice\" \"Appleseed\")))))\n (is (sub? [{::msg/type ::",
"end": 1264,
"score": 0.9989039897918701,
"start": 1259,
"tag": "NAME",
"value": "Alice"
},
{
"context": "eve-events store (::aggregate/id (person \"Alice\" \"Appleseed\")))))\n (is (sub? [{::msg/type ::registered\n ",
"end": 1276,
"score": 0.9986052513122559,
"start": 1267,
"tag": "NAME",
"value": "Appleseed"
},
{
"context": "type ::registered\n :given-name \"Alice\"\n :family-name \"Appleseed\"}]\n ",
"end": 1360,
"score": 0.9996495842933655,
"start": 1355,
"tag": "NAME",
"value": "Alice"
},
{
"context": "given-name \"Alice\"\n :family-name \"Appleseed\"}]\n (retrieve-events store all-event",
"end": 1401,
"score": 0.9985312223434448,
"start": 1392,
"tag": "NAME",
"value": "Appleseed"
}
] |
data/test/clojure/aaef26d5731c20bda6a1b620411eee2693843020wrap_stream_properties_test.clj
|
harshp8l/deep-learning-lang-detection
| 84 |
(ns rill.wheel.wrap-stream-properties-test
(:require [clojure.test :refer [are deftest is testing use-fixtures]]
[rill.event-store :refer [append-events retrieve-events]]
[rill.event-store.memory :refer [memory-store]]
[rill.event-stream :refer [all-events-stream-id]]
[rill.message :as msg]
[rill.wheel :as aggregate :refer [defaggregate defevent ok?]]
[rill.wheel.bare-repository :refer [bare-repository]]
[rill.wheel.repository :as repo]
[rill.wheel.testing :refer [sub? with-instrument-all]]
[rill.wheel.wrap-stream-properties :refer [wrap-stream-properties]]))
(use-fixtures :once with-instrument-all)
(defaggregate person
[given-name family-name])
(defevent registered ::person
[p]
p)
(deftest test-stream-properties
(let [store (-> (memory-store)
(wrap-stream-properties))
repo (bare-repository store)]
(is (repo/commit! repo (-> (get-person repo "Alice" "Appleseed")
(registered))))
(is (sub? [{::msg/type ::registered
:given-name "Alice"
:family-name "Appleseed"}]
(retrieve-events store (::aggregate/id (person "Alice" "Appleseed")))))
(is (sub? [{::msg/type ::registered
:given-name "Alice"
:family-name "Appleseed"}]
(retrieve-events store all-events-stream-id)))))
|
77269
|
(ns rill.wheel.wrap-stream-properties-test
(:require [clojure.test :refer [are deftest is testing use-fixtures]]
[rill.event-store :refer [append-events retrieve-events]]
[rill.event-store.memory :refer [memory-store]]
[rill.event-stream :refer [all-events-stream-id]]
[rill.message :as msg]
[rill.wheel :as aggregate :refer [defaggregate defevent ok?]]
[rill.wheel.bare-repository :refer [bare-repository]]
[rill.wheel.repository :as repo]
[rill.wheel.testing :refer [sub? with-instrument-all]]
[rill.wheel.wrap-stream-properties :refer [wrap-stream-properties]]))
(use-fixtures :once with-instrument-all)
(defaggregate person
[given-name family-name])
(defevent registered ::person
[p]
p)
(deftest test-stream-properties
(let [store (-> (memory-store)
(wrap-stream-properties))
repo (bare-repository store)]
(is (repo/commit! repo (-> (get-person repo "<NAME>" "<NAME>")
(registered))))
(is (sub? [{::msg/type ::registered
:given-name "<NAME>"
:family-name "<NAME>"}]
(retrieve-events store (::aggregate/id (person "<NAME>" "<NAME>")))))
(is (sub? [{::msg/type ::registered
:given-name "<NAME>"
:family-name "<NAME>"}]
(retrieve-events store all-events-stream-id)))))
| true |
(ns rill.wheel.wrap-stream-properties-test
(:require [clojure.test :refer [are deftest is testing use-fixtures]]
[rill.event-store :refer [append-events retrieve-events]]
[rill.event-store.memory :refer [memory-store]]
[rill.event-stream :refer [all-events-stream-id]]
[rill.message :as msg]
[rill.wheel :as aggregate :refer [defaggregate defevent ok?]]
[rill.wheel.bare-repository :refer [bare-repository]]
[rill.wheel.repository :as repo]
[rill.wheel.testing :refer [sub? with-instrument-all]]
[rill.wheel.wrap-stream-properties :refer [wrap-stream-properties]]))
(use-fixtures :once with-instrument-all)
(defaggregate person
[given-name family-name])
(defevent registered ::person
[p]
p)
(deftest test-stream-properties
(let [store (-> (memory-store)
(wrap-stream-properties))
repo (bare-repository store)]
(is (repo/commit! repo (-> (get-person repo "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI")
(registered))))
(is (sub? [{::msg/type ::registered
:given-name "PI:NAME:<NAME>END_PI"
:family-name "PI:NAME:<NAME>END_PI"}]
(retrieve-events store (::aggregate/id (person "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI")))))
(is (sub? [{::msg/type ::registered
:given-name "PI:NAME:<NAME>END_PI"
:family-name "PI:NAME:<NAME>END_PI"}]
(retrieve-events store all-events-stream-id)))))
|
[
{
"context": ";\n; Copyright (c) Dilvan A. Moreira 2016. All rights reserved.\n;\n; This file is part",
"end": 35,
"score": 0.9998714327812195,
"start": 18,
"tag": "NAME",
"value": "Dilvan A. Moreira"
}
] |
grails-app/assets/javascripts/dataset-viewer/src/datasetViewer/db.cljs
|
johnciclus/Decisioner
| 1 |
;
; Copyright (c) Dilvan A. Moreira 2016. All rights reserved.
;
; This file is part of ePAD2.
;
; ePAD2 is free software: you can redistribute it and/or modify
; it under the terms of the GNU General Public License as published by
; the Free Software Foundation, either version 3 of the License, or
; (at your option) any later version.
;
; ePAD2 is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; You should have received a copy of the GNU General Public License
; along with ePAD2. If not, see <http://www.gnu.org/licenses/>.
;
(ns datasetViewer.db)
(def initial-state
{:current "editor0"
:tool :gradient
:views {"editor0" {:active-plane :all
:axial {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}
:sagittal {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}
:frontal {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}}}})
|
4424
|
;
; Copyright (c) <NAME> 2016. All rights reserved.
;
; This file is part of ePAD2.
;
; ePAD2 is free software: you can redistribute it and/or modify
; it under the terms of the GNU General Public License as published by
; the Free Software Foundation, either version 3 of the License, or
; (at your option) any later version.
;
; ePAD2 is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; You should have received a copy of the GNU General Public License
; along with ePAD2. If not, see <http://www.gnu.org/licenses/>.
;
(ns datasetViewer.db)
(def initial-state
{:current "editor0"
:tool :gradient
:views {"editor0" {:active-plane :all
:axial {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}
:sagittal {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}
:frontal {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}}}})
| true |
;
; Copyright (c) PI:NAME:<NAME>END_PI 2016. All rights reserved.
;
; This file is part of ePAD2.
;
; ePAD2 is free software: you can redistribute it and/or modify
; it under the terms of the GNU General Public License as published by
; the Free Software Foundation, either version 3 of the License, or
; (at your option) any later version.
;
; ePAD2 is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; You should have received a copy of the GNU General Public License
; along with ePAD2. If not, see <http://www.gnu.org/licenses/>.
;
(ns datasetViewer.db)
(def initial-state
{:current "editor0"
:tool :gradient
:views {"editor0" {:active-plane :all
:axial {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}
:sagittal {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}
:frontal {:x 0.5 :y 0.5 :zoom 1 :imgCoord 0.5}}}})
|
[
{
"context": ";; copyright (c) 2019-2021 Sean Corfield, all rights reserved\n\n(ns usermanager.model.user-",
"end": 40,
"score": 0.9998242855072021,
"start": 27,
"tag": "NAME",
"value": "Sean Corfield"
},
{
"context": "ed the database with this data.\"\n [{:first_name \"Sean\" :last_name \"Corfield\"\n :email \"sean@worldsing",
"end": 831,
"score": 0.9997433423995972,
"start": 827,
"tag": "NAME",
"value": "Sean"
},
{
"context": "th this data.\"\n [{:first_name \"Sean\" :last_name \"Corfield\"\n :email \"[email protected]\" :department_i",
"end": 853,
"score": 0.818152666091919,
"start": 845,
"tag": "NAME",
"value": "Corfield"
},
{
"context": "rst_name \"Sean\" :last_name \"Corfield\"\n :email \"[email protected]\" :department_id 4}\n {:first_name \"Michaël\" :las",
"end": 888,
"score": 0.9999303817749023,
"start": 867,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "ldsingles.com\" :department_id 4}\n {:first_name \"Michaël\" :last_name \"Salihi\"\n :email \"contact@prestanc",
"end": 932,
"score": 0.999822735786438,
"start": 925,
"tag": "NAME",
"value": "Michaël"
},
{
"context": "tment_id 4}\n {:first_name \"Michaël\" :last_name \"Salihi\"\n :email \"[email protected]\" :depar",
"end": 952,
"score": 0.99969083070755,
"start": 946,
"tag": "NAME",
"value": "Salihi"
},
{
"context": "st_name \"Michaël\" :last_name \"Salihi\"\n :email \"[email protected]\" :department_id 4}])\n\n;; database initialization\n",
"end": 994,
"score": 0.9999263882637024,
"start": 966,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
src/usermanager/model/user_manager.clj
|
PrestanceDesign/usermanager-example
| 8 |
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns usermanager.model.user-manager
"The model for the application. This is where the persistence happens,
although in a larger application, this would probably contain just the
business logic and the persistence would be in a separate namespace."
(:require [com.stuartsierra.component :as component]
[next.jdbc :as jdbc]
[next.jdbc.sql :as sql]
[next.jdbc.result-set :as rs]))
;; our database connection and initial data
(def ^:private my-db
"SQLite database connection spec."
{:dbtype "sqlite" :dbname "usermanager_db"})
(def ^:private departments
"List of departments."
["Accounting" "Sales" "Support" "Development"])
(def ^:private initial-user-data
"Seed the database with this data."
[{:first_name "Sean" :last_name "Corfield"
:email "[email protected]" :department_id 4}
{:first_name "Michaël" :last_name "Salihi"
:email "[email protected]" :department_id 4}])
;; database initialization
(defn- populate
"Called at application startup. Attempts to create the
database table and populate it. Takes no action if the
database table already exists."
[db db-type]
(let [auto-key (if (= "sqlite" db-type)
"primary key autoincrement"
(str "generated always as identity"
" (start with 1 increment by 1)"
" primary key"))]
(try
(jdbc/execute-one! (db)
[(str "
create table department (
id integer " auto-key ",
name varchar(32)
)")])
(jdbc/execute-one! (db)
[(str "
create table addressbook (
id integer " auto-key ",
first_name varchar(32),
last_name varchar(32),
email varchar(64),
department_id integer not null
)")])
(println "Created database and addressbook table!")
;; if table creation was successful, it didn't exist before
;; so populate it...
(try
(doseq [d departments]
(sql/insert! (db) :department {:name d}))
(doseq [row initial-user-data]
(sql/insert! (db) :addressbook row))
(println "Populated database with initial data!")
(catch Exception e
(println "Exception:" (ex-message e))
(println "Unable to populate the initial data -- proceed with caution!")))
(catch Exception e
(println "Exception:" (ex-message e))
(println "Looks like the database is already setup?")))))
;; database component
(defrecord Database [db-spec ; configuration
datasource] ; state
component/Lifecycle
(start [this]
(if datasource
this ; already initialized
(let [database (assoc this :datasource (jdbc/get-datasource db-spec))]
;; set up database if necessary
(populate database (:dbtype db-spec))
database)))
(stop [this]
(assoc this :datasource nil))
;; allow the Database component to be "called" with no arguments
;; to produce the underlying datasource object
clojure.lang.IFn
(invoke [_] (jdbc/with-options datasource {:builder-fn rs/as-unqualified-maps})))
(defn setup-database [] (map->Database {:db-spec my-db}))
;; data model access functions
(defn get-department-by-id
"Given a department ID, return the department record."
[db id]
(sql/get-by-id (db) :department id))
(defn get-departments
"Return all available department records (in order)."
[db]
(sql/query (db) ["select * from department order by name"]))
(defn get-user-by-id
"Given a user ID, return the user record."
[db id]
(sql/get-by-id (db) :addressbook id))
(defn get-users
"Return all available users, sorted by name.
Since this is a join, the keys in the hash maps returned will
be namespace-qualified by the table from which they are drawn:
addressbook/id, addressbook/first_name, etc, department/name"
[db]
(sql/query (db)
["
select a.*, d.name
from addressbook a
join department d on a.department_id = d.id
order by a.last_name, a.first_name
"]))
(defn save-user
"Save a user record. If ID is present and not zero, then
this is an update operation, otherwise it's an insert."
[db user]
(let [id (:addressbook/id user)]
(if (and id (not (zero? id)))
;; update
(sql/update! (db) :addressbook
(dissoc user :addressbook/id)
{:id id})
;; insert
(sql/insert! (db) :addressbook
(dissoc user :addressbook/id)))))
(defn delete-user-by-id
"Given a user ID, delete that user."
[db id]
(sql/delete! (db) :addressbook {:id id}))
|
108264
|
;; copyright (c) 2019-2021 <NAME>, all rights reserved
(ns usermanager.model.user-manager
"The model for the application. This is where the persistence happens,
although in a larger application, this would probably contain just the
business logic and the persistence would be in a separate namespace."
(:require [com.stuartsierra.component :as component]
[next.jdbc :as jdbc]
[next.jdbc.sql :as sql]
[next.jdbc.result-set :as rs]))
;; our database connection and initial data
(def ^:private my-db
"SQLite database connection spec."
{:dbtype "sqlite" :dbname "usermanager_db"})
(def ^:private departments
"List of departments."
["Accounting" "Sales" "Support" "Development"])
(def ^:private initial-user-data
"Seed the database with this data."
[{:first_name "<NAME>" :last_name "<NAME>"
:email "<EMAIL>" :department_id 4}
{:first_name "<NAME>" :last_name "<NAME>"
:email "<EMAIL>" :department_id 4}])
;; database initialization
(defn- populate
"Called at application startup. Attempts to create the
database table and populate it. Takes no action if the
database table already exists."
[db db-type]
(let [auto-key (if (= "sqlite" db-type)
"primary key autoincrement"
(str "generated always as identity"
" (start with 1 increment by 1)"
" primary key"))]
(try
(jdbc/execute-one! (db)
[(str "
create table department (
id integer " auto-key ",
name varchar(32)
)")])
(jdbc/execute-one! (db)
[(str "
create table addressbook (
id integer " auto-key ",
first_name varchar(32),
last_name varchar(32),
email varchar(64),
department_id integer not null
)")])
(println "Created database and addressbook table!")
;; if table creation was successful, it didn't exist before
;; so populate it...
(try
(doseq [d departments]
(sql/insert! (db) :department {:name d}))
(doseq [row initial-user-data]
(sql/insert! (db) :addressbook row))
(println "Populated database with initial data!")
(catch Exception e
(println "Exception:" (ex-message e))
(println "Unable to populate the initial data -- proceed with caution!")))
(catch Exception e
(println "Exception:" (ex-message e))
(println "Looks like the database is already setup?")))))
;; database component
(defrecord Database [db-spec ; configuration
datasource] ; state
component/Lifecycle
(start [this]
(if datasource
this ; already initialized
(let [database (assoc this :datasource (jdbc/get-datasource db-spec))]
;; set up database if necessary
(populate database (:dbtype db-spec))
database)))
(stop [this]
(assoc this :datasource nil))
;; allow the Database component to be "called" with no arguments
;; to produce the underlying datasource object
clojure.lang.IFn
(invoke [_] (jdbc/with-options datasource {:builder-fn rs/as-unqualified-maps})))
(defn setup-database [] (map->Database {:db-spec my-db}))
;; data model access functions
(defn get-department-by-id
"Given a department ID, return the department record."
[db id]
(sql/get-by-id (db) :department id))
(defn get-departments
"Return all available department records (in order)."
[db]
(sql/query (db) ["select * from department order by name"]))
(defn get-user-by-id
"Given a user ID, return the user record."
[db id]
(sql/get-by-id (db) :addressbook id))
(defn get-users
"Return all available users, sorted by name.
Since this is a join, the keys in the hash maps returned will
be namespace-qualified by the table from which they are drawn:
addressbook/id, addressbook/first_name, etc, department/name"
[db]
(sql/query (db)
["
select a.*, d.name
from addressbook a
join department d on a.department_id = d.id
order by a.last_name, a.first_name
"]))
(defn save-user
"Save a user record. If ID is present and not zero, then
this is an update operation, otherwise it's an insert."
[db user]
(let [id (:addressbook/id user)]
(if (and id (not (zero? id)))
;; update
(sql/update! (db) :addressbook
(dissoc user :addressbook/id)
{:id id})
;; insert
(sql/insert! (db) :addressbook
(dissoc user :addressbook/id)))))
(defn delete-user-by-id
"Given a user ID, delete that user."
[db id]
(sql/delete! (db) :addressbook {:id id}))
| true |
;; copyright (c) 2019-2021 PI:NAME:<NAME>END_PI, all rights reserved
(ns usermanager.model.user-manager
"The model for the application. This is where the persistence happens,
although in a larger application, this would probably contain just the
business logic and the persistence would be in a separate namespace."
(:require [com.stuartsierra.component :as component]
[next.jdbc :as jdbc]
[next.jdbc.sql :as sql]
[next.jdbc.result-set :as rs]))
;; our database connection and initial data
(def ^:private my-db
"SQLite database connection spec."
{:dbtype "sqlite" :dbname "usermanager_db"})
(def ^:private departments
"List of departments."
["Accounting" "Sales" "Support" "Development"])
(def ^:private initial-user-data
"Seed the database with this data."
[{:first_name "PI:NAME:<NAME>END_PI" :last_name "PI:NAME:<NAME>END_PI"
:email "PI:EMAIL:<EMAIL>END_PI" :department_id 4}
{:first_name "PI:NAME:<NAME>END_PI" :last_name "PI:NAME:<NAME>END_PI"
:email "PI:EMAIL:<EMAIL>END_PI" :department_id 4}])
;; database initialization
(defn- populate
"Called at application startup. Attempts to create the
database table and populate it. Takes no action if the
database table already exists."
[db db-type]
(let [auto-key (if (= "sqlite" db-type)
"primary key autoincrement"
(str "generated always as identity"
" (start with 1 increment by 1)"
" primary key"))]
(try
(jdbc/execute-one! (db)
[(str "
create table department (
id integer " auto-key ",
name varchar(32)
)")])
(jdbc/execute-one! (db)
[(str "
create table addressbook (
id integer " auto-key ",
first_name varchar(32),
last_name varchar(32),
email varchar(64),
department_id integer not null
)")])
(println "Created database and addressbook table!")
;; if table creation was successful, it didn't exist before
;; so populate it...
(try
(doseq [d departments]
(sql/insert! (db) :department {:name d}))
(doseq [row initial-user-data]
(sql/insert! (db) :addressbook row))
(println "Populated database with initial data!")
(catch Exception e
(println "Exception:" (ex-message e))
(println "Unable to populate the initial data -- proceed with caution!")))
(catch Exception e
(println "Exception:" (ex-message e))
(println "Looks like the database is already setup?")))))
;; database component
(defrecord Database [db-spec ; configuration
datasource] ; state
component/Lifecycle
(start [this]
(if datasource
this ; already initialized
(let [database (assoc this :datasource (jdbc/get-datasource db-spec))]
;; set up database if necessary
(populate database (:dbtype db-spec))
database)))
(stop [this]
(assoc this :datasource nil))
;; allow the Database component to be "called" with no arguments
;; to produce the underlying datasource object
clojure.lang.IFn
(invoke [_] (jdbc/with-options datasource {:builder-fn rs/as-unqualified-maps})))
(defn setup-database [] (map->Database {:db-spec my-db}))
;; data model access functions
(defn get-department-by-id
"Given a department ID, return the department record."
[db id]
(sql/get-by-id (db) :department id))
(defn get-departments
"Return all available department records (in order)."
[db]
(sql/query (db) ["select * from department order by name"]))
(defn get-user-by-id
"Given a user ID, return the user record."
[db id]
(sql/get-by-id (db) :addressbook id))
(defn get-users
"Return all available users, sorted by name.
Since this is a join, the keys in the hash maps returned will
be namespace-qualified by the table from which they are drawn:
addressbook/id, addressbook/first_name, etc, department/name"
[db]
(sql/query (db)
["
select a.*, d.name
from addressbook a
join department d on a.department_id = d.id
order by a.last_name, a.first_name
"]))
(defn save-user
"Save a user record. If ID is present and not zero, then
this is an update operation, otherwise it's an insert."
[db user]
(let [id (:addressbook/id user)]
(if (and id (not (zero? id)))
;; update
(sql/update! (db) :addressbook
(dissoc user :addressbook/id)
{:id id})
;; insert
(sql/insert! (db) :addressbook
(dissoc user :addressbook/id)))))
(defn delete-user-by-id
"Given a user ID, delete that user."
[db id]
(sql/delete! (db) :addressbook {:id id}))
|
[
{
"context": "; @author: Muhammadjon Hakimov\n(load-file \"object.clj\")\n(load-file \"combinators.",
"end": 30,
"score": 0.9998856782913208,
"start": 11,
"tag": "NAME",
"value": "Muhammadjon Hakimov"
}
] |
Clojure/expression.clj
|
MrHakimov/expression-parsers
| 0 |
; @author: Muhammadjon Hakimov
(load-file "object.clj")
(load-file "combinators.clj")
(def *all-chars (mapv char (range 0 128)))
(def *space (+char (apply str (filter #(Character/isWhitespace (char %)) *all-chars))))
(def *letter (+char (apply str (filter #(Character/isLetter (char %)) *all-chars))))
(def *digit (+char (apply str (filter #(Character/isDigit (char %)) *all-chars))))
(def *ws (+ignore (+star *space)))
(def *constant (+map (comp Constant read-string)
(+str (+seq (+opt (+char "-+")) (+str (+plus *digit)) (+char ".") (+str (+plus *digit))
(+opt (+seq (+char "e") (+opt (+char "-+")) (+str (+plus *digit))))))))
(def *operations (+char "+-*/&|^"))
(def *identifier (+str (+plus (+or *letter *operations))))
(def *function-or-variable (+map (comp #(OPERATIONS % (Variable (str %))) symbol) *identifier))
(declare *value)
(defn *seq [begin p end] (+seqn 1 (+char begin) (+plus (+seqn 0 *ws p)) *ws (+char end)))
(def *list (+map (fn [list] (apply (last list) (butlast list))) (*seq "(" (delay *value) ")")))
(def *value (+or *constant *function-or-variable *list))
(def parseObjectSuffix (+parser (+seqn 0 *ws *value *ws)))
|
56220
|
; @author: <NAME>
(load-file "object.clj")
(load-file "combinators.clj")
(def *all-chars (mapv char (range 0 128)))
(def *space (+char (apply str (filter #(Character/isWhitespace (char %)) *all-chars))))
(def *letter (+char (apply str (filter #(Character/isLetter (char %)) *all-chars))))
(def *digit (+char (apply str (filter #(Character/isDigit (char %)) *all-chars))))
(def *ws (+ignore (+star *space)))
(def *constant (+map (comp Constant read-string)
(+str (+seq (+opt (+char "-+")) (+str (+plus *digit)) (+char ".") (+str (+plus *digit))
(+opt (+seq (+char "e") (+opt (+char "-+")) (+str (+plus *digit))))))))
(def *operations (+char "+-*/&|^"))
(def *identifier (+str (+plus (+or *letter *operations))))
(def *function-or-variable (+map (comp #(OPERATIONS % (Variable (str %))) symbol) *identifier))
(declare *value)
(defn *seq [begin p end] (+seqn 1 (+char begin) (+plus (+seqn 0 *ws p)) *ws (+char end)))
(def *list (+map (fn [list] (apply (last list) (butlast list))) (*seq "(" (delay *value) ")")))
(def *value (+or *constant *function-or-variable *list))
(def parseObjectSuffix (+parser (+seqn 0 *ws *value *ws)))
| true |
; @author: PI:NAME:<NAME>END_PI
(load-file "object.clj")
(load-file "combinators.clj")
(def *all-chars (mapv char (range 0 128)))
(def *space (+char (apply str (filter #(Character/isWhitespace (char %)) *all-chars))))
(def *letter (+char (apply str (filter #(Character/isLetter (char %)) *all-chars))))
(def *digit (+char (apply str (filter #(Character/isDigit (char %)) *all-chars))))
(def *ws (+ignore (+star *space)))
(def *constant (+map (comp Constant read-string)
(+str (+seq (+opt (+char "-+")) (+str (+plus *digit)) (+char ".") (+str (+plus *digit))
(+opt (+seq (+char "e") (+opt (+char "-+")) (+str (+plus *digit))))))))
(def *operations (+char "+-*/&|^"))
(def *identifier (+str (+plus (+or *letter *operations))))
(def *function-or-variable (+map (comp #(OPERATIONS % (Variable (str %))) symbol) *identifier))
(declare *value)
(defn *seq [begin p end] (+seqn 1 (+char begin) (+plus (+seqn 0 *ws p)) *ws (+char end)))
(def *list (+map (fn [list] (apply (last list) (butlast list))) (*seq "(" (delay *value) ")")))
(def *value (+or *constant *function-or-variable *list))
(def parseObjectSuffix (+parser (+seqn 0 *ws *value *ws)))
|
[
{
"context": "ofile-id 123456, :institution-id 1000, :username \"[email protected]\", :default-user false}}\n resp (auth/arts",
"end": 317,
"score": 0.9999191164970398,
"start": 303,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": ":profile-id nil, :institution-id 1000, :username \"[email protected]\", :default-user false}}\n resp (auth/arts",
"end": 564,
"score": 0.9999160766601562,
"start": 550,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/artstor_metadata_service_os/auth_test.clj
|
ithaka/artstor-metadata-service-os
| 0 |
(ns artstor-metadata-service-os.auth-test
(:require [artstor-metadata-service-os.auth :as auth]
[clojure.test :refer :all]))
(deftest testing-auth
(testing "Test auth using valid artstor-user-info"
(let [req {:artstor-user-info {:profile-id 123456, :institution-id 1000, :username "[email protected]", :default-user false}}
resp (auth/artstor-user? req)]
(is (= true resp))))
(testing "Test auth using invalid artstor-user-info"
(let [req {:artstor-user-info {:profile-id nil, :institution-id 1000, :username "[email protected]", :default-user false}}
resp (auth/artstor-user? req)]
(is (= false resp))))
(testing "Test auth without artstor-user-info"
(let [req {:headers {:fromkress true}}
resp (auth/artstor-user? req)]
(is (= false resp)))))
|
41741
|
(ns artstor-metadata-service-os.auth-test
(:require [artstor-metadata-service-os.auth :as auth]
[clojure.test :refer :all]))
(deftest testing-auth
(testing "Test auth using valid artstor-user-info"
(let [req {:artstor-user-info {:profile-id 123456, :institution-id 1000, :username "<EMAIL>", :default-user false}}
resp (auth/artstor-user? req)]
(is (= true resp))))
(testing "Test auth using invalid artstor-user-info"
(let [req {:artstor-user-info {:profile-id nil, :institution-id 1000, :username "<EMAIL>", :default-user false}}
resp (auth/artstor-user? req)]
(is (= false resp))))
(testing "Test auth without artstor-user-info"
(let [req {:headers {:fromkress true}}
resp (auth/artstor-user? req)]
(is (= false resp)))))
| true |
(ns artstor-metadata-service-os.auth-test
(:require [artstor-metadata-service-os.auth :as auth]
[clojure.test :refer :all]))
(deftest testing-auth
(testing "Test auth using valid artstor-user-info"
(let [req {:artstor-user-info {:profile-id 123456, :institution-id 1000, :username "PI:EMAIL:<EMAIL>END_PI", :default-user false}}
resp (auth/artstor-user? req)]
(is (= true resp))))
(testing "Test auth using invalid artstor-user-info"
(let [req {:artstor-user-info {:profile-id nil, :institution-id 1000, :username "PI:EMAIL:<EMAIL>END_PI", :default-user false}}
resp (auth/artstor-user? req)]
(is (= false resp))))
(testing "Test auth without artstor-user-info"
(let [req {:headers {:fromkress true}}
resp (auth/artstor-user? req)]
(is (= false resp)))))
|
[
{
"context": "! *unchecked-math* :warn-on-boxed)\n(ns ^{:author \"John Alan McDonald\" :date \"2016-07-12\"\n :doc \"Something like a ",
"end": 105,
"score": 0.9998763799667358,
"start": 87,
"tag": "NAME",
"value": "John Alan McDonald"
}
] |
src/main/clojure/zana/collections/cube.clj
|
wahpenayo/zana
| 2 |
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(ns ^{:author "John Alan McDonald" :date "2016-07-12"
:doc "Something like a data cube" }
zana.collections.cube
(:refer-clojure :exclude [range])
(:require [clojure.pprint :as pp]
[zana.commons.core :as cc]
[zana.collections.generic :as generic]
[zana.collections.maps :as maps]
[zana.collections.sets :as sets]))
;;------------------------------------------------------------------------------
(deftype Cube [;; functions corresponding to axes of the data cube.
;; TODO: ImmutableSet?
^java.util.Collection attributes
;; TODO: ImmutableMap?
^java.util.Map ranges
;; key is {attribute0 value0 attribute1 value1 ...}
;; with entry for every attribute
;; value is a collection or the records that match the key
;; TODO: ImmutableMap?
^java.util.Map data]
java.util.Map ;; TODO: ImmutableMap?
(containsKey [this k] (.containsKey data k))
(entrySet [this] (.entrySet data))
(get [this k]
(assert (= (.keySet ^java.util.Map k) (.keySet ^java.util.Map ranges))
(print-str "k:\n" (cc/pprint-str k)
"ranges:\n" (cc/pprint-str ranges)))
(.get data k))
(isEmpty [this] (.isEmpty data))
(keySet [this] (.keySet data))
(values [this] (.values data))
(size [this] (.size data))
Object ;; identity semantics
(hashCode [this] (System/identityHashCode this))
(equals [this that] (identical? this that))
(toString [this] (cc/pprint-str {:attributes attributes
:ranges ranges
:data (into {} data)})))
;;------------------------------------------------------------------------------
(defmethod print-method Cube [^Cube this ^java.io.Writer w]
(.write w (.toString this)))
;;------------------------------------------------------------------------------
(defn cube?
"Is this a data cube?"
[x] (instance? Cube x))
(defn- data ^java.util.Map [^Cube cube] (.data cube))
(defn attributes
"Return a collection of the attribute functions used to define the axes of
this cube."
^java.util.Collection [^Cube cube] (.attributes cube))
(defn- ranges ^java.util.Map [^Cube cube] (.ranges cube))
(defn has-attribute?
"Is <code>attribute</code> one of the functions used to define the axes of the
<code>cube</code>?"
[^Cube cube attribute]
(.containsKey (ranges cube) attribute))
(defn range
"Return a set of the distinct values of <code>attribute</code> over the
original data used to construct the cube. Throw an exception if
<code>attribute</code> isn't one of the cube attributes."
[^Cube cube attribute]
(assert (has-attribute? cube attribute)
(print-str "Not an attribute of the Cube:\n" cube))
(.get (ranges cube) attribute))
;;------------------------------------------------------------------------------
(defn- key-function [attributes]
(fn [datum] (into {} (map (fn [z] [z (z datum)]) attributes))))
;;------------------------------------------------------------------------------
(defn cube
"Construct a data cube, indexing the elements of <code>data</code> on the
values of the <code>attributes</code>.<br>
Each attribute function should have a small number of distinct values over
<code>data</code>.<br>
Basically a multi-key multi-map with special keys:
<code>{attribute0 value0 attribute1 value1 ...}</code>.
Data cubes are maps, <code>get</code> returns a list of the data elements
that match a full key (a map of attribute-value pairs for all the attributes).
<br>In addition, data cubes offer a [[slice]] operation,
which takes a partial key (a map of attribute-value pairs for a subset of the
attributes) and returns a sub-cube."
^zana.collections.cube.Cube [attributes data]
(assert (every? ifn? attributes))
(let [ranges (into {} (map (fn [f] [f (sets/distinct f data)]) attributes))
indexed (maps/group-by (key-function attributes) data)]
(Cube. attributes ranges indexed)))
;;------------------------------------------------------------------------------
;; Note: attributes are no longer functions of the elements of the cube
(defmethod generic/map
[clojure.lang.IFn Cube]
[^clojure.lang.IFn f ^Cube cube]
(let [d (generic/map f (data cube))]
(Cube. (attributes cube) (ranges cube) d)))
;;------------------------------------------------------------------------------
;; TODO: reset the ranges? remove single value attributes?
(defmethod generic/filter
[clojure.lang.IFn Cube]
[^clojure.lang.IFn f ^Cube cube]
(let [d (generic/filter f (data cube))]
(Cube. (attributes cube) (ranges cube) d)))
;;------------------------------------------------------------------------------
(defn- match? [partial-key whole-key]
(let [it (generic/iterator partial-key)]
(loop []
(if-not (generic/has-next? it)
true
(let [[k v] (generic/next-item it)]
(if-not (= v (generic/get whole-key k))
false
(recur)))))))
(defn slice
"Return a data cube with the same attributes and attributes ranges (the same
axes) but with only the data corresponding to the <code>partial-key</code>.
The resulting cube will have the same cells as the original, but the one that
don't match the <code>partial-key</code> will be
empty."
^zana.collections.cube.Cube [partial-key ^Cube cube]
(generic/filter (fn [k v] (match? partial-key k)) cube))
;;------------------------------------------------------------------------------
;; TODO: is this better than taking the union of all the data and re-cubing?
;; TODO: as a generic partial reduction
#_(defn project ^zana.collections.cube.Cube [^Cube cube onto]
)
;;------------------------------------------------------------------------------
|
30507
|
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(ns ^{:author "<NAME>" :date "2016-07-12"
:doc "Something like a data cube" }
zana.collections.cube
(:refer-clojure :exclude [range])
(:require [clojure.pprint :as pp]
[zana.commons.core :as cc]
[zana.collections.generic :as generic]
[zana.collections.maps :as maps]
[zana.collections.sets :as sets]))
;;------------------------------------------------------------------------------
(deftype Cube [;; functions corresponding to axes of the data cube.
;; TODO: ImmutableSet?
^java.util.Collection attributes
;; TODO: ImmutableMap?
^java.util.Map ranges
;; key is {attribute0 value0 attribute1 value1 ...}
;; with entry for every attribute
;; value is a collection or the records that match the key
;; TODO: ImmutableMap?
^java.util.Map data]
java.util.Map ;; TODO: ImmutableMap?
(containsKey [this k] (.containsKey data k))
(entrySet [this] (.entrySet data))
(get [this k]
(assert (= (.keySet ^java.util.Map k) (.keySet ^java.util.Map ranges))
(print-str "k:\n" (cc/pprint-str k)
"ranges:\n" (cc/pprint-str ranges)))
(.get data k))
(isEmpty [this] (.isEmpty data))
(keySet [this] (.keySet data))
(values [this] (.values data))
(size [this] (.size data))
Object ;; identity semantics
(hashCode [this] (System/identityHashCode this))
(equals [this that] (identical? this that))
(toString [this] (cc/pprint-str {:attributes attributes
:ranges ranges
:data (into {} data)})))
;;------------------------------------------------------------------------------
(defmethod print-method Cube [^Cube this ^java.io.Writer w]
(.write w (.toString this)))
;;------------------------------------------------------------------------------
(defn cube?
"Is this a data cube?"
[x] (instance? Cube x))
(defn- data ^java.util.Map [^Cube cube] (.data cube))
(defn attributes
"Return a collection of the attribute functions used to define the axes of
this cube."
^java.util.Collection [^Cube cube] (.attributes cube))
(defn- ranges ^java.util.Map [^Cube cube] (.ranges cube))
(defn has-attribute?
"Is <code>attribute</code> one of the functions used to define the axes of the
<code>cube</code>?"
[^Cube cube attribute]
(.containsKey (ranges cube) attribute))
(defn range
"Return a set of the distinct values of <code>attribute</code> over the
original data used to construct the cube. Throw an exception if
<code>attribute</code> isn't one of the cube attributes."
[^Cube cube attribute]
(assert (has-attribute? cube attribute)
(print-str "Not an attribute of the Cube:\n" cube))
(.get (ranges cube) attribute))
;;------------------------------------------------------------------------------
(defn- key-function [attributes]
(fn [datum] (into {} (map (fn [z] [z (z datum)]) attributes))))
;;------------------------------------------------------------------------------
(defn cube
"Construct a data cube, indexing the elements of <code>data</code> on the
values of the <code>attributes</code>.<br>
Each attribute function should have a small number of distinct values over
<code>data</code>.<br>
Basically a multi-key multi-map with special keys:
<code>{attribute0 value0 attribute1 value1 ...}</code>.
Data cubes are maps, <code>get</code> returns a list of the data elements
that match a full key (a map of attribute-value pairs for all the attributes).
<br>In addition, data cubes offer a [[slice]] operation,
which takes a partial key (a map of attribute-value pairs for a subset of the
attributes) and returns a sub-cube."
^zana.collections.cube.Cube [attributes data]
(assert (every? ifn? attributes))
(let [ranges (into {} (map (fn [f] [f (sets/distinct f data)]) attributes))
indexed (maps/group-by (key-function attributes) data)]
(Cube. attributes ranges indexed)))
;;------------------------------------------------------------------------------
;; Note: attributes are no longer functions of the elements of the cube
(defmethod generic/map
[clojure.lang.IFn Cube]
[^clojure.lang.IFn f ^Cube cube]
(let [d (generic/map f (data cube))]
(Cube. (attributes cube) (ranges cube) d)))
;;------------------------------------------------------------------------------
;; TODO: reset the ranges? remove single value attributes?
(defmethod generic/filter
[clojure.lang.IFn Cube]
[^clojure.lang.IFn f ^Cube cube]
(let [d (generic/filter f (data cube))]
(Cube. (attributes cube) (ranges cube) d)))
;;------------------------------------------------------------------------------
(defn- match? [partial-key whole-key]
(let [it (generic/iterator partial-key)]
(loop []
(if-not (generic/has-next? it)
true
(let [[k v] (generic/next-item it)]
(if-not (= v (generic/get whole-key k))
false
(recur)))))))
(defn slice
"Return a data cube with the same attributes and attributes ranges (the same
axes) but with only the data corresponding to the <code>partial-key</code>.
The resulting cube will have the same cells as the original, but the one that
don't match the <code>partial-key</code> will be
empty."
^zana.collections.cube.Cube [partial-key ^Cube cube]
(generic/filter (fn [k v] (match? partial-key k)) cube))
;;------------------------------------------------------------------------------
;; TODO: is this better than taking the union of all the data and re-cubing?
;; TODO: as a generic partial reduction
#_(defn project ^zana.collections.cube.Cube [^Cube cube onto]
)
;;------------------------------------------------------------------------------
| true |
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(ns ^{:author "PI:NAME:<NAME>END_PI" :date "2016-07-12"
:doc "Something like a data cube" }
zana.collections.cube
(:refer-clojure :exclude [range])
(:require [clojure.pprint :as pp]
[zana.commons.core :as cc]
[zana.collections.generic :as generic]
[zana.collections.maps :as maps]
[zana.collections.sets :as sets]))
;;------------------------------------------------------------------------------
(deftype Cube [;; functions corresponding to axes of the data cube.
;; TODO: ImmutableSet?
^java.util.Collection attributes
;; TODO: ImmutableMap?
^java.util.Map ranges
;; key is {attribute0 value0 attribute1 value1 ...}
;; with entry for every attribute
;; value is a collection or the records that match the key
;; TODO: ImmutableMap?
^java.util.Map data]
java.util.Map ;; TODO: ImmutableMap?
(containsKey [this k] (.containsKey data k))
(entrySet [this] (.entrySet data))
(get [this k]
(assert (= (.keySet ^java.util.Map k) (.keySet ^java.util.Map ranges))
(print-str "k:\n" (cc/pprint-str k)
"ranges:\n" (cc/pprint-str ranges)))
(.get data k))
(isEmpty [this] (.isEmpty data))
(keySet [this] (.keySet data))
(values [this] (.values data))
(size [this] (.size data))
Object ;; identity semantics
(hashCode [this] (System/identityHashCode this))
(equals [this that] (identical? this that))
(toString [this] (cc/pprint-str {:attributes attributes
:ranges ranges
:data (into {} data)})))
;;------------------------------------------------------------------------------
(defmethod print-method Cube [^Cube this ^java.io.Writer w]
(.write w (.toString this)))
;;------------------------------------------------------------------------------
(defn cube?
"Is this a data cube?"
[x] (instance? Cube x))
(defn- data ^java.util.Map [^Cube cube] (.data cube))
(defn attributes
"Return a collection of the attribute functions used to define the axes of
this cube."
^java.util.Collection [^Cube cube] (.attributes cube))
(defn- ranges ^java.util.Map [^Cube cube] (.ranges cube))
(defn has-attribute?
"Is <code>attribute</code> one of the functions used to define the axes of the
<code>cube</code>?"
[^Cube cube attribute]
(.containsKey (ranges cube) attribute))
(defn range
"Return a set of the distinct values of <code>attribute</code> over the
original data used to construct the cube. Throw an exception if
<code>attribute</code> isn't one of the cube attributes."
[^Cube cube attribute]
(assert (has-attribute? cube attribute)
(print-str "Not an attribute of the Cube:\n" cube))
(.get (ranges cube) attribute))
;;------------------------------------------------------------------------------
(defn- key-function [attributes]
(fn [datum] (into {} (map (fn [z] [z (z datum)]) attributes))))
;;------------------------------------------------------------------------------
(defn cube
"Construct a data cube, indexing the elements of <code>data</code> on the
values of the <code>attributes</code>.<br>
Each attribute function should have a small number of distinct values over
<code>data</code>.<br>
Basically a multi-key multi-map with special keys:
<code>{attribute0 value0 attribute1 value1 ...}</code>.
Data cubes are maps, <code>get</code> returns a list of the data elements
that match a full key (a map of attribute-value pairs for all the attributes).
<br>In addition, data cubes offer a [[slice]] operation,
which takes a partial key (a map of attribute-value pairs for a subset of the
attributes) and returns a sub-cube."
^zana.collections.cube.Cube [attributes data]
(assert (every? ifn? attributes))
(let [ranges (into {} (map (fn [f] [f (sets/distinct f data)]) attributes))
indexed (maps/group-by (key-function attributes) data)]
(Cube. attributes ranges indexed)))
;;------------------------------------------------------------------------------
;; Note: attributes are no longer functions of the elements of the cube
(defmethod generic/map
[clojure.lang.IFn Cube]
[^clojure.lang.IFn f ^Cube cube]
(let [d (generic/map f (data cube))]
(Cube. (attributes cube) (ranges cube) d)))
;;------------------------------------------------------------------------------
;; TODO: reset the ranges? remove single value attributes?
(defmethod generic/filter
[clojure.lang.IFn Cube]
[^clojure.lang.IFn f ^Cube cube]
(let [d (generic/filter f (data cube))]
(Cube. (attributes cube) (ranges cube) d)))
;;------------------------------------------------------------------------------
(defn- match? [partial-key whole-key]
(let [it (generic/iterator partial-key)]
(loop []
(if-not (generic/has-next? it)
true
(let [[k v] (generic/next-item it)]
(if-not (= v (generic/get whole-key k))
false
(recur)))))))
(defn slice
"Return a data cube with the same attributes and attributes ranges (the same
axes) but with only the data corresponding to the <code>partial-key</code>.
The resulting cube will have the same cells as the original, but the one that
don't match the <code>partial-key</code> will be
empty."
^zana.collections.cube.Cube [partial-key ^Cube cube]
(generic/filter (fn [k v] (match? partial-key k)) cube))
;;------------------------------------------------------------------------------
;; TODO: is this better than taking the union of all the data and re-cubing?
;; TODO: as a generic partial reduction
#_(defn project ^zana.collections.cube.Cube [^Cube cube onto]
)
;;------------------------------------------------------------------------------
|
[
{
"context": "(defn main! []\n (.log js/console \"Plang App v0.1\\nPavel Metelitsyn 2020\\[email protected]\")\n (rd/render [app-roo",
"end": 199,
"score": 0.9998365044593811,
"start": 182,
"tag": "NAME",
"value": "nPavel Metelitsyn"
},
{
"context": "log js/console \"Plang App v0.1\\nPavel Metelitsyn 2020\\[email protected]\")\n (rd/render [app-root] ",
"end": 202,
"score": 0.7411695122718811,
"start": 201,
"tag": "EMAIL",
"value": "0"
},
{
"context": "js/console \"Plang App v0.1\\nPavel Metelitsyn 2020\\[email protected]\")\n (rd/render [app-root] (.getElementById js/doc",
"end": 225,
"score": 0.9998733997344971,
"start": 205,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
plang/src/plang/index.cljs
|
mtlsn/PLangTool
| 1 |
(ns plang.index
(:require [reagent.dom :as rd]
[plang.components.root :refer [app-root]]))
(enable-console-print!)
(defn main! []
(.log js/console "Plang App v0.1\nPavel Metelitsyn 2020\[email protected]")
(rd/render [app-root] (.getElementById js/document "app")))
|
66496
|
(ns plang.index
(:require [reagent.dom :as rd]
[plang.components.root :refer [app-root]]))
(enable-console-print!)
(defn main! []
(.log js/console "Plang App v0.1\<NAME> 2<EMAIL>20\<EMAIL>")
(rd/render [app-root] (.getElementById js/document "app")))
| true |
(ns plang.index
(:require [reagent.dom :as rd]
[plang.components.root :refer [app-root]]))
(enable-console-print!)
(defn main! []
(.log js/console "Plang App v0.1\PI:NAME:<NAME>END_PI 2PI:EMAIL:<EMAIL>END_PI20\PI:EMAIL:<EMAIL>END_PI")
(rd/render [app-root] (.getElementById js/document "app")))
|
[
{
"context": ";; Copyright 2014 Pellucid Analytics\n;;\n;; Licensed under the Apache License, Version ",
"end": 36,
"score": 0.9994433522224426,
"start": 18,
"tag": "NAME",
"value": "Pellucid Analytics"
}
] |
src/datomic_graphite_reporter/core.clj
|
dwhjames/datomic-graphite-reporter
| 1 |
;; Copyright 2014 Pellucid Analytics
;;
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns datomic-graphite-reporter.core
(:require [clojure.string :as str])
(:import [java.net DatagramSocket InetSocketAddress]
java.nio.ByteBuffer
java.nio.channels.DatagramChannel
java.nio.charset.Charset
[org.slf4j Logger LoggerFactory]))
(def ^Logger logger
(LoggerFactory/getLogger "datomic.graphite.reporter"))
(def ^String graphite-host
(or
(System/getProperty "graphite.host")
(System/getenv "GRAPHITE_HOST")))
(def ^String graphite-port
(or
(System/getProperty "graphite.port")
(System/getenv "GRAPHITE_PORT")))
(def ^String graphite-prefix
(or
(System/getProperty "graphite.prefix")
(System/getenv "GRAPHITE_PREFIX")))
(def ^Charset ISO-Latin-1-Charset
(Charset/forName "ISO-8859-1"))
(def ^InetSocketAddress graphite-address
(try
(InetSocketAddress. graphite-host
(Integer/parseInt graphite-port))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to create socket address to Graphite!"
e))
(throw e))))
(defn- ^ByteBuffer build-graphite-metric-data
[name value timestamp]
(-> (str name
\space
value
\space
timestamp
\newline)
(.getBytes ISO-Latin-1-Charset)
ByteBuffer/wrap))
(def ^DatagramChannel graphite-channel nil)
;; Inspired by com.codahale.metrics.graphite.GraphiteUDP
(defn- ^DatagramChannel get-graphite-channel
[]
(if (and graphite-channel
(not (-> graphite-channel
(.socket)
(.isClosed))))
graphite-channel
(do
(when graphite-channel
(.close graphite-channel))
(let [chan
(try
(-> (DatagramChannel/open)
(.connect graphite-address))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to make a UDP connection to Graphite!"
e)
(throw e))))]
(when (.isInfoEnabled logger)
(.info logger (str "Opened connection to Graphite at "
graphite-address)))
(alter-var-root #'graphite-channel
(constantly chan))
chan))))
(defn- ^String unix-timestamp
[]
(str (quot (System/currentTimeMillis) 1000)))
(defn- send-metric-data
[^DatagramChannel chan metric-name metric-value timestamp]
(try
(.write chan
(build-graphite-metric-data metric-name
metric-value
timestamp))
(when (.isTraceEnabled logger)
(.trace logger "Wrote metric data to Graphite."))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to write data to the Graphite channel!"
e)
(throw e)))))
(defn- ^String mk-metric-name
[& rest]
(str/join \.
(if graphite-prefix
(cons graphite-prefix rest)
rest)))
(defn report-metrics
[metrics]
(let [timestamp (unix-timestamp)
chan (get-graphite-channel)]
(doseq [[metric-name metric-value] metrics]
(if (map? metric-value)
(doseq [[sub-metric-name sub-metric-value] metric-value]
(send-metric-data chan
(mk-metric-name (name metric-name) (name sub-metric-name))
sub-metric-value
timestamp))
(send-metric-data chan
(mk-metric-name (name metric-name))
metric-value
timestamp)))))
|
50323
|
;; Copyright 2014 <NAME>
;;
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns datomic-graphite-reporter.core
(:require [clojure.string :as str])
(:import [java.net DatagramSocket InetSocketAddress]
java.nio.ByteBuffer
java.nio.channels.DatagramChannel
java.nio.charset.Charset
[org.slf4j Logger LoggerFactory]))
(def ^Logger logger
(LoggerFactory/getLogger "datomic.graphite.reporter"))
(def ^String graphite-host
(or
(System/getProperty "graphite.host")
(System/getenv "GRAPHITE_HOST")))
(def ^String graphite-port
(or
(System/getProperty "graphite.port")
(System/getenv "GRAPHITE_PORT")))
(def ^String graphite-prefix
(or
(System/getProperty "graphite.prefix")
(System/getenv "GRAPHITE_PREFIX")))
(def ^Charset ISO-Latin-1-Charset
(Charset/forName "ISO-8859-1"))
(def ^InetSocketAddress graphite-address
(try
(InetSocketAddress. graphite-host
(Integer/parseInt graphite-port))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to create socket address to Graphite!"
e))
(throw e))))
(defn- ^ByteBuffer build-graphite-metric-data
[name value timestamp]
(-> (str name
\space
value
\space
timestamp
\newline)
(.getBytes ISO-Latin-1-Charset)
ByteBuffer/wrap))
(def ^DatagramChannel graphite-channel nil)
;; Inspired by com.codahale.metrics.graphite.GraphiteUDP
(defn- ^DatagramChannel get-graphite-channel
[]
(if (and graphite-channel
(not (-> graphite-channel
(.socket)
(.isClosed))))
graphite-channel
(do
(when graphite-channel
(.close graphite-channel))
(let [chan
(try
(-> (DatagramChannel/open)
(.connect graphite-address))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to make a UDP connection to Graphite!"
e)
(throw e))))]
(when (.isInfoEnabled logger)
(.info logger (str "Opened connection to Graphite at "
graphite-address)))
(alter-var-root #'graphite-channel
(constantly chan))
chan))))
(defn- ^String unix-timestamp
[]
(str (quot (System/currentTimeMillis) 1000)))
(defn- send-metric-data
[^DatagramChannel chan metric-name metric-value timestamp]
(try
(.write chan
(build-graphite-metric-data metric-name
metric-value
timestamp))
(when (.isTraceEnabled logger)
(.trace logger "Wrote metric data to Graphite."))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to write data to the Graphite channel!"
e)
(throw e)))))
(defn- ^String mk-metric-name
[& rest]
(str/join \.
(if graphite-prefix
(cons graphite-prefix rest)
rest)))
(defn report-metrics
[metrics]
(let [timestamp (unix-timestamp)
chan (get-graphite-channel)]
(doseq [[metric-name metric-value] metrics]
(if (map? metric-value)
(doseq [[sub-metric-name sub-metric-value] metric-value]
(send-metric-data chan
(mk-metric-name (name metric-name) (name sub-metric-name))
sub-metric-value
timestamp))
(send-metric-data chan
(mk-metric-name (name metric-name))
metric-value
timestamp)))))
| true |
;; Copyright 2014 PI:NAME:<NAME>END_PI
;;
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns datomic-graphite-reporter.core
(:require [clojure.string :as str])
(:import [java.net DatagramSocket InetSocketAddress]
java.nio.ByteBuffer
java.nio.channels.DatagramChannel
java.nio.charset.Charset
[org.slf4j Logger LoggerFactory]))
(def ^Logger logger
(LoggerFactory/getLogger "datomic.graphite.reporter"))
(def ^String graphite-host
(or
(System/getProperty "graphite.host")
(System/getenv "GRAPHITE_HOST")))
(def ^String graphite-port
(or
(System/getProperty "graphite.port")
(System/getenv "GRAPHITE_PORT")))
(def ^String graphite-prefix
(or
(System/getProperty "graphite.prefix")
(System/getenv "GRAPHITE_PREFIX")))
(def ^Charset ISO-Latin-1-Charset
(Charset/forName "ISO-8859-1"))
(def ^InetSocketAddress graphite-address
(try
(InetSocketAddress. graphite-host
(Integer/parseInt graphite-port))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to create socket address to Graphite!"
e))
(throw e))))
(defn- ^ByteBuffer build-graphite-metric-data
[name value timestamp]
(-> (str name
\space
value
\space
timestamp
\newline)
(.getBytes ISO-Latin-1-Charset)
ByteBuffer/wrap))
(def ^DatagramChannel graphite-channel nil)
;; Inspired by com.codahale.metrics.graphite.GraphiteUDP
(defn- ^DatagramChannel get-graphite-channel
[]
(if (and graphite-channel
(not (-> graphite-channel
(.socket)
(.isClosed))))
graphite-channel
(do
(when graphite-channel
(.close graphite-channel))
(let [chan
(try
(-> (DatagramChannel/open)
(.connect graphite-address))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to make a UDP connection to Graphite!"
e)
(throw e))))]
(when (.isInfoEnabled logger)
(.info logger (str "Opened connection to Graphite at "
graphite-address)))
(alter-var-root #'graphite-channel
(constantly chan))
chan))))
(defn- ^String unix-timestamp
[]
(str (quot (System/currentTimeMillis) 1000)))
(defn- send-metric-data
[^DatagramChannel chan metric-name metric-value timestamp]
(try
(.write chan
(build-graphite-metric-data metric-name
metric-value
timestamp))
(when (.isTraceEnabled logger)
(.trace logger "Wrote metric data to Graphite."))
(catch Exception e
(when (.isErrorEnabled logger)
(.error logger
"Failed to write data to the Graphite channel!"
e)
(throw e)))))
(defn- ^String mk-metric-name
[& rest]
(str/join \.
(if graphite-prefix
(cons graphite-prefix rest)
rest)))
(defn report-metrics
[metrics]
(let [timestamp (unix-timestamp)
chan (get-graphite-channel)]
(doseq [[metric-name metric-value] metrics]
(if (map? metric-value)
(doseq [[sub-metric-name sub-metric-value] metric-value]
(send-metric-data chan
(mk-metric-name (name metric-name) (name sub-metric-name))
sub-metric-value
timestamp))
(send-metric-data chan
(mk-metric-name (name metric-name))
metric-value
timestamp)))))
|
[
{
"context": " (let [request {:params (assoc user1 :password \"wrongpassword\")}\n response (handlers/login request)]\n ",
"end": 2307,
"score": 0.9993911981582642,
"start": 2294,
"tag": "PASSWORD",
"value": "wrongpassword"
},
{
"context": "\n (let [request {:params (assoc user1 :email \"[email protected]\")}\n response (handlers/login request)]\n ",
"end": 2526,
"score": 0.9999234080314636,
"start": 2508,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "il (:email user1)\n password (:password user1)\n message (models/get-token email passw",
"end": 2783,
"score": 0.9891852140426636,
"start": 2778,
"tag": "PASSWORD",
"value": "user1"
}
] |
test/clj/villagebook/user/handlers_test.clj
|
sathia27/villagebook
| 0 |
(ns villagebook.user.handlers-test
(:require [villagebook.fixtures :refer [setup-once wrap-transaction]]
[villagebook.factory :refer [user1 user2]]
[villagebook.user.db :as db]
[villagebook.user.models :as models]
[villagebook.user.handlers :as handlers]
[clojure.test :refer :all]))
(use-fixtures :once setup-once)
(use-fixtures :each wrap-transaction)
(deftest signup-tests
(testing "Signing up"
(let [request {:params user1}
response (handlers/signup request)]
(is (= 201 (:status response)))
(is (= (:email user1) (get-in response [:body :email])))
(is (get-in response [:body :token]))
(is (get-in response [:cookies "token" :value]))
(is (not (empty? (db/retrieve-by-email (get-in response [:body :email])))))))
(testing "Signing up as user 2 (with optional details)"
(let [request {:params user2}
response (handlers/signup request)]
(is (= 201 (:status response)))
(is (= (:email user2) (get-in response [:body :email])))
(is (get-in response [:body :token]))
(is (get-in response [:cookies "token" :value]))
(is (not (empty? (db/retrieve-by-email (get-in response [:body :email]))))))))
(deftest invalid-signup-tests
(testing "Signing up with invalid request"
(let [request {}
response (handlers/signup request)]
(is (= 400 (:status response))))))
(deftest login-tests
(testing "Logging in as user 1"
(let [user (db/create! user1)
request {:params user1}
response (handlers/login request)]
(is (= 200 (:status response)))
(is (not (nil? (get-in response [:body :token]))))
(is (get-in response [:cookies "token" :value])))))
(deftest invalid-login-tests
(let [user (db/create! user1)]
(testing "Logging in with invalid request"
(let [request {}
response (handlers/login request)]
(is (= 400 (:status response)))))
(testing "Logging in with invalid request - missing password"
(let [request {:params (dissoc user1 :password)}
response (handlers/login request)]
(is (= 400 (:status response)))))
(testing "Logging in as user 1 with incorrect password"
(let [request {:params (assoc user1 :password "wrongpassword")}
response (handlers/login request)]
(is (= 401 (:status response)))))
(testing "Logging in with invalid email (does not exist)"
(let [request {:params (assoc user1 :email "[email protected]")}
response (handlers/login request)]
(is (= 401 (:status response)))))))
(deftest retrieve-tests
(testing "Retrieving a user."
(let [user (db/create! user1)
email (:email user1)
password (:password user1)
message (models/get-token email password)
request {:identity {:id (:id user)}}
response (handlers/retrieve request)]
(is (= 200 (:status response))))))
|
57257
|
(ns villagebook.user.handlers-test
(:require [villagebook.fixtures :refer [setup-once wrap-transaction]]
[villagebook.factory :refer [user1 user2]]
[villagebook.user.db :as db]
[villagebook.user.models :as models]
[villagebook.user.handlers :as handlers]
[clojure.test :refer :all]))
(use-fixtures :once setup-once)
(use-fixtures :each wrap-transaction)
(deftest signup-tests
(testing "Signing up"
(let [request {:params user1}
response (handlers/signup request)]
(is (= 201 (:status response)))
(is (= (:email user1) (get-in response [:body :email])))
(is (get-in response [:body :token]))
(is (get-in response [:cookies "token" :value]))
(is (not (empty? (db/retrieve-by-email (get-in response [:body :email])))))))
(testing "Signing up as user 2 (with optional details)"
(let [request {:params user2}
response (handlers/signup request)]
(is (= 201 (:status response)))
(is (= (:email user2) (get-in response [:body :email])))
(is (get-in response [:body :token]))
(is (get-in response [:cookies "token" :value]))
(is (not (empty? (db/retrieve-by-email (get-in response [:body :email]))))))))
(deftest invalid-signup-tests
(testing "Signing up with invalid request"
(let [request {}
response (handlers/signup request)]
(is (= 400 (:status response))))))
(deftest login-tests
(testing "Logging in as user 1"
(let [user (db/create! user1)
request {:params user1}
response (handlers/login request)]
(is (= 200 (:status response)))
(is (not (nil? (get-in response [:body :token]))))
(is (get-in response [:cookies "token" :value])))))
(deftest invalid-login-tests
(let [user (db/create! user1)]
(testing "Logging in with invalid request"
(let [request {}
response (handlers/login request)]
(is (= 400 (:status response)))))
(testing "Logging in with invalid request - missing password"
(let [request {:params (dissoc user1 :password)}
response (handlers/login request)]
(is (= 400 (:status response)))))
(testing "Logging in as user 1 with incorrect password"
(let [request {:params (assoc user1 :password "<PASSWORD>")}
response (handlers/login request)]
(is (= 401 (:status response)))))
(testing "Logging in with invalid email (does not exist)"
(let [request {:params (assoc user1 :email "<EMAIL>")}
response (handlers/login request)]
(is (= 401 (:status response)))))))
(deftest retrieve-tests
(testing "Retrieving a user."
(let [user (db/create! user1)
email (:email user1)
password (:password <PASSWORD>)
message (models/get-token email password)
request {:identity {:id (:id user)}}
response (handlers/retrieve request)]
(is (= 200 (:status response))))))
| true |
(ns villagebook.user.handlers-test
(:require [villagebook.fixtures :refer [setup-once wrap-transaction]]
[villagebook.factory :refer [user1 user2]]
[villagebook.user.db :as db]
[villagebook.user.models :as models]
[villagebook.user.handlers :as handlers]
[clojure.test :refer :all]))
(use-fixtures :once setup-once)
(use-fixtures :each wrap-transaction)
(deftest signup-tests
(testing "Signing up"
(let [request {:params user1}
response (handlers/signup request)]
(is (= 201 (:status response)))
(is (= (:email user1) (get-in response [:body :email])))
(is (get-in response [:body :token]))
(is (get-in response [:cookies "token" :value]))
(is (not (empty? (db/retrieve-by-email (get-in response [:body :email])))))))
(testing "Signing up as user 2 (with optional details)"
(let [request {:params user2}
response (handlers/signup request)]
(is (= 201 (:status response)))
(is (= (:email user2) (get-in response [:body :email])))
(is (get-in response [:body :token]))
(is (get-in response [:cookies "token" :value]))
(is (not (empty? (db/retrieve-by-email (get-in response [:body :email]))))))))
(deftest invalid-signup-tests
(testing "Signing up with invalid request"
(let [request {}
response (handlers/signup request)]
(is (= 400 (:status response))))))
(deftest login-tests
(testing "Logging in as user 1"
(let [user (db/create! user1)
request {:params user1}
response (handlers/login request)]
(is (= 200 (:status response)))
(is (not (nil? (get-in response [:body :token]))))
(is (get-in response [:cookies "token" :value])))))
(deftest invalid-login-tests
(let [user (db/create! user1)]
(testing "Logging in with invalid request"
(let [request {}
response (handlers/login request)]
(is (= 400 (:status response)))))
(testing "Logging in with invalid request - missing password"
(let [request {:params (dissoc user1 :password)}
response (handlers/login request)]
(is (= 400 (:status response)))))
(testing "Logging in as user 1 with incorrect password"
(let [request {:params (assoc user1 :password "PI:PASSWORD:<PASSWORD>END_PI")}
response (handlers/login request)]
(is (= 401 (:status response)))))
(testing "Logging in with invalid email (does not exist)"
(let [request {:params (assoc user1 :email "PI:EMAIL:<EMAIL>END_PI")}
response (handlers/login request)]
(is (= 401 (:status response)))))))
(deftest retrieve-tests
(testing "Retrieving a user."
(let [user (db/create! user1)
email (:email user1)
password (:password PI:PASSWORD:<PASSWORD>END_PI)
message (models/get-token email password)
request {:identity {:id (:id user)}}
response (handlers/retrieve request)]
(is (= 200 (:status response))))))
|
[
{
"context": "d\"))\n :termsOfService \"https://github.com/CrossRef/rest-api-doc\"\n :contact {:name \"Crossref\"\n ",
"end": 788,
"score": 0.7597275376319885,
"start": 785,
"tag": "USERNAME",
"value": "Ref"
},
{
"context": "b.com/CrossRef/rest-api-doc\"\n :contact {:name \"Crossref\"\n :email \"[email protected]\"\n ",
"end": 832,
"score": 0.9885364770889282,
"start": 824,
"tag": "NAME",
"value": "Crossref"
},
{
"context": " :contact {:name \"Crossref\"\n :email \"[email protected]\"\n :url \"https://crossref.org\"}}})\n\n(",
"end": 876,
"score": 0.9999186992645264,
"start": 856,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "nders in the [Funder Registry](https://github.com/Crossref/open-funder-registry).\"\n :parameters (me",
"end": 3028,
"score": 0.8294747471809387,
"start": 3020,
"tag": "USERNAME",
"value": "Crossref"
}
] |
src/cayenne/api/v1/doc.clj
|
CrossRef/cayenne
| 11 |
(ns cayenne.api.v1.doc
(:require [cayenne.api.v1.schema :as sc]
[cayenne.api.v1.filter :refer [std-filters compound-fields]]
[cayenne.api.v1.fields :refer [work-fields]]
[cayenne.api.v1.facet :refer [std-facets]]
[cayenne.api.v1.query :refer [select-fields sort-fields]]
[compojure.core :refer [defroutes GET]]
[clojure.data.json :as json]
[clojure.java.io :refer [resource]]
[ring.swagger.swagger-ui :refer [swagger-ui]]
[ring.swagger.swagger2 :as rs]
[schema.core :as s]))
(def info
{:info
{:version "0.1"
:title "Crossref Unified Resource API"
:description (slurp (resource "swagger/description.md"))
:termsOfService "https://github.com/CrossRef/rest-api-doc"
:contact {:name "Crossref"
:email "[email protected]"
:url "https://crossref.org"}}})
(def tags
{:tags
[{:name "Funder"
:description "Endpoints that expose funder related data"}
{:name "Journal"
:description "Endpoints that expose journal related data"}
{:name "Work"
:description "Endpoints that expose works related data"}
{:name "Prefix"
:description "Endpoints that expose prefix related data"}
{:name "Member"
:description "Endpoints that expose member related data"}
{:name "Type"
:description "Endpoints that expose type related data"}]})
(defn- fields [compound-fields field]
(let [c-fields (get compound-fields (keyword field))
field-prefix (str "\n + " field ".")]
[field
(when c-fields
(str field "." (clojure.string/join field-prefix c-fields)))]))
(defn- fields-description
([title filters]
(fields-description title filters {}))
([title filters compound-fields]
(->> (map (comp (partial fields compound-fields) key) filters)
(map #(str "\n+ " (first %) (when (second %) (str "\n + " (second %)))))
clojure.string/join
(str title))))
(defn- filters-description []
(fields-description
(slurp (resource "swagger/filters-description.md"))
std-filters
compound-fields))
(defn- facets-description []
(fields-description
(slurp (resource "swagger/facets-description.md"))
(reduce merge (map (comp #(assoc {} % []) :external-field val) std-facets))))
(defn- selects-description []
(fields-description
(slurp (resource "swagger/selects-description.md"))
select-fields))
(defn- sorts-description []
(fields-description
(slurp (resource "swagger/sorts-description.md"))
sort-fields))
(defn- query-description []
(fields-description
(slurp (resource "swagger/query-description.md"))
work-fields))
(defn- works-description [title]
(str
title
(filters-description)
(query-description)
(facets-description)
(selects-description)
(sorts-description)))
(def funders
{"/funders"
{:get {:description "Returns a list of all funders in the [Funder Registry](https://github.com/Crossref/open-funder-registry)."
:parameters (merge-with merge sc/FundersFilter sc/QueryParams)
:responses {200 {:schema sc/FundersMessage
:description "A list of funders."}}
:tags ["Funder"]}}
"/funders/:id"
{:get {:description "Returns metadata for specified funder **and** its suborganizations, as an example use id 501100006004"
:parameters {:path {:id sc/FunderId}}
:responses {200 {:schema sc/FunderMessage
:description "The funder identified by {id}."}
404 {:description "The funder identified by {id} does not exist."}}
:tags ["Funder"]}}
"/funders/:id/works"
{:get {:description (works-description "Returns list of works associated with the specified {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Funder"]}}})
(def journals
{"/journals"
{:get {:description "Return a list of journals in the Crossref database."
:parameters sc/QueryParams
:responses {200 {:schema sc/JournalsMessage
:description "A list of journals"}}
:tags ["Journal"]}}
"/journals/:issn"
{:get {:description "Returns information about a journal with the given ISSN, as an example use ISSN 03064530"
:parameters {:path {:id sc/JournalIssn}}
:responses {200 {:schema sc/JournalMessage
:description "The journal identified by {issn}."}
404 {:description "The journal identified by {issn} does not exist."}}
:tags ["Journal"]}}
"/journals/:issn/works"
{:get {:description (works-description "Returns a list of works in the journal identified by {issn}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Journal"]}}})
(def works
{"/works"
{:get {:description (works-description "Returns a list of all works (journal articles, conference proceedings, books, components, etc), 20 per page.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Work"]}}
"/works/:doi"
{:get {:description "Returns metadata for the specified Crossref DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/WorkMessage
:description "The work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}
"/works/:doi/agency"
{:get {:description "Gets the agency associated with a specific work by it's DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/AgencyMessage
:description "The agency associated with work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}
"/works/:doi/quality"
{:get {:description "Gets the list of quality standards for work by it's DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/QualityMessage
:description "The quality standards associated with work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}})
(def prefixes
{"/prefixes/:prefix"
{:get {:description "Returns metadata for the DOI owner prefix, as an example use prefix 10.1016"
:parameters {:path {:prefix s/Str}}
:responses {200 {:schema sc/PrefixMessage
:description "The prefix data identified by {prefix}."}
404 {:description "The prefix data identified by {prefix} does not exist."}}
:tags ["Prefix"]}}
"/prefixes/:prefix/works"
{:get {:description (works-description "Returns list of works associated with specified {prefix}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Prefix"]}}})
(def members
{"/members"
{:get {:description "Returns a list of all Crossref members (mostly publishers)."
:parameters sc/QueryParams
:responses {200 {:schema sc/MembersMessage
:description "A collection of members"}}
:tags ["Member"]}}
"/members/:id"
{:get {:description "Returns metadata for a Crossref member, as an example use id 324"
:parameters {:path {:id s/Int}}
:responses {200 {:schema sc/MemberMessage
:description "The prefix data identified by {id}."}
404 {:description "The prefix data identified by {id} does not exist."}}
:tags ["Member"]}}
"/members/:id/works"
{:get {:description (works-description "Returns list of works associated with a Crossref member (deposited by a Crossref member) with {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Member"]}}})
(def types
{"/types"
{:get {:description "Returns a list of valid work types."
:parameters sc/QueryParams
:responses {200 {:schema sc/TypesMessage
:description "A collection of types"}}
:tags ["Type"]}}
"/types/:id"
{:get {:description "Returns information about a metadata work type, as an example use `monograph`"
:parameters {:path {:id s/Int}}
:responses {200 {:schema sc/TypeMessage
:description "The type identified by {id}."}
404 {:description "The type identified by {id} does not exist."}}
:tags ["Type"]}}
"/types/:id/works"
{:get {:description (works-description "returns list of works of type {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Type"]}}})
(def paths
{:paths
(merge
funders
journals
works
prefixes
members
types)})
(defroutes api-doc-routes
(swagger-ui
{:path "/swagger-ui"
:swagger-docs "/swagger-docs"})
(GET "/swagger-docs" []
(json/write-str
(s/with-fn-validation
(rs/swagger-json
(merge
info
tags
paths))))))
|
6541
|
(ns cayenne.api.v1.doc
(:require [cayenne.api.v1.schema :as sc]
[cayenne.api.v1.filter :refer [std-filters compound-fields]]
[cayenne.api.v1.fields :refer [work-fields]]
[cayenne.api.v1.facet :refer [std-facets]]
[cayenne.api.v1.query :refer [select-fields sort-fields]]
[compojure.core :refer [defroutes GET]]
[clojure.data.json :as json]
[clojure.java.io :refer [resource]]
[ring.swagger.swagger-ui :refer [swagger-ui]]
[ring.swagger.swagger2 :as rs]
[schema.core :as s]))
(def info
{:info
{:version "0.1"
:title "Crossref Unified Resource API"
:description (slurp (resource "swagger/description.md"))
:termsOfService "https://github.com/CrossRef/rest-api-doc"
:contact {:name "<NAME>"
:email "<EMAIL>"
:url "https://crossref.org"}}})
(def tags
{:tags
[{:name "Funder"
:description "Endpoints that expose funder related data"}
{:name "Journal"
:description "Endpoints that expose journal related data"}
{:name "Work"
:description "Endpoints that expose works related data"}
{:name "Prefix"
:description "Endpoints that expose prefix related data"}
{:name "Member"
:description "Endpoints that expose member related data"}
{:name "Type"
:description "Endpoints that expose type related data"}]})
(defn- fields [compound-fields field]
(let [c-fields (get compound-fields (keyword field))
field-prefix (str "\n + " field ".")]
[field
(when c-fields
(str field "." (clojure.string/join field-prefix c-fields)))]))
(defn- fields-description
([title filters]
(fields-description title filters {}))
([title filters compound-fields]
(->> (map (comp (partial fields compound-fields) key) filters)
(map #(str "\n+ " (first %) (when (second %) (str "\n + " (second %)))))
clojure.string/join
(str title))))
(defn- filters-description []
(fields-description
(slurp (resource "swagger/filters-description.md"))
std-filters
compound-fields))
(defn- facets-description []
(fields-description
(slurp (resource "swagger/facets-description.md"))
(reduce merge (map (comp #(assoc {} % []) :external-field val) std-facets))))
(defn- selects-description []
(fields-description
(slurp (resource "swagger/selects-description.md"))
select-fields))
(defn- sorts-description []
(fields-description
(slurp (resource "swagger/sorts-description.md"))
sort-fields))
(defn- query-description []
(fields-description
(slurp (resource "swagger/query-description.md"))
work-fields))
(defn- works-description [title]
(str
title
(filters-description)
(query-description)
(facets-description)
(selects-description)
(sorts-description)))
(def funders
{"/funders"
{:get {:description "Returns a list of all funders in the [Funder Registry](https://github.com/Crossref/open-funder-registry)."
:parameters (merge-with merge sc/FundersFilter sc/QueryParams)
:responses {200 {:schema sc/FundersMessage
:description "A list of funders."}}
:tags ["Funder"]}}
"/funders/:id"
{:get {:description "Returns metadata for specified funder **and** its suborganizations, as an example use id 501100006004"
:parameters {:path {:id sc/FunderId}}
:responses {200 {:schema sc/FunderMessage
:description "The funder identified by {id}."}
404 {:description "The funder identified by {id} does not exist."}}
:tags ["Funder"]}}
"/funders/:id/works"
{:get {:description (works-description "Returns list of works associated with the specified {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Funder"]}}})
(def journals
{"/journals"
{:get {:description "Return a list of journals in the Crossref database."
:parameters sc/QueryParams
:responses {200 {:schema sc/JournalsMessage
:description "A list of journals"}}
:tags ["Journal"]}}
"/journals/:issn"
{:get {:description "Returns information about a journal with the given ISSN, as an example use ISSN 03064530"
:parameters {:path {:id sc/JournalIssn}}
:responses {200 {:schema sc/JournalMessage
:description "The journal identified by {issn}."}
404 {:description "The journal identified by {issn} does not exist."}}
:tags ["Journal"]}}
"/journals/:issn/works"
{:get {:description (works-description "Returns a list of works in the journal identified by {issn}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Journal"]}}})
(def works
{"/works"
{:get {:description (works-description "Returns a list of all works (journal articles, conference proceedings, books, components, etc), 20 per page.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Work"]}}
"/works/:doi"
{:get {:description "Returns metadata for the specified Crossref DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/WorkMessage
:description "The work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}
"/works/:doi/agency"
{:get {:description "Gets the agency associated with a specific work by it's DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/AgencyMessage
:description "The agency associated with work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}
"/works/:doi/quality"
{:get {:description "Gets the list of quality standards for work by it's DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/QualityMessage
:description "The quality standards associated with work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}})
(def prefixes
{"/prefixes/:prefix"
{:get {:description "Returns metadata for the DOI owner prefix, as an example use prefix 10.1016"
:parameters {:path {:prefix s/Str}}
:responses {200 {:schema sc/PrefixMessage
:description "The prefix data identified by {prefix}."}
404 {:description "The prefix data identified by {prefix} does not exist."}}
:tags ["Prefix"]}}
"/prefixes/:prefix/works"
{:get {:description (works-description "Returns list of works associated with specified {prefix}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Prefix"]}}})
(def members
{"/members"
{:get {:description "Returns a list of all Crossref members (mostly publishers)."
:parameters sc/QueryParams
:responses {200 {:schema sc/MembersMessage
:description "A collection of members"}}
:tags ["Member"]}}
"/members/:id"
{:get {:description "Returns metadata for a Crossref member, as an example use id 324"
:parameters {:path {:id s/Int}}
:responses {200 {:schema sc/MemberMessage
:description "The prefix data identified by {id}."}
404 {:description "The prefix data identified by {id} does not exist."}}
:tags ["Member"]}}
"/members/:id/works"
{:get {:description (works-description "Returns list of works associated with a Crossref member (deposited by a Crossref member) with {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Member"]}}})
(def types
{"/types"
{:get {:description "Returns a list of valid work types."
:parameters sc/QueryParams
:responses {200 {:schema sc/TypesMessage
:description "A collection of types"}}
:tags ["Type"]}}
"/types/:id"
{:get {:description "Returns information about a metadata work type, as an example use `monograph`"
:parameters {:path {:id s/Int}}
:responses {200 {:schema sc/TypeMessage
:description "The type identified by {id}."}
404 {:description "The type identified by {id} does not exist."}}
:tags ["Type"]}}
"/types/:id/works"
{:get {:description (works-description "returns list of works of type {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Type"]}}})
(def paths
{:paths
(merge
funders
journals
works
prefixes
members
types)})
(defroutes api-doc-routes
(swagger-ui
{:path "/swagger-ui"
:swagger-docs "/swagger-docs"})
(GET "/swagger-docs" []
(json/write-str
(s/with-fn-validation
(rs/swagger-json
(merge
info
tags
paths))))))
| true |
(ns cayenne.api.v1.doc
(:require [cayenne.api.v1.schema :as sc]
[cayenne.api.v1.filter :refer [std-filters compound-fields]]
[cayenne.api.v1.fields :refer [work-fields]]
[cayenne.api.v1.facet :refer [std-facets]]
[cayenne.api.v1.query :refer [select-fields sort-fields]]
[compojure.core :refer [defroutes GET]]
[clojure.data.json :as json]
[clojure.java.io :refer [resource]]
[ring.swagger.swagger-ui :refer [swagger-ui]]
[ring.swagger.swagger2 :as rs]
[schema.core :as s]))
(def info
{:info
{:version "0.1"
:title "Crossref Unified Resource API"
:description (slurp (resource "swagger/description.md"))
:termsOfService "https://github.com/CrossRef/rest-api-doc"
:contact {:name "PI:NAME:<NAME>END_PI"
:email "PI:EMAIL:<EMAIL>END_PI"
:url "https://crossref.org"}}})
(def tags
{:tags
[{:name "Funder"
:description "Endpoints that expose funder related data"}
{:name "Journal"
:description "Endpoints that expose journal related data"}
{:name "Work"
:description "Endpoints that expose works related data"}
{:name "Prefix"
:description "Endpoints that expose prefix related data"}
{:name "Member"
:description "Endpoints that expose member related data"}
{:name "Type"
:description "Endpoints that expose type related data"}]})
(defn- fields [compound-fields field]
(let [c-fields (get compound-fields (keyword field))
field-prefix (str "\n + " field ".")]
[field
(when c-fields
(str field "." (clojure.string/join field-prefix c-fields)))]))
(defn- fields-description
([title filters]
(fields-description title filters {}))
([title filters compound-fields]
(->> (map (comp (partial fields compound-fields) key) filters)
(map #(str "\n+ " (first %) (when (second %) (str "\n + " (second %)))))
clojure.string/join
(str title))))
(defn- filters-description []
(fields-description
(slurp (resource "swagger/filters-description.md"))
std-filters
compound-fields))
(defn- facets-description []
(fields-description
(slurp (resource "swagger/facets-description.md"))
(reduce merge (map (comp #(assoc {} % []) :external-field val) std-facets))))
(defn- selects-description []
(fields-description
(slurp (resource "swagger/selects-description.md"))
select-fields))
(defn- sorts-description []
(fields-description
(slurp (resource "swagger/sorts-description.md"))
sort-fields))
(defn- query-description []
(fields-description
(slurp (resource "swagger/query-description.md"))
work-fields))
(defn- works-description [title]
(str
title
(filters-description)
(query-description)
(facets-description)
(selects-description)
(sorts-description)))
(def funders
{"/funders"
{:get {:description "Returns a list of all funders in the [Funder Registry](https://github.com/Crossref/open-funder-registry)."
:parameters (merge-with merge sc/FundersFilter sc/QueryParams)
:responses {200 {:schema sc/FundersMessage
:description "A list of funders."}}
:tags ["Funder"]}}
"/funders/:id"
{:get {:description "Returns metadata for specified funder **and** its suborganizations, as an example use id 501100006004"
:parameters {:path {:id sc/FunderId}}
:responses {200 {:schema sc/FunderMessage
:description "The funder identified by {id}."}
404 {:description "The funder identified by {id} does not exist."}}
:tags ["Funder"]}}
"/funders/:id/works"
{:get {:description (works-description "Returns list of works associated with the specified {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Funder"]}}})
(def journals
{"/journals"
{:get {:description "Return a list of journals in the Crossref database."
:parameters sc/QueryParams
:responses {200 {:schema sc/JournalsMessage
:description "A list of journals"}}
:tags ["Journal"]}}
"/journals/:issn"
{:get {:description "Returns information about a journal with the given ISSN, as an example use ISSN 03064530"
:parameters {:path {:id sc/JournalIssn}}
:responses {200 {:schema sc/JournalMessage
:description "The journal identified by {issn}."}
404 {:description "The journal identified by {issn} does not exist."}}
:tags ["Journal"]}}
"/journals/:issn/works"
{:get {:description (works-description "Returns a list of works in the journal identified by {issn}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Journal"]}}})
(def works
{"/works"
{:get {:description (works-description "Returns a list of all works (journal articles, conference proceedings, books, components, etc), 20 per page.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Work"]}}
"/works/:doi"
{:get {:description "Returns metadata for the specified Crossref DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/WorkMessage
:description "The work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}
"/works/:doi/agency"
{:get {:description "Gets the agency associated with a specific work by it's DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/AgencyMessage
:description "The agency associated with work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}
"/works/:doi/quality"
{:get {:description "Gets the list of quality standards for work by it's DOI, as an example use DOI 10.5555/12345678"
:parameters {:path {:doi sc/WorkDoi}}
:responses {200 {:schema sc/QualityMessage
:description "The quality standards associated with work identified by {doi}."}
404 {:description "The work identified by {doi} does not exist."}}
:tags ["Work"]}}})
(def prefixes
{"/prefixes/:prefix"
{:get {:description "Returns metadata for the DOI owner prefix, as an example use prefix 10.1016"
:parameters {:path {:prefix s/Str}}
:responses {200 {:schema sc/PrefixMessage
:description "The prefix data identified by {prefix}."}
404 {:description "The prefix data identified by {prefix} does not exist."}}
:tags ["Prefix"]}}
"/prefixes/:prefix/works"
{:get {:description (works-description "Returns list of works associated with specified {prefix}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Prefix"]}}})
(def members
{"/members"
{:get {:description "Returns a list of all Crossref members (mostly publishers)."
:parameters sc/QueryParams
:responses {200 {:schema sc/MembersMessage
:description "A collection of members"}}
:tags ["Member"]}}
"/members/:id"
{:get {:description "Returns metadata for a Crossref member, as an example use id 324"
:parameters {:path {:id s/Int}}
:responses {200 {:schema sc/MemberMessage
:description "The prefix data identified by {id}."}
404 {:description "The prefix data identified by {id} does not exist."}}
:tags ["Member"]}}
"/members/:id/works"
{:get {:description (works-description "Returns list of works associated with a Crossref member (deposited by a Crossref member) with {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Member"]}}})
(def types
{"/types"
{:get {:description "Returns a list of valid work types."
:parameters sc/QueryParams
:responses {200 {:schema sc/TypesMessage
:description "A collection of types"}}
:tags ["Type"]}}
"/types/:id"
{:get {:description "Returns information about a metadata work type, as an example use `monograph`"
:parameters {:path {:id s/Int}}
:responses {200 {:schema sc/TypeMessage
:description "The type identified by {id}."}
404 {:description "The type identified by {id} does not exist."}}
:tags ["Type"]}}
"/types/:id/works"
{:get {:description (works-description "returns list of works of type {id}.")
:parameters (merge-with merge sc/WorksQuery sc/QueryParams)
:responses {200 {:schema sc/WorksMessage
:description "A list of works"}}
:tags ["Type"]}}})
(def paths
{:paths
(merge
funders
journals
works
prefixes
members
types)})
(defroutes api-doc-routes
(swagger-ui
{:path "/swagger-ui"
:swagger-docs "/swagger-docs"})
(GET "/swagger-docs" []
(json/write-str
(s/with-fn-validation
(rs/swagger-json
(merge
info
tags
paths))))))
|
[
{
"context": " [:what\n [id ::core/name \"Ivan\"]\n [id ::core/last-name last-name]\n ",
"end": 256,
"score": 0.9994754195213318,
"start": 252,
"tag": "NAME",
"value": "Ivan"
}
] |
bench-src/people/odoyle.cljc
|
drewverlee/odoyle-rules
| 0 |
(ns people.odoyle
(:require [odoyle.rules :as o]
[people.core :as core]))
(defn init []
(as-> (o/->session) $
(reduce o/add-rule $
(o/ruleset
{::get-ivan
[:what
[id ::core/name "Ivan"]
[id ::core/last-name last-name]
[id ::core/age age]
[id ::core/sex :male]]}))
(reduce (fn [session person]
(o/insert session (:db/id person) person))
$ core/people20k)))
(def initial-session (init))
(defn query [session]
(o/query-all session ::get-ivan))
(defn run []
(query initial-session))
|
13921
|
(ns people.odoyle
(:require [odoyle.rules :as o]
[people.core :as core]))
(defn init []
(as-> (o/->session) $
(reduce o/add-rule $
(o/ruleset
{::get-ivan
[:what
[id ::core/name "<NAME>"]
[id ::core/last-name last-name]
[id ::core/age age]
[id ::core/sex :male]]}))
(reduce (fn [session person]
(o/insert session (:db/id person) person))
$ core/people20k)))
(def initial-session (init))
(defn query [session]
(o/query-all session ::get-ivan))
(defn run []
(query initial-session))
| true |
(ns people.odoyle
(:require [odoyle.rules :as o]
[people.core :as core]))
(defn init []
(as-> (o/->session) $
(reduce o/add-rule $
(o/ruleset
{::get-ivan
[:what
[id ::core/name "PI:NAME:<NAME>END_PI"]
[id ::core/last-name last-name]
[id ::core/age age]
[id ::core/sex :male]]}))
(reduce (fn [session person]
(o/insert session (:db/id person) person))
$ core/people20k)))
(def initial-session (init))
(defn query [session]
(o/query-all session ::get-ivan))
(defn run []
(query initial-session))
|
[
{
"context": ", :phase nil, :attributes {:id \"mrna0001\", :name \"foobar\"}}\n {:chr \"ctg123\", :source nil, :type \"exon\", ",
"end": 2331,
"score": 0.9930232763290405,
"start": 2325,
"tag": "USERNAME",
"value": "foobar"
},
{
"context": " :phase nil, :attributes {:id \"gene00001\", :name \"EDEN\"}}\n {:chr \"ctg123\", :source nil, :type \"TF_bind",
"end": 8946,
"score": 0.971902072429657,
"start": 8942,
"tag": "NAME",
"value": "EDEN"
},
{
"context": "ase nil,\n :attributes {:id \"gene00001\", :name \"EDEN\"}}\n {:chr \"ctg123\", :source nil, :type \"TF_bind",
"end": 19550,
"score": 0.6855843663215637,
"start": 19546,
"tag": "NAME",
"value": "EDEN"
}
] |
test/cljam/io/gff_test.clj
|
niyarin-another/cljam
| 78 |
(ns cljam.io.gff-test
(:require [clojure.test :refer [deftest is are testing]]
[clojure.string :as cstr]
[clojure.java.io :as cio]
[cljam.test-common :refer
[with-before-after
prepare-cache!
clean-cache!
not-throw?
http-server
temp-dir
test-gff3-file]]
[cljam.io.gff :as gff])
(:import [java.io ByteArrayInputStream ByteArrayOutputStream]
[cljam.io.gff GFFReader GFFWriter]))
(def ^:private ^String
simple-gff
(->> ["##gff-version 3"
"ctg123 . exon 1300 1500 . + . ID=exon00001"
"ctg123 . exon 1050 1500 . + . ID=exon00002"
"ctg123 . exon 3000 3902 . + . ID=exon00003"
"ctg123 . exon 5000 5500 . + . ID=exon00004"
"ctg123 . exon 7000 9000 . + . ID=exon00005"]
(cstr/join \newline)))
(def ^:private
simple-edn
[{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001"}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002"}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003"}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004"}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005"}}])
(def ^:private ^String
nested-gff-1
(->> ["##gff-version 3"
"ctg123 . mRNA 1300 9000 . + . ID=mrna0001;Name=foobar"
"ctg123 . exon 1300 1500 . + . ID=exon00001;Parent=mrna0001"
"ctg123 . exon 1050 1500 . + . ID=exon00002;Parent=mrna0001"
"ctg123 . exon 3000 3902 . + . ID=exon00003;Parent=mrna0001"
"ctg123 . exon 5000 5500 . + . ID=exon00004;Parent=mrna0001"
"ctg123 . exon 7000 9000 . + . ID=exon00005;Parent=mrna0001"]
(cstr/join \newline)))
(def ^:private
nested-edn-1
[{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0001", :name "foobar"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005", :parent ["mrna0001"]}}])
(def ^:private ^String
nested-gff-2
(->> ["##gff-version 3"
"ctg123 . operon 1300 15000 . + . ID=operon001;Name=Operon"
"ctg123 . mRNA 1300 9000 . + . ID=mrna0001;Parent=operon001;Name=foobar"
"ctg123 . exon 1300 1500 . + . Parent=mrna0001"
"ctg123 . exon 1050 1500 . + . Parent=mrna0001"
"ctg123 . exon 3000 3902 . + . Parent=mrna0001"
"ctg123 . exon 5000 5500 . + . Parent=mrna0001"
"ctg123 . exon 7000 9000 . + . Parent=mrna0001"
"ctg123 . mRNA 10000 15000 . + . ID=mrna0002;Parent=operon001;Name=baz"
"ctg123 . exon 10000 12000 . + . Parent=mrna0002"
"ctg123 . exon 14000 15000 . + . Parent=mrna0002"]
(cstr/join \newline)))
(def ^:private
nested-edn-2
[{:chr "ctg123", :source nil, :type "operon", :start 1300, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:id "operon001", :name "Operon"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0001", :parent ["operon001"], :name "foobar"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 10000, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0002", :parent ["operon001"], :name "baz"}}
{:chr "ctg123", :source nil, :type "exon", :start 10000, :end 12000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0002"]}}
{:chr "ctg123", :source nil, :type "exon", :start 14000, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0002"]}}])
(def ^:private ^String
discontinuous-gff
(->> ["##gff-version 3"
"ctg123 example match 26122 26126 . + . ID=match001"
"ctg123 example match 26497 26869 . + . ID=match001"
"ctg123 example match 27201 27325 . + . ID=match001"
"ctg123 example match 27372 27433 . + . ID=match001"
"ctg123 example match 27565 27565 . + . ID=match001"]
(cstr/join \newline)))
(def ^:private
discontinuous-edn
[{:chr "ctg123", :source "example", :type "match", :start 26122, :end 26126, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 26497, :end 26869, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27201, :end 27325, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27372, :end 27433, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27565, :end 27565, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}])
(def ^:private ^String
example-gene-gff
(->> ["##gff-version 3.2.1"
"##sequence-region ctg123 1 1497228"
"ctg123 . gene 1000 9000 . + . ID=gene00001;Name=EDEN"
"ctg123 . TF_binding_site 1000 1012 . + . ID=tfbs00001;Parent=gene00001"
"ctg123 . mRNA 1050 9000 . + . ID=mRNA00001;Parent=gene00001;Name=EDEN.1"
"ctg123 . mRNA 1050 9000 . + . ID=mRNA00002;Parent=gene00001;Name=EDEN.2"
"ctg123 . mRNA 1300 9000 . + . ID=mRNA00003;Parent=gene00001;Name=EDEN.3"
"ctg123 . exon 1300 1500 . + . ID=exon00001;Parent=mRNA00003"
"ctg123 . exon 1050 1500 . + . ID=exon00002;Parent=mRNA00001,mRNA00002"
"ctg123 . exon 3000 3902 . + . ID=exon00003;Parent=mRNA00001,mRNA00003"
"ctg123 . exon 5000 5500 . + . ID=exon00004;Parent=mRNA00001,mRNA00002,mRNA00003"
"ctg123 . exon 7000 9000 . + . ID=exon00005;Parent=mRNA00001,mRNA00002,mRNA00003"
"ctg123 . CDS 1201 1500 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 3000 3902 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 5000 5500 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 7000 7600 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 1201 1500 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 5000 5500 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 7000 7600 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 3301 3902 . + 0 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 5000 5500 . + 1 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 7000 7600 . + 1 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 3391 3902 . + 0 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"
"ctg123 . CDS 5000 5500 . + 1 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"
"ctg123 . CDS 7000 7600 . + 1 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"]
(cstr/join \newline)))
(def ^:private
example-gene-edn
[{:chr "ctg123", :source nil, :type "gene", :start 1000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "gene00001", :name "EDEN"}}
{:chr "ctg123", :source nil, :type "TF_binding_site", :start 1000, :end 1012, :score nil, :strand :forward, :phase nil, :attributes {:id "tfbs00001", :parent ["gene00001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00001", :parent ["gene00001"], :name "EDEN.1"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00002", :parent ["gene00001"], :name "EDEN.2"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00003", :parent ["gene00001"], :name "EDEN.3"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001", :parent ["mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002", :parent ["mRNA00001" "mRNA00002"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003", :parent ["mRNA00001" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004", :parent ["mRNA00001" "mRNA00002" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005", :parent ["mRNA00001" "mRNA00002" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3000, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3301, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3391, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}])
(def ^:private ^String
circular-gff
(->> ["##gff-version 3.2.1"
"# organism Enterobacteria phage f1"
"# Note Bacteriophage f1, complete genome."
"J02448 GenBank region 1 6407 . + . ID=J02448;Name=J02448;Is_circular=true"
"J02448 GenBank CDS 6006 7238 . + 0 ID=geneII;Name=II;Note=protein II"]
(cstr/join \newline)))
(def ^:private
circular-edn
[{:chr "J02448", :source "GenBank", :type "region", :start 1, :end 6407, :score nil, :strand :forward, :phase nil, :attributes {:id "J02448", :name "J02448", :circular? true}}
{:chr "J02448", :source "GenBank", :type "CDS", :start 6006, :end 7238, :score nil, :strand :forward, :phase 0, :attributes {:id "geneII", :name "II", :note ["protein II"]}}])
(def ^:private ^String
gap-gff
(->> ["##gff-version 3.2.1"
"chr3 . Match 1 23 . . . ID=Match1;Target=EST23 1 21;Gap=M8 D3 M6 I1 M6"
"ctg123 . nucleotide_to_protein 100 129 . + . ID=match008;Target=p101 1 10;Gap=M3 I1 M2 D1 M4"]
(cstr/join \newline)))
(def ^:private
gap-edn
[{:chr "chr3", :source nil, :type "Match", :start 1, :end 23, :score nil, :strand nil, :phase nil,
:attributes {:id "Match1", :target {:chr "EST23", :start 1, :end 21}, :gap [[\M 8] [\D 3] [\M 6] [\I 1] [\M 6]]}}
{:chr "ctg123", :source nil, :type "nucleotide_to_protein", :start 100, :end 129, :score nil, :strand :forward, :phase nil,
:attributes {:id "match008", :target {:chr "p101", :start 1, :end 10}, :gap [[\M 3] [\I 1] [\M 2] [\D 1] [\M 4]]}}])
(def ^:private ^String
alignment-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1050 9000 6.2e-45 + . ID=match00001;Target=cdna0123 12 2964;Gap=M451 D3499 M501 D1499 M2001"]
(cstr/join \newline)))
(def ^:private
alignment-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 9000, :score 6.2e-45, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 2964}, :gap [[\M 451] [\D 3499] [\M 501] [\D 1499] [\M 2001]]}}])
(def ^:private ^String
alignment-multiple-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1050 1500 5.8e-42 + . ID=match00001;Target=cdna0123 12 462"
"ctg123 . cDNA_match 5000 5500 8.1e-43 + . ID=match00001;Target=cdna0123 463 963"
"ctg123 . cDNA_match 7000 9000 1.4e-40 + . ID=match00001;Target=cdna0123 964 2964"]
(cstr/join \newline)))
(def ^:private
alignment-multiple-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 1500, :score 5.8e-42, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 462}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 5000, :end 5500, :score 8.1e-43, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 463, :end 963}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 7000, :end 9000, :score 1.4e-40, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 964, :end 2964}}}])
(def ^:private ^String
alignment-reverse-gff
(->> ["##gff-version 3.2.1"
"ctg123 . EST_match 1200 3200 2.2e-30 + . ID=match00002;Target=mjm1123.5 5 506;Gap=M301 D1499 M201"
"ctg123 . EST_match 7000 9000 7.4e-32 - . ID=match00003;Target=mjm1123.3 1 502;Gap=M101 D1499 M401"]
(cstr/join \newline)))
(def ^:private
alignment-reverse-edn
[{:chr "ctg123", :source nil, :type "EST_match", :start 1200, :end 3200, :score 2.2e-30, :strand :forward, :phase nil,
:attributes {:id "match00002", :target {:chr "mjm1123.5", :start 5, :end 506}, :gap [[\M 301] [\D 1499] [\M 201]]}}
{:chr "ctg123", :source nil, :type "EST_match", :start 7000, :end 9000, :score 7.4e-32, :strand :reverse, :phase nil,
:attributes {:id "match00003", :target {:chr "mjm1123.3", :start 1, :end 502}, :gap [[\M 101] [\D 1499] [\M 401]]}}])
(def ^:private ^String
alignment-group-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1200 9000 . . . ID=cDNA00001"
"ctg123 . match_part 1200 3200 2.2e-30 + . ID=match00002;Parent=cDNA00001;Target=mjm1123.5 5 506;Gap=M301 D1499 M201"
"ctg123 . match_part 7000 9000 7.4e-32 - . ID=match00003;Parent=cDNA00001;Target=mjm1123.3 1 502;Gap=M101 D1499 M401"]
(cstr/join \newline)))
(def ^:private
alignment-group-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1200, :end 9000, :score nil, :strand nil, :phase nil,
:attributes {:id "cDNA00001"}}
{:chr "ctg123", :source nil, :type "match_part", :start 1200, :end 3200, :score 2.2e-30, :strand :forward, :phase nil,
:attributes {:id "match00002", :parent ["cDNA00001"], :target {:chr "mjm1123.5", :start 5, :end 506}, :gap [[\M 301] [\D 1499] [\M 201]]}}
{:chr "ctg123", :source nil, :type "match_part", :start 7000, :end 9000, :score 7.4e-32, :strand :reverse, :phase nil,
:attributes {:id "match00003", :parent ["cDNA00001"], :target {:chr "mjm1123.3", :start 1, :end 502}, :gap [[\M 101] [\D 1499] [\M 401]]}}])
(def ^:private ^String
encoding-gff
(->> ["##gff-version 3.2"
"ch r;1 sour =ce ty &p,e 1 10 9.0 ? . ."
"chr%253B1 sour%253Dce ty%2526p%252Ce 1 10 . + . Target=Foo%20Bar 1 10 +;Dbxref=EMBL:AA816246,NCBI_gi:10727410;Foo=Bar%2C,Baz "
" !\"#$%25&'%09()*+,-./%0A0123456789:;<=>?@[\\]^_`{|}~ . type 1 10 . . . ID= !\"#$%25%26'%09()*+%2C-./%0A0123456789:%3B<%3D>?@[\\]^_`{|}~;Target=%20!\"#$%25%26'%09()*+%2C-./%0A0123456789:%3B<%3D>?@[\\]^_`{|}~ 1 10 -"]
(cstr/join \newline)))
(def ^:private
encoding-edn
[{:chr "ch r;1", :source "sour =ce", :type "ty &p,e", :start 1, :end 10, :score 9.0, :strand :unknown, :phase nil, :attributes {}}
{:chr "chr%3B1", :source "sour%3Dce", :type "ty%26p%2Ce", :start 1, :end 10, :score nil, :strand :forward, :phase nil,
:attributes {:target {:chr "Foo Bar", :start 1, :end 10, :strand :forward}, :db-xref [{:db-tag "EMBL", :id "AA816246"}, {:db-tag "NCBI_gi", :id "10727410"}], "Foo" ["Bar," "Baz "]}}
{:chr " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~", :source nil, :type "type", :start 1, :end 10, :score nil, :strand nil, :phase nil,
:attributes {:id " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~",
:target {:chr " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~", :start 1, :end 10, :strand :reverse}}}])
(def ^:private
example-edn
[{:chr "ctg123", :source nil, :type "gene", :start 1000, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:id "gene00001", :name "EDEN"}}
{:chr "ctg123", :source nil, :type "TF_binding_site", :start 1000, :end 1012, :score nil, :strand :forward, :phase nil,
:attributes {:id "tfbs00001", :parent ["gene00001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:id "mRNA00001", :parent ["gene00001"], :name "EDEN.1"}}
{:chr "ctg123", :source nil, :type "five_prime_UTR", :start 1050, :end 1200, :score nil, :strand :forward, :phase nil,
:attributes {:parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 3000, :end 3902, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "three_prime_UTR", :start 7601, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 1500, :score 5.8e-42, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 462}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 5000, :end 5500, :score 8.1e-43, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 463, :end 963}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 7000, :end 9000, :score 1.4e-40, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 964, :end 2964}}}])
(deftest reader
(with-open [bais (ByteArrayInputStream. (.getBytes simple-gff))
r (gff/reader bais)]
(is (instance? GFFReader r))
(is (= {:version 3, :major-revision nil, :minor-revision nil}
(gff/version r))))
(with-open [bais (ByteArrayInputStream. (.getBytes alignment-gff))
r (gff/reader bais)]
(is (instance? GFFReader r))
(is (= {:version 3, :major-revision 2, :minor-revision 1}
(gff/version r))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##"))]
(is (thrown? Exception (gff/reader bais))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##"))]
(is (= {:url nil, :version-directive "##"}
(try (gff/reader bais) (catch Exception e (ex-data e))))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 2"))]
(is (thrown? Exception (gff/reader bais))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 2"))]
(is (= {:url nil, :version 2, :major-revision nil, :minor-revision nil}
(try (gff/reader bais) (catch Exception e (ex-data e))))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 3\nctg%41123\t.\t.\t1\t10\t.\t.\t.\t."))
r (gff/reader bais)]
(is (thrown-with-msg?
Exception
#"Found an invalid character encoding while decoding GFF3 file"
(gff/read-features r))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 3\nctg%41123\t.\t.\t1\t10\t.\t.\t.\t."))
r (gff/reader bais)]
(is (= {:input "ctg%41123", :invalid-string "%41"}
(try (gff/read-features r) (catch Exception e (ex-data e)))))))
(deftest read-features
(are [?str ?edn]
(= ?edn
(with-open [bais (ByteArrayInputStream. (.getBytes ^String ?str))
r (gff/reader bais)]
(doall (gff/read-features r))))
simple-gff simple-edn
nested-gff-1 nested-edn-1
nested-gff-2 nested-edn-2
discontinuous-gff discontinuous-edn
example-gene-gff example-gene-edn
circular-gff circular-edn
gap-gff gap-edn
alignment-gff alignment-edn
alignment-multiple-gff alignment-multiple-edn
alignment-reverse-gff alignment-reverse-edn
alignment-group-gff alignment-group-edn
encoding-gff encoding-edn))
(deftest read-features-from-file
(with-open [r (gff/reader test-gff3-file)]
(is (= example-edn
(gff/read-features r)))))
(deftest writer
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos)]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos {:version 3})]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos {:version 3, :major-revision 2, :minor-revision 1})]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)]
(is (thrown? Exception (gff/writer baos {:version 2}))))
(with-open [baos (ByteArrayOutputStream.)]
(is (= {:url nil, :version 2}
(try (gff/writer baos {:version 2}) (catch Exception e (ex-data e))))))
(with-open [baos (ByteArrayOutputStream.)]
(with-open [w (gff/writer baos {:version 3, :encoding :gzip})]
(gff/write-features w simple-edn))
(let [ba (.toByteArray baos)]
;; GZIP file header
(is (= (unchecked-byte 0x1f) (aget ba 0)))
(is (= (unchecked-byte 0x8b) (aget ba 1))))))
(deftest write-features
(are [?edn ?str]
;; ignore directives and comment lines
(= (cstr/replace ?str #"(?<=\n)#.*?\n" "")
(with-open [bais (ByteArrayInputStream. (.getBytes ^String ?str))
baos (ByteArrayOutputStream.)]
(let [v (with-open [r (gff/reader bais)]
(gff/version r))]
(with-open [w (gff/writer baos v)]
(gff/write-features w ?edn)))
(str baos)))
simple-edn simple-gff
nested-edn-1 nested-gff-1
nested-edn-2 nested-gff-2
discontinuous-edn discontinuous-gff
example-gene-edn example-gene-gff
circular-edn circular-gff
gap-edn gap-gff
alignment-edn alignment-gff
alignment-multiple-edn alignment-multiple-gff
alignment-reverse-edn alignment-reverse-gff
alignment-group-edn alignment-group-gff
encoding-edn encoding-gff))
(deftest write-features-to-file
(with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(let [f (cio/file temp-dir "gff-write.gff3")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))
(let [f (cio/file temp-dir "gff-write.gff3.gz")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))
(let [f (cio/file temp-dir "gff-write.gff3.bz2")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))))
(deftest source-type-test
(testing "reader"
(with-open [server (http-server)]
(are [?x] (= example-edn
(with-open [r (gff/reader ?x)]
(doall (gff/read-features r))))
test-gff3-file
(cio/file test-gff3-file)
(cio/as-url (cio/file test-gff3-file))
(cio/as-url (str (:uri server) "/gff3/example.gff3")))))
(testing "writer"
(let [tmp-gff3-file (cio/file temp-dir "gff3-source-type-writer.gff3")]
(are [?x] (with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(with-open [w (gff/writer ?x)]
(not-throw? (gff/write-features w example-edn))))
(.getCanonicalPath tmp-gff3-file)
tmp-gff3-file
(cio/as-url tmp-gff3-file)))))
|
34333
|
(ns cljam.io.gff-test
(:require [clojure.test :refer [deftest is are testing]]
[clojure.string :as cstr]
[clojure.java.io :as cio]
[cljam.test-common :refer
[with-before-after
prepare-cache!
clean-cache!
not-throw?
http-server
temp-dir
test-gff3-file]]
[cljam.io.gff :as gff])
(:import [java.io ByteArrayInputStream ByteArrayOutputStream]
[cljam.io.gff GFFReader GFFWriter]))
(def ^:private ^String
simple-gff
(->> ["##gff-version 3"
"ctg123 . exon 1300 1500 . + . ID=exon00001"
"ctg123 . exon 1050 1500 . + . ID=exon00002"
"ctg123 . exon 3000 3902 . + . ID=exon00003"
"ctg123 . exon 5000 5500 . + . ID=exon00004"
"ctg123 . exon 7000 9000 . + . ID=exon00005"]
(cstr/join \newline)))
(def ^:private
simple-edn
[{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001"}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002"}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003"}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004"}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005"}}])
(def ^:private ^String
nested-gff-1
(->> ["##gff-version 3"
"ctg123 . mRNA 1300 9000 . + . ID=mrna0001;Name=foobar"
"ctg123 . exon 1300 1500 . + . ID=exon00001;Parent=mrna0001"
"ctg123 . exon 1050 1500 . + . ID=exon00002;Parent=mrna0001"
"ctg123 . exon 3000 3902 . + . ID=exon00003;Parent=mrna0001"
"ctg123 . exon 5000 5500 . + . ID=exon00004;Parent=mrna0001"
"ctg123 . exon 7000 9000 . + . ID=exon00005;Parent=mrna0001"]
(cstr/join \newline)))
(def ^:private
nested-edn-1
[{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0001", :name "foobar"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005", :parent ["mrna0001"]}}])
(def ^:private ^String
nested-gff-2
(->> ["##gff-version 3"
"ctg123 . operon 1300 15000 . + . ID=operon001;Name=Operon"
"ctg123 . mRNA 1300 9000 . + . ID=mrna0001;Parent=operon001;Name=foobar"
"ctg123 . exon 1300 1500 . + . Parent=mrna0001"
"ctg123 . exon 1050 1500 . + . Parent=mrna0001"
"ctg123 . exon 3000 3902 . + . Parent=mrna0001"
"ctg123 . exon 5000 5500 . + . Parent=mrna0001"
"ctg123 . exon 7000 9000 . + . Parent=mrna0001"
"ctg123 . mRNA 10000 15000 . + . ID=mrna0002;Parent=operon001;Name=baz"
"ctg123 . exon 10000 12000 . + . Parent=mrna0002"
"ctg123 . exon 14000 15000 . + . Parent=mrna0002"]
(cstr/join \newline)))
(def ^:private
nested-edn-2
[{:chr "ctg123", :source nil, :type "operon", :start 1300, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:id "operon001", :name "Operon"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0001", :parent ["operon001"], :name "foobar"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 10000, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0002", :parent ["operon001"], :name "baz"}}
{:chr "ctg123", :source nil, :type "exon", :start 10000, :end 12000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0002"]}}
{:chr "ctg123", :source nil, :type "exon", :start 14000, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0002"]}}])
(def ^:private ^String
discontinuous-gff
(->> ["##gff-version 3"
"ctg123 example match 26122 26126 . + . ID=match001"
"ctg123 example match 26497 26869 . + . ID=match001"
"ctg123 example match 27201 27325 . + . ID=match001"
"ctg123 example match 27372 27433 . + . ID=match001"
"ctg123 example match 27565 27565 . + . ID=match001"]
(cstr/join \newline)))
(def ^:private
discontinuous-edn
[{:chr "ctg123", :source "example", :type "match", :start 26122, :end 26126, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 26497, :end 26869, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27201, :end 27325, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27372, :end 27433, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27565, :end 27565, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}])
(def ^:private ^String
example-gene-gff
(->> ["##gff-version 3.2.1"
"##sequence-region ctg123 1 1497228"
"ctg123 . gene 1000 9000 . + . ID=gene00001;Name=EDEN"
"ctg123 . TF_binding_site 1000 1012 . + . ID=tfbs00001;Parent=gene00001"
"ctg123 . mRNA 1050 9000 . + . ID=mRNA00001;Parent=gene00001;Name=EDEN.1"
"ctg123 . mRNA 1050 9000 . + . ID=mRNA00002;Parent=gene00001;Name=EDEN.2"
"ctg123 . mRNA 1300 9000 . + . ID=mRNA00003;Parent=gene00001;Name=EDEN.3"
"ctg123 . exon 1300 1500 . + . ID=exon00001;Parent=mRNA00003"
"ctg123 . exon 1050 1500 . + . ID=exon00002;Parent=mRNA00001,mRNA00002"
"ctg123 . exon 3000 3902 . + . ID=exon00003;Parent=mRNA00001,mRNA00003"
"ctg123 . exon 5000 5500 . + . ID=exon00004;Parent=mRNA00001,mRNA00002,mRNA00003"
"ctg123 . exon 7000 9000 . + . ID=exon00005;Parent=mRNA00001,mRNA00002,mRNA00003"
"ctg123 . CDS 1201 1500 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 3000 3902 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 5000 5500 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 7000 7600 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 1201 1500 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 5000 5500 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 7000 7600 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 3301 3902 . + 0 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 5000 5500 . + 1 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 7000 7600 . + 1 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 3391 3902 . + 0 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"
"ctg123 . CDS 5000 5500 . + 1 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"
"ctg123 . CDS 7000 7600 . + 1 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"]
(cstr/join \newline)))
(def ^:private
example-gene-edn
[{:chr "ctg123", :source nil, :type "gene", :start 1000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "gene00001", :name "<NAME>"}}
{:chr "ctg123", :source nil, :type "TF_binding_site", :start 1000, :end 1012, :score nil, :strand :forward, :phase nil, :attributes {:id "tfbs00001", :parent ["gene00001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00001", :parent ["gene00001"], :name "EDEN.1"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00002", :parent ["gene00001"], :name "EDEN.2"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00003", :parent ["gene00001"], :name "EDEN.3"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001", :parent ["mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002", :parent ["mRNA00001" "mRNA00002"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003", :parent ["mRNA00001" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004", :parent ["mRNA00001" "mRNA00002" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005", :parent ["mRNA00001" "mRNA00002" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3000, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3301, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3391, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}])
(def ^:private ^String
circular-gff
(->> ["##gff-version 3.2.1"
"# organism Enterobacteria phage f1"
"# Note Bacteriophage f1, complete genome."
"J02448 GenBank region 1 6407 . + . ID=J02448;Name=J02448;Is_circular=true"
"J02448 GenBank CDS 6006 7238 . + 0 ID=geneII;Name=II;Note=protein II"]
(cstr/join \newline)))
(def ^:private
circular-edn
[{:chr "J02448", :source "GenBank", :type "region", :start 1, :end 6407, :score nil, :strand :forward, :phase nil, :attributes {:id "J02448", :name "J02448", :circular? true}}
{:chr "J02448", :source "GenBank", :type "CDS", :start 6006, :end 7238, :score nil, :strand :forward, :phase 0, :attributes {:id "geneII", :name "II", :note ["protein II"]}}])
(def ^:private ^String
gap-gff
(->> ["##gff-version 3.2.1"
"chr3 . Match 1 23 . . . ID=Match1;Target=EST23 1 21;Gap=M8 D3 M6 I1 M6"
"ctg123 . nucleotide_to_protein 100 129 . + . ID=match008;Target=p101 1 10;Gap=M3 I1 M2 D1 M4"]
(cstr/join \newline)))
(def ^:private
gap-edn
[{:chr "chr3", :source nil, :type "Match", :start 1, :end 23, :score nil, :strand nil, :phase nil,
:attributes {:id "Match1", :target {:chr "EST23", :start 1, :end 21}, :gap [[\M 8] [\D 3] [\M 6] [\I 1] [\M 6]]}}
{:chr "ctg123", :source nil, :type "nucleotide_to_protein", :start 100, :end 129, :score nil, :strand :forward, :phase nil,
:attributes {:id "match008", :target {:chr "p101", :start 1, :end 10}, :gap [[\M 3] [\I 1] [\M 2] [\D 1] [\M 4]]}}])
(def ^:private ^String
alignment-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1050 9000 6.2e-45 + . ID=match00001;Target=cdna0123 12 2964;Gap=M451 D3499 M501 D1499 M2001"]
(cstr/join \newline)))
(def ^:private
alignment-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 9000, :score 6.2e-45, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 2964}, :gap [[\M 451] [\D 3499] [\M 501] [\D 1499] [\M 2001]]}}])
(def ^:private ^String
alignment-multiple-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1050 1500 5.8e-42 + . ID=match00001;Target=cdna0123 12 462"
"ctg123 . cDNA_match 5000 5500 8.1e-43 + . ID=match00001;Target=cdna0123 463 963"
"ctg123 . cDNA_match 7000 9000 1.4e-40 + . ID=match00001;Target=cdna0123 964 2964"]
(cstr/join \newline)))
(def ^:private
alignment-multiple-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 1500, :score 5.8e-42, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 462}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 5000, :end 5500, :score 8.1e-43, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 463, :end 963}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 7000, :end 9000, :score 1.4e-40, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 964, :end 2964}}}])
(def ^:private ^String
alignment-reverse-gff
(->> ["##gff-version 3.2.1"
"ctg123 . EST_match 1200 3200 2.2e-30 + . ID=match00002;Target=mjm1123.5 5 506;Gap=M301 D1499 M201"
"ctg123 . EST_match 7000 9000 7.4e-32 - . ID=match00003;Target=mjm1123.3 1 502;Gap=M101 D1499 M401"]
(cstr/join \newline)))
(def ^:private
alignment-reverse-edn
[{:chr "ctg123", :source nil, :type "EST_match", :start 1200, :end 3200, :score 2.2e-30, :strand :forward, :phase nil,
:attributes {:id "match00002", :target {:chr "mjm1123.5", :start 5, :end 506}, :gap [[\M 301] [\D 1499] [\M 201]]}}
{:chr "ctg123", :source nil, :type "EST_match", :start 7000, :end 9000, :score 7.4e-32, :strand :reverse, :phase nil,
:attributes {:id "match00003", :target {:chr "mjm1123.3", :start 1, :end 502}, :gap [[\M 101] [\D 1499] [\M 401]]}}])
(def ^:private ^String
alignment-group-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1200 9000 . . . ID=cDNA00001"
"ctg123 . match_part 1200 3200 2.2e-30 + . ID=match00002;Parent=cDNA00001;Target=mjm1123.5 5 506;Gap=M301 D1499 M201"
"ctg123 . match_part 7000 9000 7.4e-32 - . ID=match00003;Parent=cDNA00001;Target=mjm1123.3 1 502;Gap=M101 D1499 M401"]
(cstr/join \newline)))
(def ^:private
alignment-group-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1200, :end 9000, :score nil, :strand nil, :phase nil,
:attributes {:id "cDNA00001"}}
{:chr "ctg123", :source nil, :type "match_part", :start 1200, :end 3200, :score 2.2e-30, :strand :forward, :phase nil,
:attributes {:id "match00002", :parent ["cDNA00001"], :target {:chr "mjm1123.5", :start 5, :end 506}, :gap [[\M 301] [\D 1499] [\M 201]]}}
{:chr "ctg123", :source nil, :type "match_part", :start 7000, :end 9000, :score 7.4e-32, :strand :reverse, :phase nil,
:attributes {:id "match00003", :parent ["cDNA00001"], :target {:chr "mjm1123.3", :start 1, :end 502}, :gap [[\M 101] [\D 1499] [\M 401]]}}])
(def ^:private ^String
encoding-gff
(->> ["##gff-version 3.2"
"ch r;1 sour =ce ty &p,e 1 10 9.0 ? . ."
"chr%253B1 sour%253Dce ty%2526p%252Ce 1 10 . + . Target=Foo%20Bar 1 10 +;Dbxref=EMBL:AA816246,NCBI_gi:10727410;Foo=Bar%2C,Baz "
" !\"#$%25&'%09()*+,-./%0A0123456789:;<=>?@[\\]^_`{|}~ . type 1 10 . . . ID= !\"#$%25%26'%09()*+%2C-./%0A0123456789:%3B<%3D>?@[\\]^_`{|}~;Target=%20!\"#$%25%26'%09()*+%2C-./%0A0123456789:%3B<%3D>?@[\\]^_`{|}~ 1 10 -"]
(cstr/join \newline)))
(def ^:private
encoding-edn
[{:chr "ch r;1", :source "sour =ce", :type "ty &p,e", :start 1, :end 10, :score 9.0, :strand :unknown, :phase nil, :attributes {}}
{:chr "chr%3B1", :source "sour%3Dce", :type "ty%26p%2Ce", :start 1, :end 10, :score nil, :strand :forward, :phase nil,
:attributes {:target {:chr "Foo Bar", :start 1, :end 10, :strand :forward}, :db-xref [{:db-tag "EMBL", :id "AA816246"}, {:db-tag "NCBI_gi", :id "10727410"}], "Foo" ["Bar," "Baz "]}}
{:chr " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~", :source nil, :type "type", :start 1, :end 10, :score nil, :strand nil, :phase nil,
:attributes {:id " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~",
:target {:chr " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~", :start 1, :end 10, :strand :reverse}}}])
(def ^:private
example-edn
[{:chr "ctg123", :source nil, :type "gene", :start 1000, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:id "gene00001", :name "<NAME>"}}
{:chr "ctg123", :source nil, :type "TF_binding_site", :start 1000, :end 1012, :score nil, :strand :forward, :phase nil,
:attributes {:id "tfbs00001", :parent ["gene00001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:id "mRNA00001", :parent ["gene00001"], :name "EDEN.1"}}
{:chr "ctg123", :source nil, :type "five_prime_UTR", :start 1050, :end 1200, :score nil, :strand :forward, :phase nil,
:attributes {:parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 3000, :end 3902, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "three_prime_UTR", :start 7601, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 1500, :score 5.8e-42, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 462}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 5000, :end 5500, :score 8.1e-43, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 463, :end 963}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 7000, :end 9000, :score 1.4e-40, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 964, :end 2964}}}])
(deftest reader
(with-open [bais (ByteArrayInputStream. (.getBytes simple-gff))
r (gff/reader bais)]
(is (instance? GFFReader r))
(is (= {:version 3, :major-revision nil, :minor-revision nil}
(gff/version r))))
(with-open [bais (ByteArrayInputStream. (.getBytes alignment-gff))
r (gff/reader bais)]
(is (instance? GFFReader r))
(is (= {:version 3, :major-revision 2, :minor-revision 1}
(gff/version r))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##"))]
(is (thrown? Exception (gff/reader bais))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##"))]
(is (= {:url nil, :version-directive "##"}
(try (gff/reader bais) (catch Exception e (ex-data e))))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 2"))]
(is (thrown? Exception (gff/reader bais))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 2"))]
(is (= {:url nil, :version 2, :major-revision nil, :minor-revision nil}
(try (gff/reader bais) (catch Exception e (ex-data e))))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 3\nctg%41123\t.\t.\t1\t10\t.\t.\t.\t."))
r (gff/reader bais)]
(is (thrown-with-msg?
Exception
#"Found an invalid character encoding while decoding GFF3 file"
(gff/read-features r))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 3\nctg%41123\t.\t.\t1\t10\t.\t.\t.\t."))
r (gff/reader bais)]
(is (= {:input "ctg%41123", :invalid-string "%41"}
(try (gff/read-features r) (catch Exception e (ex-data e)))))))
(deftest read-features
(are [?str ?edn]
(= ?edn
(with-open [bais (ByteArrayInputStream. (.getBytes ^String ?str))
r (gff/reader bais)]
(doall (gff/read-features r))))
simple-gff simple-edn
nested-gff-1 nested-edn-1
nested-gff-2 nested-edn-2
discontinuous-gff discontinuous-edn
example-gene-gff example-gene-edn
circular-gff circular-edn
gap-gff gap-edn
alignment-gff alignment-edn
alignment-multiple-gff alignment-multiple-edn
alignment-reverse-gff alignment-reverse-edn
alignment-group-gff alignment-group-edn
encoding-gff encoding-edn))
(deftest read-features-from-file
(with-open [r (gff/reader test-gff3-file)]
(is (= example-edn
(gff/read-features r)))))
(deftest writer
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos)]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos {:version 3})]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos {:version 3, :major-revision 2, :minor-revision 1})]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)]
(is (thrown? Exception (gff/writer baos {:version 2}))))
(with-open [baos (ByteArrayOutputStream.)]
(is (= {:url nil, :version 2}
(try (gff/writer baos {:version 2}) (catch Exception e (ex-data e))))))
(with-open [baos (ByteArrayOutputStream.)]
(with-open [w (gff/writer baos {:version 3, :encoding :gzip})]
(gff/write-features w simple-edn))
(let [ba (.toByteArray baos)]
;; GZIP file header
(is (= (unchecked-byte 0x1f) (aget ba 0)))
(is (= (unchecked-byte 0x8b) (aget ba 1))))))
(deftest write-features
(are [?edn ?str]
;; ignore directives and comment lines
(= (cstr/replace ?str #"(?<=\n)#.*?\n" "")
(with-open [bais (ByteArrayInputStream. (.getBytes ^String ?str))
baos (ByteArrayOutputStream.)]
(let [v (with-open [r (gff/reader bais)]
(gff/version r))]
(with-open [w (gff/writer baos v)]
(gff/write-features w ?edn)))
(str baos)))
simple-edn simple-gff
nested-edn-1 nested-gff-1
nested-edn-2 nested-gff-2
discontinuous-edn discontinuous-gff
example-gene-edn example-gene-gff
circular-edn circular-gff
gap-edn gap-gff
alignment-edn alignment-gff
alignment-multiple-edn alignment-multiple-gff
alignment-reverse-edn alignment-reverse-gff
alignment-group-edn alignment-group-gff
encoding-edn encoding-gff))
(deftest write-features-to-file
(with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(let [f (cio/file temp-dir "gff-write.gff3")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))
(let [f (cio/file temp-dir "gff-write.gff3.gz")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))
(let [f (cio/file temp-dir "gff-write.gff3.bz2")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))))
(deftest source-type-test
(testing "reader"
(with-open [server (http-server)]
(are [?x] (= example-edn
(with-open [r (gff/reader ?x)]
(doall (gff/read-features r))))
test-gff3-file
(cio/file test-gff3-file)
(cio/as-url (cio/file test-gff3-file))
(cio/as-url (str (:uri server) "/gff3/example.gff3")))))
(testing "writer"
(let [tmp-gff3-file (cio/file temp-dir "gff3-source-type-writer.gff3")]
(are [?x] (with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(with-open [w (gff/writer ?x)]
(not-throw? (gff/write-features w example-edn))))
(.getCanonicalPath tmp-gff3-file)
tmp-gff3-file
(cio/as-url tmp-gff3-file)))))
| true |
(ns cljam.io.gff-test
(:require [clojure.test :refer [deftest is are testing]]
[clojure.string :as cstr]
[clojure.java.io :as cio]
[cljam.test-common :refer
[with-before-after
prepare-cache!
clean-cache!
not-throw?
http-server
temp-dir
test-gff3-file]]
[cljam.io.gff :as gff])
(:import [java.io ByteArrayInputStream ByteArrayOutputStream]
[cljam.io.gff GFFReader GFFWriter]))
(def ^:private ^String
simple-gff
(->> ["##gff-version 3"
"ctg123 . exon 1300 1500 . + . ID=exon00001"
"ctg123 . exon 1050 1500 . + . ID=exon00002"
"ctg123 . exon 3000 3902 . + . ID=exon00003"
"ctg123 . exon 5000 5500 . + . ID=exon00004"
"ctg123 . exon 7000 9000 . + . ID=exon00005"]
(cstr/join \newline)))
(def ^:private
simple-edn
[{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001"}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002"}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003"}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004"}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005"}}])
(def ^:private ^String
nested-gff-1
(->> ["##gff-version 3"
"ctg123 . mRNA 1300 9000 . + . ID=mrna0001;Name=foobar"
"ctg123 . exon 1300 1500 . + . ID=exon00001;Parent=mrna0001"
"ctg123 . exon 1050 1500 . + . ID=exon00002;Parent=mrna0001"
"ctg123 . exon 3000 3902 . + . ID=exon00003;Parent=mrna0001"
"ctg123 . exon 5000 5500 . + . ID=exon00004;Parent=mrna0001"
"ctg123 . exon 7000 9000 . + . ID=exon00005;Parent=mrna0001"]
(cstr/join \newline)))
(def ^:private
nested-edn-1
[{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0001", :name "foobar"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004", :parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005", :parent ["mrna0001"]}}])
(def ^:private ^String
nested-gff-2
(->> ["##gff-version 3"
"ctg123 . operon 1300 15000 . + . ID=operon001;Name=Operon"
"ctg123 . mRNA 1300 9000 . + . ID=mrna0001;Parent=operon001;Name=foobar"
"ctg123 . exon 1300 1500 . + . Parent=mrna0001"
"ctg123 . exon 1050 1500 . + . Parent=mrna0001"
"ctg123 . exon 3000 3902 . + . Parent=mrna0001"
"ctg123 . exon 5000 5500 . + . Parent=mrna0001"
"ctg123 . exon 7000 9000 . + . Parent=mrna0001"
"ctg123 . mRNA 10000 15000 . + . ID=mrna0002;Parent=operon001;Name=baz"
"ctg123 . exon 10000 12000 . + . Parent=mrna0002"
"ctg123 . exon 14000 15000 . + . Parent=mrna0002"]
(cstr/join \newline)))
(def ^:private
nested-edn-2
[{:chr "ctg123", :source nil, :type "operon", :start 1300, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:id "operon001", :name "Operon"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0001", :parent ["operon001"], :name "foobar"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 10000, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:id "mrna0002", :parent ["operon001"], :name "baz"}}
{:chr "ctg123", :source nil, :type "exon", :start 10000, :end 12000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0002"]}}
{:chr "ctg123", :source nil, :type "exon", :start 14000, :end 15000, :score nil, :strand :forward, :phase nil, :attributes {:parent ["mrna0002"]}}])
(def ^:private ^String
discontinuous-gff
(->> ["##gff-version 3"
"ctg123 example match 26122 26126 . + . ID=match001"
"ctg123 example match 26497 26869 . + . ID=match001"
"ctg123 example match 27201 27325 . + . ID=match001"
"ctg123 example match 27372 27433 . + . ID=match001"
"ctg123 example match 27565 27565 . + . ID=match001"]
(cstr/join \newline)))
(def ^:private
discontinuous-edn
[{:chr "ctg123", :source "example", :type "match", :start 26122, :end 26126, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 26497, :end 26869, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27201, :end 27325, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27372, :end 27433, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}
{:chr "ctg123", :source "example", :type "match", :start 27565, :end 27565, :score nil, :strand :forward, :phase nil, :attributes {:id "match001"}}])
(def ^:private ^String
example-gene-gff
(->> ["##gff-version 3.2.1"
"##sequence-region ctg123 1 1497228"
"ctg123 . gene 1000 9000 . + . ID=gene00001;Name=EDEN"
"ctg123 . TF_binding_site 1000 1012 . + . ID=tfbs00001;Parent=gene00001"
"ctg123 . mRNA 1050 9000 . + . ID=mRNA00001;Parent=gene00001;Name=EDEN.1"
"ctg123 . mRNA 1050 9000 . + . ID=mRNA00002;Parent=gene00001;Name=EDEN.2"
"ctg123 . mRNA 1300 9000 . + . ID=mRNA00003;Parent=gene00001;Name=EDEN.3"
"ctg123 . exon 1300 1500 . + . ID=exon00001;Parent=mRNA00003"
"ctg123 . exon 1050 1500 . + . ID=exon00002;Parent=mRNA00001,mRNA00002"
"ctg123 . exon 3000 3902 . + . ID=exon00003;Parent=mRNA00001,mRNA00003"
"ctg123 . exon 5000 5500 . + . ID=exon00004;Parent=mRNA00001,mRNA00002,mRNA00003"
"ctg123 . exon 7000 9000 . + . ID=exon00005;Parent=mRNA00001,mRNA00002,mRNA00003"
"ctg123 . CDS 1201 1500 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 3000 3902 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 5000 5500 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 7000 7600 . + 0 ID=cds00001;Parent=mRNA00001;Name=edenprotein.1"
"ctg123 . CDS 1201 1500 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 5000 5500 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 7000 7600 . + 0 ID=cds00002;Parent=mRNA00002;Name=edenprotein.2"
"ctg123 . CDS 3301 3902 . + 0 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 5000 5500 . + 1 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 7000 7600 . + 1 ID=cds00003;Parent=mRNA00003;Name=edenprotein.3"
"ctg123 . CDS 3391 3902 . + 0 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"
"ctg123 . CDS 5000 5500 . + 1 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"
"ctg123 . CDS 7000 7600 . + 1 ID=cds00004;Parent=mRNA00003;Name=edenprotein.4"]
(cstr/join \newline)))
(def ^:private
example-gene-edn
[{:chr "ctg123", :source nil, :type "gene", :start 1000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "gene00001", :name "PI:NAME:<NAME>END_PI"}}
{:chr "ctg123", :source nil, :type "TF_binding_site", :start 1000, :end 1012, :score nil, :strand :forward, :phase nil, :attributes {:id "tfbs00001", :parent ["gene00001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00001", :parent ["gene00001"], :name "EDEN.1"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00002", :parent ["gene00001"], :name "EDEN.2"}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1300, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "mRNA00003", :parent ["gene00001"], :name "EDEN.3"}}
{:chr "ctg123", :source nil, :type "exon", :start 1300, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00001", :parent ["mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 1050, :end 1500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00002", :parent ["mRNA00001" "mRNA00002"]}}
{:chr "ctg123", :source nil, :type "exon", :start 3000, :end 3902, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00003", :parent ["mRNA00001" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 5000, :end 5500, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00004", :parent ["mRNA00001" "mRNA00002" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "exon", :start 7000, :end 9000, :score nil, :strand :forward, :phase nil, :attributes {:id "exon00005", :parent ["mRNA00001" "mRNA00002" "mRNA00003"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3000, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00001", :parent ["mRNA00001"], :name "edenprotein.1"}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00002", :parent ["mRNA00002"], :name "edenprotein.2"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3301, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00003", :parent ["mRNA00003"], :name "edenprotein.3"}}
{:chr "ctg123", :source nil, :type "CDS", :start 3391, :end 3902, :score nil, :strand :forward, :phase 0, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 1, :attributes {:id "cds00004", :parent ["mRNA00003"], :name "edenprotein.4"}}])
(def ^:private ^String
circular-gff
(->> ["##gff-version 3.2.1"
"# organism Enterobacteria phage f1"
"# Note Bacteriophage f1, complete genome."
"J02448 GenBank region 1 6407 . + . ID=J02448;Name=J02448;Is_circular=true"
"J02448 GenBank CDS 6006 7238 . + 0 ID=geneII;Name=II;Note=protein II"]
(cstr/join \newline)))
(def ^:private
circular-edn
[{:chr "J02448", :source "GenBank", :type "region", :start 1, :end 6407, :score nil, :strand :forward, :phase nil, :attributes {:id "J02448", :name "J02448", :circular? true}}
{:chr "J02448", :source "GenBank", :type "CDS", :start 6006, :end 7238, :score nil, :strand :forward, :phase 0, :attributes {:id "geneII", :name "II", :note ["protein II"]}}])
(def ^:private ^String
gap-gff
(->> ["##gff-version 3.2.1"
"chr3 . Match 1 23 . . . ID=Match1;Target=EST23 1 21;Gap=M8 D3 M6 I1 M6"
"ctg123 . nucleotide_to_protein 100 129 . + . ID=match008;Target=p101 1 10;Gap=M3 I1 M2 D1 M4"]
(cstr/join \newline)))
(def ^:private
gap-edn
[{:chr "chr3", :source nil, :type "Match", :start 1, :end 23, :score nil, :strand nil, :phase nil,
:attributes {:id "Match1", :target {:chr "EST23", :start 1, :end 21}, :gap [[\M 8] [\D 3] [\M 6] [\I 1] [\M 6]]}}
{:chr "ctg123", :source nil, :type "nucleotide_to_protein", :start 100, :end 129, :score nil, :strand :forward, :phase nil,
:attributes {:id "match008", :target {:chr "p101", :start 1, :end 10}, :gap [[\M 3] [\I 1] [\M 2] [\D 1] [\M 4]]}}])
(def ^:private ^String
alignment-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1050 9000 6.2e-45 + . ID=match00001;Target=cdna0123 12 2964;Gap=M451 D3499 M501 D1499 M2001"]
(cstr/join \newline)))
(def ^:private
alignment-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 9000, :score 6.2e-45, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 2964}, :gap [[\M 451] [\D 3499] [\M 501] [\D 1499] [\M 2001]]}}])
(def ^:private ^String
alignment-multiple-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1050 1500 5.8e-42 + . ID=match00001;Target=cdna0123 12 462"
"ctg123 . cDNA_match 5000 5500 8.1e-43 + . ID=match00001;Target=cdna0123 463 963"
"ctg123 . cDNA_match 7000 9000 1.4e-40 + . ID=match00001;Target=cdna0123 964 2964"]
(cstr/join \newline)))
(def ^:private
alignment-multiple-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 1500, :score 5.8e-42, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 462}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 5000, :end 5500, :score 8.1e-43, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 463, :end 963}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 7000, :end 9000, :score 1.4e-40, :strand :forward, :phase nil, :attributes {:id "match00001", :target {:chr "cdna0123", :start 964, :end 2964}}}])
(def ^:private ^String
alignment-reverse-gff
(->> ["##gff-version 3.2.1"
"ctg123 . EST_match 1200 3200 2.2e-30 + . ID=match00002;Target=mjm1123.5 5 506;Gap=M301 D1499 M201"
"ctg123 . EST_match 7000 9000 7.4e-32 - . ID=match00003;Target=mjm1123.3 1 502;Gap=M101 D1499 M401"]
(cstr/join \newline)))
(def ^:private
alignment-reverse-edn
[{:chr "ctg123", :source nil, :type "EST_match", :start 1200, :end 3200, :score 2.2e-30, :strand :forward, :phase nil,
:attributes {:id "match00002", :target {:chr "mjm1123.5", :start 5, :end 506}, :gap [[\M 301] [\D 1499] [\M 201]]}}
{:chr "ctg123", :source nil, :type "EST_match", :start 7000, :end 9000, :score 7.4e-32, :strand :reverse, :phase nil,
:attributes {:id "match00003", :target {:chr "mjm1123.3", :start 1, :end 502}, :gap [[\M 101] [\D 1499] [\M 401]]}}])
(def ^:private ^String
alignment-group-gff
(->> ["##gff-version 3.2.1"
"ctg123 . cDNA_match 1200 9000 . . . ID=cDNA00001"
"ctg123 . match_part 1200 3200 2.2e-30 + . ID=match00002;Parent=cDNA00001;Target=mjm1123.5 5 506;Gap=M301 D1499 M201"
"ctg123 . match_part 7000 9000 7.4e-32 - . ID=match00003;Parent=cDNA00001;Target=mjm1123.3 1 502;Gap=M101 D1499 M401"]
(cstr/join \newline)))
(def ^:private
alignment-group-edn
[{:chr "ctg123", :source nil, :type "cDNA_match", :start 1200, :end 9000, :score nil, :strand nil, :phase nil,
:attributes {:id "cDNA00001"}}
{:chr "ctg123", :source nil, :type "match_part", :start 1200, :end 3200, :score 2.2e-30, :strand :forward, :phase nil,
:attributes {:id "match00002", :parent ["cDNA00001"], :target {:chr "mjm1123.5", :start 5, :end 506}, :gap [[\M 301] [\D 1499] [\M 201]]}}
{:chr "ctg123", :source nil, :type "match_part", :start 7000, :end 9000, :score 7.4e-32, :strand :reverse, :phase nil,
:attributes {:id "match00003", :parent ["cDNA00001"], :target {:chr "mjm1123.3", :start 1, :end 502}, :gap [[\M 101] [\D 1499] [\M 401]]}}])
(def ^:private ^String
encoding-gff
(->> ["##gff-version 3.2"
"ch r;1 sour =ce ty &p,e 1 10 9.0 ? . ."
"chr%253B1 sour%253Dce ty%2526p%252Ce 1 10 . + . Target=Foo%20Bar 1 10 +;Dbxref=EMBL:AA816246,NCBI_gi:10727410;Foo=Bar%2C,Baz "
" !\"#$%25&'%09()*+,-./%0A0123456789:;<=>?@[\\]^_`{|}~ . type 1 10 . . . ID= !\"#$%25%26'%09()*+%2C-./%0A0123456789:%3B<%3D>?@[\\]^_`{|}~;Target=%20!\"#$%25%26'%09()*+%2C-./%0A0123456789:%3B<%3D>?@[\\]^_`{|}~ 1 10 -"]
(cstr/join \newline)))
(def ^:private
encoding-edn
[{:chr "ch r;1", :source "sour =ce", :type "ty &p,e", :start 1, :end 10, :score 9.0, :strand :unknown, :phase nil, :attributes {}}
{:chr "chr%3B1", :source "sour%3Dce", :type "ty%26p%2Ce", :start 1, :end 10, :score nil, :strand :forward, :phase nil,
:attributes {:target {:chr "Foo Bar", :start 1, :end 10, :strand :forward}, :db-xref [{:db-tag "EMBL", :id "AA816246"}, {:db-tag "NCBI_gi", :id "10727410"}], "Foo" ["Bar," "Baz "]}}
{:chr " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~", :source nil, :type "type", :start 1, :end 10, :score nil, :strand nil, :phase nil,
:attributes {:id " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~",
:target {:chr " !\"#$%&'\t()*+,-./\n0123456789:;<=>?@[\\]^_`{|}~", :start 1, :end 10, :strand :reverse}}}])
(def ^:private
example-edn
[{:chr "ctg123", :source nil, :type "gene", :start 1000, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:id "gene00001", :name "PI:NAME:<NAME>END_PI"}}
{:chr "ctg123", :source nil, :type "TF_binding_site", :start 1000, :end 1012, :score nil, :strand :forward, :phase nil,
:attributes {:id "tfbs00001", :parent ["gene00001"]}}
{:chr "ctg123", :source nil, :type "mRNA", :start 1050, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:id "mRNA00001", :parent ["gene00001"], :name "EDEN.1"}}
{:chr "ctg123", :source nil, :type "five_prime_UTR", :start 1050, :end 1200, :score nil, :strand :forward, :phase nil,
:attributes {:parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 1201, :end 1500, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 3000, :end 3902, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 5000, :end 5500, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "CDS", :start 7000, :end 7600, :score nil, :strand :forward, :phase 0,
:attributes {:id "cds00001", :parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "three_prime_UTR", :start 7601, :end 9000, :score nil, :strand :forward, :phase nil,
:attributes {:parent ["mRNA00001"]}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 1050, :end 1500, :score 5.8e-42, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 12, :end 462}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 5000, :end 5500, :score 8.1e-43, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 463, :end 963}}}
{:chr "ctg123", :source nil, :type "cDNA_match", :start 7000, :end 9000, :score 1.4e-40, :strand :forward, :phase nil,
:attributes {:id "match00001", :target {:chr "cdna0123", :start 964, :end 2964}}}])
(deftest reader
(with-open [bais (ByteArrayInputStream. (.getBytes simple-gff))
r (gff/reader bais)]
(is (instance? GFFReader r))
(is (= {:version 3, :major-revision nil, :minor-revision nil}
(gff/version r))))
(with-open [bais (ByteArrayInputStream. (.getBytes alignment-gff))
r (gff/reader bais)]
(is (instance? GFFReader r))
(is (= {:version 3, :major-revision 2, :minor-revision 1}
(gff/version r))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##"))]
(is (thrown? Exception (gff/reader bais))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##"))]
(is (= {:url nil, :version-directive "##"}
(try (gff/reader bais) (catch Exception e (ex-data e))))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 2"))]
(is (thrown? Exception (gff/reader bais))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 2"))]
(is (= {:url nil, :version 2, :major-revision nil, :minor-revision nil}
(try (gff/reader bais) (catch Exception e (ex-data e))))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 3\nctg%41123\t.\t.\t1\t10\t.\t.\t.\t."))
r (gff/reader bais)]
(is (thrown-with-msg?
Exception
#"Found an invalid character encoding while decoding GFF3 file"
(gff/read-features r))))
(with-open [bais (ByteArrayInputStream. (.getBytes "##gff-version 3\nctg%41123\t.\t.\t1\t10\t.\t.\t.\t."))
r (gff/reader bais)]
(is (= {:input "ctg%41123", :invalid-string "%41"}
(try (gff/read-features r) (catch Exception e (ex-data e)))))))
(deftest read-features
(are [?str ?edn]
(= ?edn
(with-open [bais (ByteArrayInputStream. (.getBytes ^String ?str))
r (gff/reader bais)]
(doall (gff/read-features r))))
simple-gff simple-edn
nested-gff-1 nested-edn-1
nested-gff-2 nested-edn-2
discontinuous-gff discontinuous-edn
example-gene-gff example-gene-edn
circular-gff circular-edn
gap-gff gap-edn
alignment-gff alignment-edn
alignment-multiple-gff alignment-multiple-edn
alignment-reverse-gff alignment-reverse-edn
alignment-group-gff alignment-group-edn
encoding-gff encoding-edn))
(deftest read-features-from-file
(with-open [r (gff/reader test-gff3-file)]
(is (= example-edn
(gff/read-features r)))))
(deftest writer
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos)]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos {:version 3})]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)
w (gff/writer baos {:version 3, :major-revision 2, :minor-revision 1})]
(is (instance? GFFWriter w)))
(with-open [baos (ByteArrayOutputStream.)]
(is (thrown? Exception (gff/writer baos {:version 2}))))
(with-open [baos (ByteArrayOutputStream.)]
(is (= {:url nil, :version 2}
(try (gff/writer baos {:version 2}) (catch Exception e (ex-data e))))))
(with-open [baos (ByteArrayOutputStream.)]
(with-open [w (gff/writer baos {:version 3, :encoding :gzip})]
(gff/write-features w simple-edn))
(let [ba (.toByteArray baos)]
;; GZIP file header
(is (= (unchecked-byte 0x1f) (aget ba 0)))
(is (= (unchecked-byte 0x8b) (aget ba 1))))))
(deftest write-features
(are [?edn ?str]
;; ignore directives and comment lines
(= (cstr/replace ?str #"(?<=\n)#.*?\n" "")
(with-open [bais (ByteArrayInputStream. (.getBytes ^String ?str))
baos (ByteArrayOutputStream.)]
(let [v (with-open [r (gff/reader bais)]
(gff/version r))]
(with-open [w (gff/writer baos v)]
(gff/write-features w ?edn)))
(str baos)))
simple-edn simple-gff
nested-edn-1 nested-gff-1
nested-edn-2 nested-gff-2
discontinuous-edn discontinuous-gff
example-gene-edn example-gene-gff
circular-edn circular-gff
gap-edn gap-gff
alignment-edn alignment-gff
alignment-multiple-edn alignment-multiple-gff
alignment-reverse-edn alignment-reverse-gff
alignment-group-edn alignment-group-gff
encoding-edn encoding-gff))
(deftest write-features-to-file
(with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(let [f (cio/file temp-dir "gff-write.gff3")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))
(let [f (cio/file temp-dir "gff-write.gff3.gz")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))
(let [f (cio/file temp-dir "gff-write.gff3.bz2")]
(is (not-throw? (with-open [w (gff/writer f)]
(gff/write-features w simple-edn))))
(is (.isFile f)))))
(deftest source-type-test
(testing "reader"
(with-open [server (http-server)]
(are [?x] (= example-edn
(with-open [r (gff/reader ?x)]
(doall (gff/read-features r))))
test-gff3-file
(cio/file test-gff3-file)
(cio/as-url (cio/file test-gff3-file))
(cio/as-url (str (:uri server) "/gff3/example.gff3")))))
(testing "writer"
(let [tmp-gff3-file (cio/file temp-dir "gff3-source-type-writer.gff3")]
(are [?x] (with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(with-open [w (gff/writer ?x)]
(not-throw? (gff/write-features w example-edn))))
(.getCanonicalPath tmp-gff3-file)
tmp-gff3-file
(cio/as-url tmp-gff3-file)))))
|
[
{
"context": "on\n [:h2 \"What is Clojure?\"]\n [:p \"Designed by Rich Hickey:\"]\n [:img {:src \"img/Rich_Hickey.jpg\" :height \"",
"end": 831,
"score": 0.9996744394302368,
"start": 820,
"tag": "NAME",
"value": "Rich Hickey"
},
{
"context": "on\n [:h2 \"What is Clojure?\"]\n [:p \"Designed by Rich Hickey\"\n [:blockquote \"Clojure's name, according to H",
"end": 985,
"score": 0.9997816681861877,
"start": 974,
"tag": "NAME",
"value": "Rich Hickey"
},
{
"context": " \"10 data structures. - Alan J. Perlis\"]]\n [:li \"Learning new languages and paradigms",
"end": 2379,
"score": 0.9998760223388672,
"start": 2365,
"tag": "NAME",
"value": "Alan J. Perlis"
}
] |
src/reveal/slides.cljs
|
GingaNinja/clojure-pres
| 0 |
(ns reveal.slides)
(def slide-1
[:section
[:h1 "Clojure"]
[:h3 "Let's Get Some Clojure"]
[:p "(and some puns)"]
])
(def clojure-closure
[:section
[:h2 "It's Clojure, not a closure"]
[:pre
[:code {:class "hljs"
:data-trim true
:data-noescape true
}
"function makeFn() {
let a = 42;
return function() {
console.log(a)
}
}"]]
[:pre
[:code {:class "hljs"
:data-trim nil
:data-noescape true}
"(defn multiplesOf
[multiple target]
(let [multiplecount (- (Math/ceil (/ target multiple)) 1)]
(take multiplecount (iterate
(partial + multiple)
multiple))))"
]]])
(def what-clojure-rich
[:section
[:h2 "What is Clojure?"]
[:p "Designed by Rich Hickey:"]
[:img {:src "img/Rich_Hickey.jpg" :height "400"}]])
(def what-clojure-name
[:section
[:h2 "What is Clojure?"]
[:p "Designed by Rich Hickey"
[:blockquote "Clojure's name, according to Hickey, is a pun on the programming concept 'closure' incorporating the letters C, L, and J for C#, Lisp, and Java respectively—three languages which had a major influence on Clojure's design."]]])
(def lisp
[:section
[:h2 "What is a LISP anyway?"]])
(def lisp-list-processor
[:section
[:h2 "What is a LISP anyway?"]
[:p "LISt Processor"]
[:pre
[:code {:class "hljs"
:data-trim nil}
"(println \"hello there\")"]]
[:blockquote "(What the world needs (I think) is not (a Lisp (with fewer parentheses)) but (an English (with more.)))"]])
(def lisp-homiconicity
[:section
[:h2 "What is a LISP anyway?"]
[:p "Homoiconicity"]
[:img {:src "img/Homer_Simpson_2006.png"}]])
(def run-on
[:section
[:h2 "What does Clojure run on?"]
[:ul
[:li "JVM - Clojure"]
[:li "Javascript - ClojureScript"]
[:li ".Net CLR - Clojure CLR"]
[:li "Bash? - Joker, Babashka"]]])
(def the-repl
[:section
[:h2 "The REPL"]
[:p "Read-Eval-Print Loop"]])
(def why-useful
[:section
[:h2 "Is it useful?"]
[:ul
[:li "Functional" [:blockquote "It is better to have 100 functions operate "
"on one data structure than to have 10 functions operate on "
"10 data structures. - Alan J. Perlis"]]
[:li "Learning new languages and paradigms"]
[:li "Immutable - I know what my code is doing"]
[:li "Uses the underlying platform"]
[:li "The language itself is maleable"]]])
(def useful-tools
[:section
[:h2 "Useful tools for clojure development"]
[:ul
[:li "Leiningen / Boot"]
[:li "Figwheel"]
[:li "Emacs - uses a LISP"]
[:li "VSCode + Calva"]
[:li "deftest" [:pre
[:code {:class "hljs"
:data-trim true
:data-noescape true}
"(deftest addition-tests
(is (= 5 (+ 3 2))))"]]]]])
(def useful-books-brave
[:section
[:h2 "Useful books"]
[:p "Clojure for the Brave and True"]
[:img {:src "img/brave-true.jpg"}]])
(def useful-books-joy
[:section
[:h2 "Useful books"]
[:p "The Joy of Clojure"]
[:img {:src "img/joyclojure.png" :height "400"}]])
(def useful-books-getting
[:section
[:h2 "Useful books"]
[:p "Getting Clojure"]
[:img {:src "img/roclojure.jpg" :height "400"}]])
(def questions
[:section
[:h1 [:pre [:code {:class "hljs" :data-trim true :data-noescape true} "(any? questions)"]]]])
(defn all
"Add here all slides you want to see in your presentation."
[]
[slide-1
clojure-closure
what-clojure-rich
what-clojure-name
lisp
lisp-list-processor
lisp-homiconicity
run-on
the-repl
why-useful
useful-tools
useful-books-brave
useful-books-joy
useful-books-getting
questions])
|
9059
|
(ns reveal.slides)
(def slide-1
[:section
[:h1 "Clojure"]
[:h3 "Let's Get Some Clojure"]
[:p "(and some puns)"]
])
(def clojure-closure
[:section
[:h2 "It's Clojure, not a closure"]
[:pre
[:code {:class "hljs"
:data-trim true
:data-noescape true
}
"function makeFn() {
let a = 42;
return function() {
console.log(a)
}
}"]]
[:pre
[:code {:class "hljs"
:data-trim nil
:data-noescape true}
"(defn multiplesOf
[multiple target]
(let [multiplecount (- (Math/ceil (/ target multiple)) 1)]
(take multiplecount (iterate
(partial + multiple)
multiple))))"
]]])
(def what-clojure-rich
[:section
[:h2 "What is Clojure?"]
[:p "Designed by <NAME>:"]
[:img {:src "img/Rich_Hickey.jpg" :height "400"}]])
(def what-clojure-name
[:section
[:h2 "What is Clojure?"]
[:p "Designed by <NAME>"
[:blockquote "Clojure's name, according to Hickey, is a pun on the programming concept 'closure' incorporating the letters C, L, and J for C#, Lisp, and Java respectively—three languages which had a major influence on Clojure's design."]]])
(def lisp
[:section
[:h2 "What is a LISP anyway?"]])
(def lisp-list-processor
[:section
[:h2 "What is a LISP anyway?"]
[:p "LISt Processor"]
[:pre
[:code {:class "hljs"
:data-trim nil}
"(println \"hello there\")"]]
[:blockquote "(What the world needs (I think) is not (a Lisp (with fewer parentheses)) but (an English (with more.)))"]])
(def lisp-homiconicity
[:section
[:h2 "What is a LISP anyway?"]
[:p "Homoiconicity"]
[:img {:src "img/Homer_Simpson_2006.png"}]])
(def run-on
[:section
[:h2 "What does Clojure run on?"]
[:ul
[:li "JVM - Clojure"]
[:li "Javascript - ClojureScript"]
[:li ".Net CLR - Clojure CLR"]
[:li "Bash? - Joker, Babashka"]]])
(def the-repl
[:section
[:h2 "The REPL"]
[:p "Read-Eval-Print Loop"]])
(def why-useful
[:section
[:h2 "Is it useful?"]
[:ul
[:li "Functional" [:blockquote "It is better to have 100 functions operate "
"on one data structure than to have 10 functions operate on "
"10 data structures. - <NAME>"]]
[:li "Learning new languages and paradigms"]
[:li "Immutable - I know what my code is doing"]
[:li "Uses the underlying platform"]
[:li "The language itself is maleable"]]])
(def useful-tools
[:section
[:h2 "Useful tools for clojure development"]
[:ul
[:li "Leiningen / Boot"]
[:li "Figwheel"]
[:li "Emacs - uses a LISP"]
[:li "VSCode + Calva"]
[:li "deftest" [:pre
[:code {:class "hljs"
:data-trim true
:data-noescape true}
"(deftest addition-tests
(is (= 5 (+ 3 2))))"]]]]])
(def useful-books-brave
[:section
[:h2 "Useful books"]
[:p "Clojure for the Brave and True"]
[:img {:src "img/brave-true.jpg"}]])
(def useful-books-joy
[:section
[:h2 "Useful books"]
[:p "The Joy of Clojure"]
[:img {:src "img/joyclojure.png" :height "400"}]])
(def useful-books-getting
[:section
[:h2 "Useful books"]
[:p "Getting Clojure"]
[:img {:src "img/roclojure.jpg" :height "400"}]])
(def questions
[:section
[:h1 [:pre [:code {:class "hljs" :data-trim true :data-noescape true} "(any? questions)"]]]])
(defn all
"Add here all slides you want to see in your presentation."
[]
[slide-1
clojure-closure
what-clojure-rich
what-clojure-name
lisp
lisp-list-processor
lisp-homiconicity
run-on
the-repl
why-useful
useful-tools
useful-books-brave
useful-books-joy
useful-books-getting
questions])
| true |
(ns reveal.slides)
(def slide-1
[:section
[:h1 "Clojure"]
[:h3 "Let's Get Some Clojure"]
[:p "(and some puns)"]
])
(def clojure-closure
[:section
[:h2 "It's Clojure, not a closure"]
[:pre
[:code {:class "hljs"
:data-trim true
:data-noescape true
}
"function makeFn() {
let a = 42;
return function() {
console.log(a)
}
}"]]
[:pre
[:code {:class "hljs"
:data-trim nil
:data-noescape true}
"(defn multiplesOf
[multiple target]
(let [multiplecount (- (Math/ceil (/ target multiple)) 1)]
(take multiplecount (iterate
(partial + multiple)
multiple))))"
]]])
(def what-clojure-rich
[:section
[:h2 "What is Clojure?"]
[:p "Designed by PI:NAME:<NAME>END_PI:"]
[:img {:src "img/Rich_Hickey.jpg" :height "400"}]])
(def what-clojure-name
[:section
[:h2 "What is Clojure?"]
[:p "Designed by PI:NAME:<NAME>END_PI"
[:blockquote "Clojure's name, according to Hickey, is a pun on the programming concept 'closure' incorporating the letters C, L, and J for C#, Lisp, and Java respectively—three languages which had a major influence on Clojure's design."]]])
(def lisp
[:section
[:h2 "What is a LISP anyway?"]])
(def lisp-list-processor
[:section
[:h2 "What is a LISP anyway?"]
[:p "LISt Processor"]
[:pre
[:code {:class "hljs"
:data-trim nil}
"(println \"hello there\")"]]
[:blockquote "(What the world needs (I think) is not (a Lisp (with fewer parentheses)) but (an English (with more.)))"]])
(def lisp-homiconicity
[:section
[:h2 "What is a LISP anyway?"]
[:p "Homoiconicity"]
[:img {:src "img/Homer_Simpson_2006.png"}]])
(def run-on
[:section
[:h2 "What does Clojure run on?"]
[:ul
[:li "JVM - Clojure"]
[:li "Javascript - ClojureScript"]
[:li ".Net CLR - Clojure CLR"]
[:li "Bash? - Joker, Babashka"]]])
(def the-repl
[:section
[:h2 "The REPL"]
[:p "Read-Eval-Print Loop"]])
(def why-useful
[:section
[:h2 "Is it useful?"]
[:ul
[:li "Functional" [:blockquote "It is better to have 100 functions operate "
"on one data structure than to have 10 functions operate on "
"10 data structures. - PI:NAME:<NAME>END_PI"]]
[:li "Learning new languages and paradigms"]
[:li "Immutable - I know what my code is doing"]
[:li "Uses the underlying platform"]
[:li "The language itself is maleable"]]])
(def useful-tools
[:section
[:h2 "Useful tools for clojure development"]
[:ul
[:li "Leiningen / Boot"]
[:li "Figwheel"]
[:li "Emacs - uses a LISP"]
[:li "VSCode + Calva"]
[:li "deftest" [:pre
[:code {:class "hljs"
:data-trim true
:data-noescape true}
"(deftest addition-tests
(is (= 5 (+ 3 2))))"]]]]])
(def useful-books-brave
[:section
[:h2 "Useful books"]
[:p "Clojure for the Brave and True"]
[:img {:src "img/brave-true.jpg"}]])
(def useful-books-joy
[:section
[:h2 "Useful books"]
[:p "The Joy of Clojure"]
[:img {:src "img/joyclojure.png" :height "400"}]])
(def useful-books-getting
[:section
[:h2 "Useful books"]
[:p "Getting Clojure"]
[:img {:src "img/roclojure.jpg" :height "400"}]])
(def questions
[:section
[:h1 [:pre [:code {:class "hljs" :data-trim true :data-noescape true} "(any? questions)"]]]])
(defn all
"Add here all slides you want to see in your presentation."
[]
[slide-1
clojure-closure
what-clojure-rich
what-clojure-name
lisp
lisp-list-processor
lisp-homiconicity
run-on
the-repl
why-useful
useful-tools
useful-books-brave
useful-books-joy
useful-books-getting
questions])
|
[
{
"context": "! *unchecked-math* :warn-on-boxed)\n(ns ^{:author \"John Alan McDonald, Kristina Lisa Klinkner\" :date \"2016-11-09\"\n ",
"end": 105,
"score": 0.9998766779899597,
"start": 87,
"tag": "NAME",
"value": "John Alan McDonald"
},
{
"context": ":warn-on-boxed)\n(ns ^{:author \"John Alan McDonald, Kristina Lisa Klinkner\" :date \"2016-11-09\"\n :doc \"Importance, etc.,",
"end": 129,
"score": 0.9998711347579956,
"start": 107,
"tag": "NAME",
"value": "Kristina Lisa Klinkner"
}
] |
src/main/clojure/taiga/permutation.clj
|
wahpenayo/taiga
| 4 |
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(ns ^{:author "John Alan McDonald, Kristina Lisa Klinkner" :date "2016-11-09"
:doc "Importance, etc., via permutation." }
taiga.permutation
(:require [clojure.string :as s]
[zana.api :as z]))
;;------------------------------------------------------------------------------
;; TODO: move permutation statistics to Zana?
;; probably would need a different api ---
;;------------------------------------------------------------------------------
;; Call (prng) to get a generator with the same seed each time,
;; for reproducibility.
(defn- mersenne-twister []
(z/mersenne-twister-generator "D5B93C275D55B6871F5F72FD812B5572"))
;;------------------------------------------------------------------------------
(defn- permute ^java.util.Map [^clojure.lang.Keyword k
^java.util.Map attributes
^Iterable data
^java.util.Random prng]
(if k
(assoc attributes
k
(z/lookup-function (.get attributes k) data (z/shuffle data prng)))
attributes))
;;------------------------------------------------------------------------------
(defn- metrics-record ^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data]
(into (sorted-map)
(z/map (fn [_ ^clojure.lang.IFn$OOOD metric]
(metric model attributes data))
metrics)))
;;------------------------------------------------------------------------------
(defn permutation-statistics
"Compute the values of each metric on the raw data, and with each attribute's
values randomly permuted.
<dl>
<dt><code>^java.util.Map metrics</code></dt>
<dd> A map from keyword (metric name) to [metric function](1metrics.html).
</dd>
<dt><code>^clojure.lang.IFn$OOD model</code></dt>
<dd> A function that takes an attribute map and a single datum, and returns
a double-valued prediction.
</dd>
<dt><code>^java.util.Map attributes</code></dt>
<dd> A map from keyword (attribute name) to a function that takes a
single datum and returns a double attribute value. Must include a value
for <code>:ground-truth</code>.
<dt><code>^Iterable data</code></dt>
</dd>
<dd> an Iterable over a set of training or test examples for which the
ground truth is known.
</dd>
<dt><code>^java.util.Random prng</code></dt>
<dd> Pseudo-random number generator used for generating permutations.
Defaults to a
[mersenne twister](http://maths.uncommons.org/api/org/uncommons/maths/random/MersenneTwisterRNG.html)
from [Uncommons Maths](http://maths.uncommons.org/).
</dd>
</dl>"
(^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data
^java.util.Random prng]
(assert (:ground-truth attributes) (print-str (sort (keys attributes))))
(let [predictors (dissoc attributes :ground-truth :prediction)
ground-truth (:ground-truth attributes)]
(assert (instance? clojure.lang.IFn$OD ground-truth))
(into (sorted-map)
(z/pmap (fn [[k ^clojure.lang.IFn$OD x]]
(let [px (permute k attributes data prng)]
[k (metrics-record metrics model px data)]))
(sort-by #(z/name (key %)) (assoc predictors nil nil))))))
(^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data]
(permutation-statistics metrics model attributes data (mersenne-twister))))
;;------------------------------------------------------------------------------
(defn statistics-tsv
"**TODO:** move this to a generic tsv writer in Zana
<dl>
<dt><code>^java.util.Map stats</code></dt>
<dd> a nested map. outer keys are predictor names, inner keys are metric
names. nil outer key has the results with no permutation.
</dd>
<dt><code>^java.io.File file</code>/dt>
<dd> where to write the tab separated record, with the header gotten by
stripping \":\" from the Keyword keys.
</dd>
</dl>"
[^java.util.Map stats
^java.io.File file]
(let [[predictor record] (first stats)
header (keys record)]
(with-open [w (z/print-writer file)]
(.println w (s/join "\t" (cons "predictor" (mapv z/name header))))
(doseq [[predictor record] (sort-by key stats)]
(.println w
(s/join "\t" (cons (z/name predictor)
(mapv #(get record %) header))))))))
;;------------------------------------------------------------------------------
|
97645
|
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(ns ^{:author "<NAME>, <NAME>" :date "2016-11-09"
:doc "Importance, etc., via permutation." }
taiga.permutation
(:require [clojure.string :as s]
[zana.api :as z]))
;;------------------------------------------------------------------------------
;; TODO: move permutation statistics to Zana?
;; probably would need a different api ---
;;------------------------------------------------------------------------------
;; Call (prng) to get a generator with the same seed each time,
;; for reproducibility.
(defn- mersenne-twister []
(z/mersenne-twister-generator "D5B93C275D55B6871F5F72FD812B5572"))
;;------------------------------------------------------------------------------
(defn- permute ^java.util.Map [^clojure.lang.Keyword k
^java.util.Map attributes
^Iterable data
^java.util.Random prng]
(if k
(assoc attributes
k
(z/lookup-function (.get attributes k) data (z/shuffle data prng)))
attributes))
;;------------------------------------------------------------------------------
(defn- metrics-record ^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data]
(into (sorted-map)
(z/map (fn [_ ^clojure.lang.IFn$OOOD metric]
(metric model attributes data))
metrics)))
;;------------------------------------------------------------------------------
(defn permutation-statistics
"Compute the values of each metric on the raw data, and with each attribute's
values randomly permuted.
<dl>
<dt><code>^java.util.Map metrics</code></dt>
<dd> A map from keyword (metric name) to [metric function](1metrics.html).
</dd>
<dt><code>^clojure.lang.IFn$OOD model</code></dt>
<dd> A function that takes an attribute map and a single datum, and returns
a double-valued prediction.
</dd>
<dt><code>^java.util.Map attributes</code></dt>
<dd> A map from keyword (attribute name) to a function that takes a
single datum and returns a double attribute value. Must include a value
for <code>:ground-truth</code>.
<dt><code>^Iterable data</code></dt>
</dd>
<dd> an Iterable over a set of training or test examples for which the
ground truth is known.
</dd>
<dt><code>^java.util.Random prng</code></dt>
<dd> Pseudo-random number generator used for generating permutations.
Defaults to a
[mersenne twister](http://maths.uncommons.org/api/org/uncommons/maths/random/MersenneTwisterRNG.html)
from [Uncommons Maths](http://maths.uncommons.org/).
</dd>
</dl>"
(^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data
^java.util.Random prng]
(assert (:ground-truth attributes) (print-str (sort (keys attributes))))
(let [predictors (dissoc attributes :ground-truth :prediction)
ground-truth (:ground-truth attributes)]
(assert (instance? clojure.lang.IFn$OD ground-truth))
(into (sorted-map)
(z/pmap (fn [[k ^clojure.lang.IFn$OD x]]
(let [px (permute k attributes data prng)]
[k (metrics-record metrics model px data)]))
(sort-by #(z/name (key %)) (assoc predictors nil nil))))))
(^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data]
(permutation-statistics metrics model attributes data (mersenne-twister))))
;;------------------------------------------------------------------------------
(defn statistics-tsv
"**TODO:** move this to a generic tsv writer in Zana
<dl>
<dt><code>^java.util.Map stats</code></dt>
<dd> a nested map. outer keys are predictor names, inner keys are metric
names. nil outer key has the results with no permutation.
</dd>
<dt><code>^java.io.File file</code>/dt>
<dd> where to write the tab separated record, with the header gotten by
stripping \":\" from the Keyword keys.
</dd>
</dl>"
[^java.util.Map stats
^java.io.File file]
(let [[predictor record] (first stats)
header (keys record)]
(with-open [w (z/print-writer file)]
(.println w (s/join "\t" (cons "predictor" (mapv z/name header))))
(doseq [[predictor record] (sort-by key stats)]
(.println w
(s/join "\t" (cons (z/name predictor)
(mapv #(get record %) header))))))))
;;------------------------------------------------------------------------------
| true |
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(ns ^{:author "PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI" :date "2016-11-09"
:doc "Importance, etc., via permutation." }
taiga.permutation
(:require [clojure.string :as s]
[zana.api :as z]))
;;------------------------------------------------------------------------------
;; TODO: move permutation statistics to Zana?
;; probably would need a different api ---
;;------------------------------------------------------------------------------
;; Call (prng) to get a generator with the same seed each time,
;; for reproducibility.
(defn- mersenne-twister []
(z/mersenne-twister-generator "D5B93C275D55B6871F5F72FD812B5572"))
;;------------------------------------------------------------------------------
(defn- permute ^java.util.Map [^clojure.lang.Keyword k
^java.util.Map attributes
^Iterable data
^java.util.Random prng]
(if k
(assoc attributes
k
(z/lookup-function (.get attributes k) data (z/shuffle data prng)))
attributes))
;;------------------------------------------------------------------------------
(defn- metrics-record ^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data]
(into (sorted-map)
(z/map (fn [_ ^clojure.lang.IFn$OOOD metric]
(metric model attributes data))
metrics)))
;;------------------------------------------------------------------------------
(defn permutation-statistics
"Compute the values of each metric on the raw data, and with each attribute's
values randomly permuted.
<dl>
<dt><code>^java.util.Map metrics</code></dt>
<dd> A map from keyword (metric name) to [metric function](1metrics.html).
</dd>
<dt><code>^clojure.lang.IFn$OOD model</code></dt>
<dd> A function that takes an attribute map and a single datum, and returns
a double-valued prediction.
</dd>
<dt><code>^java.util.Map attributes</code></dt>
<dd> A map from keyword (attribute name) to a function that takes a
single datum and returns a double attribute value. Must include a value
for <code>:ground-truth</code>.
<dt><code>^Iterable data</code></dt>
</dd>
<dd> an Iterable over a set of training or test examples for which the
ground truth is known.
</dd>
<dt><code>^java.util.Random prng</code></dt>
<dd> Pseudo-random number generator used for generating permutations.
Defaults to a
[mersenne twister](http://maths.uncommons.org/api/org/uncommons/maths/random/MersenneTwisterRNG.html)
from [Uncommons Maths](http://maths.uncommons.org/).
</dd>
</dl>"
(^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data
^java.util.Random prng]
(assert (:ground-truth attributes) (print-str (sort (keys attributes))))
(let [predictors (dissoc attributes :ground-truth :prediction)
ground-truth (:ground-truth attributes)]
(assert (instance? clojure.lang.IFn$OD ground-truth))
(into (sorted-map)
(z/pmap (fn [[k ^clojure.lang.IFn$OD x]]
(let [px (permute k attributes data prng)]
[k (metrics-record metrics model px data)]))
(sort-by #(z/name (key %)) (assoc predictors nil nil))))))
(^java.util.Map [^java.util.Map metrics
^clojure.lang.IFn$OOD model
^java.util.Map attributes
^Iterable data]
(permutation-statistics metrics model attributes data (mersenne-twister))))
;;------------------------------------------------------------------------------
(defn statistics-tsv
"**TODO:** move this to a generic tsv writer in Zana
<dl>
<dt><code>^java.util.Map stats</code></dt>
<dd> a nested map. outer keys are predictor names, inner keys are metric
names. nil outer key has the results with no permutation.
</dd>
<dt><code>^java.io.File file</code>/dt>
<dd> where to write the tab separated record, with the header gotten by
stripping \":\" from the Keyword keys.
</dd>
</dl>"
[^java.util.Map stats
^java.io.File file]
(let [[predictor record] (first stats)
header (keys record)]
(with-open [w (z/print-writer file)]
(.println w (s/join "\t" (cons "predictor" (mapv z/name header))))
(doseq [[predictor record] (sort-by key stats)]
(.println w
(s/join "\t" (cons (z/name predictor)
(mapv #(get record %) header))))))))
;;------------------------------------------------------------------------------
|
[
{
"context": ";; Copyright 2014-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold Li",
"end": 111,
"score": 0.9998151659965515,
"start": 96,
"tag": "NAME",
"value": "Ragnar Svensson"
},
{
"context": "-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold License version 1.0 ",
"end": 129,
"score": 0.9998232126235962,
"start": 113,
"tag": "NAME",
"value": "Christian Murray"
}
] |
editor/src/clj/editor/code/lang/cish.clj
|
cmarincia/defold
| 0 |
;; Copyright 2020-2022 The Defold Foundation
;; Copyright 2014-2020 King
;; Copyright 2009-2014 Ragnar Svensson, Christian Murray
;; Licensed under the Defold License version 1.0 (the "License"); you may not use
;; this file except in compliance with the License.
;;
;; You may obtain a copy of the License, together with FAQs at
;; https://www.defold.com/license
;;
;; Unless required by applicable law or agreed to in writing, software distributed
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
;; specific language governing permissions and limitations under the License.
(ns editor.code.lang.cish)
;; Pattern definitions translated from vscode:
;;
;; https://github.com/Microsoft/vscode/blob/master/extensions/cpp/syntaxes/c.json
;; https://github.com/Microsoft/vscode/blob/master/extensions/cpp/syntaxes/c%2B%2B.json
;;
;; Problems:
;; - No support for repositories. Can partly be replaced by def's and concat'ing patterns. But circular definitions are a hassle.
;; - Regexp grammar slightly different
;; - \n often used to match end of line. Our lines don't have an ending \n, so replace with $.
;;
;; We should really expand our grammar support and auto translate the grammar definitions.
;;
(def ^:private c-line-continuation-character-patterns
[{:match #"(\\)$"
:captures {1 {:name "constant.character.escape.line-continuation.c"}}}])
(def ^:private c-comments-patterns
[{:captures {1 {:name "meta.toc-list.banner.block.c"}}
:match #"^/\* =(\s*.*?)\s*= \*/$$?"
:name "comment.block.c"}
{:begin #"/\*"
:begin-captures {0 {:name "punctuation.definition.comment.begin.c"}}
:end #"\*/"
:end-captures {0 {:name "punctuation.definition.comment.end.c"}}
:name "comment.block.c"}
{:match #"\*/.*$"
:name "invalid.illegal.stray-comment-end.c"}
{:captures {1 {:name "meta.toc-list.banner.line.c"}}
:match #"^// =(\s*.*?)\s*=\s*$$?"
:name "comment.line.banner.cpp"}
{:begin #"(^[ \t]+)?(?=//)"
:begin-captures {1 {:name "punctuation.whitespace.comment.leading.cpp"}}
:end #"(?!\G)"
:patterns [{:begin #"//"
:begin-captures {0 {:name "punctuation.definition.comment.cpp"}}
:end #"(?=$)"
:name "comment.line.double-slash.cpp"
:patterns c-line-continuation-character-patterns}]}])
(def ^:private c-numbers-patterns
[{:match #"\b((0(x|X)[0-9a-fA-F]([0-9a-fA-F']*[0-9a-fA-F])?)|(0(b|B)[01]([01']*[01])?)|(([0-9]([0-9']*[0-9])?\.?[0-9]*([0-9']*[0-9])?)|(\.[0-9]([0-9']*[0-9])?))((e|E)(\+|-)?[0-9]([0-9']*[0-9])?)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b"
:name "constant.numeric.c"}])
(def ^:private c-string-escaped-char-patterns
[{:match #"(?x)\\ (
\\ |
[abefnprtv'\"?] |
[0-3]\d{0,2} |
[4-7]\d? |
x[a-fA-F0-9]{0,2} |
u[a-fA-F0-9]{0,4} |
U[a-fA-F0-9]{0,8} )"
:name "constant.character.escape.c"}
{:match #"\\."
:name "invalid.illegal.unknown-escape.c"}])
(def ^:private c-string-placeholder-patterns
[{:match #"(?x) %
(\d+\$)? # field (argument #)
[\#0\- +']* # flags
[,;:_]? # separator character (AltiVec)
((-?\d+)|\*(-?\d+\$)?)? # minimum field width
(\.((-?\d+)|\*(-?\d+\$)?)?)? # precision
(hh|h|ll|l|j|t|z|q|L|vh|vl|v|hv|hl)? # length modifier
[diouxXDOUeEfFgGaACcSspn%] # conversion type"
:name "constant.other.placeholder.c"}
{:match #"(%)(?!\"\s*(PRI|SCN))"
:captures {1 {:name "invalid.illegal.placeholder.c"}}}])
(def ^:private c-strings-patterns
[{:begin #"\""
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.double.c"
:patterns (concat c-string-escaped-char-patterns
c-string-placeholder-patterns
c-line-continuation-character-patterns)}
{:begin #"'"
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"'"
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.single.c"
:patterns (concat c-string-escaped-char-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-operators-patterns
[{:match #"(?<![\w$])(sizeof)(?![\w$])"
:name "keyword.operator.sizeof.c"}
{:match #"--"
:name "keyword.operator.decrement.c"}
{:match #"\+\+"
:name "keyword.operator.increment.c"}
{:match #"%=|\+=|-=|\*=|(?<!\()/="
:name "keyword.operator.assignment.compound.c"}
{:match #"&=|\^=|<<=|>>=|\|="
:name "keyword.operator.assignment.compound.bitwise.c"}
{:match #"<<|>>"
:name "keyword.operator.bitwise.shift.c"}
{:match #"!=|<=|>=|==|<|>"
:name "keyword.operator.comparison.c"}
{:match #"&&|!|\|\|"
:name "keyword.operator.logical.c"}
{:match #"&|\||\^|~"
:name "keyword.operator.c"}
{:match #"="
:name "keyword.operator.assignment.c"}
{:match #"%|\*|/|-|\+"
:name "keyword.operator.c"}])
;; skipped ternary operator bogus
(def ^:private c-libc-patterns
[{:captures {1 {:name "punctuation.whitespace.support.function.leading.c"}
2 {:name "support.function.C99.c"}}
:match #"(?x) (\s*) \b
(_Exit|(?:nearbyint|nextafter|nexttoward|netoward|nan)[fl]?|a(?:cos|sin)h?[fl]?|abort|abs|asctime|assert
|atan(?:[h2]?[fl]?)?|atexit|ato[ifl]|atoll|bsearch|btowc|cabs[fl]?|cacos|cacos[fl]|cacosh[fl]?
|calloc|carg[fl]?|casinh?[fl]?|catanh?[fl]?|cbrt[fl]?|ccosh?[fl]?|ceil[fl]?|cexp[fl]?|cimag[fl]?
|clearerr|clock|clog[fl]?|conj[fl]?|copysign[fl]?|cosh?[fl]?|cpow[fl]?|cproj[fl]?|creal[fl]?
|csinh?[fl]?|csqrt[fl]?|ctanh?[fl]?|ctime|difftime|div|erfc?[fl]?|exit|fabs[fl]?
|exp(?:2[fl]?|[fl]|m1[fl]?)?|fclose|fdim[fl]?|fe[gs]et(?:env|exceptflag|round)|feclearexcept
|feholdexcept|feof|feraiseexcept|ferror|fetestexcept|feupdateenv|fflush|fgetpos|fgetw?[sc]
|floor[fl]?|fmax?[fl]?|fmin[fl]?|fmod[fl]?|fopen|fpclassify|fprintf|fputw?[sc]|fread|free|freopen
|frexp[fl]?|fscanf|fseek|fsetpos|ftell|fwide|fwprintf|fwrite|fwscanf|genv|get[sc]|getchar|gmtime
|gwc|gwchar|hypot[fl]?|ilogb[fl]?|imaxabs|imaxdiv|isalnum|isalpha|isblank|iscntrl|isdigit|isfinite
|isgraph|isgreater|isgreaterequal|isinf|isless(?:equal|greater)?|isw?lower|isnan|isnormal|isw?print
|isw?punct|isw?space|isunordered|isw?upper|iswalnum|iswalpha|iswblank|iswcntrl|iswctype|iswdigit|iswgraph
|isw?xdigit|labs|ldexp[fl]?|ldiv|lgamma[fl]?|llabs|lldiv|llrint[fl]?|llround[fl]?|localeconv|localtime
|log[2b]?[fl]?|log1[p0][fl]?|longjmp|lrint[fl]?|lround[fl]?|malloc|mbr?len|mbr?towc|mbsinit|mbsrtowcs
|mbstowcs|memchr|memcmp|memcpy|memmove|memset|mktime|modf[fl]?|perror|pow[fl]?|printf|puts|putw?c(?:har)?
|qsort|raise|rand|remainder[fl]?|realloc|remove|remquo[fl]?|rename|rewind|rint[fl]?|round[fl]?|scalbl?n[fl]?
|scanf|setbuf|setjmp|setlocale|setvbuf|signal|signbit|sinh?[fl]?|snprintf|sprintf|sqrt[fl]?|srand|sscanf
|strcat|strchr|strcmp|strcoll|strcpy|strcspn|strerror|strftime|strlen|strncat|strncmp|strncpy|strpbrk
|strrchr|strspn|strstr|strto[kdf]|strtoimax|strtol[dl]?|strtoull?|strtoumax|strxfrm|swprintf|swscanf
|system|tan|tan[fl]|tanh[fl]?|tgamma[fl]?|time|tmpfile|tmpnam|tolower|toupper|trunc[fl]?|ungetw?c|va_arg
|va_copy|va_end|va_start|vfw?printf|vfw?scanf|vprintf|vscanf|vsnprintf|vsprintf|vsscanf|vswprintf|vswscanf
|vwprintf|vwscanf|wcrtomb|wcscat|wcschr|wcscmp|wcscoll|wcscpy|wcscspn|wcsftime|wcslen|wcsncat|wcsncmp|wcsncpy
|wcspbrk|wcsrchr|wcsrtombs|wcsspn|wcsstr|wcsto[dkf]|wcstoimax|wcstol[dl]?|wcstombs|wcstoull?|wcstoumax|wcsxfrm
|wctom?b|wmem(?:set|chr|cpy|cmp|move)|wprintf|wscanf)\b"}])
(def ^:private c-storage-types-patterns
[{:match #"\b(asm|__asm__|auto|bool|_Bool|char|_Complex|double|enum|float|_Imaginary|int|long|short|signed|struct|typedef|union|unsigned|void)\b"
:name "storage.type.c"}])
(def ^:private c-error-warning-directive-patterns
[{:begin #"^\s*(#)\s*(error|warning)\b"
:captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.diagnostic.c"}}
:end #"$"
:name "meta.preprocessor.diagnostic.c"
:patterns (concat c-comments-patterns
c-strings-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-include-patterns
[{:begin #"^\s*(#)\s*(include(?:_next)?|import)\b\s*"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.include.c"
:patterns (concat c-line-continuation-character-patterns
[{
:begin #"\""
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.double.include.c"}
{:begin #"<"
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #">"
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.other.lt-gt.include.c"}])}])
(def ^:private c-pragma-patterns
[{:begin #"^\s*(#)\s*(pragma)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.pragma.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.pragma.c"
:patterns (concat c-strings-patterns
[{:match #"[a-zA-Z_$][\w\-$]*"
:name "entity.other.attribute-name.pragma.preprocessor.c"}]
c-numbers-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-line-patterns
[{:begin #"^\s*(#)\s*(line)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.line.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.c"
:patterns (concat c-strings-patterns
c-numbers-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-naive-define-patterns
[{:begin #"(?x)
^\s* (\#\s*define) \s+ # define
([a-zA-Z_$][\w$]*) # macro name
(?:
(\()
(
\s* [a-zA-Z_$][\w$]* \s* # first argument
(?:(?:,) \s* [a-zA-Z_$][\w$]* \s*)* # additional arguments
(?:\.\.\.)? # varargs ellipsis?
)
(\))
)?"
:begin-captures {1 {:name "keyword.control.directive.define.c"}
2 {:name "entity.name.function.preprocessor.c"}
3 {:name "punctuation.definition.parameters.begin.c"}
4 {:name "variable.parameters.preprocessor.c"}
5 {:name "punctuation.definition.parameters.end.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.macro.c"
:patterns []}]) ;; here we should work on the content also...
(def ^:private c-undef-patterns
[{:begin #"^\s*(#)\s*(undef)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.undef.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.c"
:patterns (concat [{:match #"[a-zA-Z_$][\w$]*"
:name "entity.name.function.preprocessor.c"}]
c-line-continuation-character-patterns)}])
(def ^:private c-sys-types-patterns
[{:match #"\b(u_char|u_short|u_int|u_long|ushort|uint|u_quad_t|quad_t|qaddr_t|caddr_t|daddr_t|div_t|dev_t|fixpt_t|blkcnt_t|blksize_t|gid_t|in_addr_t|in_port_t|ino_t|key_t|mode_t|nlink_t|id_t|pid_t|off_t|segsz_t|swblk_t|uid_t|id_t|clock_t|size_t|ssize_t|time_t|useconds_t|suseconds_t)\b"
:name "support.type.sys-types.c"}])
(def ^:private c-pthread-types-patterns
[{:match #"\b(pthread_attr_t|pthread_cond_t|pthread_condattr_t|pthread_mutex_t|pthread_mutexattr_t|pthread_once_t|pthread_rwlock_t|pthread_rwlockattr_t|pthread_t|pthread_key_t)\b"
:name "support.type.pthread.c"}])
(def ^:private c-stdint-types-patterns
[{:match #"(?x) \b
(int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|int_least8_t
|int_least16_t|int_least32_t|int_least64_t|uint_least8_t|uint_least16_t|uint_least32_t
|uint_least64_t|int_fast8_t|int_fast16_t|int_fast32_t|int_fast64_t|uint_fast8_t
|uint_fast16_t|uint_fast32_t|uint_fast64_t|intptr_t|uintptr_t|intmax_t|intmax_t
|uintmax_t|uintmax_t)
\b"
:name "support.type.stdint.c"}])
(def ^:private c-square-bracket-patterns
[{:match #"(\[)|(\])"
:captures {1 {:name "punctuation.definition.begin.bracket.square.c"}
2 {:name "punctuation.definition.end.bracket.square.c"}}}])
(def ^:private c-control-keywords-patterns
[{:match #"\b(break|case|continue|default|do|else|for|goto|if|_Pragma|return|switch|while)\b"
:name "keyword.control.c"}])
(def ^:private c-storage-modifiers-patterns
[{:match #"\b(const|extern|register|restrict|static|volatile|inline)\b"
:name "storage.modifier.c"}])
(def ^:private c-language-constants-patterns
[{:match #"\b(NULL|true|false|TRUE|FALSE)\b"
:name "constant.language.c"}])
(def ^:private c-terminator-patterns
[{:match #";"
:name "punctuation.terminator.statement.c"}])
(def ^:private c-separator-patterns
[{:match #","
:name "punctuation.separator.delimiter.c"}])
(def grammar
{:name "CISH"
:scope-name "source.cish"
:indent {:begin #"^.*\{[^}\"\']*$|^.*\([^\)\"\']*$|^\s*\{\}$"
:end #"^\s*(\s*/[*].*[*]/\s*)*\}|^\s*(\s*/[*].*[*]/\s*)*\)"}
:line-comment "//"
:patterns (concat c-comments-patterns
c-storage-types-patterns
c-control-keywords-patterns
c-storage-modifiers-patterns
c-language-constants-patterns
c-operators-patterns
c-numbers-patterns
c-strings-patterns
c-error-warning-directive-patterns
c-include-patterns
c-pragma-patterns
c-line-patterns
c-naive-define-patterns
c-undef-patterns
c-sys-types-patterns
c-pthread-types-patterns
c-stdint-types-patterns
c-libc-patterns
c-square-bracket-patterns
c-terminator-patterns
c-separator-patterns
[{:match #"\b(friend|explicit|virtual|override|final|noexcept)\b"
:name "storage.modifier.cish"}
{:match #"\b(private:|protected:|public:)"
:name "storage.modifier.cish"}
{:match #"\b(catch|operator|try|throw|using)\b"
:name "keyword.control.cish"}
{:match #"\bdelete\b(\s*\[\])?|\bnew\b(?!])"
:name "keyword.control.cish"}
{:match #"<="
:name "keyword.operator.cish"}
{:match #"\bthis\b"
:name "variable.language.this.cish"}
{:match #"\bnulptr\b"
:name "constant.language.cish"}
{:match #"\btemplate\b\s*"
:name "storage.type.template.cish"}
{:match #"\b(const_cast|dynamic_cast|reinterpret_cast|static_cast)\b\s*"
:name "keyword.operator.cast.cish"}
{:match #"::"
:name "punctuation.separator.namespace.access.cpp"}
{:match #"\b(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\b"
:name "keyword.operator.cpp"}
{:match #"\b(class|decltype|wchar_t|char16_t|char32_t)\b"
:name "storage.type.cpp"}
{:match #"\b(constexpr|export|mutable|typename|thread_local)\b"
:name "storage.modifier.cpp"}
;; strings
{:begin #"(u|u8|U|L)?R?\""
:begin-captures {0 {:name "punctuation.definition.string.begin.cpp"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.cpp"}}
:name "string.quoted.double.cpp"
:patterns (concat [{:match #"\\u\h{4}|\\U\h{8}"
:name "constant.character.escape.cpp"}
{:match #"\\['\"?\\abfnrtv]"
:name "constant.character.escape.cpp"}
{:match #"\\[0-7]{13}"
:name "constant.character.escape.cpp"}
{:match #"\\x\h+"
:name "constant.character.escape.cpp"}]
c-string-placeholder-patterns)}])})
|
56154
|
;; Copyright 2020-2022 The Defold Foundation
;; Copyright 2014-2020 King
;; Copyright 2009-2014 <NAME>, <NAME>
;; Licensed under the Defold License version 1.0 (the "License"); you may not use
;; this file except in compliance with the License.
;;
;; You may obtain a copy of the License, together with FAQs at
;; https://www.defold.com/license
;;
;; Unless required by applicable law or agreed to in writing, software distributed
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
;; specific language governing permissions and limitations under the License.
(ns editor.code.lang.cish)
;; Pattern definitions translated from vscode:
;;
;; https://github.com/Microsoft/vscode/blob/master/extensions/cpp/syntaxes/c.json
;; https://github.com/Microsoft/vscode/blob/master/extensions/cpp/syntaxes/c%2B%2B.json
;;
;; Problems:
;; - No support for repositories. Can partly be replaced by def's and concat'ing patterns. But circular definitions are a hassle.
;; - Regexp grammar slightly different
;; - \n often used to match end of line. Our lines don't have an ending \n, so replace with $.
;;
;; We should really expand our grammar support and auto translate the grammar definitions.
;;
(def ^:private c-line-continuation-character-patterns
[{:match #"(\\)$"
:captures {1 {:name "constant.character.escape.line-continuation.c"}}}])
(def ^:private c-comments-patterns
[{:captures {1 {:name "meta.toc-list.banner.block.c"}}
:match #"^/\* =(\s*.*?)\s*= \*/$$?"
:name "comment.block.c"}
{:begin #"/\*"
:begin-captures {0 {:name "punctuation.definition.comment.begin.c"}}
:end #"\*/"
:end-captures {0 {:name "punctuation.definition.comment.end.c"}}
:name "comment.block.c"}
{:match #"\*/.*$"
:name "invalid.illegal.stray-comment-end.c"}
{:captures {1 {:name "meta.toc-list.banner.line.c"}}
:match #"^// =(\s*.*?)\s*=\s*$$?"
:name "comment.line.banner.cpp"}
{:begin #"(^[ \t]+)?(?=//)"
:begin-captures {1 {:name "punctuation.whitespace.comment.leading.cpp"}}
:end #"(?!\G)"
:patterns [{:begin #"//"
:begin-captures {0 {:name "punctuation.definition.comment.cpp"}}
:end #"(?=$)"
:name "comment.line.double-slash.cpp"
:patterns c-line-continuation-character-patterns}]}])
(def ^:private c-numbers-patterns
[{:match #"\b((0(x|X)[0-9a-fA-F]([0-9a-fA-F']*[0-9a-fA-F])?)|(0(b|B)[01]([01']*[01])?)|(([0-9]([0-9']*[0-9])?\.?[0-9]*([0-9']*[0-9])?)|(\.[0-9]([0-9']*[0-9])?))((e|E)(\+|-)?[0-9]([0-9']*[0-9])?)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b"
:name "constant.numeric.c"}])
(def ^:private c-string-escaped-char-patterns
[{:match #"(?x)\\ (
\\ |
[abefnprtv'\"?] |
[0-3]\d{0,2} |
[4-7]\d? |
x[a-fA-F0-9]{0,2} |
u[a-fA-F0-9]{0,4} |
U[a-fA-F0-9]{0,8} )"
:name "constant.character.escape.c"}
{:match #"\\."
:name "invalid.illegal.unknown-escape.c"}])
(def ^:private c-string-placeholder-patterns
[{:match #"(?x) %
(\d+\$)? # field (argument #)
[\#0\- +']* # flags
[,;:_]? # separator character (AltiVec)
((-?\d+)|\*(-?\d+\$)?)? # minimum field width
(\.((-?\d+)|\*(-?\d+\$)?)?)? # precision
(hh|h|ll|l|j|t|z|q|L|vh|vl|v|hv|hl)? # length modifier
[diouxXDOUeEfFgGaACcSspn%] # conversion type"
:name "constant.other.placeholder.c"}
{:match #"(%)(?!\"\s*(PRI|SCN))"
:captures {1 {:name "invalid.illegal.placeholder.c"}}}])
(def ^:private c-strings-patterns
[{:begin #"\""
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.double.c"
:patterns (concat c-string-escaped-char-patterns
c-string-placeholder-patterns
c-line-continuation-character-patterns)}
{:begin #"'"
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"'"
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.single.c"
:patterns (concat c-string-escaped-char-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-operators-patterns
[{:match #"(?<![\w$])(sizeof)(?![\w$])"
:name "keyword.operator.sizeof.c"}
{:match #"--"
:name "keyword.operator.decrement.c"}
{:match #"\+\+"
:name "keyword.operator.increment.c"}
{:match #"%=|\+=|-=|\*=|(?<!\()/="
:name "keyword.operator.assignment.compound.c"}
{:match #"&=|\^=|<<=|>>=|\|="
:name "keyword.operator.assignment.compound.bitwise.c"}
{:match #"<<|>>"
:name "keyword.operator.bitwise.shift.c"}
{:match #"!=|<=|>=|==|<|>"
:name "keyword.operator.comparison.c"}
{:match #"&&|!|\|\|"
:name "keyword.operator.logical.c"}
{:match #"&|\||\^|~"
:name "keyword.operator.c"}
{:match #"="
:name "keyword.operator.assignment.c"}
{:match #"%|\*|/|-|\+"
:name "keyword.operator.c"}])
;; skipped ternary operator bogus
(def ^:private c-libc-patterns
[{:captures {1 {:name "punctuation.whitespace.support.function.leading.c"}
2 {:name "support.function.C99.c"}}
:match #"(?x) (\s*) \b
(_Exit|(?:nearbyint|nextafter|nexttoward|netoward|nan)[fl]?|a(?:cos|sin)h?[fl]?|abort|abs|asctime|assert
|atan(?:[h2]?[fl]?)?|atexit|ato[ifl]|atoll|bsearch|btowc|cabs[fl]?|cacos|cacos[fl]|cacosh[fl]?
|calloc|carg[fl]?|casinh?[fl]?|catanh?[fl]?|cbrt[fl]?|ccosh?[fl]?|ceil[fl]?|cexp[fl]?|cimag[fl]?
|clearerr|clock|clog[fl]?|conj[fl]?|copysign[fl]?|cosh?[fl]?|cpow[fl]?|cproj[fl]?|creal[fl]?
|csinh?[fl]?|csqrt[fl]?|ctanh?[fl]?|ctime|difftime|div|erfc?[fl]?|exit|fabs[fl]?
|exp(?:2[fl]?|[fl]|m1[fl]?)?|fclose|fdim[fl]?|fe[gs]et(?:env|exceptflag|round)|feclearexcept
|feholdexcept|feof|feraiseexcept|ferror|fetestexcept|feupdateenv|fflush|fgetpos|fgetw?[sc]
|floor[fl]?|fmax?[fl]?|fmin[fl]?|fmod[fl]?|fopen|fpclassify|fprintf|fputw?[sc]|fread|free|freopen
|frexp[fl]?|fscanf|fseek|fsetpos|ftell|fwide|fwprintf|fwrite|fwscanf|genv|get[sc]|getchar|gmtime
|gwc|gwchar|hypot[fl]?|ilogb[fl]?|imaxabs|imaxdiv|isalnum|isalpha|isblank|iscntrl|isdigit|isfinite
|isgraph|isgreater|isgreaterequal|isinf|isless(?:equal|greater)?|isw?lower|isnan|isnormal|isw?print
|isw?punct|isw?space|isunordered|isw?upper|iswalnum|iswalpha|iswblank|iswcntrl|iswctype|iswdigit|iswgraph
|isw?xdigit|labs|ldexp[fl]?|ldiv|lgamma[fl]?|llabs|lldiv|llrint[fl]?|llround[fl]?|localeconv|localtime
|log[2b]?[fl]?|log1[p0][fl]?|longjmp|lrint[fl]?|lround[fl]?|malloc|mbr?len|mbr?towc|mbsinit|mbsrtowcs
|mbstowcs|memchr|memcmp|memcpy|memmove|memset|mktime|modf[fl]?|perror|pow[fl]?|printf|puts|putw?c(?:har)?
|qsort|raise|rand|remainder[fl]?|realloc|remove|remquo[fl]?|rename|rewind|rint[fl]?|round[fl]?|scalbl?n[fl]?
|scanf|setbuf|setjmp|setlocale|setvbuf|signal|signbit|sinh?[fl]?|snprintf|sprintf|sqrt[fl]?|srand|sscanf
|strcat|strchr|strcmp|strcoll|strcpy|strcspn|strerror|strftime|strlen|strncat|strncmp|strncpy|strpbrk
|strrchr|strspn|strstr|strto[kdf]|strtoimax|strtol[dl]?|strtoull?|strtoumax|strxfrm|swprintf|swscanf
|system|tan|tan[fl]|tanh[fl]?|tgamma[fl]?|time|tmpfile|tmpnam|tolower|toupper|trunc[fl]?|ungetw?c|va_arg
|va_copy|va_end|va_start|vfw?printf|vfw?scanf|vprintf|vscanf|vsnprintf|vsprintf|vsscanf|vswprintf|vswscanf
|vwprintf|vwscanf|wcrtomb|wcscat|wcschr|wcscmp|wcscoll|wcscpy|wcscspn|wcsftime|wcslen|wcsncat|wcsncmp|wcsncpy
|wcspbrk|wcsrchr|wcsrtombs|wcsspn|wcsstr|wcsto[dkf]|wcstoimax|wcstol[dl]?|wcstombs|wcstoull?|wcstoumax|wcsxfrm
|wctom?b|wmem(?:set|chr|cpy|cmp|move)|wprintf|wscanf)\b"}])
(def ^:private c-storage-types-patterns
[{:match #"\b(asm|__asm__|auto|bool|_Bool|char|_Complex|double|enum|float|_Imaginary|int|long|short|signed|struct|typedef|union|unsigned|void)\b"
:name "storage.type.c"}])
(def ^:private c-error-warning-directive-patterns
[{:begin #"^\s*(#)\s*(error|warning)\b"
:captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.diagnostic.c"}}
:end #"$"
:name "meta.preprocessor.diagnostic.c"
:patterns (concat c-comments-patterns
c-strings-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-include-patterns
[{:begin #"^\s*(#)\s*(include(?:_next)?|import)\b\s*"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.include.c"
:patterns (concat c-line-continuation-character-patterns
[{
:begin #"\""
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.double.include.c"}
{:begin #"<"
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #">"
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.other.lt-gt.include.c"}])}])
(def ^:private c-pragma-patterns
[{:begin #"^\s*(#)\s*(pragma)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.pragma.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.pragma.c"
:patterns (concat c-strings-patterns
[{:match #"[a-zA-Z_$][\w\-$]*"
:name "entity.other.attribute-name.pragma.preprocessor.c"}]
c-numbers-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-line-patterns
[{:begin #"^\s*(#)\s*(line)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.line.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.c"
:patterns (concat c-strings-patterns
c-numbers-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-naive-define-patterns
[{:begin #"(?x)
^\s* (\#\s*define) \s+ # define
([a-zA-Z_$][\w$]*) # macro name
(?:
(\()
(
\s* [a-zA-Z_$][\w$]* \s* # first argument
(?:(?:,) \s* [a-zA-Z_$][\w$]* \s*)* # additional arguments
(?:\.\.\.)? # varargs ellipsis?
)
(\))
)?"
:begin-captures {1 {:name "keyword.control.directive.define.c"}
2 {:name "entity.name.function.preprocessor.c"}
3 {:name "punctuation.definition.parameters.begin.c"}
4 {:name "variable.parameters.preprocessor.c"}
5 {:name "punctuation.definition.parameters.end.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.macro.c"
:patterns []}]) ;; here we should work on the content also...
(def ^:private c-undef-patterns
[{:begin #"^\s*(#)\s*(undef)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.undef.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.c"
:patterns (concat [{:match #"[a-zA-Z_$][\w$]*"
:name "entity.name.function.preprocessor.c"}]
c-line-continuation-character-patterns)}])
(def ^:private c-sys-types-patterns
[{:match #"\b(u_char|u_short|u_int|u_long|ushort|uint|u_quad_t|quad_t|qaddr_t|caddr_t|daddr_t|div_t|dev_t|fixpt_t|blkcnt_t|blksize_t|gid_t|in_addr_t|in_port_t|ino_t|key_t|mode_t|nlink_t|id_t|pid_t|off_t|segsz_t|swblk_t|uid_t|id_t|clock_t|size_t|ssize_t|time_t|useconds_t|suseconds_t)\b"
:name "support.type.sys-types.c"}])
(def ^:private c-pthread-types-patterns
[{:match #"\b(pthread_attr_t|pthread_cond_t|pthread_condattr_t|pthread_mutex_t|pthread_mutexattr_t|pthread_once_t|pthread_rwlock_t|pthread_rwlockattr_t|pthread_t|pthread_key_t)\b"
:name "support.type.pthread.c"}])
(def ^:private c-stdint-types-patterns
[{:match #"(?x) \b
(int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|int_least8_t
|int_least16_t|int_least32_t|int_least64_t|uint_least8_t|uint_least16_t|uint_least32_t
|uint_least64_t|int_fast8_t|int_fast16_t|int_fast32_t|int_fast64_t|uint_fast8_t
|uint_fast16_t|uint_fast32_t|uint_fast64_t|intptr_t|uintptr_t|intmax_t|intmax_t
|uintmax_t|uintmax_t)
\b"
:name "support.type.stdint.c"}])
(def ^:private c-square-bracket-patterns
[{:match #"(\[)|(\])"
:captures {1 {:name "punctuation.definition.begin.bracket.square.c"}
2 {:name "punctuation.definition.end.bracket.square.c"}}}])
(def ^:private c-control-keywords-patterns
[{:match #"\b(break|case|continue|default|do|else|for|goto|if|_Pragma|return|switch|while)\b"
:name "keyword.control.c"}])
(def ^:private c-storage-modifiers-patterns
[{:match #"\b(const|extern|register|restrict|static|volatile|inline)\b"
:name "storage.modifier.c"}])
(def ^:private c-language-constants-patterns
[{:match #"\b(NULL|true|false|TRUE|FALSE)\b"
:name "constant.language.c"}])
(def ^:private c-terminator-patterns
[{:match #";"
:name "punctuation.terminator.statement.c"}])
(def ^:private c-separator-patterns
[{:match #","
:name "punctuation.separator.delimiter.c"}])
(def grammar
{:name "CISH"
:scope-name "source.cish"
:indent {:begin #"^.*\{[^}\"\']*$|^.*\([^\)\"\']*$|^\s*\{\}$"
:end #"^\s*(\s*/[*].*[*]/\s*)*\}|^\s*(\s*/[*].*[*]/\s*)*\)"}
:line-comment "//"
:patterns (concat c-comments-patterns
c-storage-types-patterns
c-control-keywords-patterns
c-storage-modifiers-patterns
c-language-constants-patterns
c-operators-patterns
c-numbers-patterns
c-strings-patterns
c-error-warning-directive-patterns
c-include-patterns
c-pragma-patterns
c-line-patterns
c-naive-define-patterns
c-undef-patterns
c-sys-types-patterns
c-pthread-types-patterns
c-stdint-types-patterns
c-libc-patterns
c-square-bracket-patterns
c-terminator-patterns
c-separator-patterns
[{:match #"\b(friend|explicit|virtual|override|final|noexcept)\b"
:name "storage.modifier.cish"}
{:match #"\b(private:|protected:|public:)"
:name "storage.modifier.cish"}
{:match #"\b(catch|operator|try|throw|using)\b"
:name "keyword.control.cish"}
{:match #"\bdelete\b(\s*\[\])?|\bnew\b(?!])"
:name "keyword.control.cish"}
{:match #"<="
:name "keyword.operator.cish"}
{:match #"\bthis\b"
:name "variable.language.this.cish"}
{:match #"\bnulptr\b"
:name "constant.language.cish"}
{:match #"\btemplate\b\s*"
:name "storage.type.template.cish"}
{:match #"\b(const_cast|dynamic_cast|reinterpret_cast|static_cast)\b\s*"
:name "keyword.operator.cast.cish"}
{:match #"::"
:name "punctuation.separator.namespace.access.cpp"}
{:match #"\b(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\b"
:name "keyword.operator.cpp"}
{:match #"\b(class|decltype|wchar_t|char16_t|char32_t)\b"
:name "storage.type.cpp"}
{:match #"\b(constexpr|export|mutable|typename|thread_local)\b"
:name "storage.modifier.cpp"}
;; strings
{:begin #"(u|u8|U|L)?R?\""
:begin-captures {0 {:name "punctuation.definition.string.begin.cpp"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.cpp"}}
:name "string.quoted.double.cpp"
:patterns (concat [{:match #"\\u\h{4}|\\U\h{8}"
:name "constant.character.escape.cpp"}
{:match #"\\['\"?\\abfnrtv]"
:name "constant.character.escape.cpp"}
{:match #"\\[0-7]{13}"
:name "constant.character.escape.cpp"}
{:match #"\\x\h+"
:name "constant.character.escape.cpp"}]
c-string-placeholder-patterns)}])})
| true |
;; Copyright 2020-2022 The Defold Foundation
;; Copyright 2014-2020 King
;; Copyright 2009-2014 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
;; Licensed under the Defold License version 1.0 (the "License"); you may not use
;; this file except in compliance with the License.
;;
;; You may obtain a copy of the License, together with FAQs at
;; https://www.defold.com/license
;;
;; Unless required by applicable law or agreed to in writing, software distributed
;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
;; CONDITIONS OF ANY KIND, either express or implied. See the License for the
;; specific language governing permissions and limitations under the License.
(ns editor.code.lang.cish)
;; Pattern definitions translated from vscode:
;;
;; https://github.com/Microsoft/vscode/blob/master/extensions/cpp/syntaxes/c.json
;; https://github.com/Microsoft/vscode/blob/master/extensions/cpp/syntaxes/c%2B%2B.json
;;
;; Problems:
;; - No support for repositories. Can partly be replaced by def's and concat'ing patterns. But circular definitions are a hassle.
;; - Regexp grammar slightly different
;; - \n often used to match end of line. Our lines don't have an ending \n, so replace with $.
;;
;; We should really expand our grammar support and auto translate the grammar definitions.
;;
(def ^:private c-line-continuation-character-patterns
[{:match #"(\\)$"
:captures {1 {:name "constant.character.escape.line-continuation.c"}}}])
(def ^:private c-comments-patterns
[{:captures {1 {:name "meta.toc-list.banner.block.c"}}
:match #"^/\* =(\s*.*?)\s*= \*/$$?"
:name "comment.block.c"}
{:begin #"/\*"
:begin-captures {0 {:name "punctuation.definition.comment.begin.c"}}
:end #"\*/"
:end-captures {0 {:name "punctuation.definition.comment.end.c"}}
:name "comment.block.c"}
{:match #"\*/.*$"
:name "invalid.illegal.stray-comment-end.c"}
{:captures {1 {:name "meta.toc-list.banner.line.c"}}
:match #"^// =(\s*.*?)\s*=\s*$$?"
:name "comment.line.banner.cpp"}
{:begin #"(^[ \t]+)?(?=//)"
:begin-captures {1 {:name "punctuation.whitespace.comment.leading.cpp"}}
:end #"(?!\G)"
:patterns [{:begin #"//"
:begin-captures {0 {:name "punctuation.definition.comment.cpp"}}
:end #"(?=$)"
:name "comment.line.double-slash.cpp"
:patterns c-line-continuation-character-patterns}]}])
(def ^:private c-numbers-patterns
[{:match #"\b((0(x|X)[0-9a-fA-F]([0-9a-fA-F']*[0-9a-fA-F])?)|(0(b|B)[01]([01']*[01])?)|(([0-9]([0-9']*[0-9])?\.?[0-9]*([0-9']*[0-9])?)|(\.[0-9]([0-9']*[0-9])?))((e|E)(\+|-)?[0-9]([0-9']*[0-9])?)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b"
:name "constant.numeric.c"}])
(def ^:private c-string-escaped-char-patterns
[{:match #"(?x)\\ (
\\ |
[abefnprtv'\"?] |
[0-3]\d{0,2} |
[4-7]\d? |
x[a-fA-F0-9]{0,2} |
u[a-fA-F0-9]{0,4} |
U[a-fA-F0-9]{0,8} )"
:name "constant.character.escape.c"}
{:match #"\\."
:name "invalid.illegal.unknown-escape.c"}])
(def ^:private c-string-placeholder-patterns
[{:match #"(?x) %
(\d+\$)? # field (argument #)
[\#0\- +']* # flags
[,;:_]? # separator character (AltiVec)
((-?\d+)|\*(-?\d+\$)?)? # minimum field width
(\.((-?\d+)|\*(-?\d+\$)?)?)? # precision
(hh|h|ll|l|j|t|z|q|L|vh|vl|v|hv|hl)? # length modifier
[diouxXDOUeEfFgGaACcSspn%] # conversion type"
:name "constant.other.placeholder.c"}
{:match #"(%)(?!\"\s*(PRI|SCN))"
:captures {1 {:name "invalid.illegal.placeholder.c"}}}])
(def ^:private c-strings-patterns
[{:begin #"\""
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.double.c"
:patterns (concat c-string-escaped-char-patterns
c-string-placeholder-patterns
c-line-continuation-character-patterns)}
{:begin #"'"
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"'"
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.single.c"
:patterns (concat c-string-escaped-char-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-operators-patterns
[{:match #"(?<![\w$])(sizeof)(?![\w$])"
:name "keyword.operator.sizeof.c"}
{:match #"--"
:name "keyword.operator.decrement.c"}
{:match #"\+\+"
:name "keyword.operator.increment.c"}
{:match #"%=|\+=|-=|\*=|(?<!\()/="
:name "keyword.operator.assignment.compound.c"}
{:match #"&=|\^=|<<=|>>=|\|="
:name "keyword.operator.assignment.compound.bitwise.c"}
{:match #"<<|>>"
:name "keyword.operator.bitwise.shift.c"}
{:match #"!=|<=|>=|==|<|>"
:name "keyword.operator.comparison.c"}
{:match #"&&|!|\|\|"
:name "keyword.operator.logical.c"}
{:match #"&|\||\^|~"
:name "keyword.operator.c"}
{:match #"="
:name "keyword.operator.assignment.c"}
{:match #"%|\*|/|-|\+"
:name "keyword.operator.c"}])
;; skipped ternary operator bogus
(def ^:private c-libc-patterns
[{:captures {1 {:name "punctuation.whitespace.support.function.leading.c"}
2 {:name "support.function.C99.c"}}
:match #"(?x) (\s*) \b
(_Exit|(?:nearbyint|nextafter|nexttoward|netoward|nan)[fl]?|a(?:cos|sin)h?[fl]?|abort|abs|asctime|assert
|atan(?:[h2]?[fl]?)?|atexit|ato[ifl]|atoll|bsearch|btowc|cabs[fl]?|cacos|cacos[fl]|cacosh[fl]?
|calloc|carg[fl]?|casinh?[fl]?|catanh?[fl]?|cbrt[fl]?|ccosh?[fl]?|ceil[fl]?|cexp[fl]?|cimag[fl]?
|clearerr|clock|clog[fl]?|conj[fl]?|copysign[fl]?|cosh?[fl]?|cpow[fl]?|cproj[fl]?|creal[fl]?
|csinh?[fl]?|csqrt[fl]?|ctanh?[fl]?|ctime|difftime|div|erfc?[fl]?|exit|fabs[fl]?
|exp(?:2[fl]?|[fl]|m1[fl]?)?|fclose|fdim[fl]?|fe[gs]et(?:env|exceptflag|round)|feclearexcept
|feholdexcept|feof|feraiseexcept|ferror|fetestexcept|feupdateenv|fflush|fgetpos|fgetw?[sc]
|floor[fl]?|fmax?[fl]?|fmin[fl]?|fmod[fl]?|fopen|fpclassify|fprintf|fputw?[sc]|fread|free|freopen
|frexp[fl]?|fscanf|fseek|fsetpos|ftell|fwide|fwprintf|fwrite|fwscanf|genv|get[sc]|getchar|gmtime
|gwc|gwchar|hypot[fl]?|ilogb[fl]?|imaxabs|imaxdiv|isalnum|isalpha|isblank|iscntrl|isdigit|isfinite
|isgraph|isgreater|isgreaterequal|isinf|isless(?:equal|greater)?|isw?lower|isnan|isnormal|isw?print
|isw?punct|isw?space|isunordered|isw?upper|iswalnum|iswalpha|iswblank|iswcntrl|iswctype|iswdigit|iswgraph
|isw?xdigit|labs|ldexp[fl]?|ldiv|lgamma[fl]?|llabs|lldiv|llrint[fl]?|llround[fl]?|localeconv|localtime
|log[2b]?[fl]?|log1[p0][fl]?|longjmp|lrint[fl]?|lround[fl]?|malloc|mbr?len|mbr?towc|mbsinit|mbsrtowcs
|mbstowcs|memchr|memcmp|memcpy|memmove|memset|mktime|modf[fl]?|perror|pow[fl]?|printf|puts|putw?c(?:har)?
|qsort|raise|rand|remainder[fl]?|realloc|remove|remquo[fl]?|rename|rewind|rint[fl]?|round[fl]?|scalbl?n[fl]?
|scanf|setbuf|setjmp|setlocale|setvbuf|signal|signbit|sinh?[fl]?|snprintf|sprintf|sqrt[fl]?|srand|sscanf
|strcat|strchr|strcmp|strcoll|strcpy|strcspn|strerror|strftime|strlen|strncat|strncmp|strncpy|strpbrk
|strrchr|strspn|strstr|strto[kdf]|strtoimax|strtol[dl]?|strtoull?|strtoumax|strxfrm|swprintf|swscanf
|system|tan|tan[fl]|tanh[fl]?|tgamma[fl]?|time|tmpfile|tmpnam|tolower|toupper|trunc[fl]?|ungetw?c|va_arg
|va_copy|va_end|va_start|vfw?printf|vfw?scanf|vprintf|vscanf|vsnprintf|vsprintf|vsscanf|vswprintf|vswscanf
|vwprintf|vwscanf|wcrtomb|wcscat|wcschr|wcscmp|wcscoll|wcscpy|wcscspn|wcsftime|wcslen|wcsncat|wcsncmp|wcsncpy
|wcspbrk|wcsrchr|wcsrtombs|wcsspn|wcsstr|wcsto[dkf]|wcstoimax|wcstol[dl]?|wcstombs|wcstoull?|wcstoumax|wcsxfrm
|wctom?b|wmem(?:set|chr|cpy|cmp|move)|wprintf|wscanf)\b"}])
(def ^:private c-storage-types-patterns
[{:match #"\b(asm|__asm__|auto|bool|_Bool|char|_Complex|double|enum|float|_Imaginary|int|long|short|signed|struct|typedef|union|unsigned|void)\b"
:name "storage.type.c"}])
(def ^:private c-error-warning-directive-patterns
[{:begin #"^\s*(#)\s*(error|warning)\b"
:captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.diagnostic.c"}}
:end #"$"
:name "meta.preprocessor.diagnostic.c"
:patterns (concat c-comments-patterns
c-strings-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-include-patterns
[{:begin #"^\s*(#)\s*(include(?:_next)?|import)\b\s*"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.include.c"
:patterns (concat c-line-continuation-character-patterns
[{
:begin #"\""
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.double.include.c"}
{:begin #"<"
:begin-captures {0 {:name "punctuation.definition.string.begin.c"}}
:end #">"
:end-captures {0 {:name "punctuation.definition.string.end.c"}}
:name "string.quoted.other.lt-gt.include.c"}])}])
(def ^:private c-pragma-patterns
[{:begin #"^\s*(#)\s*(pragma)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.pragma.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.pragma.c"
:patterns (concat c-strings-patterns
[{:match #"[a-zA-Z_$][\w\-$]*"
:name "entity.other.attribute-name.pragma.preprocessor.c"}]
c-numbers-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-line-patterns
[{:begin #"^\s*(#)\s*(line)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.line.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.c"
:patterns (concat c-strings-patterns
c-numbers-patterns
c-line-continuation-character-patterns)}])
(def ^:private c-naive-define-patterns
[{:begin #"(?x)
^\s* (\#\s*define) \s+ # define
([a-zA-Z_$][\w$]*) # macro name
(?:
(\()
(
\s* [a-zA-Z_$][\w$]* \s* # first argument
(?:(?:,) \s* [a-zA-Z_$][\w$]* \s*)* # additional arguments
(?:\.\.\.)? # varargs ellipsis?
)
(\))
)?"
:begin-captures {1 {:name "keyword.control.directive.define.c"}
2 {:name "entity.name.function.preprocessor.c"}
3 {:name "punctuation.definition.parameters.begin.c"}
4 {:name "variable.parameters.preprocessor.c"}
5 {:name "punctuation.definition.parameters.end.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.macro.c"
:patterns []}]) ;; here we should work on the content also...
(def ^:private c-undef-patterns
[{:begin #"^\s*(#)\s*(undef)\b"
:begin-captures {1 {:name "punctuation.definition.directive.c"}
2 {:name "keyword.control.directive.undef.c"}}
:end #"(?=(?://|/\*))|(?<!\\)(?=$)"
:name "meta.preprocessor.c"
:patterns (concat [{:match #"[a-zA-Z_$][\w$]*"
:name "entity.name.function.preprocessor.c"}]
c-line-continuation-character-patterns)}])
(def ^:private c-sys-types-patterns
[{:match #"\b(u_char|u_short|u_int|u_long|ushort|uint|u_quad_t|quad_t|qaddr_t|caddr_t|daddr_t|div_t|dev_t|fixpt_t|blkcnt_t|blksize_t|gid_t|in_addr_t|in_port_t|ino_t|key_t|mode_t|nlink_t|id_t|pid_t|off_t|segsz_t|swblk_t|uid_t|id_t|clock_t|size_t|ssize_t|time_t|useconds_t|suseconds_t)\b"
:name "support.type.sys-types.c"}])
(def ^:private c-pthread-types-patterns
[{:match #"\b(pthread_attr_t|pthread_cond_t|pthread_condattr_t|pthread_mutex_t|pthread_mutexattr_t|pthread_once_t|pthread_rwlock_t|pthread_rwlockattr_t|pthread_t|pthread_key_t)\b"
:name "support.type.pthread.c"}])
(def ^:private c-stdint-types-patterns
[{:match #"(?x) \b
(int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|int_least8_t
|int_least16_t|int_least32_t|int_least64_t|uint_least8_t|uint_least16_t|uint_least32_t
|uint_least64_t|int_fast8_t|int_fast16_t|int_fast32_t|int_fast64_t|uint_fast8_t
|uint_fast16_t|uint_fast32_t|uint_fast64_t|intptr_t|uintptr_t|intmax_t|intmax_t
|uintmax_t|uintmax_t)
\b"
:name "support.type.stdint.c"}])
(def ^:private c-square-bracket-patterns
[{:match #"(\[)|(\])"
:captures {1 {:name "punctuation.definition.begin.bracket.square.c"}
2 {:name "punctuation.definition.end.bracket.square.c"}}}])
(def ^:private c-control-keywords-patterns
[{:match #"\b(break|case|continue|default|do|else|for|goto|if|_Pragma|return|switch|while)\b"
:name "keyword.control.c"}])
(def ^:private c-storage-modifiers-patterns
[{:match #"\b(const|extern|register|restrict|static|volatile|inline)\b"
:name "storage.modifier.c"}])
(def ^:private c-language-constants-patterns
[{:match #"\b(NULL|true|false|TRUE|FALSE)\b"
:name "constant.language.c"}])
(def ^:private c-terminator-patterns
[{:match #";"
:name "punctuation.terminator.statement.c"}])
(def ^:private c-separator-patterns
[{:match #","
:name "punctuation.separator.delimiter.c"}])
(def grammar
{:name "CISH"
:scope-name "source.cish"
:indent {:begin #"^.*\{[^}\"\']*$|^.*\([^\)\"\']*$|^\s*\{\}$"
:end #"^\s*(\s*/[*].*[*]/\s*)*\}|^\s*(\s*/[*].*[*]/\s*)*\)"}
:line-comment "//"
:patterns (concat c-comments-patterns
c-storage-types-patterns
c-control-keywords-patterns
c-storage-modifiers-patterns
c-language-constants-patterns
c-operators-patterns
c-numbers-patterns
c-strings-patterns
c-error-warning-directive-patterns
c-include-patterns
c-pragma-patterns
c-line-patterns
c-naive-define-patterns
c-undef-patterns
c-sys-types-patterns
c-pthread-types-patterns
c-stdint-types-patterns
c-libc-patterns
c-square-bracket-patterns
c-terminator-patterns
c-separator-patterns
[{:match #"\b(friend|explicit|virtual|override|final|noexcept)\b"
:name "storage.modifier.cish"}
{:match #"\b(private:|protected:|public:)"
:name "storage.modifier.cish"}
{:match #"\b(catch|operator|try|throw|using)\b"
:name "keyword.control.cish"}
{:match #"\bdelete\b(\s*\[\])?|\bnew\b(?!])"
:name "keyword.control.cish"}
{:match #"<="
:name "keyword.operator.cish"}
{:match #"\bthis\b"
:name "variable.language.this.cish"}
{:match #"\bnulptr\b"
:name "constant.language.cish"}
{:match #"\btemplate\b\s*"
:name "storage.type.template.cish"}
{:match #"\b(const_cast|dynamic_cast|reinterpret_cast|static_cast)\b\s*"
:name "keyword.operator.cast.cish"}
{:match #"::"
:name "punctuation.separator.namespace.access.cpp"}
{:match #"\b(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\b"
:name "keyword.operator.cpp"}
{:match #"\b(class|decltype|wchar_t|char16_t|char32_t)\b"
:name "storage.type.cpp"}
{:match #"\b(constexpr|export|mutable|typename|thread_local)\b"
:name "storage.modifier.cpp"}
;; strings
{:begin #"(u|u8|U|L)?R?\""
:begin-captures {0 {:name "punctuation.definition.string.begin.cpp"}}
:end #"\""
:end-captures {0 {:name "punctuation.definition.string.end.cpp"}}
:name "string.quoted.double.cpp"
:patterns (concat [{:match #"\\u\h{4}|\\U\h{8}"
:name "constant.character.escape.cpp"}
{:match #"\\['\"?\\abfnrtv]"
:name "constant.character.escape.cpp"}
{:match #"\\[0-7]{13}"
:name "constant.character.escape.cpp"}
{:match #"\\x\h+"
:name "constant.character.escape.cpp"}]
c-string-placeholder-patterns)}])})
|
[
{
"context": " :time 1432069827152495\n :user \"[email protected]\"\n :procedure procedure\n ",
"end": 670,
"score": 0.9999109506607056,
"start": 659,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/decide_host/handlers_test.clj
|
melizalab/decide-host
| 0 |
(ns decide-host.handlers-test
(:require [midje.sweet :refer :all]
[decide-host.handlers :refer :all]
[monger.core :as mg]
[decide-host.database :as db]))
(def test-db "decide-test")
(def test-uri (str "mongodb://localhost/" test-db))
(let [context {:database (mg/connect-via-uri test-uri)}
{{db :db conn :conn} :database} context ]
(mg/drop-db conn test-db)
(let [sock-id "test-ctrl"
addr "test"
subject "tester"
procedure "testing"
data {:topic :state-changed
:addr addr
:name "experiment"
:time 1432069827152495
:user "[email protected]"
:procedure procedure
:subject subject}]
(facts "about update-subject"
(fact "handles experiment state-changed updates"
(db/add-controller! db sock-id addr)
(db/set-alive! db sock-id 10)
(db/get-procedure db subject) => nil
(update-subject! context data)
(db/get-procedure db subject) => procedure)
(fact "updates last-fed for hopper events"
(update-subject! context {:topic :state-changed :name "hopper" :up 1
:addr addr :time 1234})
(db/find-subject db subject) => (contains {:last-fed 1234}))
(fact "updates last-trial for trial events"
(update-subject! context {:topic :trial-data :subject subject :trial 10 :time 4567})
(db/find-subject db subject) => (contains {:last-trial 4567}))
(fact "update-subject updates current experiment"
(update-subject! context {:topic :trial-data :subject subject :experiment "blah"})
(db/find-subject db subject) => (contains {:experiment "blah"})))))
|
12625
|
(ns decide-host.handlers-test
(:require [midje.sweet :refer :all]
[decide-host.handlers :refer :all]
[monger.core :as mg]
[decide-host.database :as db]))
(def test-db "decide-test")
(def test-uri (str "mongodb://localhost/" test-db))
(let [context {:database (mg/connect-via-uri test-uri)}
{{db :db conn :conn} :database} context ]
(mg/drop-db conn test-db)
(let [sock-id "test-ctrl"
addr "test"
subject "tester"
procedure "testing"
data {:topic :state-changed
:addr addr
:name "experiment"
:time 1432069827152495
:user "<EMAIL>"
:procedure procedure
:subject subject}]
(facts "about update-subject"
(fact "handles experiment state-changed updates"
(db/add-controller! db sock-id addr)
(db/set-alive! db sock-id 10)
(db/get-procedure db subject) => nil
(update-subject! context data)
(db/get-procedure db subject) => procedure)
(fact "updates last-fed for hopper events"
(update-subject! context {:topic :state-changed :name "hopper" :up 1
:addr addr :time 1234})
(db/find-subject db subject) => (contains {:last-fed 1234}))
(fact "updates last-trial for trial events"
(update-subject! context {:topic :trial-data :subject subject :trial 10 :time 4567})
(db/find-subject db subject) => (contains {:last-trial 4567}))
(fact "update-subject updates current experiment"
(update-subject! context {:topic :trial-data :subject subject :experiment "blah"})
(db/find-subject db subject) => (contains {:experiment "blah"})))))
| true |
(ns decide-host.handlers-test
(:require [midje.sweet :refer :all]
[decide-host.handlers :refer :all]
[monger.core :as mg]
[decide-host.database :as db]))
(def test-db "decide-test")
(def test-uri (str "mongodb://localhost/" test-db))
(let [context {:database (mg/connect-via-uri test-uri)}
{{db :db conn :conn} :database} context ]
(mg/drop-db conn test-db)
(let [sock-id "test-ctrl"
addr "test"
subject "tester"
procedure "testing"
data {:topic :state-changed
:addr addr
:name "experiment"
:time 1432069827152495
:user "PI:EMAIL:<EMAIL>END_PI"
:procedure procedure
:subject subject}]
(facts "about update-subject"
(fact "handles experiment state-changed updates"
(db/add-controller! db sock-id addr)
(db/set-alive! db sock-id 10)
(db/get-procedure db subject) => nil
(update-subject! context data)
(db/get-procedure db subject) => procedure)
(fact "updates last-fed for hopper events"
(update-subject! context {:topic :state-changed :name "hopper" :up 1
:addr addr :time 1234})
(db/find-subject db subject) => (contains {:last-fed 1234}))
(fact "updates last-trial for trial events"
(update-subject! context {:topic :trial-data :subject subject :trial 10 :time 4567})
(db/find-subject db subject) => (contains {:last-trial 4567}))
(fact "update-subject updates current experiment"
(update-subject! context {:topic :trial-data :subject subject :experiment "blah"})
(db/find-subject db subject) => (contains {:experiment "blah"})))))
|
[
{
"context": "nts #{\"people\"})\n(def manet-artist-constraints #{\"MANET, Edouard\"})\n\n(def manet-sample-paintings\n #{{:date \"1862\"",
"end": 310,
"score": 0.941990077495575,
"start": 296,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "jpg\",\n :title \"Spanish Ballet\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:35:10.000",
"end": 526,
"score": 0.9994184970855713,
"start": 512,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "g\",\n :title \"The Old Musician\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:25:31.000",
"end": 1801,
"score": 0.9998704791069031,
"start": 1787,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "in the Tuileries Gardens (detail)\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:35:20.000",
"end": 3094,
"score": 0.9997318387031555,
"start": 3080,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "et01.jpg\",\n :title \"Bullfight\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:35:10.000",
"end": 4374,
"score": 0.999690592288971,
"start": 4360,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "ame \"livestock\", :value 0.9761847}\n {:name \"equestrian\", :value 0.9525757}\n {:name \"people\", :valu",
"end": 4855,
"score": 0.9383177757263184,
"start": 4845,
"tag": "NAME",
"value": "equestrian"
},
{
"context": "\"Young Lady (Woman with a Parrot)\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:25:42.000",
"end": 5682,
"score": 0.9996817708015442,
"start": 5668,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "\",\n :title \"The Guitar Player\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:23:29.000",
"end": 6961,
"score": 0.9998922944068909,
"start": 6947,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "title \"The Luncheon in the Studio\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:23:07.000",
"end": 8254,
"score": 0.9998745918273926,
"start": 8240,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "15.jpg\",\n :title \"The Reading\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:23:29.000",
"end": 9514,
"score": 0.9997848868370056,
"start": 9500,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": " on His Boat-Studio in Argenteuil\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:26:40.000",
"end": 10822,
"score": 0.9996454119682312,
"start": 10808,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": " :title \"Corner of a Café-Concert\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:23:29.000",
"end": 12123,
"score": 0.9995681643486023,
"start": 12109,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": " :title \"Two Women Drinking Bocks\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:23:37.000",
"end": 13431,
"score": 0.9995846748352051,
"start": 13417,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "in a Round Hat (Alphonse Maureau)\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:25:31.000",
"end": 14710,
"score": 0.9998430609703064,
"start": 14696,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": " :title \"In the Winter Garden\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:23:17.000",
"end": 15984,
"score": 0.9996480941772461,
"start": 15970,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "m/manet/5/5late07.jpg\",\n :title \"The Escape of Henri Rochefort\",\n :author \"MANET, Edouard\",\n :created_on",
"end": 17232,
"score": 0.984952449798584,
"start": 17217,
"tag": "NAME",
"value": "Henri Rochefort"
},
{
"context": "le \"The Escape of Henri Rochefort\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:35:42.000",
"end": 17263,
"score": 0.99984210729599,
"start": 17249,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "t/m/manet/5/5late08.jpg\",\n :title \"Portrait of Henri Rochefort\",\n :author \"MANET, Edouard\",\n :created_on",
"end": 18537,
"score": 0.9545596241950989,
"start": 18522,
"tag": "NAME",
"value": "Henri Rochefort"
},
{
"context": "itle \"Portrait of Henri Rochefort\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:25:52.000",
"end": 18568,
"score": 0.999413788318634,
"start": 18554,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "ting\",\n :jpg\n \"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472448/xxaoqvs6ad2av4t2aduh.jpg",
"end": 19631,
"score": 0.6778626441955566,
"start": 19622,
"tag": "USERNAME",
"value": "dgpqnl8ul"
},
{
"context": "ar at the Folies-Bergère (detail)\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:23:07.000",
"end": 19891,
"score": 0.9998071789741516,
"start": 19877,
"tag": "NAME",
"value": "MANET, Edouard"
},
{
"context": "g\",\n :title \"Study of a Model\",\n :author \"MANET, Edouard\",\n :created_on #inst \"2018-12-21T17:26:01.000",
"end": 21176,
"score": 0.9998868107795715,
"start": 21162,
"tag": "NAME",
"value": "MANET, Edouard"
}
] |
src/cljs/landschaften/sample/manet.cljs
|
cljs-material-ui/landschaften
| 2 |
(ns landschaften.sample.manet)
(def manet-people-group-name "Manet's people")
(def manet-type-constraints #{})
(def manet-school-constraints #{"French"})
(def manet-timeframe-constraints #{"1801-1850", "1851-1900"})
(def manet-concept-constraints #{"people"})
(def manet-artist-constraints #{"MANET, Edouard"})
(def manet-sample-paintings
#{{:date "1862",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/1/2spanis1.jpg",
:title "Spanish Ballet",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:35:10.000-00:00",
:concepts
#{{:name "adult", :value 0.9926609}
{:name "dancing", :value 0.95042074}
{:name "costume", :value 0.8785554}
{:name "two", :value 0.8278563}
{:name "music", :value 0.9256652}
{:name "people", :value 0.99651086}
{:name "painting", :value 0.8622995}
{:name "performance", :value 0.91299504}
{:name "wear", :value 0.9742775}
{:name "man", :value 0.9371989}
{:name "veil", :value 0.924356}
{:name "group", :value 0.988762}
{:name "many", :value 0.8306803}
{:name "woman", :value 0.9813038}
{:name "outfit", :value 0.83809006}
{:name "dancer", :value 0.9224825}
{:name "theater", :value 0.83426315}
{:name "recreation", :value 0.8632289}
{:name "art", :value 0.86101604}
{:name "child", :value 0.8897608}},
:id 24162,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472422/vqutvlotsmipx7xkmxun.jpg"}
{:date "1862",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/1/2spanis8.jpg",
:title "The Old Musician",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:25:31.000-00:00",
:concepts
#{{:name "priest", :value 0.8639396}
{:name "adult", :value 0.99157315}
{:name "religion", :value 0.9748057}
{:name "print", :value 0.8651573}
{:name "lid", :value 0.9523072}
{:name "coat", :value 0.8577764}
{:name "outerwear", :value 0.9128848}
{:name "son", :value 0.9158189}
{:name "people", :value 0.99749196}
{:name "painting", :value 0.89659476}
{:name "wear", :value 0.9846792}
{:name "portrait", :value 0.8763036}
{:name "man", :value 0.9784485}
{:name "veil", :value 0.96348965}
{:name "group", :value 0.99090946}
{:name "facial hair", :value 0.88501525}
{:name "woman", :value 0.98325956}
{:name "art", :value 0.9184065}
{:name "three", :value 0.8912737}
{:name "child", :value 0.9239347}},
:id 24169,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472423/m3vwue4ql8zmyw10jw3k.jpg"}
{:date "1862",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/1/3early2.jpg",
:title "Music in the Tuileries Gardens (detail)",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:35:20.000-00:00",
:concepts
#{{:name "priest", :value 0.8843517}
{:name "banquet", :value 0.8813138}
{:name "adult", :value 0.94551253}
{:name "religion", :value 0.9926128}
{:name "crowd", :value 0.9564}
{:name "celebration", :value 0.94409996}
{:name "people", :value 0.9958187}
{:name "wear", :value 0.87971234}
{:name "man", :value 0.8914131}
{:name "group", :value 0.98950154}
{:name "prayer", :value 0.8437264}
{:name "many", :value 0.99210143}
{:name "woman", :value 0.9641658}
{:name "crucifixion", :value 0.83641267}
{:name "ceremony", :value 0.98226374}
{:name "Easter", :value 0.9773995}
{:name "festival", :value 0.858477}
{:name "cross", :value 0.830389}
{:name "leader", :value 0.89480937}
{:name "funeral", :value 0.86351913}},
:id 24171,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472425/l6e4punrvp3etn74zun8.jpg"}
{:date "1865-66",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet01.jpg",
:title "Bullfight",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:35:10.000-00:00",
:concepts
#{{:name "mammal", :value 0.9926224}
{:name "racehorse", :value 0.9552147}
{:name "hurry", :value 0.9601132}
{:name "adult", :value 0.95615387}
{:name "race", :value 0.98657596}
{:name "jockey", :value 0.9815377}
{:name "crowd", :value 0.9723752}
{:name "cavalry", :value 0.9981413}
{:name "livestock", :value 0.9761847}
{:name "equestrian", :value 0.9525757}
{:name "people", :value 0.9984633}
{:name "action energy", :value 0.97289234}
{:name "man", :value 0.9494112}
{:name "horse", :value 0.9891174}
{:name "competition", :value 0.9896964}
{:name "cattle", :value 0.96983814}
{:name "group", :value 0.9905813}
{:name "many", :value 0.99030733}
{:name "seated", :value 0.99115324}
{:name "motion", :value 0.98737}},
:id 24186,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472429/lbs4bp3rbbrjft5zf6vq.jpg"}
{:date "1866",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet04.jpg",
:title "Young Lady (Woman with a Parrot)",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:25:42.000-00:00",
:concepts
#{{:name "one", :value 0.97028434} {:name "model", :value 0.93446124}
{:name "adult", :value 0.98388004}
{:name "religion", :value 0.83296895}
{:name "jewelry", :value 0.8180838}
{:name "print", :value 0.75688535}
{:name "lid", :value 0.8737594}
{:name "outerwear", :value 0.7577708}
{:name "people", :value 0.98241544}
{:name "fashion", :value 0.9042506}
{:name "painting", :value 0.89103764}
{:name "performance", :value 0.77718914}
{:name "wear", :value 0.97824997}
{:name "portrait", :value 0.9477818}
{:name "man", :value 0.8805766}
{:name "veil", :value 0.91031575}
{:name "fashionable", :value 0.8561712}
{:name "woman", :value 0.9581425}
{:name "dress", :value 0.89266264}
{:name "art", :value 0.9495683}},
:id 24189,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472430/bas4fxrp6hh37j9iszak.jpg"}
{:date "1867",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet11.jpg",
:title "The Guitar Player",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "one", :value 0.99543536} {:name "adult", :value 0.99741757}
{:name "furniture", :value 0.93576956}
{:name "girl", :value 0.9304198}
{:name "seat", :value 0.9126075}
{:name "singer", :value 0.9263523}
{:name "two", :value 0.9186835}
{:name "music", :value 0.96700156}
{:name "people", :value 0.99657536}
{:name "painting", :value 0.97315586}
{:name "reclining", :value 0.9257157}
{:name "wear", :value 0.9945359}
{:name "portrait", :value 0.9816007}
{:name "actress", :value 0.933759}
{:name "facial expression", :value 0.95856774}
{:name "veil", :value 0.96556073}
{:name "musician", :value 0.93621033}
{:name "woman", :value 0.985641}
{:name "recreation", :value 0.9139211}
{:name "art", :value 0.95078254}},
:id 24196,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472432/ixevnvrrhfsvl2btqule.jpg"}
{:date "1868",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet13.jpg",
:title "The Luncheon in the Studio",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:23:07.000-00:00",
:concepts
#{{:name "one", :value 0.928209} {:name "adult", :value 0.9921365}
{:name "furniture", :value 0.9286638}
{:name "lid", :value 0.97724456}
{:name "two", :value 0.9651395}
{:name "royalty", :value 0.8792964}
{:name "commerce", :value 0.8866627}
{:name "several", :value 0.8716241}
{:name "people", :value 0.9982027}
{:name "painting", :value 0.89995265}
{:name "wear", :value 0.9909334}
{:name "four", :value 0.88462174}
{:name "portrait", :value 0.93528205}
{:name "man", :value 0.979193}
{:name "veil", :value 0.9725639}
{:name "group", :value 0.98707205}
{:name "woman", :value 0.9860933}
{:name "outfit", :value 0.9133043}
{:name "recreation", :value 0.89808434}
{:name "three", :value 0.93893814}},
:id 24198,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472433/rwywykjuns6crcgtrx1z.jpg"}
{:date "1868",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet15.jpg",
:title "The Reading",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "one", :value 0.9187814} {:name "adult", :value 0.9846325}
{:name "furniture", :value 0.8632232}
{:name "girl", :value 0.8502531}
{:name "seat", :value 0.71896744}
{:name "princess", :value 0.76299554}
{:name "two", :value 0.9359244}
{:name "people", :value 0.9865805}
{:name "painting", :value 0.7880821}
{:name "wear", :value 0.9162672}
{:name "beautiful", :value 0.84891796}
{:name "portrait", :value 0.86976075}
{:name "man", :value 0.89266723}
{:name "healthcare", :value 0.7076663}
{:name "room", :value 0.8991654}
{:name "indoors", :value 0.80724007}
{:name "woman", :value 0.98091006}
{:name "dress", :value 0.85546994}
{:name "art", :value 0.8825964}
{:name "sit", :value 0.7793608}},
:id 24200,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472435/oj8aefh3d7pfmffpusht.jpg"}
{:date "1874",
:school "French",
:type "landscape",
:wga_jpg "https://www.wga.hu/art/m/manet/3/3manet13.jpg",
:title "Claude Monet Painting on His Boat-Studio in Argenteuil",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:26:40.000-00:00",
:concepts
#{{:name "watercraft", :value 0.99094677}
{:name "one", :value 0.9455198}
{:name "rowboat", :value 0.92349803}
{:name "adult", :value 0.987556}
{:name "print", :value 0.92447}
{:name "two", :value 0.9623909}
{:name "sea", :value 0.908358}
{:name "people", :value 0.9969213}
{:name "painting", :value 0.98337245}
{:name "man", :value 0.94620585}
{:name "group", :value 0.9605703}
{:name "transportation system", :value 0.96333385}
{:name "water", :value 0.9794651}
{:name "vehicle", :value 0.9800147}
{:name "woman", :value 0.97150505}
{:name "illustration", :value 0.94317913}
{:name "travel", :value 0.9278274}
{:name "recreation", :value 0.94930357}
{:name "ship", :value 0.94353926}
{:name "art", :value 0.9695486}},
:id 24218,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472436/swsvuokxf891evo93p1b.jpg"}
{:date "1878-80",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet07.jpg",
:title "Corner of a Café-Concert",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "adult", :value 0.98452723}
{:name "religion", :value 0.9813553}
{:name "administration", :value 0.80553263}
{:name "two", :value 0.841617}
{:name "container", :value 0.81185424}
{:name "celebration", :value 0.8444092}
{:name "commerce", :value 0.8864105}
{:name "several", :value 0.86247987}
{:name "people", :value 0.99761593}
{:name "wear", :value 0.95024574}
{:name "elderly", :value 0.8193976}
{:name "man", :value 0.97192264}
{:name "veil", :value 0.8256639}
{:name "group", :value 0.9939004}
{:name "many", :value 0.9530274}
{:name "woman", :value 0.9748919}
{:name "interaction", :value 0.84736484}
{:name "ceremony", :value 0.8802029}
{:name "recreation", :value 0.80783606}
{:name "leader", :value 0.8523675}},
:id 24233,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472438/vxaqini8v50k8eazpzuq.jpg"}
{:date "1878",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet10.jpg",
:title "Two Women Drinking Bocks",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:23:37.000-00:00",
:concepts
#{{:name "one", :value 0.9047679} {:name "adult", :value 0.9140061}
{:name "food", :value 0.8745171}
{:name "lid", :value 0.8042917}
{:name "drink", :value 0.83911383}
{:name "two", :value 0.8198239}
{:name "container", :value 0.725613}
{:name "cold", :value 0.9290925}
{:name "people", :value 0.9570219}
{:name "wear", :value 0.70177203}
{:name "portrait", :value 0.72586167}
{:name "man", :value 0.896839}
{:name "healthcare", :value 0.7551681}
{:name "indoors", :value 0.83269846}
{:name "glass", :value 0.88382506}
{:name "veil", :value 0.7795876}
{:name "group", :value 0.87480223}
{:name "woman", :value 0.9120748}
{:name "beer", :value 0.7319597}
{:name "no person", :value 0.79889}},
:id 24236,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472440/o3ijavwwa4ojkpqaw4sz.jpg"}
{:date "1878",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet12.jpg",
:title "Man in a Round Hat (Alphonse Maureau)",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:25:31.000-00:00",
:concepts
#{{:name "one", :value 0.98983526} {:name "adult", :value 0.98518276}
{:name "side view", :value 0.7534818}
{:name "religion", :value 0.755723}
{:name "old", :value 0.8170091}
{:name "lid", :value 0.9244362}
{:name "people", :value 0.9983702}
{:name "painting", :value 0.98108876}
{:name "wear", :value 0.89224696}
{:name "portrait", :value 0.98770726}
{:name "elderly", :value 0.7708284}
{:name "man", :value 0.98501396}
{:name "beard", :value 0.72548115}
{:name "veil", :value 0.85667205}
{:name "vintage", :value 0.7308766}
{:name "facial hair", :value 0.94412553}
{:name "retro", :value 0.816434}
{:name "art", :value 0.9833573}
{:name "leader", :value 0.7347914}
{:name "mustache", :value 0.9813224}},
:id 24238,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472441/jrom433z3ebwnqerjnjh.jpg"}
{:date "1879",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet16.jpg",
:title "In the Winter Garden",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:23:17.000-00:00",
:concepts
#{{:name "one", :value 0.9759318} {:name "adult", :value 0.99300295}
{:name "furniture", :value 0.9467297}
{:name "seat", :value 0.9515189}
{:name "lid", :value 0.91574156}
{:name "two", :value 0.97907144}
{:name "outerwear", :value 0.87922907}
{:name "people", :value 0.9964845}
{:name "painting", :value 0.8783745}
{:name "wear", :value 0.9583739}
{:name "portrait", :value 0.9744396}
{:name "man", :value 0.98772514}
{:name "veil", :value 0.8911963}
{:name "military", :value 0.9242512}
{:name "woman", :value 0.97469366}
{:name "soldier", :value 0.88871837}
{:name "art", :value 0.92404234}
{:name "sit", :value 0.93323195}
{:name "three", :value 0.89724416}
{:name "child", :value 0.8757273}},
:id 24242,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472443/gqwvv3nmczfqaqf3c7l1.jpg"}
{:date "1881",
:school "French",
:type "historical",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late07.jpg",
:title "The Escape of Henri Rochefort",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:35:42.000-00:00",
:concepts
#{{:name "watercraft", :value 0.9823499}
{:name "one", :value 0.94041026}
{:name "rowboat", :value 0.9560243}
{:name "adult", :value 0.9147363}
{:name "oar", :value 0.8999064}
{:name "two", :value 0.9277632}
{:name "sea", :value 0.93822336}
{:name "canoe", :value 0.9338627}
{:name "people", :value 0.98351806}
{:name "swimming", :value 0.8851098}
{:name "ocean", :value 0.92358166}
{:name "fisherman", :value 0.9260603}
{:name "leisure", :value 0.8901384}
{:name "transportation system", :value 0.897137}
{:name "water", :value 0.99450403}
{:name "vehicle", :value 0.89671445}
{:name "travel", :value 0.9164829}
{:name "recreation", :value 0.9788822}
{:name "fish", :value 0.949726}
{:name "boatman", :value 0.9217631}},
:id 24252,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472446/x8crdusi0cqdu3rywwv8.jpg"}
{:date "1881",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late08.jpg",
:title "Portrait of Henri Rochefort",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:25:52.000-00:00",
:concepts
#{{:name "one", :value 0.99891716} {:name "adult", :value 0.99553406}
{:name "side view", :value 0.9648169}
{:name "administration", :value 0.9108915}
{:name "music", :value 0.9220626}
{:name "people", :value 0.99919045}
{:name "wear", :value 0.95365226}
{:name "portrait", :value 0.9991429}
{:name "tie", :value 0.92405903}
{:name "facial expression", :value 0.85146475}
{:name "man", :value 0.9939749}
{:name "writer", :value 0.9283779}
{:name "politician", :value 0.8993956}
{:name "profile", :value 0.9753293}
{:name "facial hair", :value 0.93652}
{:name "outfit", :value 0.89390266}
{:name "menswear", :value 0.93957925}
{:name "neckwear", :value 0.95516014}
{:name "leader", :value 0.9851012}
{:name "mustache", :value 0.9306363}},
:id 24253,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472448/xxaoqvs6ad2av4t2aduh.jpg"}
{:date "1881-82",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late10.jpg",
:title "A Bar at the Folies-Bergère (detail)",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:23:07.000-00:00",
:concepts
#{{:name "one", :value 0.8000617}
{:name "religion", :value 0.8590821}
{:name "winter", :value 0.8660009}
{:name "traditional", :value 0.85578156}
{:name "outdoors", :value 0.8795291}
{:name "cold", :value 0.86454546}
{:name "people", :value 0.95081115}
{:name "painting", :value 0.8788986}
{:name "indoors", :value 0.8228017}
{:name "group", :value 0.82631165}
{:name "water", :value 0.9398377}
{:name "woman", :value 0.84158987}
{:name "crystal", :value 0.8572757}
{:name "wet", :value 0.8016135}
{:name "travel", :value 0.92263913}
{:name "decoration", :value 0.8386463}
{:name "icee", :value 0.8358636}
{:name "fish", :value 0.7907878}
{:name "art", :value 0.92345643}
{:name "no person", :value 0.9639495}},
:id 24255,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472449/k3jixe3wrjb4qcuuvo3h.jpg"}
{:date "1881",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late11.jpg",
:title "Study of a Model",
:author "MANET, Edouard",
:created_on #inst "2018-12-21T17:26:01.000-00:00",
:concepts
#{{:name "one", :value 0.99024737} {:name "model", :value 0.8956412}
{:name "adult", :value 0.9915126}
{:name "girl", :value 0.90252256}
{:name "face", :value 0.9113476}
{:name "lid", :value 0.97739756}
{:name "people", :value 0.9946698}
{:name "fashion", :value 0.9372419}
{:name "painting", :value 0.8990917}
{:name "wear", :value 0.95215756}
{:name "smoke", :value 0.89441824}
{:name "portrait", :value 0.9936029}
{:name "facial expression", :value 0.88725185}
{:name "man", :value 0.9698202}
{:name "veil", :value 0.957594}
{:name "fashionable", :value 0.8565195}
{:name "woman", :value 0.9524783}
{:name "retro", :value 0.8354877}
{:name "jacket", :value 0.8673773}
{:name "art", :value 0.95265865}},
:id 24256,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472451/b8fiba0shz0wldpk0bbb.jpg"}})
(def manet-example-group
{:group-name manet-people-group-name
:paintings manet-sample-paintings
:type-constraints manet-type-constraints
:school-constraints manet-school-constraints
:timeframe-constraints manet-timeframe-constraints
:concept-constraints manet-concept-constraints
:artist-constraints manet-artist-constraints})
|
34504
|
(ns landschaften.sample.manet)
(def manet-people-group-name "Manet's people")
(def manet-type-constraints #{})
(def manet-school-constraints #{"French"})
(def manet-timeframe-constraints #{"1801-1850", "1851-1900"})
(def manet-concept-constraints #{"people"})
(def manet-artist-constraints #{"<NAME>"})
(def manet-sample-paintings
#{{:date "1862",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/1/2spanis1.jpg",
:title "Spanish Ballet",
:author "<NAME>",
:created_on #inst "2018-12-21T17:35:10.000-00:00",
:concepts
#{{:name "adult", :value 0.9926609}
{:name "dancing", :value 0.95042074}
{:name "costume", :value 0.8785554}
{:name "two", :value 0.8278563}
{:name "music", :value 0.9256652}
{:name "people", :value 0.99651086}
{:name "painting", :value 0.8622995}
{:name "performance", :value 0.91299504}
{:name "wear", :value 0.9742775}
{:name "man", :value 0.9371989}
{:name "veil", :value 0.924356}
{:name "group", :value 0.988762}
{:name "many", :value 0.8306803}
{:name "woman", :value 0.9813038}
{:name "outfit", :value 0.83809006}
{:name "dancer", :value 0.9224825}
{:name "theater", :value 0.83426315}
{:name "recreation", :value 0.8632289}
{:name "art", :value 0.86101604}
{:name "child", :value 0.8897608}},
:id 24162,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472422/vqutvlotsmipx7xkmxun.jpg"}
{:date "1862",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/1/2spanis8.jpg",
:title "The Old Musician",
:author "<NAME>",
:created_on #inst "2018-12-21T17:25:31.000-00:00",
:concepts
#{{:name "priest", :value 0.8639396}
{:name "adult", :value 0.99157315}
{:name "religion", :value 0.9748057}
{:name "print", :value 0.8651573}
{:name "lid", :value 0.9523072}
{:name "coat", :value 0.8577764}
{:name "outerwear", :value 0.9128848}
{:name "son", :value 0.9158189}
{:name "people", :value 0.99749196}
{:name "painting", :value 0.89659476}
{:name "wear", :value 0.9846792}
{:name "portrait", :value 0.8763036}
{:name "man", :value 0.9784485}
{:name "veil", :value 0.96348965}
{:name "group", :value 0.99090946}
{:name "facial hair", :value 0.88501525}
{:name "woman", :value 0.98325956}
{:name "art", :value 0.9184065}
{:name "three", :value 0.8912737}
{:name "child", :value 0.9239347}},
:id 24169,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472423/m3vwue4ql8zmyw10jw3k.jpg"}
{:date "1862",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/1/3early2.jpg",
:title "Music in the Tuileries Gardens (detail)",
:author "<NAME>",
:created_on #inst "2018-12-21T17:35:20.000-00:00",
:concepts
#{{:name "priest", :value 0.8843517}
{:name "banquet", :value 0.8813138}
{:name "adult", :value 0.94551253}
{:name "religion", :value 0.9926128}
{:name "crowd", :value 0.9564}
{:name "celebration", :value 0.94409996}
{:name "people", :value 0.9958187}
{:name "wear", :value 0.87971234}
{:name "man", :value 0.8914131}
{:name "group", :value 0.98950154}
{:name "prayer", :value 0.8437264}
{:name "many", :value 0.99210143}
{:name "woman", :value 0.9641658}
{:name "crucifixion", :value 0.83641267}
{:name "ceremony", :value 0.98226374}
{:name "Easter", :value 0.9773995}
{:name "festival", :value 0.858477}
{:name "cross", :value 0.830389}
{:name "leader", :value 0.89480937}
{:name "funeral", :value 0.86351913}},
:id 24171,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472425/l6e4punrvp3etn74zun8.jpg"}
{:date "1865-66",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet01.jpg",
:title "Bullfight",
:author "<NAME>",
:created_on #inst "2018-12-21T17:35:10.000-00:00",
:concepts
#{{:name "mammal", :value 0.9926224}
{:name "racehorse", :value 0.9552147}
{:name "hurry", :value 0.9601132}
{:name "adult", :value 0.95615387}
{:name "race", :value 0.98657596}
{:name "jockey", :value 0.9815377}
{:name "crowd", :value 0.9723752}
{:name "cavalry", :value 0.9981413}
{:name "livestock", :value 0.9761847}
{:name "<NAME>", :value 0.9525757}
{:name "people", :value 0.9984633}
{:name "action energy", :value 0.97289234}
{:name "man", :value 0.9494112}
{:name "horse", :value 0.9891174}
{:name "competition", :value 0.9896964}
{:name "cattle", :value 0.96983814}
{:name "group", :value 0.9905813}
{:name "many", :value 0.99030733}
{:name "seated", :value 0.99115324}
{:name "motion", :value 0.98737}},
:id 24186,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472429/lbs4bp3rbbrjft5zf6vq.jpg"}
{:date "1866",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet04.jpg",
:title "Young Lady (Woman with a Parrot)",
:author "<NAME>",
:created_on #inst "2018-12-21T17:25:42.000-00:00",
:concepts
#{{:name "one", :value 0.97028434} {:name "model", :value 0.93446124}
{:name "adult", :value 0.98388004}
{:name "religion", :value 0.83296895}
{:name "jewelry", :value 0.8180838}
{:name "print", :value 0.75688535}
{:name "lid", :value 0.8737594}
{:name "outerwear", :value 0.7577708}
{:name "people", :value 0.98241544}
{:name "fashion", :value 0.9042506}
{:name "painting", :value 0.89103764}
{:name "performance", :value 0.77718914}
{:name "wear", :value 0.97824997}
{:name "portrait", :value 0.9477818}
{:name "man", :value 0.8805766}
{:name "veil", :value 0.91031575}
{:name "fashionable", :value 0.8561712}
{:name "woman", :value 0.9581425}
{:name "dress", :value 0.89266264}
{:name "art", :value 0.9495683}},
:id 24189,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472430/bas4fxrp6hh37j9iszak.jpg"}
{:date "1867",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet11.jpg",
:title "The Guitar Player",
:author "<NAME>",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "one", :value 0.99543536} {:name "adult", :value 0.99741757}
{:name "furniture", :value 0.93576956}
{:name "girl", :value 0.9304198}
{:name "seat", :value 0.9126075}
{:name "singer", :value 0.9263523}
{:name "two", :value 0.9186835}
{:name "music", :value 0.96700156}
{:name "people", :value 0.99657536}
{:name "painting", :value 0.97315586}
{:name "reclining", :value 0.9257157}
{:name "wear", :value 0.9945359}
{:name "portrait", :value 0.9816007}
{:name "actress", :value 0.933759}
{:name "facial expression", :value 0.95856774}
{:name "veil", :value 0.96556073}
{:name "musician", :value 0.93621033}
{:name "woman", :value 0.985641}
{:name "recreation", :value 0.9139211}
{:name "art", :value 0.95078254}},
:id 24196,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472432/ixevnvrrhfsvl2btqule.jpg"}
{:date "1868",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet13.jpg",
:title "The Luncheon in the Studio",
:author "<NAME>",
:created_on #inst "2018-12-21T17:23:07.000-00:00",
:concepts
#{{:name "one", :value 0.928209} {:name "adult", :value 0.9921365}
{:name "furniture", :value 0.9286638}
{:name "lid", :value 0.97724456}
{:name "two", :value 0.9651395}
{:name "royalty", :value 0.8792964}
{:name "commerce", :value 0.8866627}
{:name "several", :value 0.8716241}
{:name "people", :value 0.9982027}
{:name "painting", :value 0.89995265}
{:name "wear", :value 0.9909334}
{:name "four", :value 0.88462174}
{:name "portrait", :value 0.93528205}
{:name "man", :value 0.979193}
{:name "veil", :value 0.9725639}
{:name "group", :value 0.98707205}
{:name "woman", :value 0.9860933}
{:name "outfit", :value 0.9133043}
{:name "recreation", :value 0.89808434}
{:name "three", :value 0.93893814}},
:id 24198,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472433/rwywykjuns6crcgtrx1z.jpg"}
{:date "1868",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet15.jpg",
:title "The Reading",
:author "<NAME>",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "one", :value 0.9187814} {:name "adult", :value 0.9846325}
{:name "furniture", :value 0.8632232}
{:name "girl", :value 0.8502531}
{:name "seat", :value 0.71896744}
{:name "princess", :value 0.76299554}
{:name "two", :value 0.9359244}
{:name "people", :value 0.9865805}
{:name "painting", :value 0.7880821}
{:name "wear", :value 0.9162672}
{:name "beautiful", :value 0.84891796}
{:name "portrait", :value 0.86976075}
{:name "man", :value 0.89266723}
{:name "healthcare", :value 0.7076663}
{:name "room", :value 0.8991654}
{:name "indoors", :value 0.80724007}
{:name "woman", :value 0.98091006}
{:name "dress", :value 0.85546994}
{:name "art", :value 0.8825964}
{:name "sit", :value 0.7793608}},
:id 24200,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472435/oj8aefh3d7pfmffpusht.jpg"}
{:date "1874",
:school "French",
:type "landscape",
:wga_jpg "https://www.wga.hu/art/m/manet/3/3manet13.jpg",
:title "Claude Monet Painting on His Boat-Studio in Argenteuil",
:author "<NAME>",
:created_on #inst "2018-12-21T17:26:40.000-00:00",
:concepts
#{{:name "watercraft", :value 0.99094677}
{:name "one", :value 0.9455198}
{:name "rowboat", :value 0.92349803}
{:name "adult", :value 0.987556}
{:name "print", :value 0.92447}
{:name "two", :value 0.9623909}
{:name "sea", :value 0.908358}
{:name "people", :value 0.9969213}
{:name "painting", :value 0.98337245}
{:name "man", :value 0.94620585}
{:name "group", :value 0.9605703}
{:name "transportation system", :value 0.96333385}
{:name "water", :value 0.9794651}
{:name "vehicle", :value 0.9800147}
{:name "woman", :value 0.97150505}
{:name "illustration", :value 0.94317913}
{:name "travel", :value 0.9278274}
{:name "recreation", :value 0.94930357}
{:name "ship", :value 0.94353926}
{:name "art", :value 0.9695486}},
:id 24218,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472436/swsvuokxf891evo93p1b.jpg"}
{:date "1878-80",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet07.jpg",
:title "Corner of a Café-Concert",
:author "<NAME>",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "adult", :value 0.98452723}
{:name "religion", :value 0.9813553}
{:name "administration", :value 0.80553263}
{:name "two", :value 0.841617}
{:name "container", :value 0.81185424}
{:name "celebration", :value 0.8444092}
{:name "commerce", :value 0.8864105}
{:name "several", :value 0.86247987}
{:name "people", :value 0.99761593}
{:name "wear", :value 0.95024574}
{:name "elderly", :value 0.8193976}
{:name "man", :value 0.97192264}
{:name "veil", :value 0.8256639}
{:name "group", :value 0.9939004}
{:name "many", :value 0.9530274}
{:name "woman", :value 0.9748919}
{:name "interaction", :value 0.84736484}
{:name "ceremony", :value 0.8802029}
{:name "recreation", :value 0.80783606}
{:name "leader", :value 0.8523675}},
:id 24233,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472438/vxaqini8v50k8eazpzuq.jpg"}
{:date "1878",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet10.jpg",
:title "Two Women Drinking Bocks",
:author "<NAME>",
:created_on #inst "2018-12-21T17:23:37.000-00:00",
:concepts
#{{:name "one", :value 0.9047679} {:name "adult", :value 0.9140061}
{:name "food", :value 0.8745171}
{:name "lid", :value 0.8042917}
{:name "drink", :value 0.83911383}
{:name "two", :value 0.8198239}
{:name "container", :value 0.725613}
{:name "cold", :value 0.9290925}
{:name "people", :value 0.9570219}
{:name "wear", :value 0.70177203}
{:name "portrait", :value 0.72586167}
{:name "man", :value 0.896839}
{:name "healthcare", :value 0.7551681}
{:name "indoors", :value 0.83269846}
{:name "glass", :value 0.88382506}
{:name "veil", :value 0.7795876}
{:name "group", :value 0.87480223}
{:name "woman", :value 0.9120748}
{:name "beer", :value 0.7319597}
{:name "no person", :value 0.79889}},
:id 24236,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472440/o3ijavwwa4ojkpqaw4sz.jpg"}
{:date "1878",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet12.jpg",
:title "Man in a Round Hat (Alphonse Maureau)",
:author "<NAME>",
:created_on #inst "2018-12-21T17:25:31.000-00:00",
:concepts
#{{:name "one", :value 0.98983526} {:name "adult", :value 0.98518276}
{:name "side view", :value 0.7534818}
{:name "religion", :value 0.755723}
{:name "old", :value 0.8170091}
{:name "lid", :value 0.9244362}
{:name "people", :value 0.9983702}
{:name "painting", :value 0.98108876}
{:name "wear", :value 0.89224696}
{:name "portrait", :value 0.98770726}
{:name "elderly", :value 0.7708284}
{:name "man", :value 0.98501396}
{:name "beard", :value 0.72548115}
{:name "veil", :value 0.85667205}
{:name "vintage", :value 0.7308766}
{:name "facial hair", :value 0.94412553}
{:name "retro", :value 0.816434}
{:name "art", :value 0.9833573}
{:name "leader", :value 0.7347914}
{:name "mustache", :value 0.9813224}},
:id 24238,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472441/jrom433z3ebwnqerjnjh.jpg"}
{:date "1879",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet16.jpg",
:title "In the Winter Garden",
:author "<NAME>",
:created_on #inst "2018-12-21T17:23:17.000-00:00",
:concepts
#{{:name "one", :value 0.9759318} {:name "adult", :value 0.99300295}
{:name "furniture", :value 0.9467297}
{:name "seat", :value 0.9515189}
{:name "lid", :value 0.91574156}
{:name "two", :value 0.97907144}
{:name "outerwear", :value 0.87922907}
{:name "people", :value 0.9964845}
{:name "painting", :value 0.8783745}
{:name "wear", :value 0.9583739}
{:name "portrait", :value 0.9744396}
{:name "man", :value 0.98772514}
{:name "veil", :value 0.8911963}
{:name "military", :value 0.9242512}
{:name "woman", :value 0.97469366}
{:name "soldier", :value 0.88871837}
{:name "art", :value 0.92404234}
{:name "sit", :value 0.93323195}
{:name "three", :value 0.89724416}
{:name "child", :value 0.8757273}},
:id 24242,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472443/gqwvv3nmczfqaqf3c7l1.jpg"}
{:date "1881",
:school "French",
:type "historical",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late07.jpg",
:title "The Escape of <NAME>",
:author "<NAME>",
:created_on #inst "2018-12-21T17:35:42.000-00:00",
:concepts
#{{:name "watercraft", :value 0.9823499}
{:name "one", :value 0.94041026}
{:name "rowboat", :value 0.9560243}
{:name "adult", :value 0.9147363}
{:name "oar", :value 0.8999064}
{:name "two", :value 0.9277632}
{:name "sea", :value 0.93822336}
{:name "canoe", :value 0.9338627}
{:name "people", :value 0.98351806}
{:name "swimming", :value 0.8851098}
{:name "ocean", :value 0.92358166}
{:name "fisherman", :value 0.9260603}
{:name "leisure", :value 0.8901384}
{:name "transportation system", :value 0.897137}
{:name "water", :value 0.99450403}
{:name "vehicle", :value 0.89671445}
{:name "travel", :value 0.9164829}
{:name "recreation", :value 0.9788822}
{:name "fish", :value 0.949726}
{:name "boatman", :value 0.9217631}},
:id 24252,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472446/x8crdusi0cqdu3rywwv8.jpg"}
{:date "1881",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late08.jpg",
:title "Portrait of <NAME>",
:author "<NAME>",
:created_on #inst "2018-12-21T17:25:52.000-00:00",
:concepts
#{{:name "one", :value 0.99891716} {:name "adult", :value 0.99553406}
{:name "side view", :value 0.9648169}
{:name "administration", :value 0.9108915}
{:name "music", :value 0.9220626}
{:name "people", :value 0.99919045}
{:name "wear", :value 0.95365226}
{:name "portrait", :value 0.9991429}
{:name "tie", :value 0.92405903}
{:name "facial expression", :value 0.85146475}
{:name "man", :value 0.9939749}
{:name "writer", :value 0.9283779}
{:name "politician", :value 0.8993956}
{:name "profile", :value 0.9753293}
{:name "facial hair", :value 0.93652}
{:name "outfit", :value 0.89390266}
{:name "menswear", :value 0.93957925}
{:name "neckwear", :value 0.95516014}
{:name "leader", :value 0.9851012}
{:name "mustache", :value 0.9306363}},
:id 24253,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472448/xxaoqvs6ad2av4t2aduh.jpg"}
{:date "1881-82",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late10.jpg",
:title "A Bar at the Folies-Bergère (detail)",
:author "<NAME>",
:created_on #inst "2018-12-21T17:23:07.000-00:00",
:concepts
#{{:name "one", :value 0.8000617}
{:name "religion", :value 0.8590821}
{:name "winter", :value 0.8660009}
{:name "traditional", :value 0.85578156}
{:name "outdoors", :value 0.8795291}
{:name "cold", :value 0.86454546}
{:name "people", :value 0.95081115}
{:name "painting", :value 0.8788986}
{:name "indoors", :value 0.8228017}
{:name "group", :value 0.82631165}
{:name "water", :value 0.9398377}
{:name "woman", :value 0.84158987}
{:name "crystal", :value 0.8572757}
{:name "wet", :value 0.8016135}
{:name "travel", :value 0.92263913}
{:name "decoration", :value 0.8386463}
{:name "icee", :value 0.8358636}
{:name "fish", :value 0.7907878}
{:name "art", :value 0.92345643}
{:name "no person", :value 0.9639495}},
:id 24255,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472449/k3jixe3wrjb4qcuuvo3h.jpg"}
{:date "1881",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late11.jpg",
:title "Study of a Model",
:author "<NAME>",
:created_on #inst "2018-12-21T17:26:01.000-00:00",
:concepts
#{{:name "one", :value 0.99024737} {:name "model", :value 0.8956412}
{:name "adult", :value 0.9915126}
{:name "girl", :value 0.90252256}
{:name "face", :value 0.9113476}
{:name "lid", :value 0.97739756}
{:name "people", :value 0.9946698}
{:name "fashion", :value 0.9372419}
{:name "painting", :value 0.8990917}
{:name "wear", :value 0.95215756}
{:name "smoke", :value 0.89441824}
{:name "portrait", :value 0.9936029}
{:name "facial expression", :value 0.88725185}
{:name "man", :value 0.9698202}
{:name "veil", :value 0.957594}
{:name "fashionable", :value 0.8565195}
{:name "woman", :value 0.9524783}
{:name "retro", :value 0.8354877}
{:name "jacket", :value 0.8673773}
{:name "art", :value 0.95265865}},
:id 24256,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472451/b8fiba0shz0wldpk0bbb.jpg"}})
(def manet-example-group
{:group-name manet-people-group-name
:paintings manet-sample-paintings
:type-constraints manet-type-constraints
:school-constraints manet-school-constraints
:timeframe-constraints manet-timeframe-constraints
:concept-constraints manet-concept-constraints
:artist-constraints manet-artist-constraints})
| true |
(ns landschaften.sample.manet)
(def manet-people-group-name "Manet's people")
(def manet-type-constraints #{})
(def manet-school-constraints #{"French"})
(def manet-timeframe-constraints #{"1801-1850", "1851-1900"})
(def manet-concept-constraints #{"people"})
(def manet-artist-constraints #{"PI:NAME:<NAME>END_PI"})
(def manet-sample-paintings
#{{:date "1862",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/1/2spanis1.jpg",
:title "Spanish Ballet",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:35:10.000-00:00",
:concepts
#{{:name "adult", :value 0.9926609}
{:name "dancing", :value 0.95042074}
{:name "costume", :value 0.8785554}
{:name "two", :value 0.8278563}
{:name "music", :value 0.9256652}
{:name "people", :value 0.99651086}
{:name "painting", :value 0.8622995}
{:name "performance", :value 0.91299504}
{:name "wear", :value 0.9742775}
{:name "man", :value 0.9371989}
{:name "veil", :value 0.924356}
{:name "group", :value 0.988762}
{:name "many", :value 0.8306803}
{:name "woman", :value 0.9813038}
{:name "outfit", :value 0.83809006}
{:name "dancer", :value 0.9224825}
{:name "theater", :value 0.83426315}
{:name "recreation", :value 0.8632289}
{:name "art", :value 0.86101604}
{:name "child", :value 0.8897608}},
:id 24162,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472422/vqutvlotsmipx7xkmxun.jpg"}
{:date "1862",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/1/2spanis8.jpg",
:title "The Old Musician",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:25:31.000-00:00",
:concepts
#{{:name "priest", :value 0.8639396}
{:name "adult", :value 0.99157315}
{:name "religion", :value 0.9748057}
{:name "print", :value 0.8651573}
{:name "lid", :value 0.9523072}
{:name "coat", :value 0.8577764}
{:name "outerwear", :value 0.9128848}
{:name "son", :value 0.9158189}
{:name "people", :value 0.99749196}
{:name "painting", :value 0.89659476}
{:name "wear", :value 0.9846792}
{:name "portrait", :value 0.8763036}
{:name "man", :value 0.9784485}
{:name "veil", :value 0.96348965}
{:name "group", :value 0.99090946}
{:name "facial hair", :value 0.88501525}
{:name "woman", :value 0.98325956}
{:name "art", :value 0.9184065}
{:name "three", :value 0.8912737}
{:name "child", :value 0.9239347}},
:id 24169,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472423/m3vwue4ql8zmyw10jw3k.jpg"}
{:date "1862",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/1/3early2.jpg",
:title "Music in the Tuileries Gardens (detail)",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:35:20.000-00:00",
:concepts
#{{:name "priest", :value 0.8843517}
{:name "banquet", :value 0.8813138}
{:name "adult", :value 0.94551253}
{:name "religion", :value 0.9926128}
{:name "crowd", :value 0.9564}
{:name "celebration", :value 0.94409996}
{:name "people", :value 0.9958187}
{:name "wear", :value 0.87971234}
{:name "man", :value 0.8914131}
{:name "group", :value 0.98950154}
{:name "prayer", :value 0.8437264}
{:name "many", :value 0.99210143}
{:name "woman", :value 0.9641658}
{:name "crucifixion", :value 0.83641267}
{:name "ceremony", :value 0.98226374}
{:name "Easter", :value 0.9773995}
{:name "festival", :value 0.858477}
{:name "cross", :value 0.830389}
{:name "leader", :value 0.89480937}
{:name "funeral", :value 0.86351913}},
:id 24171,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472425/l6e4punrvp3etn74zun8.jpg"}
{:date "1865-66",
:school "French",
:type "other",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet01.jpg",
:title "Bullfight",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:35:10.000-00:00",
:concepts
#{{:name "mammal", :value 0.9926224}
{:name "racehorse", :value 0.9552147}
{:name "hurry", :value 0.9601132}
{:name "adult", :value 0.95615387}
{:name "race", :value 0.98657596}
{:name "jockey", :value 0.9815377}
{:name "crowd", :value 0.9723752}
{:name "cavalry", :value 0.9981413}
{:name "livestock", :value 0.9761847}
{:name "PI:NAME:<NAME>END_PI", :value 0.9525757}
{:name "people", :value 0.9984633}
{:name "action energy", :value 0.97289234}
{:name "man", :value 0.9494112}
{:name "horse", :value 0.9891174}
{:name "competition", :value 0.9896964}
{:name "cattle", :value 0.96983814}
{:name "group", :value 0.9905813}
{:name "many", :value 0.99030733}
{:name "seated", :value 0.99115324}
{:name "motion", :value 0.98737}},
:id 24186,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472429/lbs4bp3rbbrjft5zf6vq.jpg"}
{:date "1866",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet04.jpg",
:title "Young Lady (Woman with a Parrot)",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:25:42.000-00:00",
:concepts
#{{:name "one", :value 0.97028434} {:name "model", :value 0.93446124}
{:name "adult", :value 0.98388004}
{:name "religion", :value 0.83296895}
{:name "jewelry", :value 0.8180838}
{:name "print", :value 0.75688535}
{:name "lid", :value 0.8737594}
{:name "outerwear", :value 0.7577708}
{:name "people", :value 0.98241544}
{:name "fashion", :value 0.9042506}
{:name "painting", :value 0.89103764}
{:name "performance", :value 0.77718914}
{:name "wear", :value 0.97824997}
{:name "portrait", :value 0.9477818}
{:name "man", :value 0.8805766}
{:name "veil", :value 0.91031575}
{:name "fashionable", :value 0.8561712}
{:name "woman", :value 0.9581425}
{:name "dress", :value 0.89266264}
{:name "art", :value 0.9495683}},
:id 24189,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472430/bas4fxrp6hh37j9iszak.jpg"}
{:date "1867",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet11.jpg",
:title "The Guitar Player",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "one", :value 0.99543536} {:name "adult", :value 0.99741757}
{:name "furniture", :value 0.93576956}
{:name "girl", :value 0.9304198}
{:name "seat", :value 0.9126075}
{:name "singer", :value 0.9263523}
{:name "two", :value 0.9186835}
{:name "music", :value 0.96700156}
{:name "people", :value 0.99657536}
{:name "painting", :value 0.97315586}
{:name "reclining", :value 0.9257157}
{:name "wear", :value 0.9945359}
{:name "portrait", :value 0.9816007}
{:name "actress", :value 0.933759}
{:name "facial expression", :value 0.95856774}
{:name "veil", :value 0.96556073}
{:name "musician", :value 0.93621033}
{:name "woman", :value 0.985641}
{:name "recreation", :value 0.9139211}
{:name "art", :value 0.95078254}},
:id 24196,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472432/ixevnvrrhfsvl2btqule.jpg"}
{:date "1868",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet13.jpg",
:title "The Luncheon in the Studio",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:23:07.000-00:00",
:concepts
#{{:name "one", :value 0.928209} {:name "adult", :value 0.9921365}
{:name "furniture", :value 0.9286638}
{:name "lid", :value 0.97724456}
{:name "two", :value 0.9651395}
{:name "royalty", :value 0.8792964}
{:name "commerce", :value 0.8866627}
{:name "several", :value 0.8716241}
{:name "people", :value 0.9982027}
{:name "painting", :value 0.89995265}
{:name "wear", :value 0.9909334}
{:name "four", :value 0.88462174}
{:name "portrait", :value 0.93528205}
{:name "man", :value 0.979193}
{:name "veil", :value 0.9725639}
{:name "group", :value 0.98707205}
{:name "woman", :value 0.9860933}
{:name "outfit", :value 0.9133043}
{:name "recreation", :value 0.89808434}
{:name "three", :value 0.93893814}},
:id 24198,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472433/rwywykjuns6crcgtrx1z.jpg"}
{:date "1868",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/2/2manet15.jpg",
:title "The Reading",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "one", :value 0.9187814} {:name "adult", :value 0.9846325}
{:name "furniture", :value 0.8632232}
{:name "girl", :value 0.8502531}
{:name "seat", :value 0.71896744}
{:name "princess", :value 0.76299554}
{:name "two", :value 0.9359244}
{:name "people", :value 0.9865805}
{:name "painting", :value 0.7880821}
{:name "wear", :value 0.9162672}
{:name "beautiful", :value 0.84891796}
{:name "portrait", :value 0.86976075}
{:name "man", :value 0.89266723}
{:name "healthcare", :value 0.7076663}
{:name "room", :value 0.8991654}
{:name "indoors", :value 0.80724007}
{:name "woman", :value 0.98091006}
{:name "dress", :value 0.85546994}
{:name "art", :value 0.8825964}
{:name "sit", :value 0.7793608}},
:id 24200,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472435/oj8aefh3d7pfmffpusht.jpg"}
{:date "1874",
:school "French",
:type "landscape",
:wga_jpg "https://www.wga.hu/art/m/manet/3/3manet13.jpg",
:title "Claude Monet Painting on His Boat-Studio in Argenteuil",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:26:40.000-00:00",
:concepts
#{{:name "watercraft", :value 0.99094677}
{:name "one", :value 0.9455198}
{:name "rowboat", :value 0.92349803}
{:name "adult", :value 0.987556}
{:name "print", :value 0.92447}
{:name "two", :value 0.9623909}
{:name "sea", :value 0.908358}
{:name "people", :value 0.9969213}
{:name "painting", :value 0.98337245}
{:name "man", :value 0.94620585}
{:name "group", :value 0.9605703}
{:name "transportation system", :value 0.96333385}
{:name "water", :value 0.9794651}
{:name "vehicle", :value 0.9800147}
{:name "woman", :value 0.97150505}
{:name "illustration", :value 0.94317913}
{:name "travel", :value 0.9278274}
{:name "recreation", :value 0.94930357}
{:name "ship", :value 0.94353926}
{:name "art", :value 0.9695486}},
:id 24218,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472436/swsvuokxf891evo93p1b.jpg"}
{:date "1878-80",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet07.jpg",
:title "Corner of a Café-Concert",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:23:29.000-00:00",
:concepts
#{{:name "adult", :value 0.98452723}
{:name "religion", :value 0.9813553}
{:name "administration", :value 0.80553263}
{:name "two", :value 0.841617}
{:name "container", :value 0.81185424}
{:name "celebration", :value 0.8444092}
{:name "commerce", :value 0.8864105}
{:name "several", :value 0.86247987}
{:name "people", :value 0.99761593}
{:name "wear", :value 0.95024574}
{:name "elderly", :value 0.8193976}
{:name "man", :value 0.97192264}
{:name "veil", :value 0.8256639}
{:name "group", :value 0.9939004}
{:name "many", :value 0.9530274}
{:name "woman", :value 0.9748919}
{:name "interaction", :value 0.84736484}
{:name "ceremony", :value 0.8802029}
{:name "recreation", :value 0.80783606}
{:name "leader", :value 0.8523675}},
:id 24233,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472438/vxaqini8v50k8eazpzuq.jpg"}
{:date "1878",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet10.jpg",
:title "Two Women Drinking Bocks",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:23:37.000-00:00",
:concepts
#{{:name "one", :value 0.9047679} {:name "adult", :value 0.9140061}
{:name "food", :value 0.8745171}
{:name "lid", :value 0.8042917}
{:name "drink", :value 0.83911383}
{:name "two", :value 0.8198239}
{:name "container", :value 0.725613}
{:name "cold", :value 0.9290925}
{:name "people", :value 0.9570219}
{:name "wear", :value 0.70177203}
{:name "portrait", :value 0.72586167}
{:name "man", :value 0.896839}
{:name "healthcare", :value 0.7551681}
{:name "indoors", :value 0.83269846}
{:name "glass", :value 0.88382506}
{:name "veil", :value 0.7795876}
{:name "group", :value 0.87480223}
{:name "woman", :value 0.9120748}
{:name "beer", :value 0.7319597}
{:name "no person", :value 0.79889}},
:id 24236,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472440/o3ijavwwa4ojkpqaw4sz.jpg"}
{:date "1878",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet12.jpg",
:title "Man in a Round Hat (Alphonse Maureau)",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:25:31.000-00:00",
:concepts
#{{:name "one", :value 0.98983526} {:name "adult", :value 0.98518276}
{:name "side view", :value 0.7534818}
{:name "religion", :value 0.755723}
{:name "old", :value 0.8170091}
{:name "lid", :value 0.9244362}
{:name "people", :value 0.9983702}
{:name "painting", :value 0.98108876}
{:name "wear", :value 0.89224696}
{:name "portrait", :value 0.98770726}
{:name "elderly", :value 0.7708284}
{:name "man", :value 0.98501396}
{:name "beard", :value 0.72548115}
{:name "veil", :value 0.85667205}
{:name "vintage", :value 0.7308766}
{:name "facial hair", :value 0.94412553}
{:name "retro", :value 0.816434}
{:name "art", :value 0.9833573}
{:name "leader", :value 0.7347914}
{:name "mustache", :value 0.9813224}},
:id 24238,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472441/jrom433z3ebwnqerjnjh.jpg"}
{:date "1879",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/4/4manet16.jpg",
:title "In the Winter Garden",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:23:17.000-00:00",
:concepts
#{{:name "one", :value 0.9759318} {:name "adult", :value 0.99300295}
{:name "furniture", :value 0.9467297}
{:name "seat", :value 0.9515189}
{:name "lid", :value 0.91574156}
{:name "two", :value 0.97907144}
{:name "outerwear", :value 0.87922907}
{:name "people", :value 0.9964845}
{:name "painting", :value 0.8783745}
{:name "wear", :value 0.9583739}
{:name "portrait", :value 0.9744396}
{:name "man", :value 0.98772514}
{:name "veil", :value 0.8911963}
{:name "military", :value 0.9242512}
{:name "woman", :value 0.97469366}
{:name "soldier", :value 0.88871837}
{:name "art", :value 0.92404234}
{:name "sit", :value 0.93323195}
{:name "three", :value 0.89724416}
{:name "child", :value 0.8757273}},
:id 24242,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472443/gqwvv3nmczfqaqf3c7l1.jpg"}
{:date "1881",
:school "French",
:type "historical",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late07.jpg",
:title "The Escape of PI:NAME:<NAME>END_PI",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:35:42.000-00:00",
:concepts
#{{:name "watercraft", :value 0.9823499}
{:name "one", :value 0.94041026}
{:name "rowboat", :value 0.9560243}
{:name "adult", :value 0.9147363}
{:name "oar", :value 0.8999064}
{:name "two", :value 0.9277632}
{:name "sea", :value 0.93822336}
{:name "canoe", :value 0.9338627}
{:name "people", :value 0.98351806}
{:name "swimming", :value 0.8851098}
{:name "ocean", :value 0.92358166}
{:name "fisherman", :value 0.9260603}
{:name "leisure", :value 0.8901384}
{:name "transportation system", :value 0.897137}
{:name "water", :value 0.99450403}
{:name "vehicle", :value 0.89671445}
{:name "travel", :value 0.9164829}
{:name "recreation", :value 0.9788822}
{:name "fish", :value 0.949726}
{:name "boatman", :value 0.9217631}},
:id 24252,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472446/x8crdusi0cqdu3rywwv8.jpg"}
{:date "1881",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late08.jpg",
:title "Portrait of PI:NAME:<NAME>END_PI",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:25:52.000-00:00",
:concepts
#{{:name "one", :value 0.99891716} {:name "adult", :value 0.99553406}
{:name "side view", :value 0.9648169}
{:name "administration", :value 0.9108915}
{:name "music", :value 0.9220626}
{:name "people", :value 0.99919045}
{:name "wear", :value 0.95365226}
{:name "portrait", :value 0.9991429}
{:name "tie", :value 0.92405903}
{:name "facial expression", :value 0.85146475}
{:name "man", :value 0.9939749}
{:name "writer", :value 0.9283779}
{:name "politician", :value 0.8993956}
{:name "profile", :value 0.9753293}
{:name "facial hair", :value 0.93652}
{:name "outfit", :value 0.89390266}
{:name "menswear", :value 0.93957925}
{:name "neckwear", :value 0.95516014}
{:name "leader", :value 0.9851012}
{:name "mustache", :value 0.9306363}},
:id 24253,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472448/xxaoqvs6ad2av4t2aduh.jpg"}
{:date "1881-82",
:school "French",
:type "genre",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late10.jpg",
:title "A Bar at the Folies-Bergère (detail)",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:23:07.000-00:00",
:concepts
#{{:name "one", :value 0.8000617}
{:name "religion", :value 0.8590821}
{:name "winter", :value 0.8660009}
{:name "traditional", :value 0.85578156}
{:name "outdoors", :value 0.8795291}
{:name "cold", :value 0.86454546}
{:name "people", :value 0.95081115}
{:name "painting", :value 0.8788986}
{:name "indoors", :value 0.8228017}
{:name "group", :value 0.82631165}
{:name "water", :value 0.9398377}
{:name "woman", :value 0.84158987}
{:name "crystal", :value 0.8572757}
{:name "wet", :value 0.8016135}
{:name "travel", :value 0.92263913}
{:name "decoration", :value 0.8386463}
{:name "icee", :value 0.8358636}
{:name "fish", :value 0.7907878}
{:name "art", :value 0.92345643}
{:name "no person", :value 0.9639495}},
:id 24255,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472449/k3jixe3wrjb4qcuuvo3h.jpg"}
{:date "1881",
:school "French",
:type "portrait",
:wga_jpg "https://www.wga.hu/art/m/manet/5/5late11.jpg",
:title "Study of a Model",
:author "PI:NAME:<NAME>END_PI",
:created_on #inst "2018-12-21T17:26:01.000-00:00",
:concepts
#{{:name "one", :value 0.99024737} {:name "model", :value 0.8956412}
{:name "adult", :value 0.9915126}
{:name "girl", :value 0.90252256}
{:name "face", :value 0.9113476}
{:name "lid", :value 0.97739756}
{:name "people", :value 0.9946698}
{:name "fashion", :value 0.9372419}
{:name "painting", :value 0.8990917}
{:name "wear", :value 0.95215756}
{:name "smoke", :value 0.89441824}
{:name "portrait", :value 0.9936029}
{:name "facial expression", :value 0.88725185}
{:name "man", :value 0.9698202}
{:name "veil", :value 0.957594}
{:name "fashionable", :value 0.8565195}
{:name "woman", :value 0.9524783}
{:name "retro", :value 0.8354877}
{:name "jacket", :value 0.8673773}
{:name "art", :value 0.95265865}},
:id 24256,
:timeframe "1851-1900",
:form "painting",
:jpg
"https://res.cloudinary.com/dgpqnl8ul/image/upload/v1546472451/b8fiba0shz0wldpk0bbb.jpg"}})
(def manet-example-group
{:group-name manet-people-group-name
:paintings manet-sample-paintings
:type-constraints manet-type-constraints
:school-constraints manet-school-constraints
:timeframe-constraints manet-timeframe-constraints
:concept-constraints manet-concept-constraints
:artist-constraints manet-artist-constraints})
|
[
{
"context": "372 :foo bar\")))\n (is (=\n {:prefix {:nick \"someone\", :username \"foo\", :host \"example.com\"}, :command",
"end": 448,
"score": 0.9845524430274963,
"start": 441,
"tag": "USERNAME",
"value": "someone"
},
{
"context": "s (=\n {:prefix {:nick \"someone\", :username \"foo\", :host \"example.com\"}, :command \"PRIVMSG\", :args",
"end": 465,
"score": 0.9866592884063721,
"start": 462,
"tag": "USERNAME",
"value": "foo"
},
{
"context": "gs [\"me\" \"hey you\"]}\n (parse-line \":[email protected] PRIVMSG me :hey you\"))))\n\n\n",
"end": 578,
"score": 0.9952316284179688,
"start": 563,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/tachyon/parser_test.clj
|
henrikolsson/tachyon
| 0 |
(ns tachyon.parser-test
(:use [tachyon.parser] :reload)
(:use [clojure.test]))
(deftest can-parse-line
(is (=
{:prefix {:nick nil, :username nil, :host nil}, :command "TEST", :args ["a" "b"]}
(parse-line "TEST a b")))
(is (=
{:prefix {:nick nil, :username nil, :host "test.example.com"}, :command "372", :args ["foo bar"]}
(parse-line ":test.example.com 372 :foo bar")))
(is (=
{:prefix {:nick "someone", :username "foo", :host "example.com"}, :command "PRIVMSG", :args ["me" "hey you"]}
(parse-line ":[email protected] PRIVMSG me :hey you"))))
|
115976
|
(ns tachyon.parser-test
(:use [tachyon.parser] :reload)
(:use [clojure.test]))
(deftest can-parse-line
(is (=
{:prefix {:nick nil, :username nil, :host nil}, :command "TEST", :args ["a" "b"]}
(parse-line "TEST a b")))
(is (=
{:prefix {:nick nil, :username nil, :host "test.example.com"}, :command "372", :args ["foo bar"]}
(parse-line ":test.example.com 372 :foo bar")))
(is (=
{:prefix {:nick "someone", :username "foo", :host "example.com"}, :command "PRIVMSG", :args ["me" "hey you"]}
(parse-line ":someone!<EMAIL> PRIVMSG me :hey you"))))
| true |
(ns tachyon.parser-test
(:use [tachyon.parser] :reload)
(:use [clojure.test]))
(deftest can-parse-line
(is (=
{:prefix {:nick nil, :username nil, :host nil}, :command "TEST", :args ["a" "b"]}
(parse-line "TEST a b")))
(is (=
{:prefix {:nick nil, :username nil, :host "test.example.com"}, :command "372", :args ["foo bar"]}
(parse-line ":test.example.com 372 :foo bar")))
(is (=
{:prefix {:nick "someone", :username "foo", :host "example.com"}, :command "PRIVMSG", :args ["me" "hey you"]}
(parse-line ":someone!PI:EMAIL:<EMAIL>END_PI PRIVMSG me :hey you"))))
|
[
{
"context": "old-value#))))))\n\n(def valid-manufacturer {:name \"Valid Name\" :founded \"1999\" :grade 99})\n\n(defn valid-ma",
"end": 557,
"score": 0.5378831624984741,
"start": 552,
"tag": "NAME",
"value": "Valid"
}
] |
clj_record/test/test_helper.clj
|
victoryvinod/sas
| 3 |
(ns clj-record.test.test-helper
(:require [clj-record.core :as core])
(:use clojure.contrib.test-is))
(defmacro defdbtest [name & body]
`(deftest ~name
(rolling-back ~@body)))
(defmacro rolling-back [& body]
`(core/transaction clj-record.test.model.config/db
(try
~@body
(finally
(clojure.contrib.sql/set-rollback-only)))))
(defmacro restoring-ref [ref & body]
`(let [old-value# (deref ~ref)]
(try
~@body
(finally
(dosync (ref-set ~ref old-value#))))))
(def valid-manufacturer {:name "Valid Name" :founded "1999" :grade 99})
(defn valid-manufacturer-with [attributes] (merge valid-manufacturer attributes))
|
110366
|
(ns clj-record.test.test-helper
(:require [clj-record.core :as core])
(:use clojure.contrib.test-is))
(defmacro defdbtest [name & body]
`(deftest ~name
(rolling-back ~@body)))
(defmacro rolling-back [& body]
`(core/transaction clj-record.test.model.config/db
(try
~@body
(finally
(clojure.contrib.sql/set-rollback-only)))))
(defmacro restoring-ref [ref & body]
`(let [old-value# (deref ~ref)]
(try
~@body
(finally
(dosync (ref-set ~ref old-value#))))))
(def valid-manufacturer {:name "<NAME> Name" :founded "1999" :grade 99})
(defn valid-manufacturer-with [attributes] (merge valid-manufacturer attributes))
| true |
(ns clj-record.test.test-helper
(:require [clj-record.core :as core])
(:use clojure.contrib.test-is))
(defmacro defdbtest [name & body]
`(deftest ~name
(rolling-back ~@body)))
(defmacro rolling-back [& body]
`(core/transaction clj-record.test.model.config/db
(try
~@body
(finally
(clojure.contrib.sql/set-rollback-only)))))
(defmacro restoring-ref [ref & body]
`(let [old-value# (deref ~ref)]
(try
~@body
(finally
(dosync (ref-set ~ref old-value#))))))
(def valid-manufacturer {:name "PI:NAME:<NAME>END_PI Name" :founded "1999" :grade 99})
(defn valid-manufacturer-with [attributes] (merge valid-manufacturer attributes))
|
[
{
"context": " (testing\n \"Tests for 'in' operator for maps.\\n'George' in {'John': 'smart', 'Paul': 'cute', 'George': '",
"end": 9817,
"score": 0.7423629760742188,
"start": 9811,
"tag": "NAME",
"value": "George"
},
{
"context": "\"Tests for 'in' operator for maps.\\n'George' in {'John': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ri",
"end": 9828,
"score": 0.9923809170722961,
"start": 9824,
"tag": "NAME",
"value": "John"
},
{
"context": "perator for maps.\\n'George' in {'John': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ringo': 'funny'}\"\n ",
"end": 9845,
"score": 0.9972764253616333,
"start": 9841,
"tag": "NAME",
"value": "Paul"
},
{
"context": ".\\n'George' in {'John': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ringo': 'funny'}\"\n (let\n [res\n",
"end": 9863,
"score": 0.957046389579773,
"start": 9857,
"tag": "NAME",
"value": "George"
},
{
"context": "hn': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ringo': 'funny'}\"\n (let\n [res\n (parser/pars",
"end": 9881,
"score": 0.850651741027832,
"start": 9876,
"tag": "NAME",
"value": "Ringo"
},
{
"context": "bindings nil), :translate-result? false}\n \"'George' in {'John': 'smart', 'Paul': 'cute', 'George': '",
"end": 10020,
"score": 0.821100115776062,
"start": 10014,
"tag": "NAME",
"value": "George"
},
{
"context": ", :translate-result? false}\n \"'George' in {'John': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ri",
"end": 10031,
"score": 0.9879292845726013,
"start": 10027,
"tag": "NAME",
"value": "John"
},
{
"context": "lt? false}\n \"'George' in {'John': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ringo': 'funny'}\")]",
"end": 10048,
"score": 0.997289776802063,
"start": 10044,
"tag": "NAME",
"value": "Paul"
},
{
"context": " \"'George' in {'John': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ringo': 'funny'}\")]\n (is (helper",
"end": 10066,
"score": 0.9333587884902954,
"start": 10060,
"tag": "NAME",
"value": "George"
},
{
"context": "hn': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ringo': 'funny'}\")]\n (is (helper/equal? (helper/tr",
"end": 10084,
"score": 0.8653554916381836,
"start": 10079,
"tag": "NAME",
"value": "Ringo"
}
] |
test/exoscale/cel/generated/fields_test.clj
|
exoscale/cel-parser
| 1 |
(ns
exoscale.cel.generated.fields-test
"Generated test - Tests for field access in maps."
(:require
[clojure.test :refer [deftest testing is]]
[exoscale.cel.test-helper :as helper]
[exoscale.cel.parser :as parser]))
(deftest
fields-map_fields-map_key_int64-test
(testing
"select an element in a map\n{0:1,2:2,5:true}[5]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:1,2:2,5:true}[5]")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-map_fields-map_key_uint64-test
(testing
"select an element in a map\n{0u:1u,2u:'happy',5u:3u}[2u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:1u,2u:'happy',5u:3u}[2u]")]
(is (helper/equal? (helper/translate {:stringValue "happy"}) res)))))
(deftest
fields-map_fields-map_key_string-test
(testing
"select an element in a map\n{'name':100u}['name']"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{'name':100u}['name']")]
(is (helper/equal? (helper/translate {:uint64Value "100"}) res)))))
(deftest
fields-map_fields-map_key_bool-test
(testing
"select an element in a map\n{true:5}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:5}[true]")]
(is (helper/equal? (helper/translate {:int64Value "5"}) res)))))
(deftest
fields-map_fields-map_key_mix_type-test
(testing
"select an element in a map\n{true:1,2:2,5u:3}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:1,2:2,5u:3}[true]")]
(is (helper/equal? (helper/translate {:int64Value "1"}) res)))))
(deftest
fields-map_fields-map_field_access-test
(testing
"select an element in a map\nx.name"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:x
{:value
{:mapValue
{:entries
[{:key {:stringValue "name"},
:value {:int64Value "1024"}}]}}}}),
:translate-result? false}
"x.name")]
(is (helper/equal? (helper/translate {:int64Value "1024"}) res)))))
(deftest
fields-map_fields-map_no_such_key-test
(testing
"select an element in a map\n{0:1,2:2,5:3}[1]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:1,2:2,5:3}[1]")]
(is (helper/error? res)))))
(deftest
fields-map_fields-map_field_select_no_such_key-test
(testing
"select an element in a map\nx.name"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:x
{:value
{:mapValue
{:entries
[{:key {:stringValue "holiday"},
:value {:stringValue "field"}}]}}}}),
:translate-result? false}
"x.name")]
(is (helper/error? res)))))
(deftest
fields-map_fields-map_value_null-test
(testing
"select an element in a map\n{true:null}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:null}[true]")]
(is (helper/equal? (helper/translate {:nullValue nil}) res)))))
(deftest
fields-map_fields-map_value_bool-test
(testing
"select an element in a map\n{27:false}[27]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{27:false}[27]")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-map_fields-map_value_string-test
(testing
"select an element in a map\n{'n':'x'}['n']"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{'n':'x'}['n']")]
(is (helper/equal? (helper/translate {:stringValue "x"}) res)))))
(deftest
fields-map_fields-map_value_float-test
(testing
"select an element in a map\n{3:15.15}[3]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{3:15.15}[3]")]
(is (helper/equal? (helper/translate {:doubleValue 15.15}) res)))))
(deftest
fields-map_fields-map_value_uint64-test
(testing
"select an element in a map\n{0u:1u,2u:2u,5u:3u}[0u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:1u,2u:2u,5u:3u}[0u]")]
(is (helper/equal? (helper/translate {:uint64Value "1"}) res)))))
(deftest
fields-map_fields-map_value_int64-test
(testing
"select an element in a map\n{true:1,false:2}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:1,false:2}[true]")]
(is (helper/equal? (helper/translate {:int64Value "1"}) res)))))
(deftest
fields-map_fields-map_value_bytes-test
(testing
"select an element in a map\n{0:b\"\"}[0]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:b\"\"}[0]")]
(is (helper/equal? (helper/translate {:bytesValue ""}) res)))))
(deftest
fields-map_fields-map_value_list-test
(testing
"select an element in a map\n{0u:[1]}[0u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:[1]}[0u]")]
(is
(helper/equal?
(helper/translate {:listValue {:values [{:int64Value "1"}]}})
res)))))
(deftest
fields-map_fields-map_value_map-test
(testing
"select an element in a map\n{\"map\": {\"k\": \"v\"}}[\"map\"]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{\"map\": {\"k\": \"v\"}}[\"map\"]")]
(is
(helper/equal?
(helper/translate
{:mapValue
{:entries
[{:key {:stringValue "k"}, :value {:stringValue "v"}}]}})
res)))))
(deftest
fields-map_fields-map_value_mix_type-test
(testing
"select an element in a map\n{\"map\": {\"k\": \"v\"}, \"list\": [1]}[\"map\"]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{\"map\": {\"k\": \"v\"}, \"list\": [1]}[\"map\"]")]
(is
(helper/equal?
(helper/translate
{:mapValue
{:entries
[{:key {:stringValue "k"}, :value {:stringValue "v"}}]}})
res)))))
(deftest
fields-map_has-has-test
(testing
"Has macro for map entries.\nhas({'a': 1, 'b': 2}.a)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({'a': 1, 'b': 2}.a)")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-map_has-has_not-test
(testing
"Has macro for map entries.\nhas({'a': 1, 'b': 2}.c)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({'a': 1, 'b': 2}.c)")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-map_has-has_empty-test
(testing
"Has macro for map entries.\nhas({}.a)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({}.a)")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-qualified_identifier_resolution-list_field_select_unsupported-test
(testing
"Tests for qualified identifier resolution.\na.b.pancakes"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:a.b
{:value {:listValue {:values [{:stringValue "pancakes"}]}}}}),
:translate-result? false}
"a.b.pancakes")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-int64_field_select_unsupported-test
(testing
"Tests for qualified identifier resolution.\na.pancakes"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings {:a {:value {:int64Value "15"}}}),
:translate-result? false}
"a.pancakes")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-map_key_float-test
(testing
"Tests for qualified identifier resolution.\n{3.3:15.15, 1.0: 5}[1.0]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{3.3:15.15, 1.0: 5}[1.0]")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-map_key_null-test
(testing
"Tests for qualified identifier resolution.\n{null:false}[null]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{null:false}[null]")]
(is (helper/error? res)))))
(deftest
fields-in-empty-test
(testing
"Tests for 'in' operator for maps.\n7 in {}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"7 in {}")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-in-singleton-test
(testing
"Tests for 'in' operator for maps.\ntrue in {true: 1}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"true in {true: 1}")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-in-present-test
(testing
"Tests for 'in' operator for maps.\n'George' in {'John': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ringo': 'funny'}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"'George' in {'John': 'smart', 'Paul': 'cute', 'George': 'quiet', 'Ringo': 'funny'}")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-in-absent-test
(testing
"Tests for 'in' operator for maps.\n'spider' in {'ant': 6, 'fly': 6, 'centipede': 100}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"'spider' in {'ant': 6, 'fly': 6, 'centipede': 100}")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
|
40912
|
(ns
exoscale.cel.generated.fields-test
"Generated test - Tests for field access in maps."
(:require
[clojure.test :refer [deftest testing is]]
[exoscale.cel.test-helper :as helper]
[exoscale.cel.parser :as parser]))
(deftest
fields-map_fields-map_key_int64-test
(testing
"select an element in a map\n{0:1,2:2,5:true}[5]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:1,2:2,5:true}[5]")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-map_fields-map_key_uint64-test
(testing
"select an element in a map\n{0u:1u,2u:'happy',5u:3u}[2u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:1u,2u:'happy',5u:3u}[2u]")]
(is (helper/equal? (helper/translate {:stringValue "happy"}) res)))))
(deftest
fields-map_fields-map_key_string-test
(testing
"select an element in a map\n{'name':100u}['name']"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{'name':100u}['name']")]
(is (helper/equal? (helper/translate {:uint64Value "100"}) res)))))
(deftest
fields-map_fields-map_key_bool-test
(testing
"select an element in a map\n{true:5}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:5}[true]")]
(is (helper/equal? (helper/translate {:int64Value "5"}) res)))))
(deftest
fields-map_fields-map_key_mix_type-test
(testing
"select an element in a map\n{true:1,2:2,5u:3}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:1,2:2,5u:3}[true]")]
(is (helper/equal? (helper/translate {:int64Value "1"}) res)))))
(deftest
fields-map_fields-map_field_access-test
(testing
"select an element in a map\nx.name"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:x
{:value
{:mapValue
{:entries
[{:key {:stringValue "name"},
:value {:int64Value "1024"}}]}}}}),
:translate-result? false}
"x.name")]
(is (helper/equal? (helper/translate {:int64Value "1024"}) res)))))
(deftest
fields-map_fields-map_no_such_key-test
(testing
"select an element in a map\n{0:1,2:2,5:3}[1]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:1,2:2,5:3}[1]")]
(is (helper/error? res)))))
(deftest
fields-map_fields-map_field_select_no_such_key-test
(testing
"select an element in a map\nx.name"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:x
{:value
{:mapValue
{:entries
[{:key {:stringValue "holiday"},
:value {:stringValue "field"}}]}}}}),
:translate-result? false}
"x.name")]
(is (helper/error? res)))))
(deftest
fields-map_fields-map_value_null-test
(testing
"select an element in a map\n{true:null}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:null}[true]")]
(is (helper/equal? (helper/translate {:nullValue nil}) res)))))
(deftest
fields-map_fields-map_value_bool-test
(testing
"select an element in a map\n{27:false}[27]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{27:false}[27]")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-map_fields-map_value_string-test
(testing
"select an element in a map\n{'n':'x'}['n']"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{'n':'x'}['n']")]
(is (helper/equal? (helper/translate {:stringValue "x"}) res)))))
(deftest
fields-map_fields-map_value_float-test
(testing
"select an element in a map\n{3:15.15}[3]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{3:15.15}[3]")]
(is (helper/equal? (helper/translate {:doubleValue 15.15}) res)))))
(deftest
fields-map_fields-map_value_uint64-test
(testing
"select an element in a map\n{0u:1u,2u:2u,5u:3u}[0u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:1u,2u:2u,5u:3u}[0u]")]
(is (helper/equal? (helper/translate {:uint64Value "1"}) res)))))
(deftest
fields-map_fields-map_value_int64-test
(testing
"select an element in a map\n{true:1,false:2}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:1,false:2}[true]")]
(is (helper/equal? (helper/translate {:int64Value "1"}) res)))))
(deftest
fields-map_fields-map_value_bytes-test
(testing
"select an element in a map\n{0:b\"\"}[0]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:b\"\"}[0]")]
(is (helper/equal? (helper/translate {:bytesValue ""}) res)))))
(deftest
fields-map_fields-map_value_list-test
(testing
"select an element in a map\n{0u:[1]}[0u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:[1]}[0u]")]
(is
(helper/equal?
(helper/translate {:listValue {:values [{:int64Value "1"}]}})
res)))))
(deftest
fields-map_fields-map_value_map-test
(testing
"select an element in a map\n{\"map\": {\"k\": \"v\"}}[\"map\"]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{\"map\": {\"k\": \"v\"}}[\"map\"]")]
(is
(helper/equal?
(helper/translate
{:mapValue
{:entries
[{:key {:stringValue "k"}, :value {:stringValue "v"}}]}})
res)))))
(deftest
fields-map_fields-map_value_mix_type-test
(testing
"select an element in a map\n{\"map\": {\"k\": \"v\"}, \"list\": [1]}[\"map\"]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{\"map\": {\"k\": \"v\"}, \"list\": [1]}[\"map\"]")]
(is
(helper/equal?
(helper/translate
{:mapValue
{:entries
[{:key {:stringValue "k"}, :value {:stringValue "v"}}]}})
res)))))
(deftest
fields-map_has-has-test
(testing
"Has macro for map entries.\nhas({'a': 1, 'b': 2}.a)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({'a': 1, 'b': 2}.a)")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-map_has-has_not-test
(testing
"Has macro for map entries.\nhas({'a': 1, 'b': 2}.c)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({'a': 1, 'b': 2}.c)")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-map_has-has_empty-test
(testing
"Has macro for map entries.\nhas({}.a)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({}.a)")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-qualified_identifier_resolution-list_field_select_unsupported-test
(testing
"Tests for qualified identifier resolution.\na.b.pancakes"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:a.b
{:value {:listValue {:values [{:stringValue "pancakes"}]}}}}),
:translate-result? false}
"a.b.pancakes")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-int64_field_select_unsupported-test
(testing
"Tests for qualified identifier resolution.\na.pancakes"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings {:a {:value {:int64Value "15"}}}),
:translate-result? false}
"a.pancakes")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-map_key_float-test
(testing
"Tests for qualified identifier resolution.\n{3.3:15.15, 1.0: 5}[1.0]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{3.3:15.15, 1.0: 5}[1.0]")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-map_key_null-test
(testing
"Tests for qualified identifier resolution.\n{null:false}[null]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{null:false}[null]")]
(is (helper/error? res)))))
(deftest
fields-in-empty-test
(testing
"Tests for 'in' operator for maps.\n7 in {}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"7 in {}")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-in-singleton-test
(testing
"Tests for 'in' operator for maps.\ntrue in {true: 1}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"true in {true: 1}")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-in-present-test
(testing
"Tests for 'in' operator for maps.\n'<NAME>' in {'<NAME>': 'smart', '<NAME>': 'cute', '<NAME>': 'quiet', '<NAME>': 'funny'}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"'<NAME>' in {'<NAME>': 'smart', '<NAME>': 'cute', '<NAME>': 'quiet', '<NAME>': 'funny'}")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-in-absent-test
(testing
"Tests for 'in' operator for maps.\n'spider' in {'ant': 6, 'fly': 6, 'centipede': 100}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"'spider' in {'ant': 6, 'fly': 6, 'centipede': 100}")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
| true |
(ns
exoscale.cel.generated.fields-test
"Generated test - Tests for field access in maps."
(:require
[clojure.test :refer [deftest testing is]]
[exoscale.cel.test-helper :as helper]
[exoscale.cel.parser :as parser]))
(deftest
fields-map_fields-map_key_int64-test
(testing
"select an element in a map\n{0:1,2:2,5:true}[5]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:1,2:2,5:true}[5]")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-map_fields-map_key_uint64-test
(testing
"select an element in a map\n{0u:1u,2u:'happy',5u:3u}[2u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:1u,2u:'happy',5u:3u}[2u]")]
(is (helper/equal? (helper/translate {:stringValue "happy"}) res)))))
(deftest
fields-map_fields-map_key_string-test
(testing
"select an element in a map\n{'name':100u}['name']"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{'name':100u}['name']")]
(is (helper/equal? (helper/translate {:uint64Value "100"}) res)))))
(deftest
fields-map_fields-map_key_bool-test
(testing
"select an element in a map\n{true:5}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:5}[true]")]
(is (helper/equal? (helper/translate {:int64Value "5"}) res)))))
(deftest
fields-map_fields-map_key_mix_type-test
(testing
"select an element in a map\n{true:1,2:2,5u:3}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:1,2:2,5u:3}[true]")]
(is (helper/equal? (helper/translate {:int64Value "1"}) res)))))
(deftest
fields-map_fields-map_field_access-test
(testing
"select an element in a map\nx.name"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:x
{:value
{:mapValue
{:entries
[{:key {:stringValue "name"},
:value {:int64Value "1024"}}]}}}}),
:translate-result? false}
"x.name")]
(is (helper/equal? (helper/translate {:int64Value "1024"}) res)))))
(deftest
fields-map_fields-map_no_such_key-test
(testing
"select an element in a map\n{0:1,2:2,5:3}[1]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:1,2:2,5:3}[1]")]
(is (helper/error? res)))))
(deftest
fields-map_fields-map_field_select_no_such_key-test
(testing
"select an element in a map\nx.name"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:x
{:value
{:mapValue
{:entries
[{:key {:stringValue "holiday"},
:value {:stringValue "field"}}]}}}}),
:translate-result? false}
"x.name")]
(is (helper/error? res)))))
(deftest
fields-map_fields-map_value_null-test
(testing
"select an element in a map\n{true:null}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:null}[true]")]
(is (helper/equal? (helper/translate {:nullValue nil}) res)))))
(deftest
fields-map_fields-map_value_bool-test
(testing
"select an element in a map\n{27:false}[27]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{27:false}[27]")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-map_fields-map_value_string-test
(testing
"select an element in a map\n{'n':'x'}['n']"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{'n':'x'}['n']")]
(is (helper/equal? (helper/translate {:stringValue "x"}) res)))))
(deftest
fields-map_fields-map_value_float-test
(testing
"select an element in a map\n{3:15.15}[3]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{3:15.15}[3]")]
(is (helper/equal? (helper/translate {:doubleValue 15.15}) res)))))
(deftest
fields-map_fields-map_value_uint64-test
(testing
"select an element in a map\n{0u:1u,2u:2u,5u:3u}[0u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:1u,2u:2u,5u:3u}[0u]")]
(is (helper/equal? (helper/translate {:uint64Value "1"}) res)))))
(deftest
fields-map_fields-map_value_int64-test
(testing
"select an element in a map\n{true:1,false:2}[true]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{true:1,false:2}[true]")]
(is (helper/equal? (helper/translate {:int64Value "1"}) res)))))
(deftest
fields-map_fields-map_value_bytes-test
(testing
"select an element in a map\n{0:b\"\"}[0]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0:b\"\"}[0]")]
(is (helper/equal? (helper/translate {:bytesValue ""}) res)))))
(deftest
fields-map_fields-map_value_list-test
(testing
"select an element in a map\n{0u:[1]}[0u]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{0u:[1]}[0u]")]
(is
(helper/equal?
(helper/translate {:listValue {:values [{:int64Value "1"}]}})
res)))))
(deftest
fields-map_fields-map_value_map-test
(testing
"select an element in a map\n{\"map\": {\"k\": \"v\"}}[\"map\"]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{\"map\": {\"k\": \"v\"}}[\"map\"]")]
(is
(helper/equal?
(helper/translate
{:mapValue
{:entries
[{:key {:stringValue "k"}, :value {:stringValue "v"}}]}})
res)))))
(deftest
fields-map_fields-map_value_mix_type-test
(testing
"select an element in a map\n{\"map\": {\"k\": \"v\"}, \"list\": [1]}[\"map\"]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{\"map\": {\"k\": \"v\"}, \"list\": [1]}[\"map\"]")]
(is
(helper/equal?
(helper/translate
{:mapValue
{:entries
[{:key {:stringValue "k"}, :value {:stringValue "v"}}]}})
res)))))
(deftest
fields-map_has-has-test
(testing
"Has macro for map entries.\nhas({'a': 1, 'b': 2}.a)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({'a': 1, 'b': 2}.a)")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-map_has-has_not-test
(testing
"Has macro for map entries.\nhas({'a': 1, 'b': 2}.c)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({'a': 1, 'b': 2}.c)")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-map_has-has_empty-test
(testing
"Has macro for map entries.\nhas({}.a)"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"has({}.a)")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-qualified_identifier_resolution-list_field_select_unsupported-test
(testing
"Tests for qualified identifier resolution.\na.b.pancakes"
(let
[res
(parser/parse-eval
{:bindings
(helper/bindings
{:a.b
{:value {:listValue {:values [{:stringValue "pancakes"}]}}}}),
:translate-result? false}
"a.b.pancakes")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-int64_field_select_unsupported-test
(testing
"Tests for qualified identifier resolution.\na.pancakes"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings {:a {:value {:int64Value "15"}}}),
:translate-result? false}
"a.pancakes")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-map_key_float-test
(testing
"Tests for qualified identifier resolution.\n{3.3:15.15, 1.0: 5}[1.0]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{3.3:15.15, 1.0: 5}[1.0]")]
(is (helper/error? res)))))
(deftest
fields-qualified_identifier_resolution-map_key_null-test
(testing
"Tests for qualified identifier resolution.\n{null:false}[null]"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"{null:false}[null]")]
(is (helper/error? res)))))
(deftest
fields-in-empty-test
(testing
"Tests for 'in' operator for maps.\n7 in {}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"7 in {}")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
(deftest
fields-in-singleton-test
(testing
"Tests for 'in' operator for maps.\ntrue in {true: 1}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"true in {true: 1}")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-in-present-test
(testing
"Tests for 'in' operator for maps.\n'PI:NAME:<NAME>END_PI' in {'PI:NAME:<NAME>END_PI': 'smart', 'PI:NAME:<NAME>END_PI': 'cute', 'PI:NAME:<NAME>END_PI': 'quiet', 'PI:NAME:<NAME>END_PI': 'funny'}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"'PI:NAME:<NAME>END_PI' in {'PI:NAME:<NAME>END_PI': 'smart', 'PI:NAME:<NAME>END_PI': 'cute', 'PI:NAME:<NAME>END_PI': 'quiet', 'PI:NAME:<NAME>END_PI': 'funny'}")]
(is (helper/equal? (helper/translate {:boolValue true}) res)))))
(deftest
fields-in-absent-test
(testing
"Tests for 'in' operator for maps.\n'spider' in {'ant': 6, 'fly': 6, 'centipede': 100}"
(let
[res
(parser/parse-eval
{:bindings (helper/bindings nil), :translate-result? false}
"'spider' in {'ant': 6, 'fly': 6, 'centipede': 100}")]
(is (helper/equal? (helper/translate {:boolValue false}) res)))))
|
[
{
"context": ";; 1\n(def booking [1425, \"Bob Smith\", \"Allergic to unsalted peanuts only\", [[48.9615,",
"end": 35,
"score": 0.9995023012161255,
"start": 26,
"tag": "NAME",
"value": "Bob Smith"
},
{
"context": "(def booking\n {\n :id 8773\n :customer-name \"Alice Smith\"\n :catering-notes \"Vegetarian on Sundays\"\n ",
"end": 1710,
"score": 0.9986802339553833,
"start": 1699,
"tag": "NAME",
"value": "Alice Smith"
}
] |
chapter03/exercise1/repl.clj
|
TrainingByPackt/Clojure
| 0 |
;; 1
(def booking [1425, "Bob Smith", "Allergic to unsalted peanuts only", [[48.9615, 2.4372], [37.742, -25.6976]], [[37.742, -25.6976], [48.9615, 2.4372]]])
;; 2
(let [[id customer-name sensitive-info flight1 flight2 flight3] booking] (println id customer-name flight1 flight2 flight3))
;; 3
(let [big-booking (conj booking [[37.742, -25.6976], [51.1537, 0.1821]] [[51.1537, 0.1821], [48.9615, 2.4372]])
[id customer-name sensitive-info flight1 flight2 flight3] big-booking] (println id customer-name flight1 flight2 flight3))
;; 4
(let [[_ customer-name _ flight1 flight2 flight3] booking] (println customer-name flight1 flight2 flight3))
;; 5
(let [[_ customer-name _ & flights] booking]
(println (str customer-name " booked " (count flights) " flights.")))
;; 6
(defn print-flight [flight]
(let [[[lat1 lon1] [lat2 lon2]] flight]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(print-flight [[48.9615, 2.4372], [37.742 -25.6976]])
;; 7
(defn print-flight [flight]
(let [[departure arrival] flight
[lat1 lon1] departure
[lat2 lon2] arrival]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(print-flight [[48.9615, 2.4372], [37.742 -25.6976]])
;; 8
(defn print-booking [booking]
(let [[_ customer-name _ & flights] booking]
(println (str customer-name " booked " (count flights) " flights."))
(let [[flight1 flight2 flight3] flights]
(when flight1 (print-flight flight1))
(when flight2 (print-flight flight2))
(when flight3 (print-flight flight3)))))
(print-booking booking)
;; 9
(def booking
{
:id 8773
:customer-name "Alice Smith"
:catering-notes "Vegetarian on Sundays"
:flights [
{
:from {:lat 48.9615 :lon 2.4372 :name "Paris Le Bourget Airport"},
:to {:lat 37.742 :lon -25.6976 :name "Ponta Delgada Airport"}},
{
:from {:lat 37.742 :lon -25.6976 :name "Ponta Delgada Airport"},
:to {:lat 48.9615 :lon 2.4372 :name "Paris Le Bourget Airport"}}
]
})
;; 10
(let [{:keys [customer-name flights]} booking] (println (str customer-name " booked " (count flights) " flights.")))
;; 11
(defn print-mapjet-flight [flight]
(let [{{lat1 :lat lon1 :lon} :from,
{lat2 :lat lon2 :lon} :to} flight]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(defn print-mapjet-flight [flight]
(let [{:keys [from to]} flight
{lat1 :lat lon1 :lon} from
{lat2 :lat lon2 :lon} to]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
;; 12
(defn print-mapjet-booking [booking]
(let [{:keys [customer-name flights]} booking]
(println (str customer-name " booked " (count flights) " flights."))
(let [[flight1 flight2 flight3] flights]
(when flight1 (print-mapjet-flight flight1))
(when flight2 (print-mapjet-flight flight2))
(when flight3 (print-mapjet-flight flight3)))))
|
47149
|
;; 1
(def booking [1425, "<NAME>", "Allergic to unsalted peanuts only", [[48.9615, 2.4372], [37.742, -25.6976]], [[37.742, -25.6976], [48.9615, 2.4372]]])
;; 2
(let [[id customer-name sensitive-info flight1 flight2 flight3] booking] (println id customer-name flight1 flight2 flight3))
;; 3
(let [big-booking (conj booking [[37.742, -25.6976], [51.1537, 0.1821]] [[51.1537, 0.1821], [48.9615, 2.4372]])
[id customer-name sensitive-info flight1 flight2 flight3] big-booking] (println id customer-name flight1 flight2 flight3))
;; 4
(let [[_ customer-name _ flight1 flight2 flight3] booking] (println customer-name flight1 flight2 flight3))
;; 5
(let [[_ customer-name _ & flights] booking]
(println (str customer-name " booked " (count flights) " flights.")))
;; 6
(defn print-flight [flight]
(let [[[lat1 lon1] [lat2 lon2]] flight]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(print-flight [[48.9615, 2.4372], [37.742 -25.6976]])
;; 7
(defn print-flight [flight]
(let [[departure arrival] flight
[lat1 lon1] departure
[lat2 lon2] arrival]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(print-flight [[48.9615, 2.4372], [37.742 -25.6976]])
;; 8
(defn print-booking [booking]
(let [[_ customer-name _ & flights] booking]
(println (str customer-name " booked " (count flights) " flights."))
(let [[flight1 flight2 flight3] flights]
(when flight1 (print-flight flight1))
(when flight2 (print-flight flight2))
(when flight3 (print-flight flight3)))))
(print-booking booking)
;; 9
(def booking
{
:id 8773
:customer-name "<NAME>"
:catering-notes "Vegetarian on Sundays"
:flights [
{
:from {:lat 48.9615 :lon 2.4372 :name "Paris Le Bourget Airport"},
:to {:lat 37.742 :lon -25.6976 :name "Ponta Delgada Airport"}},
{
:from {:lat 37.742 :lon -25.6976 :name "Ponta Delgada Airport"},
:to {:lat 48.9615 :lon 2.4372 :name "Paris Le Bourget Airport"}}
]
})
;; 10
(let [{:keys [customer-name flights]} booking] (println (str customer-name " booked " (count flights) " flights.")))
;; 11
(defn print-mapjet-flight [flight]
(let [{{lat1 :lat lon1 :lon} :from,
{lat2 :lat lon2 :lon} :to} flight]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(defn print-mapjet-flight [flight]
(let [{:keys [from to]} flight
{lat1 :lat lon1 :lon} from
{lat2 :lat lon2 :lon} to]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
;; 12
(defn print-mapjet-booking [booking]
(let [{:keys [customer-name flights]} booking]
(println (str customer-name " booked " (count flights) " flights."))
(let [[flight1 flight2 flight3] flights]
(when flight1 (print-mapjet-flight flight1))
(when flight2 (print-mapjet-flight flight2))
(when flight3 (print-mapjet-flight flight3)))))
| true |
;; 1
(def booking [1425, "PI:NAME:<NAME>END_PI", "Allergic to unsalted peanuts only", [[48.9615, 2.4372], [37.742, -25.6976]], [[37.742, -25.6976], [48.9615, 2.4372]]])
;; 2
(let [[id customer-name sensitive-info flight1 flight2 flight3] booking] (println id customer-name flight1 flight2 flight3))
;; 3
(let [big-booking (conj booking [[37.742, -25.6976], [51.1537, 0.1821]] [[51.1537, 0.1821], [48.9615, 2.4372]])
[id customer-name sensitive-info flight1 flight2 flight3] big-booking] (println id customer-name flight1 flight2 flight3))
;; 4
(let [[_ customer-name _ flight1 flight2 flight3] booking] (println customer-name flight1 flight2 flight3))
;; 5
(let [[_ customer-name _ & flights] booking]
(println (str customer-name " booked " (count flights) " flights.")))
;; 6
(defn print-flight [flight]
(let [[[lat1 lon1] [lat2 lon2]] flight]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(print-flight [[48.9615, 2.4372], [37.742 -25.6976]])
;; 7
(defn print-flight [flight]
(let [[departure arrival] flight
[lat1 lon1] departure
[lat2 lon2] arrival]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(print-flight [[48.9615, 2.4372], [37.742 -25.6976]])
;; 8
(defn print-booking [booking]
(let [[_ customer-name _ & flights] booking]
(println (str customer-name " booked " (count flights) " flights."))
(let [[flight1 flight2 flight3] flights]
(when flight1 (print-flight flight1))
(when flight2 (print-flight flight2))
(when flight3 (print-flight flight3)))))
(print-booking booking)
;; 9
(def booking
{
:id 8773
:customer-name "PI:NAME:<NAME>END_PI"
:catering-notes "Vegetarian on Sundays"
:flights [
{
:from {:lat 48.9615 :lon 2.4372 :name "Paris Le Bourget Airport"},
:to {:lat 37.742 :lon -25.6976 :name "Ponta Delgada Airport"}},
{
:from {:lat 37.742 :lon -25.6976 :name "Ponta Delgada Airport"},
:to {:lat 48.9615 :lon 2.4372 :name "Paris Le Bourget Airport"}}
]
})
;; 10
(let [{:keys [customer-name flights]} booking] (println (str customer-name " booked " (count flights) " flights.")))
;; 11
(defn print-mapjet-flight [flight]
(let [{{lat1 :lat lon1 :lon} :from,
{lat2 :lat lon2 :lon} :to} flight]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
(defn print-mapjet-flight [flight]
(let [{:keys [from to]} flight
{lat1 :lat lon1 :lon} from
{lat2 :lat lon2 :lon} to]
(println (str "Flying from: Lat " lat1 " Lon " lon1 " Flying to: Lat " lat2 " Lon " lon2))))
;; 12
(defn print-mapjet-booking [booking]
(let [{:keys [customer-name flights]} booking]
(println (str customer-name " booked " (count flights) " flights."))
(let [[flight1 flight2 flight3] flights]
(when flight1 (print-mapjet-flight flight1))
(when flight2 (print-mapjet-flight flight2))
(when flight3 (print-mapjet-flight flight3)))))
|
[
{
"context": ";; Copyright © 2015-2019 Esko Luontola\n;; This software is released under the Apache Lic",
"end": 38,
"score": 0.9998790621757507,
"start": 25,
"tag": "NAME",
"value": "Esko Luontola"
},
{
"context": "print-namespace-maps* false}\n\n :plugins [[com.jakemccrary/lein-test-refresh \"0.14.0\"]\n [lein-anc",
"end": 2510,
"score": 0.6926772594451904,
"start": 2502,
"tag": "USERNAME",
"value": "emccrary"
}
] |
project.clj
|
JessRoberts/territory_assistant
| 0 |
;; Copyright © 2015-2019 Esko Luontola
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(defproject territory-bro "1.0.0-SNAPSHOT"
:description "Territory Bro is a tool for managing territory cards in the congregations of Jehovah's Witnesses."
:url "https://territorybro.com"
:dependencies [[camel-snake-kebab "0.4.0"]
[com.attendify/schema-refined "0.3.0-alpha4"]
[com.auth0/java-jwt "3.8.2"]
[com.auth0/jwks-rsa "0.8.3"]
[com.fasterxml.jackson.core/jackson-core "2.9.9"]
[com.fasterxml.jackson.core/jackson-databind "2.9.9.1"]
[com.fasterxml.jackson.datatype/jackson-datatype-jsr310 "2.9.9"]
[com.layerware/hugsql "0.4.9"]
[compojure "1.6.1"]
[conman "0.8.3"]
[cprop "0.1.14"]
[liberator "0.15.3"]
[luminus-immutant "0.2.5"]
[luminus-nrepl "0.1.6"]
[medley "1.2.0"]
[metosin/jsonista "0.2.4"]
[metosin/ring-http-response "0.9.1"]
[metosin/ring-middleware-format "0.6.0"] ;; TODO: replace with newer library
[metosin/schema-tools "0.12.0"]
[mount "0.1.16"]
[org.clojars.luontola/ns-tracker "0.3.1-patch1"]
[org.clojure/clojure "1.10.1"]
[org.clojure/data.json "0.2.6"]
[org.clojure/test.check "0.10.0"]
[org.clojure/tools.cli "0.4.2"]
[org.clojure/tools.logging "0.5.0"]
[org.clojure/tools.reader "1.3.2"] ;; XXX: overrides old version from metosin/ring-middleware-format
[org.flywaydb/flyway-core "6.0.1"]
[org.postgresql/postgresql "42.2.6"]
[prismatic/schema "1.1.12"]
[prismatic/schema-generators "0.1.3"]
[ring-logger "1.0.1"]
[ring/ring-core "1.7.1"]
[ring/ring-defaults "0.3.2"]]
:min-lein-version "2.0.0"
:source-paths ["src"]
:java-source-paths ["src-java"]
:javac-options ["-source" "8" "-target" "8"]
:test-paths ["test"]
:resource-paths ["resources"]
:target-path "target/%s/"
:main ^:skip-aot territory-bro.main
:global-vars {*warn-on-reflection* true
*print-namespace-maps* false}
:plugins [[com.jakemccrary/lein-test-refresh "0.14.0"]
[lein-ancient "0.6.15"]]
:aliases {"kaocha" ["with-profile" "+kaocha,+test" "run" "-m" "kaocha.runner"]}
:profiles {:uberjar {:omit-source true
:aot :all
:uberjar-name "territory-bro.jar"
:resource-paths ["env/prod/resources"]}
:kaocha {:dependencies [[lambdaisland/kaocha "0.0-529"]]}
:dev [:project/dev :profiles/dev]
:test [:project/test :profiles/test]
:project/dev {:dependencies [[bananaoomarang/ring-debug-logging "1.1.0"]
[pjstadig/humane-test-output "0.9.0"]
[ring/ring-devel "1.7.1" :exclusions [ns-tracker]]
[ring/ring-mock "0.4.0"]]
:source-paths ["env/dev/clj"]
:resource-paths ["env/dev/resources"]
:repl-options {:init-ns user}
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]}
:project/test {:resource-paths ["env/test/resources"]}
:profiles/dev {}
:profiles/test {}})
|
64095
|
;; Copyright © 2015-2019 <NAME>
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(defproject territory-bro "1.0.0-SNAPSHOT"
:description "Territory Bro is a tool for managing territory cards in the congregations of Jehovah's Witnesses."
:url "https://territorybro.com"
:dependencies [[camel-snake-kebab "0.4.0"]
[com.attendify/schema-refined "0.3.0-alpha4"]
[com.auth0/java-jwt "3.8.2"]
[com.auth0/jwks-rsa "0.8.3"]
[com.fasterxml.jackson.core/jackson-core "2.9.9"]
[com.fasterxml.jackson.core/jackson-databind "2.9.9.1"]
[com.fasterxml.jackson.datatype/jackson-datatype-jsr310 "2.9.9"]
[com.layerware/hugsql "0.4.9"]
[compojure "1.6.1"]
[conman "0.8.3"]
[cprop "0.1.14"]
[liberator "0.15.3"]
[luminus-immutant "0.2.5"]
[luminus-nrepl "0.1.6"]
[medley "1.2.0"]
[metosin/jsonista "0.2.4"]
[metosin/ring-http-response "0.9.1"]
[metosin/ring-middleware-format "0.6.0"] ;; TODO: replace with newer library
[metosin/schema-tools "0.12.0"]
[mount "0.1.16"]
[org.clojars.luontola/ns-tracker "0.3.1-patch1"]
[org.clojure/clojure "1.10.1"]
[org.clojure/data.json "0.2.6"]
[org.clojure/test.check "0.10.0"]
[org.clojure/tools.cli "0.4.2"]
[org.clojure/tools.logging "0.5.0"]
[org.clojure/tools.reader "1.3.2"] ;; XXX: overrides old version from metosin/ring-middleware-format
[org.flywaydb/flyway-core "6.0.1"]
[org.postgresql/postgresql "42.2.6"]
[prismatic/schema "1.1.12"]
[prismatic/schema-generators "0.1.3"]
[ring-logger "1.0.1"]
[ring/ring-core "1.7.1"]
[ring/ring-defaults "0.3.2"]]
:min-lein-version "2.0.0"
:source-paths ["src"]
:java-source-paths ["src-java"]
:javac-options ["-source" "8" "-target" "8"]
:test-paths ["test"]
:resource-paths ["resources"]
:target-path "target/%s/"
:main ^:skip-aot territory-bro.main
:global-vars {*warn-on-reflection* true
*print-namespace-maps* false}
:plugins [[com.jakemccrary/lein-test-refresh "0.14.0"]
[lein-ancient "0.6.15"]]
:aliases {"kaocha" ["with-profile" "+kaocha,+test" "run" "-m" "kaocha.runner"]}
:profiles {:uberjar {:omit-source true
:aot :all
:uberjar-name "territory-bro.jar"
:resource-paths ["env/prod/resources"]}
:kaocha {:dependencies [[lambdaisland/kaocha "0.0-529"]]}
:dev [:project/dev :profiles/dev]
:test [:project/test :profiles/test]
:project/dev {:dependencies [[bananaoomarang/ring-debug-logging "1.1.0"]
[pjstadig/humane-test-output "0.9.0"]
[ring/ring-devel "1.7.1" :exclusions [ns-tracker]]
[ring/ring-mock "0.4.0"]]
:source-paths ["env/dev/clj"]
:resource-paths ["env/dev/resources"]
:repl-options {:init-ns user}
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]}
:project/test {:resource-paths ["env/test/resources"]}
:profiles/dev {}
:profiles/test {}})
| true |
;; Copyright © 2015-2019 PI:NAME:<NAME>END_PI
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(defproject territory-bro "1.0.0-SNAPSHOT"
:description "Territory Bro is a tool for managing territory cards in the congregations of Jehovah's Witnesses."
:url "https://territorybro.com"
:dependencies [[camel-snake-kebab "0.4.0"]
[com.attendify/schema-refined "0.3.0-alpha4"]
[com.auth0/java-jwt "3.8.2"]
[com.auth0/jwks-rsa "0.8.3"]
[com.fasterxml.jackson.core/jackson-core "2.9.9"]
[com.fasterxml.jackson.core/jackson-databind "2.9.9.1"]
[com.fasterxml.jackson.datatype/jackson-datatype-jsr310 "2.9.9"]
[com.layerware/hugsql "0.4.9"]
[compojure "1.6.1"]
[conman "0.8.3"]
[cprop "0.1.14"]
[liberator "0.15.3"]
[luminus-immutant "0.2.5"]
[luminus-nrepl "0.1.6"]
[medley "1.2.0"]
[metosin/jsonista "0.2.4"]
[metosin/ring-http-response "0.9.1"]
[metosin/ring-middleware-format "0.6.0"] ;; TODO: replace with newer library
[metosin/schema-tools "0.12.0"]
[mount "0.1.16"]
[org.clojars.luontola/ns-tracker "0.3.1-patch1"]
[org.clojure/clojure "1.10.1"]
[org.clojure/data.json "0.2.6"]
[org.clojure/test.check "0.10.0"]
[org.clojure/tools.cli "0.4.2"]
[org.clojure/tools.logging "0.5.0"]
[org.clojure/tools.reader "1.3.2"] ;; XXX: overrides old version from metosin/ring-middleware-format
[org.flywaydb/flyway-core "6.0.1"]
[org.postgresql/postgresql "42.2.6"]
[prismatic/schema "1.1.12"]
[prismatic/schema-generators "0.1.3"]
[ring-logger "1.0.1"]
[ring/ring-core "1.7.1"]
[ring/ring-defaults "0.3.2"]]
:min-lein-version "2.0.0"
:source-paths ["src"]
:java-source-paths ["src-java"]
:javac-options ["-source" "8" "-target" "8"]
:test-paths ["test"]
:resource-paths ["resources"]
:target-path "target/%s/"
:main ^:skip-aot territory-bro.main
:global-vars {*warn-on-reflection* true
*print-namespace-maps* false}
:plugins [[com.jakemccrary/lein-test-refresh "0.14.0"]
[lein-ancient "0.6.15"]]
:aliases {"kaocha" ["with-profile" "+kaocha,+test" "run" "-m" "kaocha.runner"]}
:profiles {:uberjar {:omit-source true
:aot :all
:uberjar-name "territory-bro.jar"
:resource-paths ["env/prod/resources"]}
:kaocha {:dependencies [[lambdaisland/kaocha "0.0-529"]]}
:dev [:project/dev :profiles/dev]
:test [:project/test :profiles/test]
:project/dev {:dependencies [[bananaoomarang/ring-debug-logging "1.1.0"]
[pjstadig/humane-test-output "0.9.0"]
[ring/ring-devel "1.7.1" :exclusions [ns-tracker]]
[ring/ring-mock "0.4.0"]]
:source-paths ["env/dev/clj"]
:resource-paths ["env/dev/resources"]
:repl-options {:init-ns user}
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]}
:project/test {:resource-paths ["env/test/resources"]}
:profiles/dev {}
:profiles/test {}})
|
[
{
"context": "doc-store [doc-store]\n (let [alice {:crux.db/id :alice, :name \"Alice\"}\n alice-key (c/new-id alice",
"end": 195,
"score": 0.5818095207214355,
"start": 190,
"tag": "USERNAME",
"value": "alice"
},
{
"context": "-store]\n (let [alice {:crux.db/id :alice, :name \"Alice\"}\n alice-key (c/new-id alice)\n bob ",
"end": 209,
"score": 0.9942795038223267,
"start": 204,
"tag": "NAME",
"value": "Alice"
},
{
"context": "w-id alice)\n bob {:crux.db/id :bob, :name \"Bob\"}\n bob-key (c/new-id bob)\n max-key ",
"end": 288,
"score": 0.9783757328987122,
"start": 285,
"tag": "NAME",
"value": "Bob"
}
] |
crux-test/src/crux/fixtures/document_store.clj
|
jonpither/crux
| 0 |
(ns crux.fixtures.document-store
(:require [clojure.test :as t]
[crux.codec :as c]
[crux.db :as db]))
(defn test-doc-store [doc-store]
(let [alice {:crux.db/id :alice, :name "Alice"}
alice-key (c/new-id alice)
bob {:crux.db/id :bob, :name "Bob"}
bob-key (c/new-id bob)
max-key (c/new-id {:crux.db/id :max, :name "Max"})
people {alice-key alice, bob-key bob}]
(db/submit-docs doc-store people)
(t/is (= {alice-key alice}
(db/-fetch-docs doc-store #{alice-key})))
(t/is (= people
(db/-fetch-docs doc-store (conj (keys people) max-key))))
(let [evicted-alice {:crux.db/id :alice, :crux.db/evicted? true}]
(db/submit-docs doc-store {alice-key evicted-alice})
(t/is (= {alice-key evicted-alice, bob-key bob}
(db/-fetch-docs doc-store (keys people)))))))
|
12
|
(ns crux.fixtures.document-store
(:require [clojure.test :as t]
[crux.codec :as c]
[crux.db :as db]))
(defn test-doc-store [doc-store]
(let [alice {:crux.db/id :alice, :name "<NAME>"}
alice-key (c/new-id alice)
bob {:crux.db/id :bob, :name "<NAME>"}
bob-key (c/new-id bob)
max-key (c/new-id {:crux.db/id :max, :name "Max"})
people {alice-key alice, bob-key bob}]
(db/submit-docs doc-store people)
(t/is (= {alice-key alice}
(db/-fetch-docs doc-store #{alice-key})))
(t/is (= people
(db/-fetch-docs doc-store (conj (keys people) max-key))))
(let [evicted-alice {:crux.db/id :alice, :crux.db/evicted? true}]
(db/submit-docs doc-store {alice-key evicted-alice})
(t/is (= {alice-key evicted-alice, bob-key bob}
(db/-fetch-docs doc-store (keys people)))))))
| true |
(ns crux.fixtures.document-store
(:require [clojure.test :as t]
[crux.codec :as c]
[crux.db :as db]))
(defn test-doc-store [doc-store]
(let [alice {:crux.db/id :alice, :name "PI:NAME:<NAME>END_PI"}
alice-key (c/new-id alice)
bob {:crux.db/id :bob, :name "PI:NAME:<NAME>END_PI"}
bob-key (c/new-id bob)
max-key (c/new-id {:crux.db/id :max, :name "Max"})
people {alice-key alice, bob-key bob}]
(db/submit-docs doc-store people)
(t/is (= {alice-key alice}
(db/-fetch-docs doc-store #{alice-key})))
(t/is (= people
(db/-fetch-docs doc-store (conj (keys people) max-key))))
(let [evicted-alice {:crux.db/id :alice, :crux.db/evicted? true}]
(db/submit-docs doc-store {alice-key evicted-alice})
(t/is (= {alice-key evicted-alice, bob-key bob}
(db/-fetch-docs doc-store (keys people)))))))
|
[
{
"context": "testsuite:1/file1?message=adding%20file1&username=someone&useraddress=someone%40school.edu\")\n ",
"end": 1439,
"score": 0.9996147155761719,
"start": 1432,
"tag": "USERNAME",
"value": "someone"
},
{
"context": "ding file1\"))\n (is (= (.getName user) \"someone\"))\n (is (= (.getAddress user) \"someone",
"end": 1973,
"score": 0.9935208559036255,
"start": 1966,
"tag": "USERNAME",
"value": "someone"
},
{
"context": "someone\"))\n (is (= (.getAddress user) \"[email protected]\"))))\n ;now verify that a post to an existi",
"end": 2034,
"score": 0.9999115467071533,
"start": 2016,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/ocfl_http/handler_test.clj
|
bcail/ocfl-http
| 0 |
(ns ocfl-http.handler-test
(:require [clojure.test :refer :all]
[ring.mock.request :as mock]
[clojure.data.json :as json]
[ocfl-http.handler :refer :all]
[ocfl-http.ocfllib :refer [REPO_DIR add-path-to-object get-object get-file]]
[ocfl-http.testutils :refer [create-tmp-dir delete-dir user]]
[ring.middleware.defaults :refer [wrap-defaults site-defaults]]))
(defn add-test-object
[]
(let [contentDir (create-tmp-dir)
filePath (str contentDir "/file")]
(do
(spit (clojure.java.io/file filePath) "content")
(add-path-to-object "testsuite:1" filePath "add file" user)
(delete-dir (str contentDir)))))
(deftest test-static-routes
(testing "main route"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (mock/request :get "/"))]
(is (= (:status response) 200))
(is (= (json/read-str (:body response)) {"OCFL REPO" {"root" repoDir}}))))))
(testing "not-found route"
(let [response (app (mock/request :get "/invalid"))]
(is (= (:status response) 404)))))
(deftest test-create
(testing "create object (by adding the first file)"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (-> (mock/request :post "/testsuite:1/file1?message=adding%20file1&username=someone&useraddress=someone%40school.edu")
(mock/body "content")))]
(is (= (:status response) 201))
(is (= (:body response) ""))
(is (= (slurp (get-file "testsuite:1" "file1")) "content"))
(let [object (get-object "testsuite:1")
versionInfo (.getVersionInfo object)
user (.getUser versionInfo)
message (.getMessage versionInfo)]
(is (= message "adding file1"))
(is (= (.getName user) "someone"))
(is (= (.getAddress user) "[email protected]"))))
;now verify that a post to an existing file fails
(let [response (app (-> (mock/request :post "/testsuite:1/file1")
(mock/body "content")))]
(is (= (:status response) 409))
(is (= (:body response) "testsuite:1/file1 already exists. Use PUT to overwrite.")))
(delete-dir repoDir)))))
(deftest test-update
(testing "update a file in an object"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (-> (mock/request :put "/testsuite:1/file?message=updating%20file")
(mock/body "updated contents")))]
(is (= (:status response) 201))
(is (= (:body response) ""))
(is (= (slurp (get-file "testsuite:1" "file")) "updated contents"))
(let [object (get-object "testsuite:1")
versionInfo (.getVersionInfo object)
message (.getMessage versionInfo)]
(is (= message "updating file"))))
(delete-dir repoDir)))))
(deftest test-show-object
(testing "get object info"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1"))
headers (:headers response)]
(is (= (:status response) 200))
(is (= (json/read-str (:body response)) {"files" {"file" {}}})))
(delete-dir repoDir))))
(testing "object not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:not-found"))]
(is (= (:status response) 404))
(is (= (:body response) "object testsuite:not-found not found")))
(delete-dir repoDir)))))
(deftest test-get-file
(testing "get file from ocfl object"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1/file"))
headers (:headers response)]
(is (= (:status response) 200))
(is (= (headers "Content-Length") "7"))
(is (= (slurp (:body response)) "content")))
(delete-dir repoDir))))
(testing "object not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (mock/request :get "/testsuite:not-found/file1.txt"))]
(is (= (:status response) 404))
(is (= (:body response) "object testsuite:not-found not found"))
(delete-dir repoDir)))))
(testing "file not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1/non-existent-file"))]
(is (= (:status response) 404))
(is (= (:body response) "file non-existent-file not found"))
(delete-dir repoDir))))))
|
5343
|
(ns ocfl-http.handler-test
(:require [clojure.test :refer :all]
[ring.mock.request :as mock]
[clojure.data.json :as json]
[ocfl-http.handler :refer :all]
[ocfl-http.ocfllib :refer [REPO_DIR add-path-to-object get-object get-file]]
[ocfl-http.testutils :refer [create-tmp-dir delete-dir user]]
[ring.middleware.defaults :refer [wrap-defaults site-defaults]]))
(defn add-test-object
[]
(let [contentDir (create-tmp-dir)
filePath (str contentDir "/file")]
(do
(spit (clojure.java.io/file filePath) "content")
(add-path-to-object "testsuite:1" filePath "add file" user)
(delete-dir (str contentDir)))))
(deftest test-static-routes
(testing "main route"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (mock/request :get "/"))]
(is (= (:status response) 200))
(is (= (json/read-str (:body response)) {"OCFL REPO" {"root" repoDir}}))))))
(testing "not-found route"
(let [response (app (mock/request :get "/invalid"))]
(is (= (:status response) 404)))))
(deftest test-create
(testing "create object (by adding the first file)"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (-> (mock/request :post "/testsuite:1/file1?message=adding%20file1&username=someone&useraddress=someone%40school.edu")
(mock/body "content")))]
(is (= (:status response) 201))
(is (= (:body response) ""))
(is (= (slurp (get-file "testsuite:1" "file1")) "content"))
(let [object (get-object "testsuite:1")
versionInfo (.getVersionInfo object)
user (.getUser versionInfo)
message (.getMessage versionInfo)]
(is (= message "adding file1"))
(is (= (.getName user) "someone"))
(is (= (.getAddress user) "<EMAIL>"))))
;now verify that a post to an existing file fails
(let [response (app (-> (mock/request :post "/testsuite:1/file1")
(mock/body "content")))]
(is (= (:status response) 409))
(is (= (:body response) "testsuite:1/file1 already exists. Use PUT to overwrite.")))
(delete-dir repoDir)))))
(deftest test-update
(testing "update a file in an object"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (-> (mock/request :put "/testsuite:1/file?message=updating%20file")
(mock/body "updated contents")))]
(is (= (:status response) 201))
(is (= (:body response) ""))
(is (= (slurp (get-file "testsuite:1" "file")) "updated contents"))
(let [object (get-object "testsuite:1")
versionInfo (.getVersionInfo object)
message (.getMessage versionInfo)]
(is (= message "updating file"))))
(delete-dir repoDir)))))
(deftest test-show-object
(testing "get object info"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1"))
headers (:headers response)]
(is (= (:status response) 200))
(is (= (json/read-str (:body response)) {"files" {"file" {}}})))
(delete-dir repoDir))))
(testing "object not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:not-found"))]
(is (= (:status response) 404))
(is (= (:body response) "object testsuite:not-found not found")))
(delete-dir repoDir)))))
(deftest test-get-file
(testing "get file from ocfl object"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1/file"))
headers (:headers response)]
(is (= (:status response) 200))
(is (= (headers "Content-Length") "7"))
(is (= (slurp (:body response)) "content")))
(delete-dir repoDir))))
(testing "object not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (mock/request :get "/testsuite:not-found/file1.txt"))]
(is (= (:status response) 404))
(is (= (:body response) "object testsuite:not-found not found"))
(delete-dir repoDir)))))
(testing "file not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1/non-existent-file"))]
(is (= (:status response) 404))
(is (= (:body response) "file non-existent-file not found"))
(delete-dir repoDir))))))
| true |
(ns ocfl-http.handler-test
(:require [clojure.test :refer :all]
[ring.mock.request :as mock]
[clojure.data.json :as json]
[ocfl-http.handler :refer :all]
[ocfl-http.ocfllib :refer [REPO_DIR add-path-to-object get-object get-file]]
[ocfl-http.testutils :refer [create-tmp-dir delete-dir user]]
[ring.middleware.defaults :refer [wrap-defaults site-defaults]]))
(defn add-test-object
[]
(let [contentDir (create-tmp-dir)
filePath (str contentDir "/file")]
(do
(spit (clojure.java.io/file filePath) "content")
(add-path-to-object "testsuite:1" filePath "add file" user)
(delete-dir (str contentDir)))))
(deftest test-static-routes
(testing "main route"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (mock/request :get "/"))]
(is (= (:status response) 200))
(is (= (json/read-str (:body response)) {"OCFL REPO" {"root" repoDir}}))))))
(testing "not-found route"
(let [response (app (mock/request :get "/invalid"))]
(is (= (:status response) 404)))))
(deftest test-create
(testing "create object (by adding the first file)"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (-> (mock/request :post "/testsuite:1/file1?message=adding%20file1&username=someone&useraddress=someone%40school.edu")
(mock/body "content")))]
(is (= (:status response) 201))
(is (= (:body response) ""))
(is (= (slurp (get-file "testsuite:1" "file1")) "content"))
(let [object (get-object "testsuite:1")
versionInfo (.getVersionInfo object)
user (.getUser versionInfo)
message (.getMessage versionInfo)]
(is (= message "adding file1"))
(is (= (.getName user) "someone"))
(is (= (.getAddress user) "PI:EMAIL:<EMAIL>END_PI"))))
;now verify that a post to an existing file fails
(let [response (app (-> (mock/request :post "/testsuite:1/file1")
(mock/body "content")))]
(is (= (:status response) 409))
(is (= (:body response) "testsuite:1/file1 already exists. Use PUT to overwrite.")))
(delete-dir repoDir)))))
(deftest test-update
(testing "update a file in an object"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (-> (mock/request :put "/testsuite:1/file?message=updating%20file")
(mock/body "updated contents")))]
(is (= (:status response) 201))
(is (= (:body response) ""))
(is (= (slurp (get-file "testsuite:1" "file")) "updated contents"))
(let [object (get-object "testsuite:1")
versionInfo (.getVersionInfo object)
message (.getMessage versionInfo)]
(is (= message "updating file"))))
(delete-dir repoDir)))))
(deftest test-show-object
(testing "get object info"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1"))
headers (:headers response)]
(is (= (:status response) 200))
(is (= (json/read-str (:body response)) {"files" {"file" {}}})))
(delete-dir repoDir))))
(testing "object not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:not-found"))]
(is (= (:status response) 404))
(is (= (:body response) "object testsuite:not-found not found")))
(delete-dir repoDir)))))
(deftest test-get-file
(testing "get file from ocfl object"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1/file"))
headers (:headers response)]
(is (= (:status response) 200))
(is (= (headers "Content-Length") "7"))
(is (= (slurp (:body response)) "content")))
(delete-dir repoDir))))
(testing "object not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(let [response (app (mock/request :get "/testsuite:not-found/file1.txt"))]
(is (= (:status response) 404))
(is (= (:body response) "object testsuite:not-found not found"))
(delete-dir repoDir)))))
(testing "file not found"
(let [repoDir (create-tmp-dir)]
(do
(dosync (ref-set REPO_DIR repoDir))
(add-test-object)
(let [response (app (mock/request :get "/testsuite:1/non-existent-file"))]
(is (= (:status response) 404))
(is (= (:body response) "file non-existent-file not found"))
(delete-dir repoDir))))))
|
[
{
"context": "https://adventofcode.com/2020/day/12\n;; author: Vitor SRG ([email protected])\n;; date: 2021-01-08\n;;",
"end": 221,
"score": 0.9998928308486938,
"start": 212,
"tag": "NAME",
"value": "Vitor SRG"
},
{
"context": "ntofcode.com/2020/day/12\n;; author: Vitor SRG ([email protected])\n;; date: 2021-01-08\n;; execution: $ bash ./",
"end": 242,
"score": 0.999930739402771,
"start": 223,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
aoc2020/d12/main.clj
|
vitorsrg/advent-of-code
| 0 |
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; domain: Advent of Code 2020
;; challenge: Day 12: Rain Risk
;; url: https://adventofcode.com/2020/day/12
;; author: Vitor SRG ([email protected])
;; date: 2021-01-08
;; execution: $ bash ./aoc2020/run.sh d12 < [INPUT_FILE]
;; example:
;; $ bash ./aoc2020/run.sh d12 < ./aoc2020/d12/ex01.txt
;; part 1 25
;; part 2 286
;; $ bash ./aoc2020/run.sh d12 < ./aoc2020/d12/input.txt
;; part 1 582
;; part 2 52069
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(ns d12.main)
(defn parse-action
[action-raw]
(let [[_ func arg] (re-matches #"^([a-zA-Z]+)(\d+)$" action-raw)]
[(keyword func) (Integer/parseInt arg)]))
(defn p1-ship-next-state
[state action]
(let [[x y d] state
[f v] action]
(case f
:N [x (+ y v) d]
:S [x (- y v) d]
:E [(+ x v) y d]
:W [(- x v) y d]
:L [x y (mod (+ d v) 360)]
:R [x y (mod (- d v) 360)]
:F (case d
0 [(+ x v) y d]
90 [x (+ y v) d]
180 [(- x v) y d]
270 [x (- y v) d]))))
(defn rotate-vector
[vector degrees]
(assert (= (mod degrees 90) 0))
(if (< degrees 0)
(rotate-vector vector (mod degrees 360))
(let [[vx vy] vector
[vrx vry] (case degrees
0 [vx vy]
90 [(- vy) vx]
180 [(- vx) (- vy)]
270 [vy (- vx)])]
[vrx vry])))
(defn p2-ship-next-state
[state action]
(let [[sx sy wx wy] state
[f v] action]
(case f
:N [sx sy wx (+ wy v)]
:S [sx sy wx (- wy v)]
:E [sx sy (+ wx v) wy]
:W [sx sy (- wx v) wy]
:L (concat [sx sy] (rotate-vector [wx wy] v))
:R (concat [sx sy] (rotate-vector [wx wy] (- v)))
:F [(+ sx (* v wx)) (+ sy (* v wy)) wx wy])))
(defn ship-simulate
[ship-transition ship-initial-state actions]
(reductions ship-transition ship-initial-state actions))
(defn -main
[& args]
(let [actions (->> *in*
(slurp)
(clojure.string/split-lines)
(map parse-action)
(map vec)
(vec))]
;; (clojure.pprint/pprint actions)
;; part 1
(let [ship-states (vec (ship-simulate p1-ship-next-state [0 0 0] actions))
[x y _] (nth ship-states (count actions))]
;; (clojure.pprint/pprint ship-states)
(println "part 1" (+ (Math/abs x) (Math/abs y))))
;; part 2
(let [ship-states (vec
(ship-simulate p2-ship-next-state [0 0 10 1] actions))
[x y _] (nth ship-states (count actions))]
;; (clojure.pprint/pprint ship-states)
(println "part 2" (+ (Math/abs x) (Math/abs y))))))
|
111563
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; domain: Advent of Code 2020
;; challenge: Day 12: Rain Risk
;; url: https://adventofcode.com/2020/day/12
;; author: <NAME> (<EMAIL>)
;; date: 2021-01-08
;; execution: $ bash ./aoc2020/run.sh d12 < [INPUT_FILE]
;; example:
;; $ bash ./aoc2020/run.sh d12 < ./aoc2020/d12/ex01.txt
;; part 1 25
;; part 2 286
;; $ bash ./aoc2020/run.sh d12 < ./aoc2020/d12/input.txt
;; part 1 582
;; part 2 52069
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(ns d12.main)
(defn parse-action
[action-raw]
(let [[_ func arg] (re-matches #"^([a-zA-Z]+)(\d+)$" action-raw)]
[(keyword func) (Integer/parseInt arg)]))
(defn p1-ship-next-state
[state action]
(let [[x y d] state
[f v] action]
(case f
:N [x (+ y v) d]
:S [x (- y v) d]
:E [(+ x v) y d]
:W [(- x v) y d]
:L [x y (mod (+ d v) 360)]
:R [x y (mod (- d v) 360)]
:F (case d
0 [(+ x v) y d]
90 [x (+ y v) d]
180 [(- x v) y d]
270 [x (- y v) d]))))
(defn rotate-vector
[vector degrees]
(assert (= (mod degrees 90) 0))
(if (< degrees 0)
(rotate-vector vector (mod degrees 360))
(let [[vx vy] vector
[vrx vry] (case degrees
0 [vx vy]
90 [(- vy) vx]
180 [(- vx) (- vy)]
270 [vy (- vx)])]
[vrx vry])))
(defn p2-ship-next-state
[state action]
(let [[sx sy wx wy] state
[f v] action]
(case f
:N [sx sy wx (+ wy v)]
:S [sx sy wx (- wy v)]
:E [sx sy (+ wx v) wy]
:W [sx sy (- wx v) wy]
:L (concat [sx sy] (rotate-vector [wx wy] v))
:R (concat [sx sy] (rotate-vector [wx wy] (- v)))
:F [(+ sx (* v wx)) (+ sy (* v wy)) wx wy])))
(defn ship-simulate
[ship-transition ship-initial-state actions]
(reductions ship-transition ship-initial-state actions))
(defn -main
[& args]
(let [actions (->> *in*
(slurp)
(clojure.string/split-lines)
(map parse-action)
(map vec)
(vec))]
;; (clojure.pprint/pprint actions)
;; part 1
(let [ship-states (vec (ship-simulate p1-ship-next-state [0 0 0] actions))
[x y _] (nth ship-states (count actions))]
;; (clojure.pprint/pprint ship-states)
(println "part 1" (+ (Math/abs x) (Math/abs y))))
;; part 2
(let [ship-states (vec
(ship-simulate p2-ship-next-state [0 0 10 1] actions))
[x y _] (nth ship-states (count actions))]
;; (clojure.pprint/pprint ship-states)
(println "part 2" (+ (Math/abs x) (Math/abs y))))))
| true |
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; domain: Advent of Code 2020
;; challenge: Day 12: Rain Risk
;; url: https://adventofcode.com/2020/day/12
;; author: PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
;; date: 2021-01-08
;; execution: $ bash ./aoc2020/run.sh d12 < [INPUT_FILE]
;; example:
;; $ bash ./aoc2020/run.sh d12 < ./aoc2020/d12/ex01.txt
;; part 1 25
;; part 2 286
;; $ bash ./aoc2020/run.sh d12 < ./aoc2020/d12/input.txt
;; part 1 582
;; part 2 52069
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(ns d12.main)
(defn parse-action
[action-raw]
(let [[_ func arg] (re-matches #"^([a-zA-Z]+)(\d+)$" action-raw)]
[(keyword func) (Integer/parseInt arg)]))
(defn p1-ship-next-state
[state action]
(let [[x y d] state
[f v] action]
(case f
:N [x (+ y v) d]
:S [x (- y v) d]
:E [(+ x v) y d]
:W [(- x v) y d]
:L [x y (mod (+ d v) 360)]
:R [x y (mod (- d v) 360)]
:F (case d
0 [(+ x v) y d]
90 [x (+ y v) d]
180 [(- x v) y d]
270 [x (- y v) d]))))
(defn rotate-vector
[vector degrees]
(assert (= (mod degrees 90) 0))
(if (< degrees 0)
(rotate-vector vector (mod degrees 360))
(let [[vx vy] vector
[vrx vry] (case degrees
0 [vx vy]
90 [(- vy) vx]
180 [(- vx) (- vy)]
270 [vy (- vx)])]
[vrx vry])))
(defn p2-ship-next-state
[state action]
(let [[sx sy wx wy] state
[f v] action]
(case f
:N [sx sy wx (+ wy v)]
:S [sx sy wx (- wy v)]
:E [sx sy (+ wx v) wy]
:W [sx sy (- wx v) wy]
:L (concat [sx sy] (rotate-vector [wx wy] v))
:R (concat [sx sy] (rotate-vector [wx wy] (- v)))
:F [(+ sx (* v wx)) (+ sy (* v wy)) wx wy])))
(defn ship-simulate
[ship-transition ship-initial-state actions]
(reductions ship-transition ship-initial-state actions))
(defn -main
[& args]
(let [actions (->> *in*
(slurp)
(clojure.string/split-lines)
(map parse-action)
(map vec)
(vec))]
;; (clojure.pprint/pprint actions)
;; part 1
(let [ship-states (vec (ship-simulate p1-ship-next-state [0 0 0] actions))
[x y _] (nth ship-states (count actions))]
;; (clojure.pprint/pprint ship-states)
(println "part 1" (+ (Math/abs x) (Math/abs y))))
;; part 2
(let [ship-states (vec
(ship-simulate p2-ship-next-state [0 0 10 1] actions))
[x y _] (nth ship-states (count actions))]
;; (clojure.pprint/pprint ship-states)
(println "part 2" (+ (Math/abs x) (Math/abs y))))))
|
[
{
"context": "\n\n;; Needed to initialize pusher\n(def pusher-key \"c31ce5204231d5cdd28d\")\n\n(add-watch state :state-change-key\n ",
"end": 454,
"score": 0.999755859375,
"start": 434,
"tag": "KEY",
"value": "c31ce5204231d5cdd28d"
}
] |
data/test/clojure/68def4155acf23053744fe1fda904e4bdfdc9ae7model.cljs
|
harshp8l/deep-learning-lang-detection
| 84 |
(ns ^{:doc "Contains client-side state, validators for input fields
and functions which react to changes made to the input fields."}
jammer.model
(:require [one.dispatch :as dispatch]))
(def ^{:doc "An atom containing a map which is the application's current state."}
state (atom {}))
;; The username and channel the user is on.
(def username (atom ""))
(def channel (atom ""))
;; Needed to initialize pusher
(def pusher-key "c31ce5204231d5cdd28d")
(add-watch state :state-change-key
(fn [k r o n]
(dispatch/fire :state-change n)))
|
27244
|
(ns ^{:doc "Contains client-side state, validators for input fields
and functions which react to changes made to the input fields."}
jammer.model
(:require [one.dispatch :as dispatch]))
(def ^{:doc "An atom containing a map which is the application's current state."}
state (atom {}))
;; The username and channel the user is on.
(def username (atom ""))
(def channel (atom ""))
;; Needed to initialize pusher
(def pusher-key "<KEY>")
(add-watch state :state-change-key
(fn [k r o n]
(dispatch/fire :state-change n)))
| true |
(ns ^{:doc "Contains client-side state, validators for input fields
and functions which react to changes made to the input fields."}
jammer.model
(:require [one.dispatch :as dispatch]))
(def ^{:doc "An atom containing a map which is the application's current state."}
state (atom {}))
;; The username and channel the user is on.
(def username (atom ""))
(def channel (atom ""))
;; Needed to initialize pusher
(def pusher-key "PI:KEY:<KEY>END_PI")
(add-watch state :state-change-key
(fn [k r o n]
(dispatch/fire :state-change n)))
|
[
{
"context": "rver 2.0, we also supported a\n ;; key called `invalid-in-puppet-4` in the same route config, even though\n ;; th",
"end": 7643,
"score": 0.9994486570358276,
"start": 7624,
"tag": "KEY",
"value": "invalid-in-puppet-4"
}
] |
src/clj/puppetlabs/services/master/master_service.clj
|
gimmyxd/puppetserver
| 0 |
(ns puppetlabs.services.master.master-service
(:require [clojure.tools.logging :as log]
[ring.middleware.params :as ring]
[puppetlabs.trapperkeeper.core :refer [defservice]]
[puppetlabs.services.master.master-core :as core]
[puppetlabs.puppetserver.certificate-authority :as ca]
[puppetlabs.puppetserver.jruby-request :as jruby-request]
[puppetlabs.trapperkeeper.services :as tk-services]
[puppetlabs.comidi :as comidi]
[puppetlabs.dujour.version-check :as version-check]
[puppetlabs.metrics.http :as http-metrics]
[puppetlabs.services.protocols.master :as master]
[puppetlabs.i18n.core :as i18n]
[puppetlabs.trapperkeeper.services.status.status-core :as status-core]
[puppetlabs.services.master.master-core :as master-core]
[clojure.string :as str]))
(def master-service-status-version 1)
;; Default list of allowed histograms/timers
(def default-metrics-allowed-hists
["http.active-histo"
"http.puppet-v3-catalog-/*/-requests"
"http.puppet-v3-environment-/*/-requests"
"http.puppet-v3-environment_classes-/*/-requests"
"http.puppet-v3-environments-requests"
"http.puppet-v3-file_bucket_file-/*/-requests"
"http.puppet-v3-file_content-/*/-requests"
"http.puppet-v3-file_metadata-/*/-requests"
"http.puppet-v3-file_metadatas-/*/-requests"
"http.puppet-v3-node-/*/-requests"
"http.puppet-v3-report-/*/-requests"
"http.puppet-v3-static_file_content-/*/-requests"])
;; Default list of allowed values/counts
(def default-metrics-allowed-vals
["http.active-requests"
"http.puppet-v3-catalog-/*/-percentage"
"http.puppet-v3-environment-/*/-percentage"
"http.puppet-v3-environment_classes-/*/-percentage"
"http.puppet-v3-environments-percentage"
"http.puppet-v3-file_bucket_file-/*/-percentage"
"http.puppet-v3-file_content-/*/-percentage"
"http.puppet-v3-file_metadata-/*/-percentage"
"http.puppet-v3-file_metadatas-/*/-percentage"
"http.puppet-v3-node-/*/-percentage"
"http.puppet-v3-report-/*/-percentage"
"http.puppet-v3-static_file_content-/*/-percentage"
"http.puppet-v3-status-/*/-percentage"
"http.total-requests"
; num-cpus is registered in trapperkeeper-comidi-metrics, see
; https://github.com/puppetlabs/trapperkeeper-comidi-metrics/blob/0.1.1/src/puppetlabs/metrics/http.clj#L117-L120
"num-cpus"])
;; List of allowed jvm gauges/values
(def default-jvm-metrics-allowed
["uptime"
"memory.heap.committed"
"memory.heap.init"
"memory.heap.max"
"memory.heap.used"
"memory.non-heap.committed"
"memory.non-heap.init"
"memory.non-heap.max"
"memory.non-heap.used"
"memory.total.committed"
"memory.total.init"
"memory.total.max"
"memory.total.used"])
(def http-client-metrics-allowed-hists
(map #(format "http-client.experimental.with-metric-id.%s.full-response" (str/join "." %))
master-core/puppet-server-http-client-metrics-for-status))
(def default-metrics-allowed
(concat
default-metrics-allowed-hists
default-metrics-allowed-vals
default-jvm-metrics-allowed
http-client-metrics-allowed-hists))
(defservice master-service
master/MasterService
[[:WebroutingService add-ring-handler get-route]
[:PuppetServerConfigService get-config]
[:RequestHandlerService handle-request]
[:MetricsService get-metrics-registry get-server-id update-registry-settings]
[:CaService initialize-master-ssl! retrieve-ca-cert! retrieve-ca-crl! get-auth-handler]
[:JRubyPuppetService]
[:AuthorizationService wrap-with-authorization-check]
[:SchedulerService interspaced]
[:StatusService register-status]
[:VersionedCodeService get-code-content current-code-id]]
(init
[this context]
(core/validate-memory-requirements!)
(let [config (get-config)
route-config (core/get-master-route-config ::master-service config)
path (core/get-master-mount ::master-service route-config)
certname (get-in config [:puppetserver :certname])
localcacert (get-in config [:puppetserver :localcacert])
puppet-version (get-in config [:puppetserver :puppet-version])
max-queued-requests (get-in config [:jruby-puppet :max-queued-requests] 0)
max-retry-delay (get-in config [:jruby-puppet :max-retry-delay] 1800)
settings (ca/config->master-settings config)
metrics-service (tk-services/get-service this :JRubyMetricsService)
metrics-server-id (get-server-id)
jruby-service (tk-services/get-service this :JRubyPuppetService)
use-legacy-auth-conf (get-in config
[:jruby-puppet :use-legacy-auth-conf]
false)
environment-class-cache-enabled (get-in config
[:jruby-puppet
:environment-class-cache-enabled]
false)
wrap-with-jruby-queue-limit (if (pos? max-queued-requests)
(fn [handler]
(jruby-request/wrap-with-request-queue-limit
handler
metrics-service
max-queued-requests
max-retry-delay))
identity)
ring-app (comidi/routes
(core/construct-root-routes puppet-version
use-legacy-auth-conf
jruby-service
get-code-content
current-code-id
handle-request
(get-auth-handler)
wrap-with-jruby-queue-limit
environment-class-cache-enabled))
routes (comidi/context path ring-app)
route-metadata (comidi/route-metadata routes)
comidi-handler (comidi/routes->handler routes)
registry (get-metrics-registry :puppetserver)
http-metrics (http-metrics/initialize-http-metrics!
registry
metrics-server-id
route-metadata)
http-client-metric-ids-for-status (atom master-core/puppet-server-http-client-metrics-for-status)
ring-handler (-> comidi-handler
(http-metrics/wrap-with-request-metrics http-metrics)
(comidi/wrap-with-route-metadata routes))
hostcrl (get-in config [:puppetserver :hostcrl])]
(retrieve-ca-cert! localcacert)
(retrieve-ca-crl! hostcrl)
(initialize-master-ssl! settings certname)
(core/register-jvm-metrics! registry metrics-server-id)
(update-registry-settings :puppetserver
{:default-metrics-allowed default-metrics-allowed})
(log/info (i18n/trs "Master Service adding ring handlers"))
;; if the webrouting config uses the old-style config where
;; there is a single key with a route-id, we need to deal with that
;; for backward compat. We have a hard-coded assumption that this route-id
;; must be `master-routes`. In Puppet Server 2.0, we also supported a
;; key called `invalid-in-puppet-4` in the same route config, even though
;; that key is no longer used for Puppet Server 2.1 and later. We
;; should be able to remove this hack as soon as we are able to get rid
;; of the legacy routes.
(if (and (map? route-config)
(contains? route-config :master-routes))
(add-ring-handler this
ring-handler
{:route-id :master-routes
:normalize-request-uri true})
(add-ring-handler this
ring-handler
{:normalize-request-uri true}))
(register-status
"master"
(status-core/get-artifact-version "puppetlabs" "puppetserver")
master-service-status-version
(partial core/v1-status http-metrics http-client-metric-ids-for-status registry))
(-> context
(assoc :http-metrics http-metrics)
(assoc :http-client-metric-ids-for-status http-client-metric-ids-for-status))))
(start
[this context]
(log/info (i18n/trs "Puppet Server has successfully started and is now ready to handle requests"))
context)
(add-metric-ids-to-http-client-metrics-list!
[this metric-ids-to-add]
(let [metric-ids-from-context (:http-client-metric-ids-for-status
(tk-services/service-context this))]
(master-core/add-metric-ids-to-http-client-metrics-list! metric-ids-from-context
metric-ids-to-add))))
|
25130
|
(ns puppetlabs.services.master.master-service
(:require [clojure.tools.logging :as log]
[ring.middleware.params :as ring]
[puppetlabs.trapperkeeper.core :refer [defservice]]
[puppetlabs.services.master.master-core :as core]
[puppetlabs.puppetserver.certificate-authority :as ca]
[puppetlabs.puppetserver.jruby-request :as jruby-request]
[puppetlabs.trapperkeeper.services :as tk-services]
[puppetlabs.comidi :as comidi]
[puppetlabs.dujour.version-check :as version-check]
[puppetlabs.metrics.http :as http-metrics]
[puppetlabs.services.protocols.master :as master]
[puppetlabs.i18n.core :as i18n]
[puppetlabs.trapperkeeper.services.status.status-core :as status-core]
[puppetlabs.services.master.master-core :as master-core]
[clojure.string :as str]))
(def master-service-status-version 1)
;; Default list of allowed histograms/timers
(def default-metrics-allowed-hists
["http.active-histo"
"http.puppet-v3-catalog-/*/-requests"
"http.puppet-v3-environment-/*/-requests"
"http.puppet-v3-environment_classes-/*/-requests"
"http.puppet-v3-environments-requests"
"http.puppet-v3-file_bucket_file-/*/-requests"
"http.puppet-v3-file_content-/*/-requests"
"http.puppet-v3-file_metadata-/*/-requests"
"http.puppet-v3-file_metadatas-/*/-requests"
"http.puppet-v3-node-/*/-requests"
"http.puppet-v3-report-/*/-requests"
"http.puppet-v3-static_file_content-/*/-requests"])
;; Default list of allowed values/counts
(def default-metrics-allowed-vals
["http.active-requests"
"http.puppet-v3-catalog-/*/-percentage"
"http.puppet-v3-environment-/*/-percentage"
"http.puppet-v3-environment_classes-/*/-percentage"
"http.puppet-v3-environments-percentage"
"http.puppet-v3-file_bucket_file-/*/-percentage"
"http.puppet-v3-file_content-/*/-percentage"
"http.puppet-v3-file_metadata-/*/-percentage"
"http.puppet-v3-file_metadatas-/*/-percentage"
"http.puppet-v3-node-/*/-percentage"
"http.puppet-v3-report-/*/-percentage"
"http.puppet-v3-static_file_content-/*/-percentage"
"http.puppet-v3-status-/*/-percentage"
"http.total-requests"
; num-cpus is registered in trapperkeeper-comidi-metrics, see
; https://github.com/puppetlabs/trapperkeeper-comidi-metrics/blob/0.1.1/src/puppetlabs/metrics/http.clj#L117-L120
"num-cpus"])
;; List of allowed jvm gauges/values
(def default-jvm-metrics-allowed
["uptime"
"memory.heap.committed"
"memory.heap.init"
"memory.heap.max"
"memory.heap.used"
"memory.non-heap.committed"
"memory.non-heap.init"
"memory.non-heap.max"
"memory.non-heap.used"
"memory.total.committed"
"memory.total.init"
"memory.total.max"
"memory.total.used"])
(def http-client-metrics-allowed-hists
(map #(format "http-client.experimental.with-metric-id.%s.full-response" (str/join "." %))
master-core/puppet-server-http-client-metrics-for-status))
(def default-metrics-allowed
(concat
default-metrics-allowed-hists
default-metrics-allowed-vals
default-jvm-metrics-allowed
http-client-metrics-allowed-hists))
(defservice master-service
master/MasterService
[[:WebroutingService add-ring-handler get-route]
[:PuppetServerConfigService get-config]
[:RequestHandlerService handle-request]
[:MetricsService get-metrics-registry get-server-id update-registry-settings]
[:CaService initialize-master-ssl! retrieve-ca-cert! retrieve-ca-crl! get-auth-handler]
[:JRubyPuppetService]
[:AuthorizationService wrap-with-authorization-check]
[:SchedulerService interspaced]
[:StatusService register-status]
[:VersionedCodeService get-code-content current-code-id]]
(init
[this context]
(core/validate-memory-requirements!)
(let [config (get-config)
route-config (core/get-master-route-config ::master-service config)
path (core/get-master-mount ::master-service route-config)
certname (get-in config [:puppetserver :certname])
localcacert (get-in config [:puppetserver :localcacert])
puppet-version (get-in config [:puppetserver :puppet-version])
max-queued-requests (get-in config [:jruby-puppet :max-queued-requests] 0)
max-retry-delay (get-in config [:jruby-puppet :max-retry-delay] 1800)
settings (ca/config->master-settings config)
metrics-service (tk-services/get-service this :JRubyMetricsService)
metrics-server-id (get-server-id)
jruby-service (tk-services/get-service this :JRubyPuppetService)
use-legacy-auth-conf (get-in config
[:jruby-puppet :use-legacy-auth-conf]
false)
environment-class-cache-enabled (get-in config
[:jruby-puppet
:environment-class-cache-enabled]
false)
wrap-with-jruby-queue-limit (if (pos? max-queued-requests)
(fn [handler]
(jruby-request/wrap-with-request-queue-limit
handler
metrics-service
max-queued-requests
max-retry-delay))
identity)
ring-app (comidi/routes
(core/construct-root-routes puppet-version
use-legacy-auth-conf
jruby-service
get-code-content
current-code-id
handle-request
(get-auth-handler)
wrap-with-jruby-queue-limit
environment-class-cache-enabled))
routes (comidi/context path ring-app)
route-metadata (comidi/route-metadata routes)
comidi-handler (comidi/routes->handler routes)
registry (get-metrics-registry :puppetserver)
http-metrics (http-metrics/initialize-http-metrics!
registry
metrics-server-id
route-metadata)
http-client-metric-ids-for-status (atom master-core/puppet-server-http-client-metrics-for-status)
ring-handler (-> comidi-handler
(http-metrics/wrap-with-request-metrics http-metrics)
(comidi/wrap-with-route-metadata routes))
hostcrl (get-in config [:puppetserver :hostcrl])]
(retrieve-ca-cert! localcacert)
(retrieve-ca-crl! hostcrl)
(initialize-master-ssl! settings certname)
(core/register-jvm-metrics! registry metrics-server-id)
(update-registry-settings :puppetserver
{:default-metrics-allowed default-metrics-allowed})
(log/info (i18n/trs "Master Service adding ring handlers"))
;; if the webrouting config uses the old-style config where
;; there is a single key with a route-id, we need to deal with that
;; for backward compat. We have a hard-coded assumption that this route-id
;; must be `master-routes`. In Puppet Server 2.0, we also supported a
;; key called `<KEY>` in the same route config, even though
;; that key is no longer used for Puppet Server 2.1 and later. We
;; should be able to remove this hack as soon as we are able to get rid
;; of the legacy routes.
(if (and (map? route-config)
(contains? route-config :master-routes))
(add-ring-handler this
ring-handler
{:route-id :master-routes
:normalize-request-uri true})
(add-ring-handler this
ring-handler
{:normalize-request-uri true}))
(register-status
"master"
(status-core/get-artifact-version "puppetlabs" "puppetserver")
master-service-status-version
(partial core/v1-status http-metrics http-client-metric-ids-for-status registry))
(-> context
(assoc :http-metrics http-metrics)
(assoc :http-client-metric-ids-for-status http-client-metric-ids-for-status))))
(start
[this context]
(log/info (i18n/trs "Puppet Server has successfully started and is now ready to handle requests"))
context)
(add-metric-ids-to-http-client-metrics-list!
[this metric-ids-to-add]
(let [metric-ids-from-context (:http-client-metric-ids-for-status
(tk-services/service-context this))]
(master-core/add-metric-ids-to-http-client-metrics-list! metric-ids-from-context
metric-ids-to-add))))
| true |
(ns puppetlabs.services.master.master-service
(:require [clojure.tools.logging :as log]
[ring.middleware.params :as ring]
[puppetlabs.trapperkeeper.core :refer [defservice]]
[puppetlabs.services.master.master-core :as core]
[puppetlabs.puppetserver.certificate-authority :as ca]
[puppetlabs.puppetserver.jruby-request :as jruby-request]
[puppetlabs.trapperkeeper.services :as tk-services]
[puppetlabs.comidi :as comidi]
[puppetlabs.dujour.version-check :as version-check]
[puppetlabs.metrics.http :as http-metrics]
[puppetlabs.services.protocols.master :as master]
[puppetlabs.i18n.core :as i18n]
[puppetlabs.trapperkeeper.services.status.status-core :as status-core]
[puppetlabs.services.master.master-core :as master-core]
[clojure.string :as str]))
(def master-service-status-version 1)
;; Default list of allowed histograms/timers
(def default-metrics-allowed-hists
["http.active-histo"
"http.puppet-v3-catalog-/*/-requests"
"http.puppet-v3-environment-/*/-requests"
"http.puppet-v3-environment_classes-/*/-requests"
"http.puppet-v3-environments-requests"
"http.puppet-v3-file_bucket_file-/*/-requests"
"http.puppet-v3-file_content-/*/-requests"
"http.puppet-v3-file_metadata-/*/-requests"
"http.puppet-v3-file_metadatas-/*/-requests"
"http.puppet-v3-node-/*/-requests"
"http.puppet-v3-report-/*/-requests"
"http.puppet-v3-static_file_content-/*/-requests"])
;; Default list of allowed values/counts
(def default-metrics-allowed-vals
["http.active-requests"
"http.puppet-v3-catalog-/*/-percentage"
"http.puppet-v3-environment-/*/-percentage"
"http.puppet-v3-environment_classes-/*/-percentage"
"http.puppet-v3-environments-percentage"
"http.puppet-v3-file_bucket_file-/*/-percentage"
"http.puppet-v3-file_content-/*/-percentage"
"http.puppet-v3-file_metadata-/*/-percentage"
"http.puppet-v3-file_metadatas-/*/-percentage"
"http.puppet-v3-node-/*/-percentage"
"http.puppet-v3-report-/*/-percentage"
"http.puppet-v3-static_file_content-/*/-percentage"
"http.puppet-v3-status-/*/-percentage"
"http.total-requests"
; num-cpus is registered in trapperkeeper-comidi-metrics, see
; https://github.com/puppetlabs/trapperkeeper-comidi-metrics/blob/0.1.1/src/puppetlabs/metrics/http.clj#L117-L120
"num-cpus"])
;; List of allowed jvm gauges/values
(def default-jvm-metrics-allowed
["uptime"
"memory.heap.committed"
"memory.heap.init"
"memory.heap.max"
"memory.heap.used"
"memory.non-heap.committed"
"memory.non-heap.init"
"memory.non-heap.max"
"memory.non-heap.used"
"memory.total.committed"
"memory.total.init"
"memory.total.max"
"memory.total.used"])
(def http-client-metrics-allowed-hists
(map #(format "http-client.experimental.with-metric-id.%s.full-response" (str/join "." %))
master-core/puppet-server-http-client-metrics-for-status))
(def default-metrics-allowed
(concat
default-metrics-allowed-hists
default-metrics-allowed-vals
default-jvm-metrics-allowed
http-client-metrics-allowed-hists))
(defservice master-service
master/MasterService
[[:WebroutingService add-ring-handler get-route]
[:PuppetServerConfigService get-config]
[:RequestHandlerService handle-request]
[:MetricsService get-metrics-registry get-server-id update-registry-settings]
[:CaService initialize-master-ssl! retrieve-ca-cert! retrieve-ca-crl! get-auth-handler]
[:JRubyPuppetService]
[:AuthorizationService wrap-with-authorization-check]
[:SchedulerService interspaced]
[:StatusService register-status]
[:VersionedCodeService get-code-content current-code-id]]
(init
[this context]
(core/validate-memory-requirements!)
(let [config (get-config)
route-config (core/get-master-route-config ::master-service config)
path (core/get-master-mount ::master-service route-config)
certname (get-in config [:puppetserver :certname])
localcacert (get-in config [:puppetserver :localcacert])
puppet-version (get-in config [:puppetserver :puppet-version])
max-queued-requests (get-in config [:jruby-puppet :max-queued-requests] 0)
max-retry-delay (get-in config [:jruby-puppet :max-retry-delay] 1800)
settings (ca/config->master-settings config)
metrics-service (tk-services/get-service this :JRubyMetricsService)
metrics-server-id (get-server-id)
jruby-service (tk-services/get-service this :JRubyPuppetService)
use-legacy-auth-conf (get-in config
[:jruby-puppet :use-legacy-auth-conf]
false)
environment-class-cache-enabled (get-in config
[:jruby-puppet
:environment-class-cache-enabled]
false)
wrap-with-jruby-queue-limit (if (pos? max-queued-requests)
(fn [handler]
(jruby-request/wrap-with-request-queue-limit
handler
metrics-service
max-queued-requests
max-retry-delay))
identity)
ring-app (comidi/routes
(core/construct-root-routes puppet-version
use-legacy-auth-conf
jruby-service
get-code-content
current-code-id
handle-request
(get-auth-handler)
wrap-with-jruby-queue-limit
environment-class-cache-enabled))
routes (comidi/context path ring-app)
route-metadata (comidi/route-metadata routes)
comidi-handler (comidi/routes->handler routes)
registry (get-metrics-registry :puppetserver)
http-metrics (http-metrics/initialize-http-metrics!
registry
metrics-server-id
route-metadata)
http-client-metric-ids-for-status (atom master-core/puppet-server-http-client-metrics-for-status)
ring-handler (-> comidi-handler
(http-metrics/wrap-with-request-metrics http-metrics)
(comidi/wrap-with-route-metadata routes))
hostcrl (get-in config [:puppetserver :hostcrl])]
(retrieve-ca-cert! localcacert)
(retrieve-ca-crl! hostcrl)
(initialize-master-ssl! settings certname)
(core/register-jvm-metrics! registry metrics-server-id)
(update-registry-settings :puppetserver
{:default-metrics-allowed default-metrics-allowed})
(log/info (i18n/trs "Master Service adding ring handlers"))
;; if the webrouting config uses the old-style config where
;; there is a single key with a route-id, we need to deal with that
;; for backward compat. We have a hard-coded assumption that this route-id
;; must be `master-routes`. In Puppet Server 2.0, we also supported a
;; key called `PI:KEY:<KEY>END_PI` in the same route config, even though
;; that key is no longer used for Puppet Server 2.1 and later. We
;; should be able to remove this hack as soon as we are able to get rid
;; of the legacy routes.
(if (and (map? route-config)
(contains? route-config :master-routes))
(add-ring-handler this
ring-handler
{:route-id :master-routes
:normalize-request-uri true})
(add-ring-handler this
ring-handler
{:normalize-request-uri true}))
(register-status
"master"
(status-core/get-artifact-version "puppetlabs" "puppetserver")
master-service-status-version
(partial core/v1-status http-metrics http-client-metric-ids-for-status registry))
(-> context
(assoc :http-metrics http-metrics)
(assoc :http-client-metric-ids-for-status http-client-metric-ids-for-status))))
(start
[this context]
(log/info (i18n/trs "Puppet Server has successfully started and is now ready to handle requests"))
context)
(add-metric-ids-to-http-client-metrics-list!
[this metric-ids-to-add]
(let [metric-ids-from-context (:http-client-metric-ids-for-status
(tk-services/service-context this))]
(master-core/add-metric-ids-to-http-client-metrics-list! metric-ids-from-context
metric-ids-to-add))))
|
[
{
"context": " name of a User.\n\n (user-full-name :u) ;; -> 'Cam Saul'\"\n [user-table]\n (hx/concat (hsql/qualify user-",
"end": 6575,
"score": 0.9995920062065125,
"start": 6567,
"tag": "NAME",
"value": "Cam Saul"
}
] |
c#-metabase/enterprise/backend/src/metabase_enterprise/audit/pages/common.clj
|
hanakhry/Crime_Admin
| 0 |
(ns metabase-enterprise.audit.pages.common
"Shared functions used by audit internal queries across different namespaces."
(:require [clojure.core.async :as a]
[clojure.core.memoize :as memoize]
[clojure.java.jdbc :as jdbc]
[clojure.string :as str]
[clojure.walk :as walk]
[honeysql.core :as hsql]
[honeysql.format :as hformat]
[honeysql.helpers :as h]
[java-time :as t]
[medley.core :as m]
[metabase-enterprise.audit.query-processor.middleware.handle-audit-queries :as qp.middleware.audit]
[metabase.db :as mdb]
[metabase.driver.sql-jdbc.execute :as sql-jdbc.execute]
[metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.query-processor.context :as context]
[metabase.query-processor.timezone :as qp.tz]
[metabase.util :as u]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.urls :as urls]
[schema.core :as s]
[toucan.db :as db]))
(def ^:private ^:const default-limit 1000)
(defn- add-default-params [honeysql-query]
(let [{:keys [limit offset]} qp.middleware.audit/*additional-query-params*]
(-> honeysql-query
(update :limit (fn [query-limit]
(or limit query-limit default-limit)))
(update :offset (fn [query-offset]
(or offset query-offset 0))))))
(defn- inject-cte-body-into-from
[from ctes]
(vec
(for [source from]
(if (vector? source)
(let [[source alias] source]
[(ctes source source) alias])
(if (ctes source)
[(ctes source) source]
source)))))
(defn- inject-cte-body-into-join
[joins ctes]
(->> joins
(partition 2)
(mapcat (fn [[source condition]]
(if (vector? source)
(let [[source alias] source]
[(if (ctes source)
[(ctes source) alias]
[source alias])
condition])
[(if (ctes source)
[(ctes source) source]
source)
condition])))
vec))
(defn- CTEs->subselects
([query] (CTEs->subselects query {}))
([{:keys [with] :as query} ctes]
(let [ctes (reduce (fn [ctes [alias definition]]
(assoc ctes alias (CTEs->subselects definition ctes)))
ctes
with)]
(walk/postwalk
(fn [form]
(if (map? form)
(-> form
(m/update-existing :from inject-cte-body-into-from ctes)
;; TODO -- make this work with all types of joins
(m/update-existing :left-join inject-cte-body-into-join ctes)
(m/update-existing :join inject-cte-body-into-join ctes))
form))
(dissoc query :with)))))
;; TODO - fixme
(def ^:private ^{:arglists '([])} application-db-default-timezone
;; cache the application DB's default timezone for an hour. I don't expect this information to change *ever*,
;; really, but it seems like it is possible that it *could* change. Determining this for every audit query seems
;; wasteful however.
;;
;; This is cached by db-type and the JDBC connection spec in case that gets changed/swapped out for one reason or
;; another
(let [timezone (memoize/ttl sql-jdbc.sync/db-default-timezone :ttl/threshold (u/hours->ms 1))]
(fn []
(timezone (mdb/db-type) (db/connection)))))
(defn- reduce-results* [honeysql-query context rff init]
(let [driver (mdb/db-type)
honeysql-query (cond-> honeysql-query
;; MySQL 5.x does not support CTEs, so convert them to subselects instead
(= driver :mysql) CTEs->subselects)
[sql & params] (db/honeysql->sql (add-default-params honeysql-query))
canceled-chan (context/canceled-chan context)]
;; MySQL driver normalizies timestamps. Setting `*results-timezone-id-override*` is a shortcut
;; instead of mocking up a chunk of regular QP pipeline.
(binding [qp.tz/*results-timezone-id-override* (application-db-default-timezone)]
(try
(with-open [conn (jdbc/get-connection (db/connection))
stmt (sql-jdbc.execute/prepared-statement driver conn sql params)
rs (sql-jdbc.execute/execute-prepared-statement! driver stmt)]
(let [rsmeta (.getMetaData rs)
cols (sql-jdbc.execute/column-metadata driver rsmeta)
metadata {:cols cols}
rf (rff metadata)]
(reduce rf init (sql-jdbc.execute/reducible-rows driver rs rsmeta canceled-chan))))
(catch InterruptedException e
(a/>!! canceled-chan :cancel)
(throw e))))))
(defn reducible-query
"Return a function with the signature
(f context) -> IReduceInit
that, when reduced, runs `honeysql-query` against the application DB, automatically including limits and offsets for
paging."
[honeysql-query]
(bound-fn reducible-query-fn [context]
(reify clojure.lang.IReduceInit
(reduce [_ rf init]
(reduce-results* honeysql-query context (constantly rf) init)))))
(defn query
"Run a internal audit query, automatically including limits and offsets for paging. This function returns results
directly as a series of maps (the 'legacy results' format as described in
`metabase-enterprise.audit.query-processor.middleware.handle-audit-queries.internal-queries`)"
[honeysql-query]
(let [context {:canceled-chan (a/promise-chan)}
rff (fn [{:keys [cols]}]
(let [col-names (mapv (comp keyword :name) cols)]
((map (partial zipmap col-names)) conj)))]
(try
(reduce-results* honeysql-query context rff [])
(catch InterruptedException e
(a/>!! (:canceled-chan context) ::cancel)
(throw e)))))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Helper Fns |
;;; +----------------------------------------------------------------------------------------------------------------+
(defn user-full-name
"HoneySQL to grab the full name of a User.
(user-full-name :u) ;; -> 'Cam Saul'"
[user-table]
(hx/concat (hsql/qualify user-table :first_name)
(hx/literal " ")
(hsql/qualify user-table :last_name)))
(def datetime-unit-str->base-type
"Map of datetime unit strings (possible params for queries that accept a datetime `unit` param) to the `:base_type` we
should use for that column in the results."
{"quarter" :type/Date
"day" :type/Date
"hour" :type/DateTime
"week" :type/Date
"default" :type/DateTime
"day-of-week" :type/Integer
"hour-of-day" :type/Integer
"month" :type/Date
"month-of-year" :type/Integer
"day-of-month" :type/Integer
"year" :type/Integer
"day-of-year" :type/Integer
"week-of-year" :type/Integer
"quarter-of-year" :type/Integer
"minute-of-hour" :type/Integer
"minute" :type/DateTime})
(def DateTimeUnitStr
"Scheme for a valid QP DateTime unit as a string (the format they will come into the audit QP). E.g. something
like `day` or `day-of-week`."
(apply s/enum (keys datetime-unit-str->base-type)))
(defn grouped-datetime
"Group a datetime expression by `unit` using the appropriate SQL QP `date` implementation for our application
database.
(grouped-datetime :day :timestamp) ;; -> `cast(timestamp AS date)` [honeysql equivalent]"
[unit expr]
(sql.qp/date (mdb/db-type) (keyword unit) expr))
(defn first-non-null
"Build a `CASE` statement that returns the first non-`NULL` of `exprs`."
[& exprs]
(apply hsql/call :case (mapcat (fn [expr]
[[:not= expr nil] expr])
exprs)))
(defn zero-if-null
"Build a `CASE` statement that will replace results of `expr` with `0` when it's `NULL`, perfect for things like
counts."
[expr]
(hsql/call :case [:not= expr nil] expr :else 0))
(defn lowercase-field
"Lowercase a SQL field, to enter into honeysql query"
[field]
(keyword (str "%lower." (name field))))
(defn add-45-days-clause
"Add an appropriate `WHERE` clause to limit query to 45 days"
[query date_column]
(h/merge-where query [:>
(hx/cast :date date_column)
(hx/cast :date (hx/literal (t/format "yyyy-MM-dd" (t/minus (t/local-date) (t/days 45)))))]))
(defn add-search-clause
"Add an appropriate `WHERE` clause to `query` to see if any of the `fields-to-search` match `query-string`.
(add-search-clause {} \"birds\" :t.name :db.name)"
[query query-string & fields-to-search]
(h/merge-where query (when (seq query-string)
(let [query-string (str \% (str/lower-case query-string) \%)]
(cons
:or
(for [field fields-to-search]
[:like (lowercase-field field) query-string]))))))
(defn add-sort-clause
"Add an `ORDER BY` clause to `query` on `sort-column` and `sort-direction`.
Most queries will just have explicit default `ORDER BY` clauses"
[query sort-column sort-direction]
(h/merge-order-by query [(keyword sort-column) (keyword sort-direction)]))
(defn card-public-url
"Return HoneySQL for a `CASE` statement to return a Card's public URL if the `public_uuid` `field` is non-NULL."
[field]
(hsql/call :case
[:not= field nil]
(hx/concat (urls/public-card-prefix) field)))
(defn native-or-gui
"Return HoneySQL for a `CASE` statement to format the QueryExecution `:native` column as either `Native` or `GUI`."
[query-execution-table]
(hsql/call :case [:= (hsql/qualify query-execution-table :native) true] (hx/literal "Native") :else (hx/literal "GUI")))
(defn card-name-or-ad-hoc
"HoneySQL for a `CASE` statement to return the name of a Card, or `Ad-hoc` if Card name is `NULL`."
[card-table]
(first-non-null (hsql/qualify card-table :name) (hx/literal "Ad-hoc")))
(defn query-execution-is-download
"HoneySQL for a `WHERE` clause to restrict QueryExecution rows to downloads (i.e. executions returned in CSV/JSON/XLS
format)."
[query-execution-table]
[:in (hsql/qualify query-execution-table :context) #{"csv-download" "xlsx-download" "json-download"}])
(defn group-concat
"Portable MySQL `group_concat`/Postgres `string_agg`"
[expr separator]
(if (= (mdb/db-type) :mysql)
(hsql/call :group_concat (hsql/raw (format "%s SEPARATOR %s"
(hformat/to-sql expr)
(hformat/to-sql (hx/literal separator)))))
(hsql/call :string_agg expr (hx/literal separator))))
|
75941
|
(ns metabase-enterprise.audit.pages.common
"Shared functions used by audit internal queries across different namespaces."
(:require [clojure.core.async :as a]
[clojure.core.memoize :as memoize]
[clojure.java.jdbc :as jdbc]
[clojure.string :as str]
[clojure.walk :as walk]
[honeysql.core :as hsql]
[honeysql.format :as hformat]
[honeysql.helpers :as h]
[java-time :as t]
[medley.core :as m]
[metabase-enterprise.audit.query-processor.middleware.handle-audit-queries :as qp.middleware.audit]
[metabase.db :as mdb]
[metabase.driver.sql-jdbc.execute :as sql-jdbc.execute]
[metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.query-processor.context :as context]
[metabase.query-processor.timezone :as qp.tz]
[metabase.util :as u]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.urls :as urls]
[schema.core :as s]
[toucan.db :as db]))
(def ^:private ^:const default-limit 1000)
(defn- add-default-params [honeysql-query]
(let [{:keys [limit offset]} qp.middleware.audit/*additional-query-params*]
(-> honeysql-query
(update :limit (fn [query-limit]
(or limit query-limit default-limit)))
(update :offset (fn [query-offset]
(or offset query-offset 0))))))
(defn- inject-cte-body-into-from
[from ctes]
(vec
(for [source from]
(if (vector? source)
(let [[source alias] source]
[(ctes source source) alias])
(if (ctes source)
[(ctes source) source]
source)))))
(defn- inject-cte-body-into-join
[joins ctes]
(->> joins
(partition 2)
(mapcat (fn [[source condition]]
(if (vector? source)
(let [[source alias] source]
[(if (ctes source)
[(ctes source) alias]
[source alias])
condition])
[(if (ctes source)
[(ctes source) source]
source)
condition])))
vec))
(defn- CTEs->subselects
([query] (CTEs->subselects query {}))
([{:keys [with] :as query} ctes]
(let [ctes (reduce (fn [ctes [alias definition]]
(assoc ctes alias (CTEs->subselects definition ctes)))
ctes
with)]
(walk/postwalk
(fn [form]
(if (map? form)
(-> form
(m/update-existing :from inject-cte-body-into-from ctes)
;; TODO -- make this work with all types of joins
(m/update-existing :left-join inject-cte-body-into-join ctes)
(m/update-existing :join inject-cte-body-into-join ctes))
form))
(dissoc query :with)))))
;; TODO - fixme
(def ^:private ^{:arglists '([])} application-db-default-timezone
;; cache the application DB's default timezone for an hour. I don't expect this information to change *ever*,
;; really, but it seems like it is possible that it *could* change. Determining this for every audit query seems
;; wasteful however.
;;
;; This is cached by db-type and the JDBC connection spec in case that gets changed/swapped out for one reason or
;; another
(let [timezone (memoize/ttl sql-jdbc.sync/db-default-timezone :ttl/threshold (u/hours->ms 1))]
(fn []
(timezone (mdb/db-type) (db/connection)))))
(defn- reduce-results* [honeysql-query context rff init]
(let [driver (mdb/db-type)
honeysql-query (cond-> honeysql-query
;; MySQL 5.x does not support CTEs, so convert them to subselects instead
(= driver :mysql) CTEs->subselects)
[sql & params] (db/honeysql->sql (add-default-params honeysql-query))
canceled-chan (context/canceled-chan context)]
;; MySQL driver normalizies timestamps. Setting `*results-timezone-id-override*` is a shortcut
;; instead of mocking up a chunk of regular QP pipeline.
(binding [qp.tz/*results-timezone-id-override* (application-db-default-timezone)]
(try
(with-open [conn (jdbc/get-connection (db/connection))
stmt (sql-jdbc.execute/prepared-statement driver conn sql params)
rs (sql-jdbc.execute/execute-prepared-statement! driver stmt)]
(let [rsmeta (.getMetaData rs)
cols (sql-jdbc.execute/column-metadata driver rsmeta)
metadata {:cols cols}
rf (rff metadata)]
(reduce rf init (sql-jdbc.execute/reducible-rows driver rs rsmeta canceled-chan))))
(catch InterruptedException e
(a/>!! canceled-chan :cancel)
(throw e))))))
(defn reducible-query
"Return a function with the signature
(f context) -> IReduceInit
that, when reduced, runs `honeysql-query` against the application DB, automatically including limits and offsets for
paging."
[honeysql-query]
(bound-fn reducible-query-fn [context]
(reify clojure.lang.IReduceInit
(reduce [_ rf init]
(reduce-results* honeysql-query context (constantly rf) init)))))
(defn query
"Run a internal audit query, automatically including limits and offsets for paging. This function returns results
directly as a series of maps (the 'legacy results' format as described in
`metabase-enterprise.audit.query-processor.middleware.handle-audit-queries.internal-queries`)"
[honeysql-query]
(let [context {:canceled-chan (a/promise-chan)}
rff (fn [{:keys [cols]}]
(let [col-names (mapv (comp keyword :name) cols)]
((map (partial zipmap col-names)) conj)))]
(try
(reduce-results* honeysql-query context rff [])
(catch InterruptedException e
(a/>!! (:canceled-chan context) ::cancel)
(throw e)))))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Helper Fns |
;;; +----------------------------------------------------------------------------------------------------------------+
(defn user-full-name
"HoneySQL to grab the full name of a User.
(user-full-name :u) ;; -> '<NAME>'"
[user-table]
(hx/concat (hsql/qualify user-table :first_name)
(hx/literal " ")
(hsql/qualify user-table :last_name)))
(def datetime-unit-str->base-type
"Map of datetime unit strings (possible params for queries that accept a datetime `unit` param) to the `:base_type` we
should use for that column in the results."
{"quarter" :type/Date
"day" :type/Date
"hour" :type/DateTime
"week" :type/Date
"default" :type/DateTime
"day-of-week" :type/Integer
"hour-of-day" :type/Integer
"month" :type/Date
"month-of-year" :type/Integer
"day-of-month" :type/Integer
"year" :type/Integer
"day-of-year" :type/Integer
"week-of-year" :type/Integer
"quarter-of-year" :type/Integer
"minute-of-hour" :type/Integer
"minute" :type/DateTime})
(def DateTimeUnitStr
"Scheme for a valid QP DateTime unit as a string (the format they will come into the audit QP). E.g. something
like `day` or `day-of-week`."
(apply s/enum (keys datetime-unit-str->base-type)))
(defn grouped-datetime
"Group a datetime expression by `unit` using the appropriate SQL QP `date` implementation for our application
database.
(grouped-datetime :day :timestamp) ;; -> `cast(timestamp AS date)` [honeysql equivalent]"
[unit expr]
(sql.qp/date (mdb/db-type) (keyword unit) expr))
(defn first-non-null
"Build a `CASE` statement that returns the first non-`NULL` of `exprs`."
[& exprs]
(apply hsql/call :case (mapcat (fn [expr]
[[:not= expr nil] expr])
exprs)))
(defn zero-if-null
"Build a `CASE` statement that will replace results of `expr` with `0` when it's `NULL`, perfect for things like
counts."
[expr]
(hsql/call :case [:not= expr nil] expr :else 0))
(defn lowercase-field
"Lowercase a SQL field, to enter into honeysql query"
[field]
(keyword (str "%lower." (name field))))
(defn add-45-days-clause
"Add an appropriate `WHERE` clause to limit query to 45 days"
[query date_column]
(h/merge-where query [:>
(hx/cast :date date_column)
(hx/cast :date (hx/literal (t/format "yyyy-MM-dd" (t/minus (t/local-date) (t/days 45)))))]))
(defn add-search-clause
"Add an appropriate `WHERE` clause to `query` to see if any of the `fields-to-search` match `query-string`.
(add-search-clause {} \"birds\" :t.name :db.name)"
[query query-string & fields-to-search]
(h/merge-where query (when (seq query-string)
(let [query-string (str \% (str/lower-case query-string) \%)]
(cons
:or
(for [field fields-to-search]
[:like (lowercase-field field) query-string]))))))
(defn add-sort-clause
"Add an `ORDER BY` clause to `query` on `sort-column` and `sort-direction`.
Most queries will just have explicit default `ORDER BY` clauses"
[query sort-column sort-direction]
(h/merge-order-by query [(keyword sort-column) (keyword sort-direction)]))
(defn card-public-url
"Return HoneySQL for a `CASE` statement to return a Card's public URL if the `public_uuid` `field` is non-NULL."
[field]
(hsql/call :case
[:not= field nil]
(hx/concat (urls/public-card-prefix) field)))
(defn native-or-gui
"Return HoneySQL for a `CASE` statement to format the QueryExecution `:native` column as either `Native` or `GUI`."
[query-execution-table]
(hsql/call :case [:= (hsql/qualify query-execution-table :native) true] (hx/literal "Native") :else (hx/literal "GUI")))
(defn card-name-or-ad-hoc
"HoneySQL for a `CASE` statement to return the name of a Card, or `Ad-hoc` if Card name is `NULL`."
[card-table]
(first-non-null (hsql/qualify card-table :name) (hx/literal "Ad-hoc")))
(defn query-execution-is-download
"HoneySQL for a `WHERE` clause to restrict QueryExecution rows to downloads (i.e. executions returned in CSV/JSON/XLS
format)."
[query-execution-table]
[:in (hsql/qualify query-execution-table :context) #{"csv-download" "xlsx-download" "json-download"}])
(defn group-concat
"Portable MySQL `group_concat`/Postgres `string_agg`"
[expr separator]
(if (= (mdb/db-type) :mysql)
(hsql/call :group_concat (hsql/raw (format "%s SEPARATOR %s"
(hformat/to-sql expr)
(hformat/to-sql (hx/literal separator)))))
(hsql/call :string_agg expr (hx/literal separator))))
| true |
(ns metabase-enterprise.audit.pages.common
"Shared functions used by audit internal queries across different namespaces."
(:require [clojure.core.async :as a]
[clojure.core.memoize :as memoize]
[clojure.java.jdbc :as jdbc]
[clojure.string :as str]
[clojure.walk :as walk]
[honeysql.core :as hsql]
[honeysql.format :as hformat]
[honeysql.helpers :as h]
[java-time :as t]
[medley.core :as m]
[metabase-enterprise.audit.query-processor.middleware.handle-audit-queries :as qp.middleware.audit]
[metabase.db :as mdb]
[metabase.driver.sql-jdbc.execute :as sql-jdbc.execute]
[metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.query-processor.context :as context]
[metabase.query-processor.timezone :as qp.tz]
[metabase.util :as u]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.urls :as urls]
[schema.core :as s]
[toucan.db :as db]))
(def ^:private ^:const default-limit 1000)
(defn- add-default-params [honeysql-query]
(let [{:keys [limit offset]} qp.middleware.audit/*additional-query-params*]
(-> honeysql-query
(update :limit (fn [query-limit]
(or limit query-limit default-limit)))
(update :offset (fn [query-offset]
(or offset query-offset 0))))))
(defn- inject-cte-body-into-from
[from ctes]
(vec
(for [source from]
(if (vector? source)
(let [[source alias] source]
[(ctes source source) alias])
(if (ctes source)
[(ctes source) source]
source)))))
(defn- inject-cte-body-into-join
[joins ctes]
(->> joins
(partition 2)
(mapcat (fn [[source condition]]
(if (vector? source)
(let [[source alias] source]
[(if (ctes source)
[(ctes source) alias]
[source alias])
condition])
[(if (ctes source)
[(ctes source) source]
source)
condition])))
vec))
(defn- CTEs->subselects
([query] (CTEs->subselects query {}))
([{:keys [with] :as query} ctes]
(let [ctes (reduce (fn [ctes [alias definition]]
(assoc ctes alias (CTEs->subselects definition ctes)))
ctes
with)]
(walk/postwalk
(fn [form]
(if (map? form)
(-> form
(m/update-existing :from inject-cte-body-into-from ctes)
;; TODO -- make this work with all types of joins
(m/update-existing :left-join inject-cte-body-into-join ctes)
(m/update-existing :join inject-cte-body-into-join ctes))
form))
(dissoc query :with)))))
;; TODO - fixme
(def ^:private ^{:arglists '([])} application-db-default-timezone
;; cache the application DB's default timezone for an hour. I don't expect this information to change *ever*,
;; really, but it seems like it is possible that it *could* change. Determining this for every audit query seems
;; wasteful however.
;;
;; This is cached by db-type and the JDBC connection spec in case that gets changed/swapped out for one reason or
;; another
(let [timezone (memoize/ttl sql-jdbc.sync/db-default-timezone :ttl/threshold (u/hours->ms 1))]
(fn []
(timezone (mdb/db-type) (db/connection)))))
(defn- reduce-results* [honeysql-query context rff init]
(let [driver (mdb/db-type)
honeysql-query (cond-> honeysql-query
;; MySQL 5.x does not support CTEs, so convert them to subselects instead
(= driver :mysql) CTEs->subselects)
[sql & params] (db/honeysql->sql (add-default-params honeysql-query))
canceled-chan (context/canceled-chan context)]
;; MySQL driver normalizies timestamps. Setting `*results-timezone-id-override*` is a shortcut
;; instead of mocking up a chunk of regular QP pipeline.
(binding [qp.tz/*results-timezone-id-override* (application-db-default-timezone)]
(try
(with-open [conn (jdbc/get-connection (db/connection))
stmt (sql-jdbc.execute/prepared-statement driver conn sql params)
rs (sql-jdbc.execute/execute-prepared-statement! driver stmt)]
(let [rsmeta (.getMetaData rs)
cols (sql-jdbc.execute/column-metadata driver rsmeta)
metadata {:cols cols}
rf (rff metadata)]
(reduce rf init (sql-jdbc.execute/reducible-rows driver rs rsmeta canceled-chan))))
(catch InterruptedException e
(a/>!! canceled-chan :cancel)
(throw e))))))
(defn reducible-query
"Return a function with the signature
(f context) -> IReduceInit
that, when reduced, runs `honeysql-query` against the application DB, automatically including limits and offsets for
paging."
[honeysql-query]
(bound-fn reducible-query-fn [context]
(reify clojure.lang.IReduceInit
(reduce [_ rf init]
(reduce-results* honeysql-query context (constantly rf) init)))))
(defn query
"Run a internal audit query, automatically including limits and offsets for paging. This function returns results
directly as a series of maps (the 'legacy results' format as described in
`metabase-enterprise.audit.query-processor.middleware.handle-audit-queries.internal-queries`)"
[honeysql-query]
(let [context {:canceled-chan (a/promise-chan)}
rff (fn [{:keys [cols]}]
(let [col-names (mapv (comp keyword :name) cols)]
((map (partial zipmap col-names)) conj)))]
(try
(reduce-results* honeysql-query context rff [])
(catch InterruptedException e
(a/>!! (:canceled-chan context) ::cancel)
(throw e)))))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Helper Fns |
;;; +----------------------------------------------------------------------------------------------------------------+
(defn user-full-name
"HoneySQL to grab the full name of a User.
(user-full-name :u) ;; -> 'PI:NAME:<NAME>END_PI'"
[user-table]
(hx/concat (hsql/qualify user-table :first_name)
(hx/literal " ")
(hsql/qualify user-table :last_name)))
(def datetime-unit-str->base-type
"Map of datetime unit strings (possible params for queries that accept a datetime `unit` param) to the `:base_type` we
should use for that column in the results."
{"quarter" :type/Date
"day" :type/Date
"hour" :type/DateTime
"week" :type/Date
"default" :type/DateTime
"day-of-week" :type/Integer
"hour-of-day" :type/Integer
"month" :type/Date
"month-of-year" :type/Integer
"day-of-month" :type/Integer
"year" :type/Integer
"day-of-year" :type/Integer
"week-of-year" :type/Integer
"quarter-of-year" :type/Integer
"minute-of-hour" :type/Integer
"minute" :type/DateTime})
(def DateTimeUnitStr
"Scheme for a valid QP DateTime unit as a string (the format they will come into the audit QP). E.g. something
like `day` or `day-of-week`."
(apply s/enum (keys datetime-unit-str->base-type)))
(defn grouped-datetime
"Group a datetime expression by `unit` using the appropriate SQL QP `date` implementation for our application
database.
(grouped-datetime :day :timestamp) ;; -> `cast(timestamp AS date)` [honeysql equivalent]"
[unit expr]
(sql.qp/date (mdb/db-type) (keyword unit) expr))
(defn first-non-null
"Build a `CASE` statement that returns the first non-`NULL` of `exprs`."
[& exprs]
(apply hsql/call :case (mapcat (fn [expr]
[[:not= expr nil] expr])
exprs)))
(defn zero-if-null
"Build a `CASE` statement that will replace results of `expr` with `0` when it's `NULL`, perfect for things like
counts."
[expr]
(hsql/call :case [:not= expr nil] expr :else 0))
(defn lowercase-field
"Lowercase a SQL field, to enter into honeysql query"
[field]
(keyword (str "%lower." (name field))))
(defn add-45-days-clause
"Add an appropriate `WHERE` clause to limit query to 45 days"
[query date_column]
(h/merge-where query [:>
(hx/cast :date date_column)
(hx/cast :date (hx/literal (t/format "yyyy-MM-dd" (t/minus (t/local-date) (t/days 45)))))]))
(defn add-search-clause
"Add an appropriate `WHERE` clause to `query` to see if any of the `fields-to-search` match `query-string`.
(add-search-clause {} \"birds\" :t.name :db.name)"
[query query-string & fields-to-search]
(h/merge-where query (when (seq query-string)
(let [query-string (str \% (str/lower-case query-string) \%)]
(cons
:or
(for [field fields-to-search]
[:like (lowercase-field field) query-string]))))))
(defn add-sort-clause
"Add an `ORDER BY` clause to `query` on `sort-column` and `sort-direction`.
Most queries will just have explicit default `ORDER BY` clauses"
[query sort-column sort-direction]
(h/merge-order-by query [(keyword sort-column) (keyword sort-direction)]))
(defn card-public-url
"Return HoneySQL for a `CASE` statement to return a Card's public URL if the `public_uuid` `field` is non-NULL."
[field]
(hsql/call :case
[:not= field nil]
(hx/concat (urls/public-card-prefix) field)))
(defn native-or-gui
"Return HoneySQL for a `CASE` statement to format the QueryExecution `:native` column as either `Native` or `GUI`."
[query-execution-table]
(hsql/call :case [:= (hsql/qualify query-execution-table :native) true] (hx/literal "Native") :else (hx/literal "GUI")))
(defn card-name-or-ad-hoc
"HoneySQL for a `CASE` statement to return the name of a Card, or `Ad-hoc` if Card name is `NULL`."
[card-table]
(first-non-null (hsql/qualify card-table :name) (hx/literal "Ad-hoc")))
(defn query-execution-is-download
"HoneySQL for a `WHERE` clause to restrict QueryExecution rows to downloads (i.e. executions returned in CSV/JSON/XLS
format)."
[query-execution-table]
[:in (hsql/qualify query-execution-table :context) #{"csv-download" "xlsx-download" "json-download"}])
(defn group-concat
"Portable MySQL `group_concat`/Postgres `string_agg`"
[expr separator]
(if (= (mdb/db-type) :mysql)
(hsql/call :group_concat (hsql/raw (format "%s SEPARATOR %s"
(hformat/to-sql expr)
(hformat/to-sql (hx/literal separator)))))
(hsql/call :string_agg expr (hx/literal separator))))
|
[
{
"context": ";\n; Copyright © 2013 Sebastian Hoß <[email protected]>\n; This work is free. You can redi",
"end": 34,
"score": 0.9998714923858643,
"start": 21,
"tag": "NAME",
"value": "Sebastian Hoß"
},
{
"context": ";\n; Copyright © 2013 Sebastian Hoß <[email protected]>\n; This work is free. You can redistribute it and",
"end": 49,
"score": 0.9999299049377441,
"start": 36,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
src/test/clojure/finj/common_test.clj
|
sebhoss/finj
| 30 |
;
; Copyright © 2013 Sebastian Hoß <[email protected]>
; This work is free. You can redistribute it and/or modify it under the
; terms of the Do What The Fuck You Want To Public License, Version 2,
; as published by Sam Hocevar. See http://www.wtfpl.net/ for more details.
;
(ns finj.common-test
(:require [finj.common :refer :all]
[clojure.test :refer :all]))
(deftest rate-test
(testing "with integers"
(is (= 1/10 (rate :rate-per-cent 10))))
(testing "with floats"
(is (= 0.125 (rate :rate-per-cent 12.5)))))
(deftest accumulation-factor-test
(testing "with integers"
(is (= 2 (accumulation-factor :rate 1))))
(testing "with floats"
(is (= 2.0 (accumulation-factor :rate 1.0))))
(testing "with ratios"
(is (= 5/4 (accumulation-factor :rate 1/4)))))
|
37315
|
;
; Copyright © 2013 <NAME> <<EMAIL>>
; This work is free. You can redistribute it and/or modify it under the
; terms of the Do What The Fuck You Want To Public License, Version 2,
; as published by Sam Hocevar. See http://www.wtfpl.net/ for more details.
;
(ns finj.common-test
(:require [finj.common :refer :all]
[clojure.test :refer :all]))
(deftest rate-test
(testing "with integers"
(is (= 1/10 (rate :rate-per-cent 10))))
(testing "with floats"
(is (= 0.125 (rate :rate-per-cent 12.5)))))
(deftest accumulation-factor-test
(testing "with integers"
(is (= 2 (accumulation-factor :rate 1))))
(testing "with floats"
(is (= 2.0 (accumulation-factor :rate 1.0))))
(testing "with ratios"
(is (= 5/4 (accumulation-factor :rate 1/4)))))
| true |
;
; Copyright © 2013 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
; This work is free. You can redistribute it and/or modify it under the
; terms of the Do What The Fuck You Want To Public License, Version 2,
; as published by Sam Hocevar. See http://www.wtfpl.net/ for more details.
;
(ns finj.common-test
(:require [finj.common :refer :all]
[clojure.test :refer :all]))
(deftest rate-test
(testing "with integers"
(is (= 1/10 (rate :rate-per-cent 10))))
(testing "with floats"
(is (= 0.125 (rate :rate-per-cent 12.5)))))
(deftest accumulation-factor-test
(testing "with integers"
(is (= 2 (accumulation-factor :rate 1))))
(testing "with floats"
(is (= 2.0 (accumulation-factor :rate 1.0))))
(testing "with ratios"
(is (= 5/4 (accumulation-factor :rate 1/4)))))
|
[
{
"context": "se 'slack-rtm.core)\n\n;; slack to file\n(def token \"xoxb-64790720018-RhD3YOsLWJ4yc13GlX0CDeQe\")\n(defn rcv-msg[evt]\n\t(spit \"evt.log\" (:message e",
"end": 93,
"score": 0.9991530179977417,
"start": 52,
"tag": "KEY",
"value": "xoxb-64790720018-RhD3YOsLWJ4yc13GlX0CDeQe"
}
] |
Chapter 10 Code/riemaning/src/riemaning/slacking.clj
|
PacktPublishing/Clojure-Programming-Cookbook
| 14 |
(use 'slack-rtm.core)
;; slack to file
(def token "xoxb-64790720018-RhD3YOsLWJ4yc13GlX0CDeQe")
(defn rcv-msg[evt]
(spit "evt.log" (:message evt) :append true)
)
(def rtm-conn (connect token :on-receive rcv-msg))
|
6969
|
(use 'slack-rtm.core)
;; slack to file
(def token "<KEY>")
(defn rcv-msg[evt]
(spit "evt.log" (:message evt) :append true)
)
(def rtm-conn (connect token :on-receive rcv-msg))
| true |
(use 'slack-rtm.core)
;; slack to file
(def token "PI:KEY:<KEY>END_PI")
(defn rcv-msg[evt]
(spit "evt.log" (:message evt) :append true)
)
(def rtm-conn (connect token :on-receive rcv-msg))
|
[
{
"context": "}\n \"registered-association\" {:fi \"Rekisteröity yhdistys\"\n :en \"Registered ass",
"end": 212,
"score": 0.5936511158943176,
"start": 205,
"tag": "NAME",
"value": "hdistys"
}
] |
webapp/src/cljc/lipas/data/owners.cljc
|
lipas-liikuntapaikat/lipas
| 49 |
(ns lipas.data.owners)
(def all
{"city" {:fi "Kunta"
:en "City"
:se "Kommun"}
"registered-association" {:fi "Rekisteröity yhdistys"
:en "Registered association"
:se "Registrerad förening"}
"company-ltd" {:fi "Yritys"
:en "Company ltd"
:se "Företag"}
"city-main-owner" {:fi "Kuntaenemmistöinen yritys"
:en "City main owner"
:se "Företag med kommun som majoritetsägare"}
"municipal-consortium" {:fi "Kuntayhtymä"
:en "Municipal consortium"
:se "Samkommun"}
"foundation" {:fi "Säätiö"
:en "Foundation"
:se "Stiftelse"}
"state" {:fi "Valtio"
:en "State"
:se "Stat"}
"other" {:fi "Muu"
:en "Other"
:se "Annat"}
"unknown" {:fi "Ei tietoa"
:en "Unknown"
:se "Okänt"}})
|
71892
|
(ns lipas.data.owners)
(def all
{"city" {:fi "Kunta"
:en "City"
:se "Kommun"}
"registered-association" {:fi "Rekisteröity y<NAME>"
:en "Registered association"
:se "Registrerad förening"}
"company-ltd" {:fi "Yritys"
:en "Company ltd"
:se "Företag"}
"city-main-owner" {:fi "Kuntaenemmistöinen yritys"
:en "City main owner"
:se "Företag med kommun som majoritetsägare"}
"municipal-consortium" {:fi "Kuntayhtymä"
:en "Municipal consortium"
:se "Samkommun"}
"foundation" {:fi "Säätiö"
:en "Foundation"
:se "Stiftelse"}
"state" {:fi "Valtio"
:en "State"
:se "Stat"}
"other" {:fi "Muu"
:en "Other"
:se "Annat"}
"unknown" {:fi "Ei tietoa"
:en "Unknown"
:se "Okänt"}})
| true |
(ns lipas.data.owners)
(def all
{"city" {:fi "Kunta"
:en "City"
:se "Kommun"}
"registered-association" {:fi "Rekisteröity yPI:NAME:<NAME>END_PI"
:en "Registered association"
:se "Registrerad förening"}
"company-ltd" {:fi "Yritys"
:en "Company ltd"
:se "Företag"}
"city-main-owner" {:fi "Kuntaenemmistöinen yritys"
:en "City main owner"
:se "Företag med kommun som majoritetsägare"}
"municipal-consortium" {:fi "Kuntayhtymä"
:en "Municipal consortium"
:se "Samkommun"}
"foundation" {:fi "Säätiö"
:en "Foundation"
:se "Stiftelse"}
"state" {:fi "Valtio"
:en "State"
:se "Stat"}
"other" {:fi "Muu"
:en "Other"
:se "Annat"}
"unknown" {:fi "Ei tietoa"
:en "Unknown"
:se "Okänt"}})
|
[
{
"context": " (+ lat-sqr (* cos lon-sqr))))))\n\n(distance {:from paris, :to bordeaux})\n\n(defmulti itinerary :transport)\n",
"end": 534,
"score": 0.9157450199127197,
"start": 529,
"tag": "NAME",
"value": "paris"
},
{
"context": " (* cos lon-sqr))))))\n\n(distance {:from paris, :to bordeaux})\n\n(defmulti itinerary :transport)\n\n(defmethod it",
"end": 548,
"score": 0.878242015838623,
"start": 540,
"tag": "NAME",
"value": "bordeaux"
},
{
"context": "nce dist, :duration duration}))\n\n(itinerary {:from paris :to bordeaux :transport :walking})\n\n(defmethod it",
"end": 816,
"score": 0.9348388910293579,
"start": 811,
"tag": "NAME",
"value": "paris"
},
{
"context": ":duration duration}))\n\n(itinerary {:from paris :to bordeaux :transport :walking})\n\n(defmethod itinerary :driv",
"end": 829,
"score": 0.8556275367736816,
"start": 821,
"tag": "NAME",
"value": "bordeaux"
},
{
"context": "nce dist, :duration duration}))\n\n(itinerary {:from paris :to bordeaux :transport :driving :vehicle :tayato",
"end": 1124,
"score": 0.8785771131515503,
"start": 1119,
"tag": "NAME",
"value": "paris"
},
{
"context": ":duration duration}))\n\n(itinerary {:from paris :to bordeaux :transport :driving :vehicle :tayato})",
"end": 1132,
"score": 0.6971680521965027,
"start": 1129,
"tag": "NAME",
"value": "bor"
}
] |
chapter-03/a3.01.distance-cost-calculator.clj
|
aadimator/clojure-workshop
| 0 |
(def walking-speed 5)
(def driving-speed 70)
(def paris {:lat 48.856483 :lon 2.352413})
(def bordeaux {:lat 44.834999 :lon -0.575490})
(def vehicle-cost-fns
{:sporche (partial * 0.12 1.5)
:tayato (partial * 0.07 1.5)
:sleta (partial * 0.2 0.1)})
(defn distance
[{{lat1 :lat, lon1 :lon} :from
{lat2 :lat, lon2 :lon} :to}]
(let [cos (Math/cos lat1)
lat-sqr (Math/pow (- lat2 lat1) 2)
lon-sqr (Math/pow (- lon2 lon1) 2)]
(* 110.25 (Math/sqrt (+ lat-sqr (* cos lon-sqr))))))
(distance {:from paris, :to bordeaux})
(defmulti itinerary :transport)
(defmethod itinerary :walking
[{:keys [from to]}]
(let [dist (distance {:from from :to to})
duration (/ dist walking-speed)
cost 0]
{:cost cost, :distance dist, :duration duration}))
(itinerary {:from paris :to bordeaux :transport :walking})
(defmethod itinerary :driving
[{:keys [from to vehicle]}]
(let [dist (distance {:from from :to to})
duration (/ dist walking-speed)
cost ((vehicle vehicle-cost-fns) dist)]
{:cost cost, :distance dist, :duration duration}))
(itinerary {:from paris :to bordeaux :transport :driving :vehicle :tayato})
|
73667
|
(def walking-speed 5)
(def driving-speed 70)
(def paris {:lat 48.856483 :lon 2.352413})
(def bordeaux {:lat 44.834999 :lon -0.575490})
(def vehicle-cost-fns
{:sporche (partial * 0.12 1.5)
:tayato (partial * 0.07 1.5)
:sleta (partial * 0.2 0.1)})
(defn distance
[{{lat1 :lat, lon1 :lon} :from
{lat2 :lat, lon2 :lon} :to}]
(let [cos (Math/cos lat1)
lat-sqr (Math/pow (- lat2 lat1) 2)
lon-sqr (Math/pow (- lon2 lon1) 2)]
(* 110.25 (Math/sqrt (+ lat-sqr (* cos lon-sqr))))))
(distance {:from <NAME>, :to <NAME>})
(defmulti itinerary :transport)
(defmethod itinerary :walking
[{:keys [from to]}]
(let [dist (distance {:from from :to to})
duration (/ dist walking-speed)
cost 0]
{:cost cost, :distance dist, :duration duration}))
(itinerary {:from <NAME> :to <NAME> :transport :walking})
(defmethod itinerary :driving
[{:keys [from to vehicle]}]
(let [dist (distance {:from from :to to})
duration (/ dist walking-speed)
cost ((vehicle vehicle-cost-fns) dist)]
{:cost cost, :distance dist, :duration duration}))
(itinerary {:from <NAME> :to <NAME>deaux :transport :driving :vehicle :tayato})
| true |
(def walking-speed 5)
(def driving-speed 70)
(def paris {:lat 48.856483 :lon 2.352413})
(def bordeaux {:lat 44.834999 :lon -0.575490})
(def vehicle-cost-fns
{:sporche (partial * 0.12 1.5)
:tayato (partial * 0.07 1.5)
:sleta (partial * 0.2 0.1)})
(defn distance
[{{lat1 :lat, lon1 :lon} :from
{lat2 :lat, lon2 :lon} :to}]
(let [cos (Math/cos lat1)
lat-sqr (Math/pow (- lat2 lat1) 2)
lon-sqr (Math/pow (- lon2 lon1) 2)]
(* 110.25 (Math/sqrt (+ lat-sqr (* cos lon-sqr))))))
(distance {:from PI:NAME:<NAME>END_PI, :to PI:NAME:<NAME>END_PI})
(defmulti itinerary :transport)
(defmethod itinerary :walking
[{:keys [from to]}]
(let [dist (distance {:from from :to to})
duration (/ dist walking-speed)
cost 0]
{:cost cost, :distance dist, :duration duration}))
(itinerary {:from PI:NAME:<NAME>END_PI :to PI:NAME:<NAME>END_PI :transport :walking})
(defmethod itinerary :driving
[{:keys [from to vehicle]}]
(let [dist (distance {:from from :to to})
duration (/ dist walking-speed)
cost ((vehicle vehicle-cost-fns) dist)]
{:cost cost, :distance dist, :duration duration}))
(itinerary {:from PI:NAME:<NAME>END_PI :to PI:NAME:<NAME>END_PIdeaux :transport :driving :vehicle :tayato})
|
[
{
"context": "he [plugin manifest reference](https://github.com/metabase/metabase/wiki/Metabase-Plugin-Manifest-Reference)",
"end": 4568,
"score": 0.7738394141197205,
"start": 4560,
"tag": "USERNAME",
"value": "metabase"
},
{
"context": " default-host-details\n :password default-password-details\n :port default-port-details\n :s",
"end": 4835,
"score": 0.9984185099601746,
"start": 4811,
"tag": "PASSWORD",
"value": "default-password-details"
}
] |
c#-metabase/src/metabase/driver/common.clj
|
hanakhry/Crime_Admin
| 0 |
(ns metabase.driver.common
"Shared definitions and helper functions for use across different drivers."
(:require [clj-time.coerce :as tcoerce]
[clj-time.core :as time]
[clj-time.format :as tformat]
[clojure.tools.logging :as log]
[metabase.driver :as driver]
[metabase.driver.util :as driver.u]
[metabase.models.setting :as setting]
[metabase.query-processor.context.default :as context.default]
[metabase.query-processor.store :as qp.store]
[metabase.util :as u]
[metabase.util.i18n :refer [deferred-tru trs tru]]
[schema.core :as s])
(:import java.text.SimpleDateFormat
org.joda.time.DateTime
org.joda.time.format.DateTimeFormatter))
(def connection-error-messages
"Generic error messages that drivers should return in their implementation of `humanize-connection-error-message`."
{:cannot-connect-check-host-and-port
(str (deferred-tru "Hmm, we couldn''t connect to the database.")
" "
(deferred-tru "Make sure your host and port settings are correct"))
:ssh-tunnel-auth-fail
(str (deferred-tru "We couldn''t connect to the ssh tunnel host.")
" "
(deferred-tru "Check the username, password."))
:ssh-tunnel-connection-fail
(str (deferred-tru "We couldn''t connect to the ssh tunnel host.")
" "
(deferred-tru "Check the hostname and port."))
:database-name-incorrect
(deferred-tru "Looks like the database name is incorrect.")
:invalid-hostname
(str (deferred-tru "It looks like your host is invalid.")
" "
(deferred-tru "Please double-check it and try again."))
:password-incorrect
(deferred-tru "Looks like your password is incorrect.")
:password-required
(deferred-tru "Looks like you forgot to enter your password.")
:username-incorrect
(deferred-tru "Looks like your username is incorrect.")
:username-or-password-incorrect
(deferred-tru "Looks like the username or password is incorrect.")
:certificate-not-trusted
(deferred-tru "Server certificate not trusted - did you specify the correct SSL certificate chain?")
:requires-ssl
(deferred-tru "Server appears to require SSL - please enable SSL above")})
;; TODO - we should rename these from `default-*-details` to `default-*-connection-property`
(def default-host-details
"Map of the db host details field, useful for `connection-properties` implementations"
{:name "host"
:display-name (deferred-tru "Host")
:placeholder "localhost"})
(def default-port-details
"Map of the db port details field, useful for `connection-properties` implementations. Implementations should assoc a
`:placeholder` key."
{:name "port"
:display-name (deferred-tru "Port")
:type :integer})
(def default-user-details
"Map of the db user details field, useful for `connection-properties` implementations"
{:name "user"
:display-name (deferred-tru "Username")
:placeholder (deferred-tru "What username do you use to login to the database?")
:required true})
(def default-password-details
"Map of the db password details field, useful for `connection-properties` implementations"
{:name "password"
:display-name (deferred-tru "Password")
:type :password
:placeholder "••••••••"})
(def default-dbname-details
"Map of the db name details field, useful for `connection-properties` implementations"
{:name "dbname"
:display-name (deferred-tru "Database name")
:placeholder (deferred-tru "birds_of_the_world")
:required true})
(def default-ssl-details
"Map of the db ssl details field, useful for `connection-properties` implementations"
{:name "ssl"
:display-name (deferred-tru "Use a secure connection (SSL)?")
:type :boolean
:default false})
(def default-additional-options-details
"Map of the db `additional-options` details field, useful for `connection-properties` implementations. Should assoc a
`:placeholder` key"
{:name "additional-options"
:display-name (deferred-tru "Additional JDBC connection string options")})
(def default-options
"Default options listed above, keyed by name. These keys can be listed in the plugin manifest to specify connection
properties for drivers shipped as separate modules, e.g.:
connection-properties:
- db-name
- host
See the [plugin manifest reference](https://github.com/metabase/metabase/wiki/Metabase-Plugin-Manifest-Reference)
for more details."
{:additional-options default-additional-options-details
:dbname default-dbname-details
:host default-host-details
:password default-password-details
:port default-port-details
:ssl default-ssl-details
:user default-user-details})
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Fetching Current Timezone |
;;; +----------------------------------------------------------------------------------------------------------------+
(defprotocol ^:private ^:deprecated ParseDateTimeString
(^:private parse
^DateTime [this date-time-str]
"Parse the `date-time-str` and return a `DateTime` instance."))
(extend-protocol ParseDateTimeString
DateTimeFormatter
(parse [formatter date-time-str]
(tformat/parse formatter date-time-str)))
;; Java's SimpleDateFormat is more flexible on what it accepts for a time zone identifier. As an example, CEST is not
;; recognized by Joda's DateTimeFormatter but is recognized by Java's SimpleDateFormat. This defrecord is used to
;; dispatch parsing for SimpleDateFormat instances. Dispatching off of the SimpleDateFormat directly wouldn't be good
;; as it's not threadsafe. This will always create a new SimpleDateFormat instance and discard it after parsing the
;; date
(defrecord ^:private ^:deprecated ThreadSafeSimpleDateFormat [format-str]
ParseDateTimeString
(parse [_ date-time-str]
(let [sdf (SimpleDateFormat. format-str)
parsed-date (.parse sdf date-time-str)
joda-tz (-> sdf .getTimeZone .getID time/time-zone-for-id)]
(time/to-time-zone (tcoerce/from-date parsed-date) joda-tz))))
(defn ^:deprecated create-db-time-formatters
"Creates date formatters from `DATE-FORMAT-STR` that will preserve the offset/timezone information. Will return a
JodaTime date formatter and a core Java SimpleDateFormat. Results of this are threadsafe and can safely be def'd."
[date-format-str]
[(.withOffsetParsed ^DateTimeFormatter (tformat/formatter date-format-str))
(ThreadSafeSimpleDateFormat. date-format-str)])
(defn- ^:deprecated first-successful-parse
"Attempt to parse `time-str` with each of `date-formatters`, returning the first successful parse. If there are no
successful parses throws the exception that the last formatter threw."
^DateTime [date-formatters time-str]
(or (some #(u/ignore-exceptions (parse % time-str)) date-formatters)
(doseq [formatter (reverse date-formatters)]
(parse formatter time-str))))
(defmulti ^:deprecated current-db-time-native-query
"Return a native query that will fetch the current time (presumably as a string) used by the `current-db-time`
implementation below.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
{:arglists '([driver])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defmulti ^:deprecated current-db-time-date-formatters
"Return JODA time date formatters to parse the current time returned by `current-db-time-native-query`. Used by
`current-db-time` implementation below. You can use `create-db-time-formatters` provided by this namespace to create
formatters for a date format string.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
{:arglists '([driver])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defn ^:deprecated current-db-time
"Implementation of `driver/current-db-time` using the `current-db-time-native-query` and
`current-db-time-date-formatters` multimethods defined above. Execute a native query for the current time, and parse
the results using the date formatters, preserving the timezone. To use this implementation, you must implement the
aforementioned multimethods; no default implementation is provided.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
^org.joda.time.DateTime [driver database]
{:pre [(map? database)]}
(driver/with-driver driver
(let [native-query (current-db-time-native-query driver)
date-formatters (current-db-time-date-formatters driver)
settings (when-let [report-tz (driver.u/report-timezone-if-supported driver)]
{:settings {:report-timezone report-tz}})
time-str (try
;; need to initialize the store since we're calling `execute-reducible-query` directly
;; instead of going thru normal QP pipeline
(qp.store/with-store
(qp.store/fetch-and-store-database! (u/the-id database))
(let [query {:database (u/the-id database), :native {:query native-query}}
reduce (fn [metadata reducible-rows]
(transduce
identity
(fn
([] nil)
([row] (first row))
([_ row] (reduced row)))
reducible-rows))]
(driver/execute-reducible-query driver query (context.default/default-context) reduce)))
(catch Exception e
(throw
(Exception.
(format "Error querying database '%s' for current time" (:name database)) e))))]
(try
(when time-str
(first-successful-parse date-formatters time-str))
(catch Exception e
(throw
(Exception.
(str
(tru "Unable to parse date string ''{0}'' for database engine ''{1}''"
time-str (-> database :engine name))) e)))))))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Class -> Base Type |
;;; +----------------------------------------------------------------------------------------------------------------+
(defn class->base-type
"Return the `Field.base_type` that corresponds to a given class returned by the DB.
This is used to infer the types of results that come back from native queries."
[klass]
(condp #(isa? %2 %1) klass
Boolean :type/Boolean
Double :type/Float
Float :type/Float
Integer :type/Integer
Long :type/Integer
java.math.BigDecimal :type/Decimal
java.math.BigInteger :type/BigInteger
Number :type/Number
String :type/Text
;; java.sql types and Joda-Time types should be considered DEPRECATED
java.sql.Date :type/Date
java.sql.Timestamp :type/DateTime
java.util.Date :type/Date
DateTime :type/DateTime
java.util.UUID :type/UUID
clojure.lang.IPersistentMap :type/Dictionary
clojure.lang.IPersistentVector :type/Array
java.time.LocalDate :type/Date
java.time.LocalTime :type/Time
java.time.LocalDateTime :type/DateTime
;; `OffsetTime` and `OffsetDateTime` should be mapped to one of `type/TimeWithLocalTZ`/`type/TimeWithZoneOffset`
;; and `type/DateTimeWithLocalTZ`/`type/DateTimeWithZoneOffset` respectively. We can't really tell how they're
;; stored in the DB based on class alone, so drivers should return more specific types where possible. See
;; discussion in the `metabase.types` namespace.
java.time.OffsetTime :type/TimeWithTZ
java.time.OffsetDateTime :type/DateTimeWithTZ
java.time.ZonedDateTime :type/DateTimeWithZoneID
java.time.Instant :type/Instant
;; TODO - this should go in the Postgres driver implementation of this method rather than here
org.postgresql.util.PGobject :type/*
;; all-NULL columns in DBs like Mongo w/o explicit types
nil :type/*
(do
(log/warn (trs "Don''t know how to map class ''{0}'' to a Field base_type, falling back to :type/*." klass))
:type/*)))
(def ^:private column-info-sample-size
"Number of result rows to sample when when determining base type."
100)
(defn values->base-type
"Transducer that given a sequence of `values`, returns the most common base type."
[]
((comp (filter some?) (take column-info-sample-size) (map class))
(fn
([]
(doto (java.util.HashMap.)
(.put nil 0))) ; fallback to keep `max-key` happy if no values
([^java.util.HashMap freqs, klass]
(.put freqs klass (inc (.getOrDefault freqs klass 0)))
freqs)
([freqs]
(->> freqs
(apply max-key val)
key
class->base-type)))))
(def ^:private days-of-week
[:monday :tuesday :wednesday :thursday :friday :saturday :sunday])
(s/defn start-of-week-offset :- s/Int
"Return the offset for start of week to have the week start on `setting/start-of-week` given `driver`."
[driver]
(let [db-start-of-week (.indexOf ^clojure.lang.PersistentVector days-of-week (driver/db-start-of-week driver))
target-start-of-week (.indexOf ^clojure.lang.PersistentVector days-of-week (setting/get-keyword :start-of-week))
delta (int (- target-start-of-week db-start-of-week))]
(* (Integer/signum delta)
(- 7 (Math/abs delta)))))
|
72345
|
(ns metabase.driver.common
"Shared definitions and helper functions for use across different drivers."
(:require [clj-time.coerce :as tcoerce]
[clj-time.core :as time]
[clj-time.format :as tformat]
[clojure.tools.logging :as log]
[metabase.driver :as driver]
[metabase.driver.util :as driver.u]
[metabase.models.setting :as setting]
[metabase.query-processor.context.default :as context.default]
[metabase.query-processor.store :as qp.store]
[metabase.util :as u]
[metabase.util.i18n :refer [deferred-tru trs tru]]
[schema.core :as s])
(:import java.text.SimpleDateFormat
org.joda.time.DateTime
org.joda.time.format.DateTimeFormatter))
(def connection-error-messages
"Generic error messages that drivers should return in their implementation of `humanize-connection-error-message`."
{:cannot-connect-check-host-and-port
(str (deferred-tru "Hmm, we couldn''t connect to the database.")
" "
(deferred-tru "Make sure your host and port settings are correct"))
:ssh-tunnel-auth-fail
(str (deferred-tru "We couldn''t connect to the ssh tunnel host.")
" "
(deferred-tru "Check the username, password."))
:ssh-tunnel-connection-fail
(str (deferred-tru "We couldn''t connect to the ssh tunnel host.")
" "
(deferred-tru "Check the hostname and port."))
:database-name-incorrect
(deferred-tru "Looks like the database name is incorrect.")
:invalid-hostname
(str (deferred-tru "It looks like your host is invalid.")
" "
(deferred-tru "Please double-check it and try again."))
:password-incorrect
(deferred-tru "Looks like your password is incorrect.")
:password-required
(deferred-tru "Looks like you forgot to enter your password.")
:username-incorrect
(deferred-tru "Looks like your username is incorrect.")
:username-or-password-incorrect
(deferred-tru "Looks like the username or password is incorrect.")
:certificate-not-trusted
(deferred-tru "Server certificate not trusted - did you specify the correct SSL certificate chain?")
:requires-ssl
(deferred-tru "Server appears to require SSL - please enable SSL above")})
;; TODO - we should rename these from `default-*-details` to `default-*-connection-property`
(def default-host-details
"Map of the db host details field, useful for `connection-properties` implementations"
{:name "host"
:display-name (deferred-tru "Host")
:placeholder "localhost"})
(def default-port-details
"Map of the db port details field, useful for `connection-properties` implementations. Implementations should assoc a
`:placeholder` key."
{:name "port"
:display-name (deferred-tru "Port")
:type :integer})
(def default-user-details
"Map of the db user details field, useful for `connection-properties` implementations"
{:name "user"
:display-name (deferred-tru "Username")
:placeholder (deferred-tru "What username do you use to login to the database?")
:required true})
(def default-password-details
"Map of the db password details field, useful for `connection-properties` implementations"
{:name "password"
:display-name (deferred-tru "Password")
:type :password
:placeholder "••••••••"})
(def default-dbname-details
"Map of the db name details field, useful for `connection-properties` implementations"
{:name "dbname"
:display-name (deferred-tru "Database name")
:placeholder (deferred-tru "birds_of_the_world")
:required true})
(def default-ssl-details
"Map of the db ssl details field, useful for `connection-properties` implementations"
{:name "ssl"
:display-name (deferred-tru "Use a secure connection (SSL)?")
:type :boolean
:default false})
(def default-additional-options-details
"Map of the db `additional-options` details field, useful for `connection-properties` implementations. Should assoc a
`:placeholder` key"
{:name "additional-options"
:display-name (deferred-tru "Additional JDBC connection string options")})
(def default-options
"Default options listed above, keyed by name. These keys can be listed in the plugin manifest to specify connection
properties for drivers shipped as separate modules, e.g.:
connection-properties:
- db-name
- host
See the [plugin manifest reference](https://github.com/metabase/metabase/wiki/Metabase-Plugin-Manifest-Reference)
for more details."
{:additional-options default-additional-options-details
:dbname default-dbname-details
:host default-host-details
:password <PASSWORD>
:port default-port-details
:ssl default-ssl-details
:user default-user-details})
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Fetching Current Timezone |
;;; +----------------------------------------------------------------------------------------------------------------+
(defprotocol ^:private ^:deprecated ParseDateTimeString
(^:private parse
^DateTime [this date-time-str]
"Parse the `date-time-str` and return a `DateTime` instance."))
(extend-protocol ParseDateTimeString
DateTimeFormatter
(parse [formatter date-time-str]
(tformat/parse formatter date-time-str)))
;; Java's SimpleDateFormat is more flexible on what it accepts for a time zone identifier. As an example, CEST is not
;; recognized by Joda's DateTimeFormatter but is recognized by Java's SimpleDateFormat. This defrecord is used to
;; dispatch parsing for SimpleDateFormat instances. Dispatching off of the SimpleDateFormat directly wouldn't be good
;; as it's not threadsafe. This will always create a new SimpleDateFormat instance and discard it after parsing the
;; date
(defrecord ^:private ^:deprecated ThreadSafeSimpleDateFormat [format-str]
ParseDateTimeString
(parse [_ date-time-str]
(let [sdf (SimpleDateFormat. format-str)
parsed-date (.parse sdf date-time-str)
joda-tz (-> sdf .getTimeZone .getID time/time-zone-for-id)]
(time/to-time-zone (tcoerce/from-date parsed-date) joda-tz))))
(defn ^:deprecated create-db-time-formatters
"Creates date formatters from `DATE-FORMAT-STR` that will preserve the offset/timezone information. Will return a
JodaTime date formatter and a core Java SimpleDateFormat. Results of this are threadsafe and can safely be def'd."
[date-format-str]
[(.withOffsetParsed ^DateTimeFormatter (tformat/formatter date-format-str))
(ThreadSafeSimpleDateFormat. date-format-str)])
(defn- ^:deprecated first-successful-parse
"Attempt to parse `time-str` with each of `date-formatters`, returning the first successful parse. If there are no
successful parses throws the exception that the last formatter threw."
^DateTime [date-formatters time-str]
(or (some #(u/ignore-exceptions (parse % time-str)) date-formatters)
(doseq [formatter (reverse date-formatters)]
(parse formatter time-str))))
(defmulti ^:deprecated current-db-time-native-query
"Return a native query that will fetch the current time (presumably as a string) used by the `current-db-time`
implementation below.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
{:arglists '([driver])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defmulti ^:deprecated current-db-time-date-formatters
"Return JODA time date formatters to parse the current time returned by `current-db-time-native-query`. Used by
`current-db-time` implementation below. You can use `create-db-time-formatters` provided by this namespace to create
formatters for a date format string.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
{:arglists '([driver])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defn ^:deprecated current-db-time
"Implementation of `driver/current-db-time` using the `current-db-time-native-query` and
`current-db-time-date-formatters` multimethods defined above. Execute a native query for the current time, and parse
the results using the date formatters, preserving the timezone. To use this implementation, you must implement the
aforementioned multimethods; no default implementation is provided.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
^org.joda.time.DateTime [driver database]
{:pre [(map? database)]}
(driver/with-driver driver
(let [native-query (current-db-time-native-query driver)
date-formatters (current-db-time-date-formatters driver)
settings (when-let [report-tz (driver.u/report-timezone-if-supported driver)]
{:settings {:report-timezone report-tz}})
time-str (try
;; need to initialize the store since we're calling `execute-reducible-query` directly
;; instead of going thru normal QP pipeline
(qp.store/with-store
(qp.store/fetch-and-store-database! (u/the-id database))
(let [query {:database (u/the-id database), :native {:query native-query}}
reduce (fn [metadata reducible-rows]
(transduce
identity
(fn
([] nil)
([row] (first row))
([_ row] (reduced row)))
reducible-rows))]
(driver/execute-reducible-query driver query (context.default/default-context) reduce)))
(catch Exception e
(throw
(Exception.
(format "Error querying database '%s' for current time" (:name database)) e))))]
(try
(when time-str
(first-successful-parse date-formatters time-str))
(catch Exception e
(throw
(Exception.
(str
(tru "Unable to parse date string ''{0}'' for database engine ''{1}''"
time-str (-> database :engine name))) e)))))))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Class -> Base Type |
;;; +----------------------------------------------------------------------------------------------------------------+
(defn class->base-type
"Return the `Field.base_type` that corresponds to a given class returned by the DB.
This is used to infer the types of results that come back from native queries."
[klass]
(condp #(isa? %2 %1) klass
Boolean :type/Boolean
Double :type/Float
Float :type/Float
Integer :type/Integer
Long :type/Integer
java.math.BigDecimal :type/Decimal
java.math.BigInteger :type/BigInteger
Number :type/Number
String :type/Text
;; java.sql types and Joda-Time types should be considered DEPRECATED
java.sql.Date :type/Date
java.sql.Timestamp :type/DateTime
java.util.Date :type/Date
DateTime :type/DateTime
java.util.UUID :type/UUID
clojure.lang.IPersistentMap :type/Dictionary
clojure.lang.IPersistentVector :type/Array
java.time.LocalDate :type/Date
java.time.LocalTime :type/Time
java.time.LocalDateTime :type/DateTime
;; `OffsetTime` and `OffsetDateTime` should be mapped to one of `type/TimeWithLocalTZ`/`type/TimeWithZoneOffset`
;; and `type/DateTimeWithLocalTZ`/`type/DateTimeWithZoneOffset` respectively. We can't really tell how they're
;; stored in the DB based on class alone, so drivers should return more specific types where possible. See
;; discussion in the `metabase.types` namespace.
java.time.OffsetTime :type/TimeWithTZ
java.time.OffsetDateTime :type/DateTimeWithTZ
java.time.ZonedDateTime :type/DateTimeWithZoneID
java.time.Instant :type/Instant
;; TODO - this should go in the Postgres driver implementation of this method rather than here
org.postgresql.util.PGobject :type/*
;; all-NULL columns in DBs like Mongo w/o explicit types
nil :type/*
(do
(log/warn (trs "Don''t know how to map class ''{0}'' to a Field base_type, falling back to :type/*." klass))
:type/*)))
(def ^:private column-info-sample-size
"Number of result rows to sample when when determining base type."
100)
(defn values->base-type
"Transducer that given a sequence of `values`, returns the most common base type."
[]
((comp (filter some?) (take column-info-sample-size) (map class))
(fn
([]
(doto (java.util.HashMap.)
(.put nil 0))) ; fallback to keep `max-key` happy if no values
([^java.util.HashMap freqs, klass]
(.put freqs klass (inc (.getOrDefault freqs klass 0)))
freqs)
([freqs]
(->> freqs
(apply max-key val)
key
class->base-type)))))
(def ^:private days-of-week
[:monday :tuesday :wednesday :thursday :friday :saturday :sunday])
(s/defn start-of-week-offset :- s/Int
"Return the offset for start of week to have the week start on `setting/start-of-week` given `driver`."
[driver]
(let [db-start-of-week (.indexOf ^clojure.lang.PersistentVector days-of-week (driver/db-start-of-week driver))
target-start-of-week (.indexOf ^clojure.lang.PersistentVector days-of-week (setting/get-keyword :start-of-week))
delta (int (- target-start-of-week db-start-of-week))]
(* (Integer/signum delta)
(- 7 (Math/abs delta)))))
| true |
(ns metabase.driver.common
"Shared definitions and helper functions for use across different drivers."
(:require [clj-time.coerce :as tcoerce]
[clj-time.core :as time]
[clj-time.format :as tformat]
[clojure.tools.logging :as log]
[metabase.driver :as driver]
[metabase.driver.util :as driver.u]
[metabase.models.setting :as setting]
[metabase.query-processor.context.default :as context.default]
[metabase.query-processor.store :as qp.store]
[metabase.util :as u]
[metabase.util.i18n :refer [deferred-tru trs tru]]
[schema.core :as s])
(:import java.text.SimpleDateFormat
org.joda.time.DateTime
org.joda.time.format.DateTimeFormatter))
(def connection-error-messages
"Generic error messages that drivers should return in their implementation of `humanize-connection-error-message`."
{:cannot-connect-check-host-and-port
(str (deferred-tru "Hmm, we couldn''t connect to the database.")
" "
(deferred-tru "Make sure your host and port settings are correct"))
:ssh-tunnel-auth-fail
(str (deferred-tru "We couldn''t connect to the ssh tunnel host.")
" "
(deferred-tru "Check the username, password."))
:ssh-tunnel-connection-fail
(str (deferred-tru "We couldn''t connect to the ssh tunnel host.")
" "
(deferred-tru "Check the hostname and port."))
:database-name-incorrect
(deferred-tru "Looks like the database name is incorrect.")
:invalid-hostname
(str (deferred-tru "It looks like your host is invalid.")
" "
(deferred-tru "Please double-check it and try again."))
:password-incorrect
(deferred-tru "Looks like your password is incorrect.")
:password-required
(deferred-tru "Looks like you forgot to enter your password.")
:username-incorrect
(deferred-tru "Looks like your username is incorrect.")
:username-or-password-incorrect
(deferred-tru "Looks like the username or password is incorrect.")
:certificate-not-trusted
(deferred-tru "Server certificate not trusted - did you specify the correct SSL certificate chain?")
:requires-ssl
(deferred-tru "Server appears to require SSL - please enable SSL above")})
;; TODO - we should rename these from `default-*-details` to `default-*-connection-property`
(def default-host-details
"Map of the db host details field, useful for `connection-properties` implementations"
{:name "host"
:display-name (deferred-tru "Host")
:placeholder "localhost"})
(def default-port-details
"Map of the db port details field, useful for `connection-properties` implementations. Implementations should assoc a
`:placeholder` key."
{:name "port"
:display-name (deferred-tru "Port")
:type :integer})
(def default-user-details
"Map of the db user details field, useful for `connection-properties` implementations"
{:name "user"
:display-name (deferred-tru "Username")
:placeholder (deferred-tru "What username do you use to login to the database?")
:required true})
(def default-password-details
"Map of the db password details field, useful for `connection-properties` implementations"
{:name "password"
:display-name (deferred-tru "Password")
:type :password
:placeholder "••••••••"})
(def default-dbname-details
"Map of the db name details field, useful for `connection-properties` implementations"
{:name "dbname"
:display-name (deferred-tru "Database name")
:placeholder (deferred-tru "birds_of_the_world")
:required true})
(def default-ssl-details
"Map of the db ssl details field, useful for `connection-properties` implementations"
{:name "ssl"
:display-name (deferred-tru "Use a secure connection (SSL)?")
:type :boolean
:default false})
(def default-additional-options-details
"Map of the db `additional-options` details field, useful for `connection-properties` implementations. Should assoc a
`:placeholder` key"
{:name "additional-options"
:display-name (deferred-tru "Additional JDBC connection string options")})
(def default-options
"Default options listed above, keyed by name. These keys can be listed in the plugin manifest to specify connection
properties for drivers shipped as separate modules, e.g.:
connection-properties:
- db-name
- host
See the [plugin manifest reference](https://github.com/metabase/metabase/wiki/Metabase-Plugin-Manifest-Reference)
for more details."
{:additional-options default-additional-options-details
:dbname default-dbname-details
:host default-host-details
:password PI:PASSWORD:<PASSWORD>END_PI
:port default-port-details
:ssl default-ssl-details
:user default-user-details})
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Fetching Current Timezone |
;;; +----------------------------------------------------------------------------------------------------------------+
(defprotocol ^:private ^:deprecated ParseDateTimeString
(^:private parse
^DateTime [this date-time-str]
"Parse the `date-time-str` and return a `DateTime` instance."))
(extend-protocol ParseDateTimeString
DateTimeFormatter
(parse [formatter date-time-str]
(tformat/parse formatter date-time-str)))
;; Java's SimpleDateFormat is more flexible on what it accepts for a time zone identifier. As an example, CEST is not
;; recognized by Joda's DateTimeFormatter but is recognized by Java's SimpleDateFormat. This defrecord is used to
;; dispatch parsing for SimpleDateFormat instances. Dispatching off of the SimpleDateFormat directly wouldn't be good
;; as it's not threadsafe. This will always create a new SimpleDateFormat instance and discard it after parsing the
;; date
(defrecord ^:private ^:deprecated ThreadSafeSimpleDateFormat [format-str]
ParseDateTimeString
(parse [_ date-time-str]
(let [sdf (SimpleDateFormat. format-str)
parsed-date (.parse sdf date-time-str)
joda-tz (-> sdf .getTimeZone .getID time/time-zone-for-id)]
(time/to-time-zone (tcoerce/from-date parsed-date) joda-tz))))
(defn ^:deprecated create-db-time-formatters
"Creates date formatters from `DATE-FORMAT-STR` that will preserve the offset/timezone information. Will return a
JodaTime date formatter and a core Java SimpleDateFormat. Results of this are threadsafe and can safely be def'd."
[date-format-str]
[(.withOffsetParsed ^DateTimeFormatter (tformat/formatter date-format-str))
(ThreadSafeSimpleDateFormat. date-format-str)])
(defn- ^:deprecated first-successful-parse
"Attempt to parse `time-str` with each of `date-formatters`, returning the first successful parse. If there are no
successful parses throws the exception that the last formatter threw."
^DateTime [date-formatters time-str]
(or (some #(u/ignore-exceptions (parse % time-str)) date-formatters)
(doseq [formatter (reverse date-formatters)]
(parse formatter time-str))))
(defmulti ^:deprecated current-db-time-native-query
"Return a native query that will fetch the current time (presumably as a string) used by the `current-db-time`
implementation below.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
{:arglists '([driver])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defmulti ^:deprecated current-db-time-date-formatters
"Return JODA time date formatters to parse the current time returned by `current-db-time-native-query`. Used by
`current-db-time` implementation below. You can use `create-db-time-formatters` provided by this namespace to create
formatters for a date format string.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
{:arglists '([driver])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defn ^:deprecated current-db-time
"Implementation of `driver/current-db-time` using the `current-db-time-native-query` and
`current-db-time-date-formatters` multimethods defined above. Execute a native query for the current time, and parse
the results using the date formatters, preserving the timezone. To use this implementation, you must implement the
aforementioned multimethods; no default implementation is provided.
DEPRECATED — `metabase.driver/current-db-time`, the method this function provides an implementation for, is itself
deprecated. Implement `metabase.driver/db-default-timezone` instead directly."
^org.joda.time.DateTime [driver database]
{:pre [(map? database)]}
(driver/with-driver driver
(let [native-query (current-db-time-native-query driver)
date-formatters (current-db-time-date-formatters driver)
settings (when-let [report-tz (driver.u/report-timezone-if-supported driver)]
{:settings {:report-timezone report-tz}})
time-str (try
;; need to initialize the store since we're calling `execute-reducible-query` directly
;; instead of going thru normal QP pipeline
(qp.store/with-store
(qp.store/fetch-and-store-database! (u/the-id database))
(let [query {:database (u/the-id database), :native {:query native-query}}
reduce (fn [metadata reducible-rows]
(transduce
identity
(fn
([] nil)
([row] (first row))
([_ row] (reduced row)))
reducible-rows))]
(driver/execute-reducible-query driver query (context.default/default-context) reduce)))
(catch Exception e
(throw
(Exception.
(format "Error querying database '%s' for current time" (:name database)) e))))]
(try
(when time-str
(first-successful-parse date-formatters time-str))
(catch Exception e
(throw
(Exception.
(str
(tru "Unable to parse date string ''{0}'' for database engine ''{1}''"
time-str (-> database :engine name))) e)))))))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Class -> Base Type |
;;; +----------------------------------------------------------------------------------------------------------------+
(defn class->base-type
"Return the `Field.base_type` that corresponds to a given class returned by the DB.
This is used to infer the types of results that come back from native queries."
[klass]
(condp #(isa? %2 %1) klass
Boolean :type/Boolean
Double :type/Float
Float :type/Float
Integer :type/Integer
Long :type/Integer
java.math.BigDecimal :type/Decimal
java.math.BigInteger :type/BigInteger
Number :type/Number
String :type/Text
;; java.sql types and Joda-Time types should be considered DEPRECATED
java.sql.Date :type/Date
java.sql.Timestamp :type/DateTime
java.util.Date :type/Date
DateTime :type/DateTime
java.util.UUID :type/UUID
clojure.lang.IPersistentMap :type/Dictionary
clojure.lang.IPersistentVector :type/Array
java.time.LocalDate :type/Date
java.time.LocalTime :type/Time
java.time.LocalDateTime :type/DateTime
;; `OffsetTime` and `OffsetDateTime` should be mapped to one of `type/TimeWithLocalTZ`/`type/TimeWithZoneOffset`
;; and `type/DateTimeWithLocalTZ`/`type/DateTimeWithZoneOffset` respectively. We can't really tell how they're
;; stored in the DB based on class alone, so drivers should return more specific types where possible. See
;; discussion in the `metabase.types` namespace.
java.time.OffsetTime :type/TimeWithTZ
java.time.OffsetDateTime :type/DateTimeWithTZ
java.time.ZonedDateTime :type/DateTimeWithZoneID
java.time.Instant :type/Instant
;; TODO - this should go in the Postgres driver implementation of this method rather than here
org.postgresql.util.PGobject :type/*
;; all-NULL columns in DBs like Mongo w/o explicit types
nil :type/*
(do
(log/warn (trs "Don''t know how to map class ''{0}'' to a Field base_type, falling back to :type/*." klass))
:type/*)))
(def ^:private column-info-sample-size
"Number of result rows to sample when when determining base type."
100)
(defn values->base-type
"Transducer that given a sequence of `values`, returns the most common base type."
[]
((comp (filter some?) (take column-info-sample-size) (map class))
(fn
([]
(doto (java.util.HashMap.)
(.put nil 0))) ; fallback to keep `max-key` happy if no values
([^java.util.HashMap freqs, klass]
(.put freqs klass (inc (.getOrDefault freqs klass 0)))
freqs)
([freqs]
(->> freqs
(apply max-key val)
key
class->base-type)))))
(def ^:private days-of-week
[:monday :tuesday :wednesday :thursday :friday :saturday :sunday])
(s/defn start-of-week-offset :- s/Int
"Return the offset for start of week to have the week start on `setting/start-of-week` given `driver`."
[driver]
(let [db-start-of-week (.indexOf ^clojure.lang.PersistentVector days-of-week (driver/db-start-of-week driver))
target-start-of-week (.indexOf ^clojure.lang.PersistentVector days-of-week (setting/get-keyword :start-of-week))
delta (int (- target-start-of-week db-start-of-week))]
(* (Integer/signum delta)
(- 7 (Math/abs delta)))))
|
[
{
"context": "ffects beyond the scope of the CVM.\"\n\n {:author \"Adam Helinski\"}\n\n (:import (convex.core.data AVector)\n ",
"end": 279,
"score": 0.9962301254272461,
"start": 266,
"tag": "NAME",
"value": "Adam Helinski"
}
] |
project/run/src/clj/main/convex/run/sreq.clj
|
rosejn/convex.cljc
| 30 |
(ns convex.run.sreq
"Implementation of requests interpreted by the runner between transactions.
A reqest is merely a CVX vector following some particular convention that the
runner follows for producing effects beyond the scope of the CVM."
{:author "Adam Helinski"}
(:import (convex.core.data AVector)
(convex.core.data.prim CVMLong)
(convex.core.lang Context))
(:require [convex.cell :as $.cell]
[convex.cvm :as $.cvm]
[convex.read :as $.read]
[convex.run.ctx :as $.run.ctx]
[convex.run.err :as $.run.err]
[convex.run.exec :as $.run.exec]
[convex.run.kw :as $.run.kw]
[convex.run.stream :as $.run.stream]
[convex.run.sym :as $.run.sym]
[criterium.core :as criterium]))
(set! *warn-on-reflection*
true)
;;;;;;;;;; Helpers
(defn- -stream
;; Given a request, returns the stream values it contains as a Java long.
[^AVector tuple]
(.longValue ^CVMLong (.get tuple
2)))
;;;;;;;;;; Setup
(defmethod $.run.exec/sreq
nil
;; No request, simply finalizes a regular transactions.
[env result]
($.run.ctx/def-result env
result))
(defmethod $.run.exec/sreq
:unknown
;; Unknown request, consided as failure.
[env tuple]
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :ARGUMENT)
($.cell/string "Unsupported special transaction")
tuple)))
;;;;;;;;;; Code
(defmethod $.run.exec/sreq
$.run.kw/code-read+
;; Reads the given string and parses it to a list of forms.
;; TODO. Improve error reporting.
[env ^AVector tuple]
(try
($.run.ctx/def-result env
(-> (.get tuple
2)
str
$.read/string+))
(catch Throwable _err
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :ARGUMENT)
($.cell/string "Unable to read source")
tuple)))))
;;;;;;;;;; File
(defmethod $.run.exec/sreq
$.run.kw/file-in
;; Opens a file for reading.
[env ^AVector tuple]
($.run.stream/file-in env
(str (.get tuple
2))))
(defmethod $.run.exec/sreq
$.run.kw/file-out
;; Opens a file for writing.
[env ^AVector tuple]
($.run.stream/file-out env
(str (.get tuple
2))))
;;;;;;;;;; Logging
(defmethod $.run.exec/sreq
$.run.kw/log-clear
;; Clears the CVM log.
[env _tuple]
(let [ctx (env :convex.run/ctx)
ctx-2 ($.cvm/ctx {:convex.cvm/address ($.cvm/address ctx)
:convex.cvm/state ($.cvm/state ctx)})]
(-> env
(assoc :convex.run/ctx
ctx-2)
($.run.ctx/def-result ($.cvm/log ctx-2)))))
(defmethod $.run.exec/sreq
$.run.kw/log-get
;; Interns the current state of the CVM log under `$/*result*`.
[env _tuple]
($.run.ctx/def-result env
($.cvm/log (env :convex.run/ctx))))
;;;;;;;;;; Performance
(defmethod $.run.exec/sreq
$.run.kw/perf-bench
;; Benchmarks a transaction using Criterium.
[env ^AVector tuple]
(let [ctx ($.cvm/fork (env :convex.run/ctx))
cell (.get tuple
2)
stat+ (criterium/benchmark* (fn []
(.query ^Context ctx
cell))
{})]
($.run.ctx/def-result env
($.cell/map {($.cell/keyword "mean") ($.cell/double (first (stat+ :mean)))
($.cell/keyword "stddev") ($.cell/double (Math/sqrt ^double (first (stat+ :variance))))}))))
(defmethod $.run.exec/sreq
$.run.kw/perf-track
;; Tracks juice consumption of the given transaction.
[env ^AVector tuple]
($.run.exec/trx-track env
(.get tuple
2)))
;;;;;;;;;; Process
(defmethod $.run.exec/sreq
$.run.kw/process-exit
;; Exits process with the user given status code.
[_env ^AVector tuple]
(let [status (.longValue ^CVMLong (.get tuple
2))]
(if (= (System/getenv "CONVEX_DEV")
"true")
(throw (ex-info "Throw instead of exit since dev mode"
{::status status}))
(System/exit status))))
(defmethod $.run.exec/sreq
$.run.kw/process-env
;; Interns under `$/*result*` the process environment map or a single requested variable.
[env ^AVector tuple]
($.run.ctx/def-result env
(if-some [env-var (.get tuple
2)]
(some-> (System/getenv (str env-var))
$.cell/string)
($.cell/map (map (fn [[k v]]
[($.cell/string k)
($.cell/string v)])
(System/getenv))))))
;;;;;;;;;; Streams
(defmethod $.run.exec/sreq
$.run.kw/stream-close
;; Closes the given stream.
[env tuple]
($.run.stream/close env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-flush
;; Flushes the given stream.
[env ^AVector tuple]
($.run.stream/flush env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-in
;; Reads a single cell from the given stream.
[env tuple]
($.run.stream/in env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-in+
;; Reads all available cells from the given stream.
[env tuple]
($.run.stream/in+ env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-line+
;; Reads line from the given stream and extracts all available cells.
[env tuple]
($.run.stream/line+ env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-out
;; Writes a cell to the given stream.
[env ^AVector tuple]
($.run.stream/out env
(-stream tuple)
(.get tuple
3)))
(defmethod $.run.exec/sreq
;; Writes a cell to the given stream, appends a new line, and flushes everything.
$.run.kw/stream-out!
[env ^AVector tuple]
($.run.stream/out! env
(-stream tuple)
(.get tuple
3)))
;;;;;;;;;; Time
(defmethod $.run.exec/sreq
$.run.kw/time-advance
;; Advances the timestamp.
[env ^AVector tuple]
(let [^CVMLong interval (.get tuple
2)]
(-> env
(update :convex.run/ctx
(fn [ctx]
($.cvm/time-advance ctx
(.longValue interval))))
($.run.ctx/def-result interval))))
(defmethod $.run.exec/sreq
$.run.kw/time-pop
;; Pops the last context saved with `$.time/push`.
[env ^AVector tuple]
(let [stack (env :convex.run/state-stack)]
(if-some [ctx-restore (peek stack)]
(-> env
(assoc :convex.run/state-stack (pop stack)
:convex.run/ctx ctx-restore)
($.run.ctx/def-trx+ ($.cell/list [(.get tuple
2)])))
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :STATE)
($.cell/string "No state to pop")
tuple)))))
(defmethod $.run.exec/sreq
$.run.kw/time-push
;; Saves a fork of the current context which can later be restored using `$.time/pop`.
[env _tuple]
(update env
:convex.run/state-stack
(fnil conj
'())
(-> (env :convex.run/ctx)
$.cvm/fork
($.cvm/def $.run.ctx/addr-$-trx
{$.run.sym/list* nil}))))
|
109971
|
(ns convex.run.sreq
"Implementation of requests interpreted by the runner between transactions.
A reqest is merely a CVX vector following some particular convention that the
runner follows for producing effects beyond the scope of the CVM."
{:author "<NAME>"}
(:import (convex.core.data AVector)
(convex.core.data.prim CVMLong)
(convex.core.lang Context))
(:require [convex.cell :as $.cell]
[convex.cvm :as $.cvm]
[convex.read :as $.read]
[convex.run.ctx :as $.run.ctx]
[convex.run.err :as $.run.err]
[convex.run.exec :as $.run.exec]
[convex.run.kw :as $.run.kw]
[convex.run.stream :as $.run.stream]
[convex.run.sym :as $.run.sym]
[criterium.core :as criterium]))
(set! *warn-on-reflection*
true)
;;;;;;;;;; Helpers
(defn- -stream
;; Given a request, returns the stream values it contains as a Java long.
[^AVector tuple]
(.longValue ^CVMLong (.get tuple
2)))
;;;;;;;;;; Setup
(defmethod $.run.exec/sreq
nil
;; No request, simply finalizes a regular transactions.
[env result]
($.run.ctx/def-result env
result))
(defmethod $.run.exec/sreq
:unknown
;; Unknown request, consided as failure.
[env tuple]
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :ARGUMENT)
($.cell/string "Unsupported special transaction")
tuple)))
;;;;;;;;;; Code
(defmethod $.run.exec/sreq
$.run.kw/code-read+
;; Reads the given string and parses it to a list of forms.
;; TODO. Improve error reporting.
[env ^AVector tuple]
(try
($.run.ctx/def-result env
(-> (.get tuple
2)
str
$.read/string+))
(catch Throwable _err
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :ARGUMENT)
($.cell/string "Unable to read source")
tuple)))))
;;;;;;;;;; File
(defmethod $.run.exec/sreq
$.run.kw/file-in
;; Opens a file for reading.
[env ^AVector tuple]
($.run.stream/file-in env
(str (.get tuple
2))))
(defmethod $.run.exec/sreq
$.run.kw/file-out
;; Opens a file for writing.
[env ^AVector tuple]
($.run.stream/file-out env
(str (.get tuple
2))))
;;;;;;;;;; Logging
(defmethod $.run.exec/sreq
$.run.kw/log-clear
;; Clears the CVM log.
[env _tuple]
(let [ctx (env :convex.run/ctx)
ctx-2 ($.cvm/ctx {:convex.cvm/address ($.cvm/address ctx)
:convex.cvm/state ($.cvm/state ctx)})]
(-> env
(assoc :convex.run/ctx
ctx-2)
($.run.ctx/def-result ($.cvm/log ctx-2)))))
(defmethod $.run.exec/sreq
$.run.kw/log-get
;; Interns the current state of the CVM log under `$/*result*`.
[env _tuple]
($.run.ctx/def-result env
($.cvm/log (env :convex.run/ctx))))
;;;;;;;;;; Performance
(defmethod $.run.exec/sreq
$.run.kw/perf-bench
;; Benchmarks a transaction using Criterium.
[env ^AVector tuple]
(let [ctx ($.cvm/fork (env :convex.run/ctx))
cell (.get tuple
2)
stat+ (criterium/benchmark* (fn []
(.query ^Context ctx
cell))
{})]
($.run.ctx/def-result env
($.cell/map {($.cell/keyword "mean") ($.cell/double (first (stat+ :mean)))
($.cell/keyword "stddev") ($.cell/double (Math/sqrt ^double (first (stat+ :variance))))}))))
(defmethod $.run.exec/sreq
$.run.kw/perf-track
;; Tracks juice consumption of the given transaction.
[env ^AVector tuple]
($.run.exec/trx-track env
(.get tuple
2)))
;;;;;;;;;; Process
(defmethod $.run.exec/sreq
$.run.kw/process-exit
;; Exits process with the user given status code.
[_env ^AVector tuple]
(let [status (.longValue ^CVMLong (.get tuple
2))]
(if (= (System/getenv "CONVEX_DEV")
"true")
(throw (ex-info "Throw instead of exit since dev mode"
{::status status}))
(System/exit status))))
(defmethod $.run.exec/sreq
$.run.kw/process-env
;; Interns under `$/*result*` the process environment map or a single requested variable.
[env ^AVector tuple]
($.run.ctx/def-result env
(if-some [env-var (.get tuple
2)]
(some-> (System/getenv (str env-var))
$.cell/string)
($.cell/map (map (fn [[k v]]
[($.cell/string k)
($.cell/string v)])
(System/getenv))))))
;;;;;;;;;; Streams
(defmethod $.run.exec/sreq
$.run.kw/stream-close
;; Closes the given stream.
[env tuple]
($.run.stream/close env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-flush
;; Flushes the given stream.
[env ^AVector tuple]
($.run.stream/flush env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-in
;; Reads a single cell from the given stream.
[env tuple]
($.run.stream/in env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-in+
;; Reads all available cells from the given stream.
[env tuple]
($.run.stream/in+ env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-line+
;; Reads line from the given stream and extracts all available cells.
[env tuple]
($.run.stream/line+ env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-out
;; Writes a cell to the given stream.
[env ^AVector tuple]
($.run.stream/out env
(-stream tuple)
(.get tuple
3)))
(defmethod $.run.exec/sreq
;; Writes a cell to the given stream, appends a new line, and flushes everything.
$.run.kw/stream-out!
[env ^AVector tuple]
($.run.stream/out! env
(-stream tuple)
(.get tuple
3)))
;;;;;;;;;; Time
(defmethod $.run.exec/sreq
$.run.kw/time-advance
;; Advances the timestamp.
[env ^AVector tuple]
(let [^CVMLong interval (.get tuple
2)]
(-> env
(update :convex.run/ctx
(fn [ctx]
($.cvm/time-advance ctx
(.longValue interval))))
($.run.ctx/def-result interval))))
(defmethod $.run.exec/sreq
$.run.kw/time-pop
;; Pops the last context saved with `$.time/push`.
[env ^AVector tuple]
(let [stack (env :convex.run/state-stack)]
(if-some [ctx-restore (peek stack)]
(-> env
(assoc :convex.run/state-stack (pop stack)
:convex.run/ctx ctx-restore)
($.run.ctx/def-trx+ ($.cell/list [(.get tuple
2)])))
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :STATE)
($.cell/string "No state to pop")
tuple)))))
(defmethod $.run.exec/sreq
$.run.kw/time-push
;; Saves a fork of the current context which can later be restored using `$.time/pop`.
[env _tuple]
(update env
:convex.run/state-stack
(fnil conj
'())
(-> (env :convex.run/ctx)
$.cvm/fork
($.cvm/def $.run.ctx/addr-$-trx
{$.run.sym/list* nil}))))
| true |
(ns convex.run.sreq
"Implementation of requests interpreted by the runner between transactions.
A reqest is merely a CVX vector following some particular convention that the
runner follows for producing effects beyond the scope of the CVM."
{:author "PI:NAME:<NAME>END_PI"}
(:import (convex.core.data AVector)
(convex.core.data.prim CVMLong)
(convex.core.lang Context))
(:require [convex.cell :as $.cell]
[convex.cvm :as $.cvm]
[convex.read :as $.read]
[convex.run.ctx :as $.run.ctx]
[convex.run.err :as $.run.err]
[convex.run.exec :as $.run.exec]
[convex.run.kw :as $.run.kw]
[convex.run.stream :as $.run.stream]
[convex.run.sym :as $.run.sym]
[criterium.core :as criterium]))
(set! *warn-on-reflection*
true)
;;;;;;;;;; Helpers
(defn- -stream
;; Given a request, returns the stream values it contains as a Java long.
[^AVector tuple]
(.longValue ^CVMLong (.get tuple
2)))
;;;;;;;;;; Setup
(defmethod $.run.exec/sreq
nil
;; No request, simply finalizes a regular transactions.
[env result]
($.run.ctx/def-result env
result))
(defmethod $.run.exec/sreq
:unknown
;; Unknown request, consided as failure.
[env tuple]
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :ARGUMENT)
($.cell/string "Unsupported special transaction")
tuple)))
;;;;;;;;;; Code
(defmethod $.run.exec/sreq
$.run.kw/code-read+
;; Reads the given string and parses it to a list of forms.
;; TODO. Improve error reporting.
[env ^AVector tuple]
(try
($.run.ctx/def-result env
(-> (.get tuple
2)
str
$.read/string+))
(catch Throwable _err
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :ARGUMENT)
($.cell/string "Unable to read source")
tuple)))))
;;;;;;;;;; File
(defmethod $.run.exec/sreq
$.run.kw/file-in
;; Opens a file for reading.
[env ^AVector tuple]
($.run.stream/file-in env
(str (.get tuple
2))))
(defmethod $.run.exec/sreq
$.run.kw/file-out
;; Opens a file for writing.
[env ^AVector tuple]
($.run.stream/file-out env
(str (.get tuple
2))))
;;;;;;;;;; Logging
(defmethod $.run.exec/sreq
$.run.kw/log-clear
;; Clears the CVM log.
[env _tuple]
(let [ctx (env :convex.run/ctx)
ctx-2 ($.cvm/ctx {:convex.cvm/address ($.cvm/address ctx)
:convex.cvm/state ($.cvm/state ctx)})]
(-> env
(assoc :convex.run/ctx
ctx-2)
($.run.ctx/def-result ($.cvm/log ctx-2)))))
(defmethod $.run.exec/sreq
$.run.kw/log-get
;; Interns the current state of the CVM log under `$/*result*`.
[env _tuple]
($.run.ctx/def-result env
($.cvm/log (env :convex.run/ctx))))
;;;;;;;;;; Performance
(defmethod $.run.exec/sreq
$.run.kw/perf-bench
;; Benchmarks a transaction using Criterium.
[env ^AVector tuple]
(let [ctx ($.cvm/fork (env :convex.run/ctx))
cell (.get tuple
2)
stat+ (criterium/benchmark* (fn []
(.query ^Context ctx
cell))
{})]
($.run.ctx/def-result env
($.cell/map {($.cell/keyword "mean") ($.cell/double (first (stat+ :mean)))
($.cell/keyword "stddev") ($.cell/double (Math/sqrt ^double (first (stat+ :variance))))}))))
(defmethod $.run.exec/sreq
$.run.kw/perf-track
;; Tracks juice consumption of the given transaction.
[env ^AVector tuple]
($.run.exec/trx-track env
(.get tuple
2)))
;;;;;;;;;; Process
(defmethod $.run.exec/sreq
$.run.kw/process-exit
;; Exits process with the user given status code.
[_env ^AVector tuple]
(let [status (.longValue ^CVMLong (.get tuple
2))]
(if (= (System/getenv "CONVEX_DEV")
"true")
(throw (ex-info "Throw instead of exit since dev mode"
{::status status}))
(System/exit status))))
(defmethod $.run.exec/sreq
$.run.kw/process-env
;; Interns under `$/*result*` the process environment map or a single requested variable.
[env ^AVector tuple]
($.run.ctx/def-result env
(if-some [env-var (.get tuple
2)]
(some-> (System/getenv (str env-var))
$.cell/string)
($.cell/map (map (fn [[k v]]
[($.cell/string k)
($.cell/string v)])
(System/getenv))))))
;;;;;;;;;; Streams
(defmethod $.run.exec/sreq
$.run.kw/stream-close
;; Closes the given stream.
[env tuple]
($.run.stream/close env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-flush
;; Flushes the given stream.
[env ^AVector tuple]
($.run.stream/flush env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-in
;; Reads a single cell from the given stream.
[env tuple]
($.run.stream/in env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-in+
;; Reads all available cells from the given stream.
[env tuple]
($.run.stream/in+ env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-line+
;; Reads line from the given stream and extracts all available cells.
[env tuple]
($.run.stream/line+ env
(-stream tuple)))
(defmethod $.run.exec/sreq
$.run.kw/stream-out
;; Writes a cell to the given stream.
[env ^AVector tuple]
($.run.stream/out env
(-stream tuple)
(.get tuple
3)))
(defmethod $.run.exec/sreq
;; Writes a cell to the given stream, appends a new line, and flushes everything.
$.run.kw/stream-out!
[env ^AVector tuple]
($.run.stream/out! env
(-stream tuple)
(.get tuple
3)))
;;;;;;;;;; Time
(defmethod $.run.exec/sreq
$.run.kw/time-advance
;; Advances the timestamp.
[env ^AVector tuple]
(let [^CVMLong interval (.get tuple
2)]
(-> env
(update :convex.run/ctx
(fn [ctx]
($.cvm/time-advance ctx
(.longValue interval))))
($.run.ctx/def-result interval))))
(defmethod $.run.exec/sreq
$.run.kw/time-pop
;; Pops the last context saved with `$.time/push`.
[env ^AVector tuple]
(let [stack (env :convex.run/state-stack)]
(if-some [ctx-restore (peek stack)]
(-> env
(assoc :convex.run/state-stack (pop stack)
:convex.run/ctx ctx-restore)
($.run.ctx/def-trx+ ($.cell/list [(.get tuple
2)])))
($.run.exec/fail env
($.run.err/sreq ($.cell/code-std* :STATE)
($.cell/string "No state to pop")
tuple)))))
(defmethod $.run.exec/sreq
$.run.kw/time-push
;; Saves a fork of the current context which can later be restored using `$.time/pop`.
[env _tuple]
(update env
:convex.run/state-stack
(fnil conj
'())
(-> (env :convex.run/ctx)
$.cvm/fork
($.cvm/def $.run.ctx/addr-$-trx
{$.run.sym/list* nil}))))
|
[
{
"context": "firestore\"]))\n\n(def firebase-config #js {:apiKey \"AIzaSyAYJX2_LdpTbdgcaGYvSbfz9hJplqTPi7Y\"\n :authDomain \"datafire.",
"end": 293,
"score": 0.9997336864471436,
"start": 254,
"tag": "KEY",
"value": "AIzaSyAYJX2_LdpTbdgcaGYvSbfz9hJplqTPi7Y"
},
{
"context": "b/id 13, :person/born \"1956-01-03\", :person/name \"Mel Gibson\"}\n {:db/id 14\n :person/born \"1946-07-22\"\n ",
"end": 1548,
"score": 0.9998679757118225,
"start": 1538,
"tag": "NAME",
"value": "Mel Gibson"
},
{
"context": " :person/born \"1946-07-22\"\n :person/name \"Danny Glover\"}\n {:db/id 15\n :person/born \"1944-07-29\"\n ",
"end": 1628,
"score": 0.9998760223388672,
"start": 1616,
"tag": "NAME",
"value": "Danny Glover"
},
{
"context": " :person/born \"1944-07-29\"\n :person/name \"Gary Busey\"}]\n :movie/director\n [{:db/id 12\n :person",
"end": 1706,
"score": 0.9998928308486938,
"start": 1696,
"tag": "NAME",
"value": "Gary Busey"
},
{
"context": " :person/born \"1930-04-24\"\n :person/name \"Richard Donner\"}]\n :movie/sequel\n {:db/id 58\n :movie/cast",
"end": 1808,
"score": 0.9998390674591064,
"start": 1794,
"tag": "NAME",
"value": "Richard Donner"
},
{
"context": " :person/born \"1956-01-03\"\n :person/name \"Mel Gibson\"}\n {:db/id 14\n :person/born \"1946-07-22\"",
"end": 1937,
"score": 0.9998409748077393,
"start": 1927,
"tag": "NAME",
"value": "Mel Gibson"
},
{
"context": " :person/born \"1946-07-22\"\n :person/name \"Danny Glover\"}\n {:db/id 37\n :person/born \"1943-02-09\"",
"end": 2020,
"score": 0.9998775720596313,
"start": 2008,
"tag": "NAME",
"value": "Danny Glover"
},
{
"context": " :person/born \"1943-02-09\"\n :person/name \"Joe Pesci\"}]\n :movie/director\n [{:db/id 12\n :per",
"end": 2100,
"score": 0.9998703002929688,
"start": 2091,
"tag": "NAME",
"value": "Joe Pesci"
},
{
"context": " :person/born \"1930-04-24\"\n :person/name \"Richard Donner\"}]\n :movie/sequel\n {:db/id 64\n :movie/c",
"end": 2206,
"score": 0.9998371601104736,
"start": 2192,
"tag": "NAME",
"value": "Richard Donner"
},
{
"context": " :person/born \"1956-01-03\"\n :person/name \"Mel Gibson\"}\n {:db/id 14\n :person/born \"1946-07-2",
"end": 2341,
"score": 0.999843955039978,
"start": 2331,
"tag": "NAME",
"value": "Mel Gibson"
},
{
"context": " :person/born \"1946-07-22\"\n :person/name \"Danny Glover\"}\n {:db/id 37\n :person/born \"1943-02-0",
"end": 2427,
"score": 0.9998223781585693,
"start": 2415,
"tag": "NAME",
"value": "Danny Glover"
},
{
"context": " :person/born \"1943-02-09\"\n :person/name \"Joe Pesci\"}]\n :movie/director\n [{:db/id 12\n :",
"end": 2510,
"score": 0.9998620748519897,
"start": 2501,
"tag": "NAME",
"value": "Joe Pesci"
},
{
"context": " :person/born \"1930-04-24\"\n :person/name \"Richard Donner\"}]\n :movie/title \"Lethal Weapon 3\"\n :movi",
"end": 2620,
"score": 0.9998447895050049,
"start": 2606,
"tag": "NAME",
"value": "Richard Donner"
}
] |
src/test/datafire/test_helpers.cljs
|
filipesilva/datascript-firebase
| 53 |
(ns datafire.test-helpers
(:require [cljs.core.async :refer [go]]
[datascript.core :as d]
[datafire.core :as df]
["firebase/app" :as firebase]
["firebase/firestore"]))
(def firebase-config #js {:apiKey "AIzaSyAYJX2_LdpTbdgcaGYvSbfz9hJplqTPi7Y"
:authDomain "datafire.firebasedatafire.com"
:projectId "datafire"})
(def emulator-settings #js {:host "localhost:8080" :ssl false})
(def default-test-app (str df/default-firebase-app "-TEST"))
(defn test-link
([] (test-link {}))
([{:keys [schema path name granularity]
:or {schema {}
path (str "tmp/rand-path-" (rand))
name default-test-app
granularity :tx}}]
(go (let [_ (try (.app firebase name)
(catch js/Error _
(let [new-app (.initializeApp firebase firebase-config name)
_ (.settings (.firestore new-app) emulator-settings)]
new-app)))
conn (d/create-conn schema)
link (df/create-link conn path {:name name :granularity granularity})]
(df/listen! link)
[conn link path name]))))
(defn query-lethal-weapon [conn]
(d/q '[:find ?e .
:where [?e :movie/title "Lethal Weapon"]]
@conn))
(defn pull-lethal-weapon [conn]
(d/pull @conn '[*] (query-lethal-weapon conn)))
(def pulled-lethal-weapon-snapshot
{:db/id 57
:movie/cast
[{:db/id 13, :person/born "1956-01-03", :person/name "Mel Gibson"}
{:db/id 14
:person/born "1946-07-22"
:person/name "Danny Glover"}
{:db/id 15
:person/born "1944-07-29"
:person/name "Gary Busey"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "Richard Donner"}]
:movie/sequel
{:db/id 58
:movie/cast
[{:db/id 13
:person/born "1956-01-03"
:person/name "Mel Gibson"}
{:db/id 14
:person/born "1946-07-22"
:person/name "Danny Glover"}
{:db/id 37
:person/born "1943-02-09"
:person/name "Joe Pesci"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "Richard Donner"}]
:movie/sequel
{:db/id 64
:movie/cast
[{:db/id 13
:person/born "1956-01-03"
:person/name "Mel Gibson"}
{:db/id 14
:person/born "1946-07-22"
:person/name "Danny Glover"}
{:db/id 37
:person/born "1943-02-09"
:person/name "Joe Pesci"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "Richard Donner"}]
:movie/title "Lethal Weapon 3"
:movie/year 1992}
:movie/title "Lethal Weapon 2"
:movie/year 1989}
:movie/title "Lethal Weapon"
:movie/year 1987})
|
17216
|
(ns datafire.test-helpers
(:require [cljs.core.async :refer [go]]
[datascript.core :as d]
[datafire.core :as df]
["firebase/app" :as firebase]
["firebase/firestore"]))
(def firebase-config #js {:apiKey "<KEY>"
:authDomain "datafire.firebasedatafire.com"
:projectId "datafire"})
(def emulator-settings #js {:host "localhost:8080" :ssl false})
(def default-test-app (str df/default-firebase-app "-TEST"))
(defn test-link
([] (test-link {}))
([{:keys [schema path name granularity]
:or {schema {}
path (str "tmp/rand-path-" (rand))
name default-test-app
granularity :tx}}]
(go (let [_ (try (.app firebase name)
(catch js/Error _
(let [new-app (.initializeApp firebase firebase-config name)
_ (.settings (.firestore new-app) emulator-settings)]
new-app)))
conn (d/create-conn schema)
link (df/create-link conn path {:name name :granularity granularity})]
(df/listen! link)
[conn link path name]))))
(defn query-lethal-weapon [conn]
(d/q '[:find ?e .
:where [?e :movie/title "Lethal Weapon"]]
@conn))
(defn pull-lethal-weapon [conn]
(d/pull @conn '[*] (query-lethal-weapon conn)))
(def pulled-lethal-weapon-snapshot
{:db/id 57
:movie/cast
[{:db/id 13, :person/born "1956-01-03", :person/name "<NAME>"}
{:db/id 14
:person/born "1946-07-22"
:person/name "<NAME>"}
{:db/id 15
:person/born "1944-07-29"
:person/name "<NAME>"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "<NAME>"}]
:movie/sequel
{:db/id 58
:movie/cast
[{:db/id 13
:person/born "1956-01-03"
:person/name "<NAME>"}
{:db/id 14
:person/born "1946-07-22"
:person/name "<NAME>"}
{:db/id 37
:person/born "1943-02-09"
:person/name "<NAME>"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "<NAME>"}]
:movie/sequel
{:db/id 64
:movie/cast
[{:db/id 13
:person/born "1956-01-03"
:person/name "<NAME>"}
{:db/id 14
:person/born "1946-07-22"
:person/name "<NAME>"}
{:db/id 37
:person/born "1943-02-09"
:person/name "<NAME>"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "<NAME>"}]
:movie/title "Lethal Weapon 3"
:movie/year 1992}
:movie/title "Lethal Weapon 2"
:movie/year 1989}
:movie/title "Lethal Weapon"
:movie/year 1987})
| true |
(ns datafire.test-helpers
(:require [cljs.core.async :refer [go]]
[datascript.core :as d]
[datafire.core :as df]
["firebase/app" :as firebase]
["firebase/firestore"]))
(def firebase-config #js {:apiKey "PI:KEY:<KEY>END_PI"
:authDomain "datafire.firebasedatafire.com"
:projectId "datafire"})
(def emulator-settings #js {:host "localhost:8080" :ssl false})
(def default-test-app (str df/default-firebase-app "-TEST"))
(defn test-link
([] (test-link {}))
([{:keys [schema path name granularity]
:or {schema {}
path (str "tmp/rand-path-" (rand))
name default-test-app
granularity :tx}}]
(go (let [_ (try (.app firebase name)
(catch js/Error _
(let [new-app (.initializeApp firebase firebase-config name)
_ (.settings (.firestore new-app) emulator-settings)]
new-app)))
conn (d/create-conn schema)
link (df/create-link conn path {:name name :granularity granularity})]
(df/listen! link)
[conn link path name]))))
(defn query-lethal-weapon [conn]
(d/q '[:find ?e .
:where [?e :movie/title "Lethal Weapon"]]
@conn))
(defn pull-lethal-weapon [conn]
(d/pull @conn '[*] (query-lethal-weapon conn)))
(def pulled-lethal-weapon-snapshot
{:db/id 57
:movie/cast
[{:db/id 13, :person/born "1956-01-03", :person/name "PI:NAME:<NAME>END_PI"}
{:db/id 14
:person/born "1946-07-22"
:person/name "PI:NAME:<NAME>END_PI"}
{:db/id 15
:person/born "1944-07-29"
:person/name "PI:NAME:<NAME>END_PI"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "PI:NAME:<NAME>END_PI"}]
:movie/sequel
{:db/id 58
:movie/cast
[{:db/id 13
:person/born "1956-01-03"
:person/name "PI:NAME:<NAME>END_PI"}
{:db/id 14
:person/born "1946-07-22"
:person/name "PI:NAME:<NAME>END_PI"}
{:db/id 37
:person/born "1943-02-09"
:person/name "PI:NAME:<NAME>END_PI"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "PI:NAME:<NAME>END_PI"}]
:movie/sequel
{:db/id 64
:movie/cast
[{:db/id 13
:person/born "1956-01-03"
:person/name "PI:NAME:<NAME>END_PI"}
{:db/id 14
:person/born "1946-07-22"
:person/name "PI:NAME:<NAME>END_PI"}
{:db/id 37
:person/born "1943-02-09"
:person/name "PI:NAME:<NAME>END_PI"}]
:movie/director
[{:db/id 12
:person/born "1930-04-24"
:person/name "PI:NAME:<NAME>END_PI"}]
:movie/title "Lethal Weapon 3"
:movie/year 1992}
:movie/title "Lethal Weapon 2"
:movie/year 1989}
:movie/title "Lethal Weapon"
:movie/year 1987})
|
[
{
"context": "bj :user {:login \"login\"\n :password \"secret\"\n :greeting \"hello world\"}\n :c",
"end": 5970,
"score": 0.9994634985923767,
"start": 5964,
"tag": "PASSWORD",
"value": "secret"
},
{
"context": "login 'login',\n :password 'secret',\n :greeting 'hello world'",
"end": 6316,
"score": 0.9994469881057739,
"start": 6310,
"tag": "PASSWORD",
"value": "secret"
},
{
"context": " :user {:login \"login\"\n :password \"secret\"\n :greeting \"hello world\"}\n ",
"end": 6886,
"score": 0.9993718862533569,
"start": 6880,
"tag": "PASSWORD",
"value": "secret"
},
{
"context": "login 'login',\n :password 'secret',\n :greeting 'hello world'",
"end": 7239,
"score": 0.9992096424102783,
"start": 7233,
"tag": "PASSWORD",
"value": "secret"
}
] |
test/cljs/midje_doc/api/gyr.cljs
|
zcaudate-me/gyr
| 1 |
(ns midje-doc.api.gyr
(:require [purnam.test])
(:use-macros [purnam.core :only [! f.n def.n obj arr]]
[gyr.core :only [def.module def.controller
def.value def.constant
def.filter def.factory
def.provider def.service
def.directive def.config]]
[purnam.test :only [describe is it]]
[gyr.test :only [describe.ng describe.controller it-uses]]))
[[:chapter {:title "gyr.core" :tag "gyr-core"}]]
"Libraries to work with angular.js"
[[:section {:title "init" :tag "init-angular"}]]
"`gyr.core` macros are imported through the :use-macro call:"
(comment
(:use-macros [gyr.core :only [def.module def.controller
def.value def.constant
def.filter def.factory
def.provider def.service
def.directive def.config]]))
[[:section {:title "def.module" :tag "def-module"}]]
"`def.module` provides an easy way to define angular modules. The following clojurescript code generates the equivalent javascript code below it:"
[[{:hide true}]]
(def.module my.app [])
(comment
(def.module my.app [ui ui.bootstrap]))
[[{:lang "js"}]]
[[:code "angular.module('my.app', ['ui', 'ui.bootstrap'])"]]
"Typically, the `def.module` is at the very top of the file, one module is defined for one clojure namespace."
[[:section {:title "def.config" :tag "def-config"}]]
"`def.config` is used to setup module providers. "
(comment
(def.config <MODULE NAME> [... <PROVIDERS> ...]
...
<FUNCTION BODY>
... ))
"It is most commonly used to setup the routing for an application."
(comment
(def.config my.app [$locationProvider $routeProvider]
(doto $locationProvider (.hashPrefix "!"))
(doto $routeProvider
(.when "" (obj :redirectTo "/home")))))
"The equivalent javascript code can be seen below."
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.config(['$locationProvider', '$routeProvider',
function($locationProvider, $routeProvider){
$locationProvider.hashPrefix('!');
$routeProvider.when('', {redirectTo: '/home'});
}]);"]]
[[:section {:title "def.controller" :tag "def-controller"}]]
"`def.controller` defines a controller. The typical usage is like this:"
(comment
(def.controller <MODULE NAME>.<CONTROLLER NAME> [... <INJECTIONS> ...]
...
<CONTROLLER BODY>
... ))
"A sample controller"
(def.controller my.app.SimpleCtrl [$scope]
(! $scope.msg "Hello")
(! $scope.setMessage (fn [msg] (! $scope.msg msg))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.controller('SimpleCtrl', ['$scope', function($scope){
$scope.msg = 'Hello'
$scope.setMessage = function (msg){
$scope.msg = msg;
}}])"]]
[[:section {:title "def.directive" :tag "def-directive"}]]
"`def.directive` defines a directive. The typical usage is like this:"
(comment
(def.directive <MODULE NAME>.<DIRECTIVE NAME> [... <INJECTIONS> ...]
;; Initialisation code to return a function:
(fn [$scope element attrs]
.... <FUNCTION> .... ))
)
"A sample directive"
(def.directive my.app.appWelcome []
(fn [$scope element attrs]
(let [html (element.html)]
(element.html (str "Welcome <strong>" html "</strong>")))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.directive('appWelcome', [function() {
return function($scope, element, attrs) {
var html = element.html();
element.html('Welcome: <strong>' + html + '</strong>');
};}]);"]]
[[:section {:title "def.filter" :tag "def-filter"}]]
"`def.filter` defines a filter. The typical usage is like this:"
(comment
(def.filter <MODULE NAME>.<FILTER NAME> [... <INJECTIONS> ...]
;; Initialisation code to return a function:
(fn [input & args]
.... <FUNCTION> .... )))
"The sample filter"
(def.filter my.app.range []
(fn [input total]
(when input
(doseq [i (range (js/parseInt total))]
(input.push i))
input)))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.filter('range', [function() {
return function(input, total) {
if(!input) return null;
total = parseInt(total);
for (var i=0; i <total; i++)
input.push(i);
return input;
};
}]);"]]
[[:section {:title "def.constant" :tag "def-constant"}]]
"`def.constant` defines a constant. The typical usage is like this:"
(comment
(def.value <MODULE NAME>.<CONSTANT NAME>
<CONSTANT>))
"The sample constant"
(def.constant my.app.MeaningOfLife 42)
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.constant('MeaningOfLife', 42);"]]
[[:section {:title "def.value" :tag "def-value"}]]
"`def.value` defines a value. The typical usage is like this:"
(comment
(def.value <MODULE NAME>.<VALUE NAME>
<VALUE>))
"The sample value"
(def.value my.app.AnotherMeaningOfLife "A Mystery")
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.value('AnotherMeaningOfLife', 'A Mystery');"]]
[[:section {:title "def.service" :tag "def-service"}]]
"`def.service` defines a service. The typical usage is like this:"
(comment
(def.service <MODULE NAME>.<SERVICE NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The sample service"
(def.service my.app.LoginService []
(obj :user {:login "login"
:password "secret"
:greeting "hello world"}
:changeLogin (fn [login]
(! this.user.login login))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.service('LoginService', [function(){
return {user: {:login 'login',
:password 'secret',
:greeting 'hello world'},
changeLogin: function (login){
this.user.login = login;}}}]);"]]
[[:section {:title "def.factory" :tag "def-factory"}]]
"`def.factory` defines a factory. The typical usage is like this:"
(comment
(def.factory <MODULE NAME>.<FACTORY NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The sample factory"
(comment
(def.factory my.app.LoginService []
(obj :user {:login "login"
:password "secret"
:greeting "hello world"}
:changeLogin (fn [login]
(! this.user.login login)))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.factory('LoginService', [function(){
return {user: {:login 'login',
:password 'secret',
:greeting 'hello world'},
changeLogin: function (login){
this.user.login = login;}}}]);"]]
[[:section {:title "def.provider" :tag "def-provider"}]]
"`def.provider` defines a provider. The typical usage is like this:"
(comment
(def.provider <MODULE NAME>.<SERVICE NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The following is a definition, configuration, and usage of a provider"
(def.provider my.app.HelloWorld []
(obj :name "Default"
:$get (fn []
(let [n self.name]
(obj :sayHello
(fn [] (str "Hello " n "!")))))
:setName (fn [name]
(! self.name name))))
(def.config my.app [HelloWorldProvider]
(HelloWorldProvider.setName "World"))
(def.controller my.app.sfpMainCtrl [$scope HelloWorld]
(! $scope.hello (str (HelloWorld.sayHello) " From Provider")))
|
119033
|
(ns midje-doc.api.gyr
(:require [purnam.test])
(:use-macros [purnam.core :only [! f.n def.n obj arr]]
[gyr.core :only [def.module def.controller
def.value def.constant
def.filter def.factory
def.provider def.service
def.directive def.config]]
[purnam.test :only [describe is it]]
[gyr.test :only [describe.ng describe.controller it-uses]]))
[[:chapter {:title "gyr.core" :tag "gyr-core"}]]
"Libraries to work with angular.js"
[[:section {:title "init" :tag "init-angular"}]]
"`gyr.core` macros are imported through the :use-macro call:"
(comment
(:use-macros [gyr.core :only [def.module def.controller
def.value def.constant
def.filter def.factory
def.provider def.service
def.directive def.config]]))
[[:section {:title "def.module" :tag "def-module"}]]
"`def.module` provides an easy way to define angular modules. The following clojurescript code generates the equivalent javascript code below it:"
[[{:hide true}]]
(def.module my.app [])
(comment
(def.module my.app [ui ui.bootstrap]))
[[{:lang "js"}]]
[[:code "angular.module('my.app', ['ui', 'ui.bootstrap'])"]]
"Typically, the `def.module` is at the very top of the file, one module is defined for one clojure namespace."
[[:section {:title "def.config" :tag "def-config"}]]
"`def.config` is used to setup module providers. "
(comment
(def.config <MODULE NAME> [... <PROVIDERS> ...]
...
<FUNCTION BODY>
... ))
"It is most commonly used to setup the routing for an application."
(comment
(def.config my.app [$locationProvider $routeProvider]
(doto $locationProvider (.hashPrefix "!"))
(doto $routeProvider
(.when "" (obj :redirectTo "/home")))))
"The equivalent javascript code can be seen below."
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.config(['$locationProvider', '$routeProvider',
function($locationProvider, $routeProvider){
$locationProvider.hashPrefix('!');
$routeProvider.when('', {redirectTo: '/home'});
}]);"]]
[[:section {:title "def.controller" :tag "def-controller"}]]
"`def.controller` defines a controller. The typical usage is like this:"
(comment
(def.controller <MODULE NAME>.<CONTROLLER NAME> [... <INJECTIONS> ...]
...
<CONTROLLER BODY>
... ))
"A sample controller"
(def.controller my.app.SimpleCtrl [$scope]
(! $scope.msg "Hello")
(! $scope.setMessage (fn [msg] (! $scope.msg msg))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.controller('SimpleCtrl', ['$scope', function($scope){
$scope.msg = 'Hello'
$scope.setMessage = function (msg){
$scope.msg = msg;
}}])"]]
[[:section {:title "def.directive" :tag "def-directive"}]]
"`def.directive` defines a directive. The typical usage is like this:"
(comment
(def.directive <MODULE NAME>.<DIRECTIVE NAME> [... <INJECTIONS> ...]
;; Initialisation code to return a function:
(fn [$scope element attrs]
.... <FUNCTION> .... ))
)
"A sample directive"
(def.directive my.app.appWelcome []
(fn [$scope element attrs]
(let [html (element.html)]
(element.html (str "Welcome <strong>" html "</strong>")))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.directive('appWelcome', [function() {
return function($scope, element, attrs) {
var html = element.html();
element.html('Welcome: <strong>' + html + '</strong>');
};}]);"]]
[[:section {:title "def.filter" :tag "def-filter"}]]
"`def.filter` defines a filter. The typical usage is like this:"
(comment
(def.filter <MODULE NAME>.<FILTER NAME> [... <INJECTIONS> ...]
;; Initialisation code to return a function:
(fn [input & args]
.... <FUNCTION> .... )))
"The sample filter"
(def.filter my.app.range []
(fn [input total]
(when input
(doseq [i (range (js/parseInt total))]
(input.push i))
input)))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.filter('range', [function() {
return function(input, total) {
if(!input) return null;
total = parseInt(total);
for (var i=0; i <total; i++)
input.push(i);
return input;
};
}]);"]]
[[:section {:title "def.constant" :tag "def-constant"}]]
"`def.constant` defines a constant. The typical usage is like this:"
(comment
(def.value <MODULE NAME>.<CONSTANT NAME>
<CONSTANT>))
"The sample constant"
(def.constant my.app.MeaningOfLife 42)
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.constant('MeaningOfLife', 42);"]]
[[:section {:title "def.value" :tag "def-value"}]]
"`def.value` defines a value. The typical usage is like this:"
(comment
(def.value <MODULE NAME>.<VALUE NAME>
<VALUE>))
"The sample value"
(def.value my.app.AnotherMeaningOfLife "A Mystery")
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.value('AnotherMeaningOfLife', 'A Mystery');"]]
[[:section {:title "def.service" :tag "def-service"}]]
"`def.service` defines a service. The typical usage is like this:"
(comment
(def.service <MODULE NAME>.<SERVICE NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The sample service"
(def.service my.app.LoginService []
(obj :user {:login "login"
:password "<PASSWORD>"
:greeting "hello world"}
:changeLogin (fn [login]
(! this.user.login login))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.service('LoginService', [function(){
return {user: {:login 'login',
:password '<PASSWORD>',
:greeting 'hello world'},
changeLogin: function (login){
this.user.login = login;}}}]);"]]
[[:section {:title "def.factory" :tag "def-factory"}]]
"`def.factory` defines a factory. The typical usage is like this:"
(comment
(def.factory <MODULE NAME>.<FACTORY NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The sample factory"
(comment
(def.factory my.app.LoginService []
(obj :user {:login "login"
:password "<PASSWORD>"
:greeting "hello world"}
:changeLogin (fn [login]
(! this.user.login login)))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.factory('LoginService', [function(){
return {user: {:login 'login',
:password '<PASSWORD>',
:greeting 'hello world'},
changeLogin: function (login){
this.user.login = login;}}}]);"]]
[[:section {:title "def.provider" :tag "def-provider"}]]
"`def.provider` defines a provider. The typical usage is like this:"
(comment
(def.provider <MODULE NAME>.<SERVICE NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The following is a definition, configuration, and usage of a provider"
(def.provider my.app.HelloWorld []
(obj :name "Default"
:$get (fn []
(let [n self.name]
(obj :sayHello
(fn [] (str "Hello " n "!")))))
:setName (fn [name]
(! self.name name))))
(def.config my.app [HelloWorldProvider]
(HelloWorldProvider.setName "World"))
(def.controller my.app.sfpMainCtrl [$scope HelloWorld]
(! $scope.hello (str (HelloWorld.sayHello) " From Provider")))
| true |
(ns midje-doc.api.gyr
(:require [purnam.test])
(:use-macros [purnam.core :only [! f.n def.n obj arr]]
[gyr.core :only [def.module def.controller
def.value def.constant
def.filter def.factory
def.provider def.service
def.directive def.config]]
[purnam.test :only [describe is it]]
[gyr.test :only [describe.ng describe.controller it-uses]]))
[[:chapter {:title "gyr.core" :tag "gyr-core"}]]
"Libraries to work with angular.js"
[[:section {:title "init" :tag "init-angular"}]]
"`gyr.core` macros are imported through the :use-macro call:"
(comment
(:use-macros [gyr.core :only [def.module def.controller
def.value def.constant
def.filter def.factory
def.provider def.service
def.directive def.config]]))
[[:section {:title "def.module" :tag "def-module"}]]
"`def.module` provides an easy way to define angular modules. The following clojurescript code generates the equivalent javascript code below it:"
[[{:hide true}]]
(def.module my.app [])
(comment
(def.module my.app [ui ui.bootstrap]))
[[{:lang "js"}]]
[[:code "angular.module('my.app', ['ui', 'ui.bootstrap'])"]]
"Typically, the `def.module` is at the very top of the file, one module is defined for one clojure namespace."
[[:section {:title "def.config" :tag "def-config"}]]
"`def.config` is used to setup module providers. "
(comment
(def.config <MODULE NAME> [... <PROVIDERS> ...]
...
<FUNCTION BODY>
... ))
"It is most commonly used to setup the routing for an application."
(comment
(def.config my.app [$locationProvider $routeProvider]
(doto $locationProvider (.hashPrefix "!"))
(doto $routeProvider
(.when "" (obj :redirectTo "/home")))))
"The equivalent javascript code can be seen below."
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.config(['$locationProvider', '$routeProvider',
function($locationProvider, $routeProvider){
$locationProvider.hashPrefix('!');
$routeProvider.when('', {redirectTo: '/home'});
}]);"]]
[[:section {:title "def.controller" :tag "def-controller"}]]
"`def.controller` defines a controller. The typical usage is like this:"
(comment
(def.controller <MODULE NAME>.<CONTROLLER NAME> [... <INJECTIONS> ...]
...
<CONTROLLER BODY>
... ))
"A sample controller"
(def.controller my.app.SimpleCtrl [$scope]
(! $scope.msg "Hello")
(! $scope.setMessage (fn [msg] (! $scope.msg msg))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.controller('SimpleCtrl', ['$scope', function($scope){
$scope.msg = 'Hello'
$scope.setMessage = function (msg){
$scope.msg = msg;
}}])"]]
[[:section {:title "def.directive" :tag "def-directive"}]]
"`def.directive` defines a directive. The typical usage is like this:"
(comment
(def.directive <MODULE NAME>.<DIRECTIVE NAME> [... <INJECTIONS> ...]
;; Initialisation code to return a function:
(fn [$scope element attrs]
.... <FUNCTION> .... ))
)
"A sample directive"
(def.directive my.app.appWelcome []
(fn [$scope element attrs]
(let [html (element.html)]
(element.html (str "Welcome <strong>" html "</strong>")))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.directive('appWelcome', [function() {
return function($scope, element, attrs) {
var html = element.html();
element.html('Welcome: <strong>' + html + '</strong>');
};}]);"]]
[[:section {:title "def.filter" :tag "def-filter"}]]
"`def.filter` defines a filter. The typical usage is like this:"
(comment
(def.filter <MODULE NAME>.<FILTER NAME> [... <INJECTIONS> ...]
;; Initialisation code to return a function:
(fn [input & args]
.... <FUNCTION> .... )))
"The sample filter"
(def.filter my.app.range []
(fn [input total]
(when input
(doseq [i (range (js/parseInt total))]
(input.push i))
input)))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code "angular.module('my.app')
.filter('range', [function() {
return function(input, total) {
if(!input) return null;
total = parseInt(total);
for (var i=0; i <total; i++)
input.push(i);
return input;
};
}]);"]]
[[:section {:title "def.constant" :tag "def-constant"}]]
"`def.constant` defines a constant. The typical usage is like this:"
(comment
(def.value <MODULE NAME>.<CONSTANT NAME>
<CONSTANT>))
"The sample constant"
(def.constant my.app.MeaningOfLife 42)
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.constant('MeaningOfLife', 42);"]]
[[:section {:title "def.value" :tag "def-value"}]]
"`def.value` defines a value. The typical usage is like this:"
(comment
(def.value <MODULE NAME>.<VALUE NAME>
<VALUE>))
"The sample value"
(def.value my.app.AnotherMeaningOfLife "A Mystery")
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.value('AnotherMeaningOfLife', 'A Mystery');"]]
[[:section {:title "def.service" :tag "def-service"}]]
"`def.service` defines a service. The typical usage is like this:"
(comment
(def.service <MODULE NAME>.<SERVICE NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The sample service"
(def.service my.app.LoginService []
(obj :user {:login "login"
:password "PI:PASSWORD:<PASSWORD>END_PI"
:greeting "hello world"}
:changeLogin (fn [login]
(! this.user.login login))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.service('LoginService', [function(){
return {user: {:login 'login',
:password 'PI:PASSWORD:<PASSWORD>END_PI',
:greeting 'hello world'},
changeLogin: function (login){
this.user.login = login;}}}]);"]]
[[:section {:title "def.factory" :tag "def-factory"}]]
"`def.factory` defines a factory. The typical usage is like this:"
(comment
(def.factory <MODULE NAME>.<FACTORY NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The sample factory"
(comment
(def.factory my.app.LoginService []
(obj :user {:login "login"
:password "PI:PASSWORD:<PASSWORD>END_PI"
:greeting "hello world"}
:changeLogin (fn [login]
(! this.user.login login)))))
"Produces the equivalent javascript code:"
[[{:lang "js"}]]
[[:code
"angular.module('my.app')
.factory('LoginService', [function(){
return {user: {:login 'login',
:password 'PI:PASSWORD:<PASSWORD>END_PI',
:greeting 'hello world'},
changeLogin: function (login){
this.user.login = login;}}}]);"]]
[[:section {:title "def.provider" :tag "def-provider"}]]
"`def.provider` defines a provider. The typical usage is like this:"
(comment
(def.provider <MODULE NAME>.<SERVICE NAME> [... <INJECTIONS> ...]
<RETURN OBJECT> ))
"The following is a definition, configuration, and usage of a provider"
(def.provider my.app.HelloWorld []
(obj :name "Default"
:$get (fn []
(let [n self.name]
(obj :sayHello
(fn [] (str "Hello " n "!")))))
:setName (fn [name]
(! self.name name))))
(def.config my.app [HelloWorldProvider]
(HelloWorldProvider.setName "World"))
(def.controller my.app.sfpMainCtrl [$scope HelloWorld]
(! $scope.hello (str (HelloWorld.sayHello) " From Provider")))
|
[
{
"context": " on-index\n \"Return a page which says \\\"Hello from Peloton\\\"\"\n [conn] \n (httpd/set-content-type-html! conn",
"end": 244,
"score": 0.9946414828300476,
"start": 237,
"tag": "NAME",
"value": "Peloton"
},
{
"context": "se-body! conn (html [:html [:body [:h1 \"Hello from Peloton\"]]]))\n (httpd/send-response! conn))\n\n(defn chunk",
"end": 373,
"score": 0.9970696568489075,
"start": 366,
"tag": "NAME",
"value": "Peloton"
}
] |
src/peloton/example/web.clj
|
bickfordb/Peloton
| 1 |
(ns peloton.example.web
(:gen-class)
(:use peloton.util)
(:use [hiccup.core :only [html]])
(:require [peloton.httpd :as httpd])
(:require [peloton.reactor :as reactor]))
(defn on-index
"Return a page which says \"Hello from Peloton\""
[conn]
(httpd/set-content-type-html! conn)
(httpd/set-response-body! conn (html [:html [:body [:h1 "Hello from Peloton"]]]))
(httpd/send-response! conn))
(defn chunk-loop
[conn]
(when (not (httpd/finished? conn))
(httpd/send-chunk!
conn
(html [:script "x++; f(x);"]))
(httpd/flush-output! conn)
(reactor/timeout! 1.0 chunk-loop conn)))
(defn on-chunked
"Return a page which displays a counter which increments once per second through JSONP chunked responses"
[conn]
(httpd/set-content-type-html! conn)
(httpd/start-chunked-response! conn)
(httpd/send-chunk!
conn
(html [:html
[:body
[:h1 {:id "foo"} "1"]
[:script "
x=0;
function f(n) {
document.getElementById(\"foo\").innerHTML = \"\" + n;
}"]]]))
; flush browser buffer
(httpd/send-chunk! conn (format "%1024s" ""))
(httpd/flush-output! conn)
(reactor/timeout! 1.0 chunk-loop conn))
(defn -main [ & args]
(httpd/serve! {:ports [8080]
:listen-backlog 100}
[:GET #"^/chunked$" on-chunked]
[:GET #"^/resources/(.+)$" (httpd/create-file-handler "peloton/" :resource? true)]
[:GET #"^/$" on-index]))
|
121326
|
(ns peloton.example.web
(:gen-class)
(:use peloton.util)
(:use [hiccup.core :only [html]])
(:require [peloton.httpd :as httpd])
(:require [peloton.reactor :as reactor]))
(defn on-index
"Return a page which says \"Hello from <NAME>\""
[conn]
(httpd/set-content-type-html! conn)
(httpd/set-response-body! conn (html [:html [:body [:h1 "Hello from <NAME>"]]]))
(httpd/send-response! conn))
(defn chunk-loop
[conn]
(when (not (httpd/finished? conn))
(httpd/send-chunk!
conn
(html [:script "x++; f(x);"]))
(httpd/flush-output! conn)
(reactor/timeout! 1.0 chunk-loop conn)))
(defn on-chunked
"Return a page which displays a counter which increments once per second through JSONP chunked responses"
[conn]
(httpd/set-content-type-html! conn)
(httpd/start-chunked-response! conn)
(httpd/send-chunk!
conn
(html [:html
[:body
[:h1 {:id "foo"} "1"]
[:script "
x=0;
function f(n) {
document.getElementById(\"foo\").innerHTML = \"\" + n;
}"]]]))
; flush browser buffer
(httpd/send-chunk! conn (format "%1024s" ""))
(httpd/flush-output! conn)
(reactor/timeout! 1.0 chunk-loop conn))
(defn -main [ & args]
(httpd/serve! {:ports [8080]
:listen-backlog 100}
[:GET #"^/chunked$" on-chunked]
[:GET #"^/resources/(.+)$" (httpd/create-file-handler "peloton/" :resource? true)]
[:GET #"^/$" on-index]))
| true |
(ns peloton.example.web
(:gen-class)
(:use peloton.util)
(:use [hiccup.core :only [html]])
(:require [peloton.httpd :as httpd])
(:require [peloton.reactor :as reactor]))
(defn on-index
"Return a page which says \"Hello from PI:NAME:<NAME>END_PI\""
[conn]
(httpd/set-content-type-html! conn)
(httpd/set-response-body! conn (html [:html [:body [:h1 "Hello from PI:NAME:<NAME>END_PI"]]]))
(httpd/send-response! conn))
(defn chunk-loop
[conn]
(when (not (httpd/finished? conn))
(httpd/send-chunk!
conn
(html [:script "x++; f(x);"]))
(httpd/flush-output! conn)
(reactor/timeout! 1.0 chunk-loop conn)))
(defn on-chunked
"Return a page which displays a counter which increments once per second through JSONP chunked responses"
[conn]
(httpd/set-content-type-html! conn)
(httpd/start-chunked-response! conn)
(httpd/send-chunk!
conn
(html [:html
[:body
[:h1 {:id "foo"} "1"]
[:script "
x=0;
function f(n) {
document.getElementById(\"foo\").innerHTML = \"\" + n;
}"]]]))
; flush browser buffer
(httpd/send-chunk! conn (format "%1024s" ""))
(httpd/flush-output! conn)
(reactor/timeout! 1.0 chunk-loop conn))
(defn -main [ & args]
(httpd/serve! {:ports [8080]
:listen-backlog 100}
[:GET #"^/chunked$" on-chunked]
[:GET #"^/resources/(.+)$" (httpd/create-file-handler "peloton/" :resource? true)]
[:GET #"^/$" on-index]))
|
[
{
"context": " You should have something like:\n\n;; profile_for(\"[email protected]\")\n;; ... and it should produce:\n\n;; {\n;; email:",
"end": 599,
"score": 0.9999130368232727,
"start": 588,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "\n;; ... and it should produce:\n\n;; {\n;; email: '[email protected]',\n;; uid: 10,\n;; role: 'user'\n;; }\n;; ... enc",
"end": 662,
"score": 0.9999122619628906,
"start": 651,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": " role: 'user'\n;; }\n;; ... encoded as:\n\n;; [email protected]&uid=10&role=user\n;; Your \"profile_for\" function s",
"end": 742,
"score": 0.9999040365219116,
"start": 731,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": " but don't let people set their email address to \"[email protected]&role=admin\".\n\n;; Now, two more easy functions. Ge",
"end": 950,
"score": 0.9998993873596191,
"start": 939,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/set2/break_profile_test.clj
|
milapsheth/Crypto-Challenges
| 3 |
(ns set2.break-profile-test
(:require [set2.profile-parser :as profile]
[clojure.test :refer :all]
[clojure.string :as str]))
;; ECB cut-and-paste
;; Write a k=v parsing routine, as if for a structured cookie. The routine should take:
;; foo=bar&baz=qux&zap=zazzle
;; ... and produce:
;; {
;; foo: 'bar',
;; baz: 'qux',
;; zap: 'zazzle'
;; }
;; (you know, the object; I don't care if you convert it to JSON).
;; Now write a function that encodes a user profile in that format, given an email address. You should have something like:
;; profile_for("[email protected]")
;; ... and it should produce:
;; {
;; email: '[email protected]',
;; uid: 10,
;; role: 'user'
;; }
;; ... encoded as:
;; [email protected]&uid=10&role=user
;; Your "profile_for" function should not allow encoding metacharacters (& and =). Eat them, quote them, whatever you want to do, but don't let people set their email address to "[email protected]&role=admin".
;; Now, two more easy functions. Generate a random AES key, then:
;; Encrypt the encoded user profile under the key; "provide" that to the "attacker".
;; Decrypt the encoded user profile and parse it.
;; Using only the user input to profile_for() (as an oracle to generate "valid" ciphertexts) and the ciphertexts themselves, make a role=admin profile.
;; Take advantage of PKCS#7 padding to get separate ciphertexts
;; for email and "admin"
(def block-size 16)
(def profile1 (str "break.thisadmin" (str/join (repeat 11 \o13))))
(def profile2 "breaking.this")
(defn make-admin []
(profile/decrypt-profile
(concat (drop-last block-size (profile/encrypt-profile profile2))
(take block-size (drop block-size
(profile/encrypt-profile profile1))))))
(deftest break-profile-test
(testing "Failed to make user admin"
(is (= "admin"
(get (make-admin) "role")))))
|
81781
|
(ns set2.break-profile-test
(:require [set2.profile-parser :as profile]
[clojure.test :refer :all]
[clojure.string :as str]))
;; ECB cut-and-paste
;; Write a k=v parsing routine, as if for a structured cookie. The routine should take:
;; foo=bar&baz=qux&zap=zazzle
;; ... and produce:
;; {
;; foo: 'bar',
;; baz: 'qux',
;; zap: 'zazzle'
;; }
;; (you know, the object; I don't care if you convert it to JSON).
;; Now write a function that encodes a user profile in that format, given an email address. You should have something like:
;; profile_for("<EMAIL>")
;; ... and it should produce:
;; {
;; email: '<EMAIL>',
;; uid: 10,
;; role: 'user'
;; }
;; ... encoded as:
;; email=<EMAIL>&uid=10&role=user
;; Your "profile_for" function should not allow encoding metacharacters (& and =). Eat them, quote them, whatever you want to do, but don't let people set their email address to "<EMAIL>&role=admin".
;; Now, two more easy functions. Generate a random AES key, then:
;; Encrypt the encoded user profile under the key; "provide" that to the "attacker".
;; Decrypt the encoded user profile and parse it.
;; Using only the user input to profile_for() (as an oracle to generate "valid" ciphertexts) and the ciphertexts themselves, make a role=admin profile.
;; Take advantage of PKCS#7 padding to get separate ciphertexts
;; for email and "admin"
(def block-size 16)
(def profile1 (str "break.thisadmin" (str/join (repeat 11 \o13))))
(def profile2 "breaking.this")
(defn make-admin []
(profile/decrypt-profile
(concat (drop-last block-size (profile/encrypt-profile profile2))
(take block-size (drop block-size
(profile/encrypt-profile profile1))))))
(deftest break-profile-test
(testing "Failed to make user admin"
(is (= "admin"
(get (make-admin) "role")))))
| true |
(ns set2.break-profile-test
(:require [set2.profile-parser :as profile]
[clojure.test :refer :all]
[clojure.string :as str]))
;; ECB cut-and-paste
;; Write a k=v parsing routine, as if for a structured cookie. The routine should take:
;; foo=bar&baz=qux&zap=zazzle
;; ... and produce:
;; {
;; foo: 'bar',
;; baz: 'qux',
;; zap: 'zazzle'
;; }
;; (you know, the object; I don't care if you convert it to JSON).
;; Now write a function that encodes a user profile in that format, given an email address. You should have something like:
;; profile_for("PI:EMAIL:<EMAIL>END_PI")
;; ... and it should produce:
;; {
;; email: 'PI:EMAIL:<EMAIL>END_PI',
;; uid: 10,
;; role: 'user'
;; }
;; ... encoded as:
;; email=PI:EMAIL:<EMAIL>END_PI&uid=10&role=user
;; Your "profile_for" function should not allow encoding metacharacters (& and =). Eat them, quote them, whatever you want to do, but don't let people set their email address to "PI:EMAIL:<EMAIL>END_PI&role=admin".
;; Now, two more easy functions. Generate a random AES key, then:
;; Encrypt the encoded user profile under the key; "provide" that to the "attacker".
;; Decrypt the encoded user profile and parse it.
;; Using only the user input to profile_for() (as an oracle to generate "valid" ciphertexts) and the ciphertexts themselves, make a role=admin profile.
;; Take advantage of PKCS#7 padding to get separate ciphertexts
;; for email and "admin"
(def block-size 16)
(def profile1 (str "break.thisadmin" (str/join (repeat 11 \o13))))
(def profile2 "breaking.this")
(defn make-admin []
(profile/decrypt-profile
(concat (drop-last block-size (profile/encrypt-profile profile2))
(take block-size (drop block-size
(profile/encrypt-profile profile1))))))
(deftest break-profile-test
(testing "Failed to make user admin"
(is (= "admin"
(get (make-admin) "role")))))
|
[
{
"context": "\"\n (:require [clojure.set :as set]))\n\n;; Based on Alan Dipert's original implementation\n;; https://gist.git",
"end": 130,
"score": 0.999881386756897,
"start": 119,
"tag": "NAME",
"value": "Alan Dipert"
},
{
"context": "nal implementation\n;; https://gist.github.com/alandipert/1263783\n\n(defn- without\n \"Returns set s with x r",
"end": 198,
"score": 0.9997302889823914,
"start": 188,
"tag": "USERNAME",
"value": "alandipert"
}
] |
src/foppl/toposort.clj
|
rmascarenhas/foppl
| 11 |
(ns foppl.toposort
"Performs topological sort of an acyclic graph."
(:require [clojure.set :as set]))
;; Based on Alan Dipert's original implementation
;; https://gist.github.com/alandipert/1263783
(defn- without
"Returns set s with x removed."
[s x] (set/difference s #{x}))
(defn- take-1
"Returns the pair [element, s'] where s' is set s with element removed."
[s] {:pre [(not (empty? s))]}
(let [item (first s)]
[item (without s item)]))
(defn- no-incoming
"Returns the set of nodes in graph g for which there are no incoming
edges, where g is a map of nodes to sets of nodes."
[g]
(let [nodes (set (keys g))
have-incoming (apply set/union (vals g))]
(set/difference nodes have-incoming)))
(defn- normalize
"Returns g with empty outgoing edges added for nodes with incoming
edges only. Example: {:a #{:b}} => {:a #{:b}, :b #{}}"
[g]
(let [have-incoming (apply set/union (vals g))]
(reduce #(if (get % %2) % (assoc % %2 #{})) g have-incoming)))
(defn- toposort
"Proposes a topological sort for directed graph g using Kahn's
algorithm, where g is a map of nodes to sets of nodes. If g is
cyclic, returns nil."
([g]
(toposort (normalize g) [] (no-incoming g)))
([g l s]
(if (empty? s)
(when (every? empty? (vals g)) l)
(let [[n s'] (take-1 s)
m (g n)
g' (reduce #(update-in % [n] without %2) g m)]
(recur g' (conj l n) (set/union s' (set/intersection (no-incoming g') m)))))))
(defn perform [{{A :A} :G}]
"Performs topological sort of a graphical model, given as a
foppl.graphical.model record. Returns an array of random-variable
names that representa topological sort of the graph."
(let [sources (map (fn [[from to]] from) A)
graph (zipmap sources (repeat #{}))
combine (fn [g [from to]] (assoc g from (conj (get g from) to)))
graph (reduce combine graph A)]
(toposort graph)))
|
75865
|
(ns foppl.toposort
"Performs topological sort of an acyclic graph."
(:require [clojure.set :as set]))
;; Based on <NAME>'s original implementation
;; https://gist.github.com/alandipert/1263783
(defn- without
"Returns set s with x removed."
[s x] (set/difference s #{x}))
(defn- take-1
"Returns the pair [element, s'] where s' is set s with element removed."
[s] {:pre [(not (empty? s))]}
(let [item (first s)]
[item (without s item)]))
(defn- no-incoming
"Returns the set of nodes in graph g for which there are no incoming
edges, where g is a map of nodes to sets of nodes."
[g]
(let [nodes (set (keys g))
have-incoming (apply set/union (vals g))]
(set/difference nodes have-incoming)))
(defn- normalize
"Returns g with empty outgoing edges added for nodes with incoming
edges only. Example: {:a #{:b}} => {:a #{:b}, :b #{}}"
[g]
(let [have-incoming (apply set/union (vals g))]
(reduce #(if (get % %2) % (assoc % %2 #{})) g have-incoming)))
(defn- toposort
"Proposes a topological sort for directed graph g using Kahn's
algorithm, where g is a map of nodes to sets of nodes. If g is
cyclic, returns nil."
([g]
(toposort (normalize g) [] (no-incoming g)))
([g l s]
(if (empty? s)
(when (every? empty? (vals g)) l)
(let [[n s'] (take-1 s)
m (g n)
g' (reduce #(update-in % [n] without %2) g m)]
(recur g' (conj l n) (set/union s' (set/intersection (no-incoming g') m)))))))
(defn perform [{{A :A} :G}]
"Performs topological sort of a graphical model, given as a
foppl.graphical.model record. Returns an array of random-variable
names that representa topological sort of the graph."
(let [sources (map (fn [[from to]] from) A)
graph (zipmap sources (repeat #{}))
combine (fn [g [from to]] (assoc g from (conj (get g from) to)))
graph (reduce combine graph A)]
(toposort graph)))
| true |
(ns foppl.toposort
"Performs topological sort of an acyclic graph."
(:require [clojure.set :as set]))
;; Based on PI:NAME:<NAME>END_PI's original implementation
;; https://gist.github.com/alandipert/1263783
(defn- without
"Returns set s with x removed."
[s x] (set/difference s #{x}))
(defn- take-1
"Returns the pair [element, s'] where s' is set s with element removed."
[s] {:pre [(not (empty? s))]}
(let [item (first s)]
[item (without s item)]))
(defn- no-incoming
"Returns the set of nodes in graph g for which there are no incoming
edges, where g is a map of nodes to sets of nodes."
[g]
(let [nodes (set (keys g))
have-incoming (apply set/union (vals g))]
(set/difference nodes have-incoming)))
(defn- normalize
"Returns g with empty outgoing edges added for nodes with incoming
edges only. Example: {:a #{:b}} => {:a #{:b}, :b #{}}"
[g]
(let [have-incoming (apply set/union (vals g))]
(reduce #(if (get % %2) % (assoc % %2 #{})) g have-incoming)))
(defn- toposort
"Proposes a topological sort for directed graph g using Kahn's
algorithm, where g is a map of nodes to sets of nodes. If g is
cyclic, returns nil."
([g]
(toposort (normalize g) [] (no-incoming g)))
([g l s]
(if (empty? s)
(when (every? empty? (vals g)) l)
(let [[n s'] (take-1 s)
m (g n)
g' (reduce #(update-in % [n] without %2) g m)]
(recur g' (conj l n) (set/union s' (set/intersection (no-incoming g') m)))))))
(defn perform [{{A :A} :G}]
"Performs topological sort of a graphical model, given as a
foppl.graphical.model record. Returns an array of random-variable
names that representa topological sort of the graph."
(let [sources (map (fn [[from to]] from) A)
graph (zipmap sources (repeat #{}))
combine (fn [g [from to]] (assoc g from (conj (get g from) to)))
graph (reduce combine graph A)]
(toposort graph)))
|
[
{
"context": "; Copyright (c) Rich Hickey. All rights reserved.\n; The use and distributio",
"end": 29,
"score": 0.9998586177825928,
"start": 18,
"tag": "NAME",
"value": "Rich Hickey"
},
{
"context": "ng,\n and enumeration. See Huet\"\n :author \"Rich Hickey\"}\n rewrite-clj.custom-zipper.core\n (:refer-cloj",
"end": 676,
"score": 0.9998784065246582,
"start": 665,
"tag": "NAME",
"value": "Rich Hickey"
}
] |
src/rewrite_clj/custom_zipper/core.clj
|
green-coder/rewrite-clj
| 1 |
; Copyright (c) Rich Hickey. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
;functional hierarchical zipper, with navigation, editing and enumeration
;see Huet
(ns ^{:doc "Functional hierarchical zipper, with navigation, editing,
and enumeration. See Huet"
:author "Rich Hickey"}
rewrite-clj.custom-zipper.core
(:refer-clojure :exclude (replace remove next))
(:require [rewrite-clj.node.protocols :as node]
[clojure.zip :as clj-zip]))
;; ## Switch
;;
;; To not force users into using this custom zipper, the following flag
;; is used to dispatch to `clojure.zip` when set to `false`.
(defn ^:no-doc custom-zipper
[root]
{::custom? true
:node root
:position [1 1]
:parent nil
:left []
:right '()})
(defn ^:no-doc zipper
[root]
(clj-zip/zipper
node/inner?
(comp seq node/children)
node/replace-children
root))
(defn ^:no-doc custom-zipper?
[value]
(::custom? value))
(defmacro ^:private defn-switchable
[sym docstring params & body]
(let [placeholders (repeatedly (count params) gensym)]
`(defn ~sym
~docstring
[~@placeholders]
(if (custom-zipper? ~(first placeholders))
(let [~@(interleave params placeholders)]
~@body)
(~(symbol "clojure.zip" (name sym)) ~@placeholders)))))
;; ## Implementation
(defn-switchable node
"Returns the node at loc"
[{:keys [node]}]
node)
(defn-switchable branch?
"Returns true if the node at loc is a branch"
[{:keys [node]}]
(node/inner? node))
(defn-switchable children
"Returns a seq of the children of node at loc, which must be a branch"
[{:keys [node] :as loc}]
(if (branch? loc)
(seq (node/children node))
(throw (Exception. "called children on a leaf node"))))
(defn-switchable ^:no-doc make-node
"Returns a new branch node, given an existing node and new
children. The loc is only used to supply the constructor."
[loc node children]
(node/replace-children node children))
(defn position
"Returns the ones-based [row col] of the start of the current node"
[loc]
(if (custom-zipper? loc)
(:position loc)
(throw
(IllegalStateException.
(str
"to use the 'position' function, please construct your zipper with "
"':track-position?' set to true.")))))
(defn-switchable lefts
"Returns a seq of the left siblings of this loc"
[loc]
(map first (:left loc)))
(defn-switchable down
"Returns the loc of the leftmost child of the node at this loc, or
nil if no children"
[loc]
(when (branch? loc)
(let [{:keys [node path] [row col] :position} loc
[c & cnext :as cs] (children loc)]
(when cs
{::custom? true
:node c
:position [row (+ col (node/leader-length node))]
:parent loc
:left []
:right cnext}))))
(defn-switchable up
"Returns the loc of the parent of the node at this loc, or nil if at
the top"
[loc]
(let [{:keys [node parent left right changed?]} loc]
(when parent
(if changed?
(assoc parent
:changed? true
:node (make-node loc
(:node parent)
(concat (map first left) (cons node right))))
parent))))
(defn-switchable root
"zips all the way up and returns the root node, reflecting any changes."
[{:keys [end?] :as loc}]
(if end?
(node loc)
(let [p (up loc)]
(if p
(recur p)
(node loc)))))
(defn-switchable right
"Returns the loc of the right sibling of the node at this loc, or nil"
[loc]
(let [{:keys [node parent position left] [r & rnext :as right] :right} loc]
(when (and parent right)
(assoc loc
:node r
:left (conj left [node position])
:right rnext
:position (node/+extent position (node/extent node))))))
(defn-switchable rightmost
"Returns the loc of the rightmost sibling of the node at this loc, or self"
[loc]
(if-let [next (right loc)]
(recur next)
loc))
(defn-switchable left
"Returns the loc of the left sibling of the node at this loc, or nil"
[loc]
(let [{:keys [node parent left right]} loc]
(when (and parent (seq left))
(let [[lnode lpos] (peek left)]
(assoc loc
:node lnode
:position lpos
:left (pop left)
:right (cons node right))))))
(defn-switchable leftmost
"Returns the loc of the leftmost sibling of the node at this loc, or self"
[loc]
(let [{:keys [node parent left right]} loc]
(if (and parent (seq left))
(let [[lnode lpos] (first left)]
(assoc loc
:node lnode
:position lpos
:left []
:right (concat (map first (rest left)) [node] right)))
loc)))
(defn-switchable insert-left
"Inserts the item as the left sibling of the node at this loc,
without moving"
[loc item]
(let [{:keys [parent position left]} loc]
(if-not parent
(throw (new Exception "Insert at top"))
(assoc loc
:changed? true
:left (conj left [item position])
:position (node/+extent position (node/extent item))))))
(defn-switchable insert-right
"Inserts the item as the right sibling of the node at this loc,
without moving"
[loc item]
(let [{:keys [parent right]} loc]
(if-not parent
(throw (new Exception "Insert at top"))
(assoc loc
:changed? true
:right (cons item right)))))
(defn-switchable replace
"Replaces the node at this loc, without moving"
[loc node]
(assoc loc :changed? true :node node))
(defn edit
"Replaces the node at this loc with the value of (f node args)"
[loc f & args]
(if (custom-zipper? loc)
(replace loc (apply f (node loc) args))
(apply clj-zip/edit loc f args)))
(defn-switchable insert-child
"Inserts the item as the leftmost child of the node at this loc,
without moving"
[loc item]
(replace loc (make-node loc (node loc) (cons item (children loc)))))
(defn-switchable append-child
"Inserts the item as the rightmost child of the node at this loc,
without moving"
[loc item]
(replace loc (make-node loc (node loc) (concat (children loc) [item]))))
(defn-switchable next
"Moves to the next loc in the hierarchy, depth-first. When reaching
the end, returns a distinguished loc detectable via end?. If already
at the end, stays there."
[{:keys [end?] :as loc}]
(if end?
loc
(or
(and (branch? loc) (down loc))
(right loc)
(loop [p loc]
(if (up p)
(or (right (up p)) (recur (up p)))
(assoc p :end? true))))))
(defn-switchable prev
"Moves to the previous loc in the hierarchy, depth-first. If already
at the root, returns nil."
[loc]
(if-let [lloc (left loc)]
(loop [loc lloc]
(if-let [child (and (branch? loc) (down loc))]
(recur (rightmost child))
loc))
(up loc)))
(defn-switchable end?
"Returns true if loc represents the end of a depth-first walk"
[loc]
(:end? loc))
(defn-switchable remove
"Removes the node at loc, returning the loc that would have preceded
it in a depth-first walk."
[loc]
(let [{:keys [node parent left right]} loc]
(if-not parent
(throw (new Exception "Remove at top"))
(if (seq left)
(loop [loc (let [[lnode lpos] (peek left)]
(assoc loc
:changed? true
:position lpos
:node lnode
:left (pop left)))]
(if-let [child (and (branch? loc) (down loc))]
(recur (rightmost child))
loc))
(assoc parent
:changed? true
:node (make-node loc (:node parent) right))))))
|
59782
|
; Copyright (c) <NAME>. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
;functional hierarchical zipper, with navigation, editing and enumeration
;see Huet
(ns ^{:doc "Functional hierarchical zipper, with navigation, editing,
and enumeration. See Huet"
:author "<NAME>"}
rewrite-clj.custom-zipper.core
(:refer-clojure :exclude (replace remove next))
(:require [rewrite-clj.node.protocols :as node]
[clojure.zip :as clj-zip]))
;; ## Switch
;;
;; To not force users into using this custom zipper, the following flag
;; is used to dispatch to `clojure.zip` when set to `false`.
(defn ^:no-doc custom-zipper
[root]
{::custom? true
:node root
:position [1 1]
:parent nil
:left []
:right '()})
(defn ^:no-doc zipper
[root]
(clj-zip/zipper
node/inner?
(comp seq node/children)
node/replace-children
root))
(defn ^:no-doc custom-zipper?
[value]
(::custom? value))
(defmacro ^:private defn-switchable
[sym docstring params & body]
(let [placeholders (repeatedly (count params) gensym)]
`(defn ~sym
~docstring
[~@placeholders]
(if (custom-zipper? ~(first placeholders))
(let [~@(interleave params placeholders)]
~@body)
(~(symbol "clojure.zip" (name sym)) ~@placeholders)))))
;; ## Implementation
(defn-switchable node
"Returns the node at loc"
[{:keys [node]}]
node)
(defn-switchable branch?
"Returns true if the node at loc is a branch"
[{:keys [node]}]
(node/inner? node))
(defn-switchable children
"Returns a seq of the children of node at loc, which must be a branch"
[{:keys [node] :as loc}]
(if (branch? loc)
(seq (node/children node))
(throw (Exception. "called children on a leaf node"))))
(defn-switchable ^:no-doc make-node
"Returns a new branch node, given an existing node and new
children. The loc is only used to supply the constructor."
[loc node children]
(node/replace-children node children))
(defn position
"Returns the ones-based [row col] of the start of the current node"
[loc]
(if (custom-zipper? loc)
(:position loc)
(throw
(IllegalStateException.
(str
"to use the 'position' function, please construct your zipper with "
"':track-position?' set to true.")))))
(defn-switchable lefts
"Returns a seq of the left siblings of this loc"
[loc]
(map first (:left loc)))
(defn-switchable down
"Returns the loc of the leftmost child of the node at this loc, or
nil if no children"
[loc]
(when (branch? loc)
(let [{:keys [node path] [row col] :position} loc
[c & cnext :as cs] (children loc)]
(when cs
{::custom? true
:node c
:position [row (+ col (node/leader-length node))]
:parent loc
:left []
:right cnext}))))
(defn-switchable up
"Returns the loc of the parent of the node at this loc, or nil if at
the top"
[loc]
(let [{:keys [node parent left right changed?]} loc]
(when parent
(if changed?
(assoc parent
:changed? true
:node (make-node loc
(:node parent)
(concat (map first left) (cons node right))))
parent))))
(defn-switchable root
"zips all the way up and returns the root node, reflecting any changes."
[{:keys [end?] :as loc}]
(if end?
(node loc)
(let [p (up loc)]
(if p
(recur p)
(node loc)))))
(defn-switchable right
"Returns the loc of the right sibling of the node at this loc, or nil"
[loc]
(let [{:keys [node parent position left] [r & rnext :as right] :right} loc]
(when (and parent right)
(assoc loc
:node r
:left (conj left [node position])
:right rnext
:position (node/+extent position (node/extent node))))))
(defn-switchable rightmost
"Returns the loc of the rightmost sibling of the node at this loc, or self"
[loc]
(if-let [next (right loc)]
(recur next)
loc))
(defn-switchable left
"Returns the loc of the left sibling of the node at this loc, or nil"
[loc]
(let [{:keys [node parent left right]} loc]
(when (and parent (seq left))
(let [[lnode lpos] (peek left)]
(assoc loc
:node lnode
:position lpos
:left (pop left)
:right (cons node right))))))
(defn-switchable leftmost
"Returns the loc of the leftmost sibling of the node at this loc, or self"
[loc]
(let [{:keys [node parent left right]} loc]
(if (and parent (seq left))
(let [[lnode lpos] (first left)]
(assoc loc
:node lnode
:position lpos
:left []
:right (concat (map first (rest left)) [node] right)))
loc)))
(defn-switchable insert-left
"Inserts the item as the left sibling of the node at this loc,
without moving"
[loc item]
(let [{:keys [parent position left]} loc]
(if-not parent
(throw (new Exception "Insert at top"))
(assoc loc
:changed? true
:left (conj left [item position])
:position (node/+extent position (node/extent item))))))
(defn-switchable insert-right
"Inserts the item as the right sibling of the node at this loc,
without moving"
[loc item]
(let [{:keys [parent right]} loc]
(if-not parent
(throw (new Exception "Insert at top"))
(assoc loc
:changed? true
:right (cons item right)))))
(defn-switchable replace
"Replaces the node at this loc, without moving"
[loc node]
(assoc loc :changed? true :node node))
(defn edit
"Replaces the node at this loc with the value of (f node args)"
[loc f & args]
(if (custom-zipper? loc)
(replace loc (apply f (node loc) args))
(apply clj-zip/edit loc f args)))
(defn-switchable insert-child
"Inserts the item as the leftmost child of the node at this loc,
without moving"
[loc item]
(replace loc (make-node loc (node loc) (cons item (children loc)))))
(defn-switchable append-child
"Inserts the item as the rightmost child of the node at this loc,
without moving"
[loc item]
(replace loc (make-node loc (node loc) (concat (children loc) [item]))))
(defn-switchable next
"Moves to the next loc in the hierarchy, depth-first. When reaching
the end, returns a distinguished loc detectable via end?. If already
at the end, stays there."
[{:keys [end?] :as loc}]
(if end?
loc
(or
(and (branch? loc) (down loc))
(right loc)
(loop [p loc]
(if (up p)
(or (right (up p)) (recur (up p)))
(assoc p :end? true))))))
(defn-switchable prev
"Moves to the previous loc in the hierarchy, depth-first. If already
at the root, returns nil."
[loc]
(if-let [lloc (left loc)]
(loop [loc lloc]
(if-let [child (and (branch? loc) (down loc))]
(recur (rightmost child))
loc))
(up loc)))
(defn-switchable end?
"Returns true if loc represents the end of a depth-first walk"
[loc]
(:end? loc))
(defn-switchable remove
"Removes the node at loc, returning the loc that would have preceded
it in a depth-first walk."
[loc]
(let [{:keys [node parent left right]} loc]
(if-not parent
(throw (new Exception "Remove at top"))
(if (seq left)
(loop [loc (let [[lnode lpos] (peek left)]
(assoc loc
:changed? true
:position lpos
:node lnode
:left (pop left)))]
(if-let [child (and (branch? loc) (down loc))]
(recur (rightmost child))
loc))
(assoc parent
:changed? true
:node (make-node loc (:node parent) right))))))
| true |
; Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
;functional hierarchical zipper, with navigation, editing and enumeration
;see Huet
(ns ^{:doc "Functional hierarchical zipper, with navigation, editing,
and enumeration. See Huet"
:author "PI:NAME:<NAME>END_PI"}
rewrite-clj.custom-zipper.core
(:refer-clojure :exclude (replace remove next))
(:require [rewrite-clj.node.protocols :as node]
[clojure.zip :as clj-zip]))
;; ## Switch
;;
;; To not force users into using this custom zipper, the following flag
;; is used to dispatch to `clojure.zip` when set to `false`.
(defn ^:no-doc custom-zipper
[root]
{::custom? true
:node root
:position [1 1]
:parent nil
:left []
:right '()})
(defn ^:no-doc zipper
[root]
(clj-zip/zipper
node/inner?
(comp seq node/children)
node/replace-children
root))
(defn ^:no-doc custom-zipper?
[value]
(::custom? value))
(defmacro ^:private defn-switchable
[sym docstring params & body]
(let [placeholders (repeatedly (count params) gensym)]
`(defn ~sym
~docstring
[~@placeholders]
(if (custom-zipper? ~(first placeholders))
(let [~@(interleave params placeholders)]
~@body)
(~(symbol "clojure.zip" (name sym)) ~@placeholders)))))
;; ## Implementation
(defn-switchable node
"Returns the node at loc"
[{:keys [node]}]
node)
(defn-switchable branch?
"Returns true if the node at loc is a branch"
[{:keys [node]}]
(node/inner? node))
(defn-switchable children
"Returns a seq of the children of node at loc, which must be a branch"
[{:keys [node] :as loc}]
(if (branch? loc)
(seq (node/children node))
(throw (Exception. "called children on a leaf node"))))
(defn-switchable ^:no-doc make-node
"Returns a new branch node, given an existing node and new
children. The loc is only used to supply the constructor."
[loc node children]
(node/replace-children node children))
(defn position
"Returns the ones-based [row col] of the start of the current node"
[loc]
(if (custom-zipper? loc)
(:position loc)
(throw
(IllegalStateException.
(str
"to use the 'position' function, please construct your zipper with "
"':track-position?' set to true.")))))
(defn-switchable lefts
"Returns a seq of the left siblings of this loc"
[loc]
(map first (:left loc)))
(defn-switchable down
"Returns the loc of the leftmost child of the node at this loc, or
nil if no children"
[loc]
(when (branch? loc)
(let [{:keys [node path] [row col] :position} loc
[c & cnext :as cs] (children loc)]
(when cs
{::custom? true
:node c
:position [row (+ col (node/leader-length node))]
:parent loc
:left []
:right cnext}))))
(defn-switchable up
"Returns the loc of the parent of the node at this loc, or nil if at
the top"
[loc]
(let [{:keys [node parent left right changed?]} loc]
(when parent
(if changed?
(assoc parent
:changed? true
:node (make-node loc
(:node parent)
(concat (map first left) (cons node right))))
parent))))
(defn-switchable root
"zips all the way up and returns the root node, reflecting any changes."
[{:keys [end?] :as loc}]
(if end?
(node loc)
(let [p (up loc)]
(if p
(recur p)
(node loc)))))
(defn-switchable right
"Returns the loc of the right sibling of the node at this loc, or nil"
[loc]
(let [{:keys [node parent position left] [r & rnext :as right] :right} loc]
(when (and parent right)
(assoc loc
:node r
:left (conj left [node position])
:right rnext
:position (node/+extent position (node/extent node))))))
(defn-switchable rightmost
"Returns the loc of the rightmost sibling of the node at this loc, or self"
[loc]
(if-let [next (right loc)]
(recur next)
loc))
(defn-switchable left
"Returns the loc of the left sibling of the node at this loc, or nil"
[loc]
(let [{:keys [node parent left right]} loc]
(when (and parent (seq left))
(let [[lnode lpos] (peek left)]
(assoc loc
:node lnode
:position lpos
:left (pop left)
:right (cons node right))))))
(defn-switchable leftmost
"Returns the loc of the leftmost sibling of the node at this loc, or self"
[loc]
(let [{:keys [node parent left right]} loc]
(if (and parent (seq left))
(let [[lnode lpos] (first left)]
(assoc loc
:node lnode
:position lpos
:left []
:right (concat (map first (rest left)) [node] right)))
loc)))
(defn-switchable insert-left
"Inserts the item as the left sibling of the node at this loc,
without moving"
[loc item]
(let [{:keys [parent position left]} loc]
(if-not parent
(throw (new Exception "Insert at top"))
(assoc loc
:changed? true
:left (conj left [item position])
:position (node/+extent position (node/extent item))))))
(defn-switchable insert-right
"Inserts the item as the right sibling of the node at this loc,
without moving"
[loc item]
(let [{:keys [parent right]} loc]
(if-not parent
(throw (new Exception "Insert at top"))
(assoc loc
:changed? true
:right (cons item right)))))
(defn-switchable replace
"Replaces the node at this loc, without moving"
[loc node]
(assoc loc :changed? true :node node))
(defn edit
"Replaces the node at this loc with the value of (f node args)"
[loc f & args]
(if (custom-zipper? loc)
(replace loc (apply f (node loc) args))
(apply clj-zip/edit loc f args)))
(defn-switchable insert-child
"Inserts the item as the leftmost child of the node at this loc,
without moving"
[loc item]
(replace loc (make-node loc (node loc) (cons item (children loc)))))
(defn-switchable append-child
"Inserts the item as the rightmost child of the node at this loc,
without moving"
[loc item]
(replace loc (make-node loc (node loc) (concat (children loc) [item]))))
(defn-switchable next
"Moves to the next loc in the hierarchy, depth-first. When reaching
the end, returns a distinguished loc detectable via end?. If already
at the end, stays there."
[{:keys [end?] :as loc}]
(if end?
loc
(or
(and (branch? loc) (down loc))
(right loc)
(loop [p loc]
(if (up p)
(or (right (up p)) (recur (up p)))
(assoc p :end? true))))))
(defn-switchable prev
"Moves to the previous loc in the hierarchy, depth-first. If already
at the root, returns nil."
[loc]
(if-let [lloc (left loc)]
(loop [loc lloc]
(if-let [child (and (branch? loc) (down loc))]
(recur (rightmost child))
loc))
(up loc)))
(defn-switchable end?
"Returns true if loc represents the end of a depth-first walk"
[loc]
(:end? loc))
(defn-switchable remove
"Removes the node at loc, returning the loc that would have preceded
it in a depth-first walk."
[loc]
(let [{:keys [node parent left right]} loc]
(if-not parent
(throw (new Exception "Remove at top"))
(if (seq left)
(loop [loc (let [[lnode lpos] (peek left)]
(assoc loc
:changed? true
:position lpos
:node lnode
:left (pop left)))]
(if-let [child (and (branch? loc) (down loc))]
(recur (rightmost child))
loc))
(assoc parent
:changed? true
:node (make-node loc (:node parent) right))))))
|
[
{
"context": " {:port 8080\n :host \"0.0.0.0\"\n :join? true\n ",
"end": 1101,
"score": 0.9965155720710754,
"start": 1094,
"tag": "IP_ADDRESS",
"value": "0.0.0.0"
},
{
"context": "ystore.p12\"\n :key-password \"password\"; Password you gave when creating the keystore\n ",
"end": 1278,
"score": 0.9990087151527405,
"start": 1270,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "\"\n :key-password \"password\"; Password you gave when creating the keystore\n ",
"end": 1289,
"score": 0.9815028309822083,
"start": 1281,
"tag": "PASSWORD",
"value": "Password"
},
{
"context": "e keystore\n })))\n;; https://danielflower.github.io/2017/04/08/Lets-Encrypt-Certs-with-embe",
"end": 1377,
"score": 0.9952361583709717,
"start": 1365,
"tag": "USERNAME",
"value": "danielflower"
}
] |
webserver/src-demo/demo/https.clj
|
pink-gorilla/webly
| 6 |
(ns demo.https
(:require
[modular.webserver.jetty :refer [run-jetty-server]]
[modular.webserver.handler.not-found :refer [not-found-handler]]
[modular.webserver.handler.files :refer [->FilesMaybe]]
[modular.webserver.handler.config :refer [config-handler]]
[modular.webserver.middleware.bidi :refer [wrap-bidi]]
[modular.webserver.middleware.exception :refer [wrap-fallback-exception]]
[modular.webserver.middleware.api :refer [wrap-api-handler]]
[modular.config :as config]))
(config/set! :demo {:mode 3 :message "testing"})
(def routes
["/" {"webly" (->FilesMaybe {:dir "../docs/"})
"webly/" (->FilesMaybe {:dir "../docs/index.html"})
;"" (->FilesMaybe {:dir "../docs/"})
"config" {:get (wrap-api-handler config-handler)}
#{"r" "public"} (->FilesMaybe {:dir "public"})
true not-found-handler}])
(defn run-webserver [& _]
(let [ring-handler (-> (wrap-bidi routes)
(wrap-fallback-exception))]
(run-jetty-server ring-handler nil
{:port 8080
:host "0.0.0.0"
:join? true
:ssl-port 8443
:keystore "./certs/keystore.p12"
:key-password "password"; Password you gave when creating the keystore
})))
;; https://danielflower.github.io/2017/04/08/Lets-Encrypt-Certs-with-embedded-Jetty.html
|
88625
|
(ns demo.https
(:require
[modular.webserver.jetty :refer [run-jetty-server]]
[modular.webserver.handler.not-found :refer [not-found-handler]]
[modular.webserver.handler.files :refer [->FilesMaybe]]
[modular.webserver.handler.config :refer [config-handler]]
[modular.webserver.middleware.bidi :refer [wrap-bidi]]
[modular.webserver.middleware.exception :refer [wrap-fallback-exception]]
[modular.webserver.middleware.api :refer [wrap-api-handler]]
[modular.config :as config]))
(config/set! :demo {:mode 3 :message "testing"})
(def routes
["/" {"webly" (->FilesMaybe {:dir "../docs/"})
"webly/" (->FilesMaybe {:dir "../docs/index.html"})
;"" (->FilesMaybe {:dir "../docs/"})
"config" {:get (wrap-api-handler config-handler)}
#{"r" "public"} (->FilesMaybe {:dir "public"})
true not-found-handler}])
(defn run-webserver [& _]
(let [ring-handler (-> (wrap-bidi routes)
(wrap-fallback-exception))]
(run-jetty-server ring-handler nil
{:port 8080
:host "0.0.0.0"
:join? true
:ssl-port 8443
:keystore "./certs/keystore.p12"
:key-password "<PASSWORD>"; <PASSWORD> you gave when creating the keystore
})))
;; https://danielflower.github.io/2017/04/08/Lets-Encrypt-Certs-with-embedded-Jetty.html
| true |
(ns demo.https
(:require
[modular.webserver.jetty :refer [run-jetty-server]]
[modular.webserver.handler.not-found :refer [not-found-handler]]
[modular.webserver.handler.files :refer [->FilesMaybe]]
[modular.webserver.handler.config :refer [config-handler]]
[modular.webserver.middleware.bidi :refer [wrap-bidi]]
[modular.webserver.middleware.exception :refer [wrap-fallback-exception]]
[modular.webserver.middleware.api :refer [wrap-api-handler]]
[modular.config :as config]))
(config/set! :demo {:mode 3 :message "testing"})
(def routes
["/" {"webly" (->FilesMaybe {:dir "../docs/"})
"webly/" (->FilesMaybe {:dir "../docs/index.html"})
;"" (->FilesMaybe {:dir "../docs/"})
"config" {:get (wrap-api-handler config-handler)}
#{"r" "public"} (->FilesMaybe {:dir "public"})
true not-found-handler}])
(defn run-webserver [& _]
(let [ring-handler (-> (wrap-bidi routes)
(wrap-fallback-exception))]
(run-jetty-server ring-handler nil
{:port 8080
:host "0.0.0.0"
:join? true
:ssl-port 8443
:keystore "./certs/keystore.p12"
:key-password "PI:PASSWORD:<PASSWORD>END_PI"; PI:PASSWORD:<PASSWORD>END_PI you gave when creating the keystore
})))
;; https://danielflower.github.io/2017/04/08/Lets-Encrypt-Certs-with-embedded-Jetty.html
|
[
{
"context": "(let [little \"little\"]\n (println (format \"Mary had a %s lamb.\" little)))\n",
"end": 47,
"score": 0.9997919797897339,
"start": 43,
"tag": "NAME",
"value": "Mary"
}
] |
Task/String-interpolation--included-/Clojure/string-interpolation--included-.clj
|
LaudateCorpus1/RosettaCodeData
| 1 |
(let [little "little"]
(println (format "Mary had a %s lamb." little)))
|
1981
|
(let [little "little"]
(println (format "<NAME> had a %s lamb." little)))
| true |
(let [little "little"]
(println (format "PI:NAME:<NAME>END_PI had a %s lamb." little)))
|
[
{
"context": "(comment \n re-core, Copyright 2012 Ronen Narkis, narkisr.com\n Licensed under the Apache License,",
"end": 48,
"score": 0.9998814463615417,
"start": 36,
"tag": "NAME",
"value": "Ronen Narkis"
},
{
"context": "ment \n re-core, Copyright 2012 Ronen Narkis, narkisr.com\n Licensed under the Apache License,\n Version 2.",
"end": 61,
"score": 0.6349434852600098,
"start": 54,
"tag": "EMAIL",
"value": "isr.com"
}
] |
src/kvm/disks.clj
|
celestial-ops/core
| 1 |
(comment
re-core, Copyright 2012 Ronen Narkis, narkisr.com
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns kvm.disks
(:require
[clojure.zip :as zip]
[kvm.common :refer (tree-edit)]
[clojure.data.xml :as xml :refer (element)]
[clojure.data.zip.xml :as zx])
)
(defn volumes [c pool path]
(map (fn [v] (.storageVolLookupByName pool v)) (.listVolumes pool)))
(defn find-volume [c path]
(let [pools (map #(.storagePoolLookupByName c %) (.listStoragePools c)) ]
(first (filter #(= (.getPath %) path) (mapcat (fn [pool] (volumes c pool path)) pools)))
))
(defn get-disks [root]
(map vector
(zx/xml-> root :devices :disk :target (zx/attr :dev))
(zx/xml-> root :devices :disk :source (zx/attr :file))
(zx/xml-> root :devices :disk :driver (zx/attr= :name "qemu") (zx/attr :type))))
(defn into-volume [c [dev file type]]
{:device dev :file file :type type :volume (find-volume c file)})
(defn clone-volume-xml [{:keys [volume type file] } name]
(element :volume {}
(element :name {} name)
(element :allocation {} "0")
(element :capacity {} (.capacity (.getInfo volume)))
(element :target {}
(element :format {:type type} nil)
(element :compat {} "1.1"))
(element :backingStore {}
(element :path {} file)
(element :format {:type type} nil))))
(defn clone-name [name idx]
(str name "-" (str idx) ".qcow2"))
(defn clear-volumes [c root]
(doseq [{:keys [volume]} (map (partial into-volume c) (get-disks root))]
(.delete volume 0)))
(defn clone-disks [c name root]
(let [volumes (map-indexed vector (map (partial into-volume c) (get-disks root)))]
(doall
(for [[idx {:keys [volume] :as v}] volumes :let [pool (.storagePoolLookupByVolume volume) new-name (clone-name name idx) ]]
(assoc v :volume (.storageVolCreateXML pool (xml/emit-str (clone-volume-xml v new-name)) 0))))))
(defn disk? [loc]
(= :disk (:tag (zip/node loc))))
(defn update-file [volumes node]
(let [target (first (filter (fn [element] (= :target (:tag element))) (:content node)))
{:keys [volume]} (first (filter (fn [{:keys [device]}] (= (get-in target [:attrs :dev]) device)) volumes)) ]
(assoc node :content
(map
(fn [{:keys [tag attrs] :as element}]
(if (= tag :source) (assoc element :attrs (assoc attrs :file (.getPath volume))) element)) (:content node)))))
(defn update-disks [root volumes]
(zip/xml-zip (tree-edit root disk? (partial update-file volumes))))
|
108203
|
(comment
re-core, Copyright 2012 <NAME>, nark<EMAIL>
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns kvm.disks
(:require
[clojure.zip :as zip]
[kvm.common :refer (tree-edit)]
[clojure.data.xml :as xml :refer (element)]
[clojure.data.zip.xml :as zx])
)
(defn volumes [c pool path]
(map (fn [v] (.storageVolLookupByName pool v)) (.listVolumes pool)))
(defn find-volume [c path]
(let [pools (map #(.storagePoolLookupByName c %) (.listStoragePools c)) ]
(first (filter #(= (.getPath %) path) (mapcat (fn [pool] (volumes c pool path)) pools)))
))
(defn get-disks [root]
(map vector
(zx/xml-> root :devices :disk :target (zx/attr :dev))
(zx/xml-> root :devices :disk :source (zx/attr :file))
(zx/xml-> root :devices :disk :driver (zx/attr= :name "qemu") (zx/attr :type))))
(defn into-volume [c [dev file type]]
{:device dev :file file :type type :volume (find-volume c file)})
(defn clone-volume-xml [{:keys [volume type file] } name]
(element :volume {}
(element :name {} name)
(element :allocation {} "0")
(element :capacity {} (.capacity (.getInfo volume)))
(element :target {}
(element :format {:type type} nil)
(element :compat {} "1.1"))
(element :backingStore {}
(element :path {} file)
(element :format {:type type} nil))))
(defn clone-name [name idx]
(str name "-" (str idx) ".qcow2"))
(defn clear-volumes [c root]
(doseq [{:keys [volume]} (map (partial into-volume c) (get-disks root))]
(.delete volume 0)))
(defn clone-disks [c name root]
(let [volumes (map-indexed vector (map (partial into-volume c) (get-disks root)))]
(doall
(for [[idx {:keys [volume] :as v}] volumes :let [pool (.storagePoolLookupByVolume volume) new-name (clone-name name idx) ]]
(assoc v :volume (.storageVolCreateXML pool (xml/emit-str (clone-volume-xml v new-name)) 0))))))
(defn disk? [loc]
(= :disk (:tag (zip/node loc))))
(defn update-file [volumes node]
(let [target (first (filter (fn [element] (= :target (:tag element))) (:content node)))
{:keys [volume]} (first (filter (fn [{:keys [device]}] (= (get-in target [:attrs :dev]) device)) volumes)) ]
(assoc node :content
(map
(fn [{:keys [tag attrs] :as element}]
(if (= tag :source) (assoc element :attrs (assoc attrs :file (.getPath volume))) element)) (:content node)))))
(defn update-disks [root volumes]
(zip/xml-zip (tree-edit root disk? (partial update-file volumes))))
| true |
(comment
re-core, Copyright 2012 PI:NAME:<NAME>END_PI, narkPI:EMAIL:<EMAIL>END_PI
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns kvm.disks
(:require
[clojure.zip :as zip]
[kvm.common :refer (tree-edit)]
[clojure.data.xml :as xml :refer (element)]
[clojure.data.zip.xml :as zx])
)
(defn volumes [c pool path]
(map (fn [v] (.storageVolLookupByName pool v)) (.listVolumes pool)))
(defn find-volume [c path]
(let [pools (map #(.storagePoolLookupByName c %) (.listStoragePools c)) ]
(first (filter #(= (.getPath %) path) (mapcat (fn [pool] (volumes c pool path)) pools)))
))
(defn get-disks [root]
(map vector
(zx/xml-> root :devices :disk :target (zx/attr :dev))
(zx/xml-> root :devices :disk :source (zx/attr :file))
(zx/xml-> root :devices :disk :driver (zx/attr= :name "qemu") (zx/attr :type))))
(defn into-volume [c [dev file type]]
{:device dev :file file :type type :volume (find-volume c file)})
(defn clone-volume-xml [{:keys [volume type file] } name]
(element :volume {}
(element :name {} name)
(element :allocation {} "0")
(element :capacity {} (.capacity (.getInfo volume)))
(element :target {}
(element :format {:type type} nil)
(element :compat {} "1.1"))
(element :backingStore {}
(element :path {} file)
(element :format {:type type} nil))))
(defn clone-name [name idx]
(str name "-" (str idx) ".qcow2"))
(defn clear-volumes [c root]
(doseq [{:keys [volume]} (map (partial into-volume c) (get-disks root))]
(.delete volume 0)))
(defn clone-disks [c name root]
(let [volumes (map-indexed vector (map (partial into-volume c) (get-disks root)))]
(doall
(for [[idx {:keys [volume] :as v}] volumes :let [pool (.storagePoolLookupByVolume volume) new-name (clone-name name idx) ]]
(assoc v :volume (.storageVolCreateXML pool (xml/emit-str (clone-volume-xml v new-name)) 0))))))
(defn disk? [loc]
(= :disk (:tag (zip/node loc))))
(defn update-file [volumes node]
(let [target (first (filter (fn [element] (= :target (:tag element))) (:content node)))
{:keys [volume]} (first (filter (fn [{:keys [device]}] (= (get-in target [:attrs :dev]) device)) volumes)) ]
(assoc node :content
(map
(fn [{:keys [tag attrs] :as element}]
(if (= tag :source) (assoc element :attrs (assoc attrs :file (.getPath volume))) element)) (:content node)))))
(defn update-disks [root volumes]
(zip/xml-zip (tree-edit root disk? (partial update-file volumes))))
|
[
{
"context": "g Convex\n and how to build dApps.\"\n\n {:author \"Adam Helinski\"}\n\n (:require [convex.cell :as $.cell]\n ",
"end": 418,
"score": 0.9991448521614075,
"start": 405,
"tag": "NAME",
"value": "Adam Helinski"
},
{
"context": "s\n ;; are key for building robust systems. Thanks Rich!\n ;;\n\n ;; A long...\n ;;\n ($.cell/long 42)\n\n ",
"end": 888,
"score": 0.9623348116874695,
"start": 884,
"tag": "NAME",
"value": "Rich"
}
] |
project/recipe/src/clj/main/convex/recipe/cell.clj
|
rosejn/convex.cljc
| 30 |
(ns convex.recipe.cell
"Cells represents anything that can be handled on the Convex network: data and other types such as functions.
They have been modeled very closely on Clojure and many of those types will be familiar to any Clojurist.
These examples show how to create cells and handle them. It is the very first step towards understanding Convex
and how to build dApps."
{:author "Adam Helinski"}
(:require [convex.cell :as $.cell]
[convex.clj :as $.clj]
[convex.read :as $.read]
[convex.std :as $.std]
[convex.write :as $.write]))
;;;;;;;;;;
(comment
;;
;; CREATING CELLS
;;
;;
;; Many types have been directly modeled on what we find in Clojure.
;;
;; This is because we know that data is king and that data-centric applications
;; are key for building robust systems. Thanks Rich!
;;
;; A long...
;;
($.cell/long 42)
;; A keyword...
;;
($.cell/keyword "foo")
;; A vector...
;;
($.cell/vector [($.cell/long 42)
($.cell/keyword "foo")])
;;
;; A few types are specific to Convex.
;;
;; Like account addresses...
;;
($.cell/address 42)
;; Or binary large objects...
;;
($.cell/blob (byte-array [1 2 3]))
;; Namespace `convex.cell` has functions for creating those cells.
;;
;; But usually it is easier using the `*` macro which converts Clojure data to Convex.
;;
($.cell/* (+ 2 2))
;; And `~` can be used to insert Convex types.
;;
($.cell/* (transfer ~($.cell/address 42)
500000))
;;
;; ENCODING
;;
;; Cells have been designed explicitly for fast storage and efficient sharing over the network.
;;
;; Let us suppose this vector.
;;
(def my-vector
($.cell/* [:a :b]))
;; Each cell can be encoded to an efficient, dense binary representation.
;;
($.cell/encoding my-vector)
;; A SHA256 hash can be computed over an encoding.
;;
;; Does not sound very exciting but very important for later!
;;
($.cell/hash my-vector)
;;
;; HANDLING CELLS
;;
;; Almost all core Clojure functions related to sequences work on Convex collections.
;;
(first ($.cell/* [:a :b]))
(map identity
($.cell/* [:a :b :c]))
(concat ($.cell/* [:a :b])
($.cell/* [:c :d]))
;; Other classic Clojure functions can be found in the `convex.std` namespace.
;;
($.std/conj ($.cell/* [:a :b])
($.cell/* :c))
($.std/get ($.cell/* {:a :b})
($.cell/* :a))
;; Sometimes it is useful converting a cell to a Clojure type via the `convex.clj` namespace.
;;
(-> ($.cell/address 42)
$.clj/address)
;; And in the rare where all of this is not enough, there is always Java interop.
;;
;; https://www.javadoc.io/doc/world.convex/convex-core/latest/convex/core/data/package-summary.html
;;
;; READER
;;
;; The Convex Lisp reader takes a string of code as input and outputs a cell.
;;
;; Convex Lisp is the language used for querying data from the network or submitting transactions, such as
;; creating smart contracts. It is almost a subset of Clojure with added capabilities.
;;
;; See [[convex.recipe.cvm]] for examples on how to compile and evaluate cells in order to execute code.
;;
;; Reading a small snippet of code.
;;
($.read/string "(+ 2 2)")
;; Most commonly used when fetching smart contracts written in file.
;;
;; For instance, this simple smart contract is used in `convex.recipe.client`.
;;
($.read/file "project/recipe/src/cvx/main/simple_contract.cvx")
;; Cells can be printed to Convex Lisp.
;;
($.write/string ($.cell/* (+ 2 2)))
)
|
52789
|
(ns convex.recipe.cell
"Cells represents anything that can be handled on the Convex network: data and other types such as functions.
They have been modeled very closely on Clojure and many of those types will be familiar to any Clojurist.
These examples show how to create cells and handle them. It is the very first step towards understanding Convex
and how to build dApps."
{:author "<NAME>"}
(:require [convex.cell :as $.cell]
[convex.clj :as $.clj]
[convex.read :as $.read]
[convex.std :as $.std]
[convex.write :as $.write]))
;;;;;;;;;;
(comment
;;
;; CREATING CELLS
;;
;;
;; Many types have been directly modeled on what we find in Clojure.
;;
;; This is because we know that data is king and that data-centric applications
;; are key for building robust systems. Thanks <NAME>!
;;
;; A long...
;;
($.cell/long 42)
;; A keyword...
;;
($.cell/keyword "foo")
;; A vector...
;;
($.cell/vector [($.cell/long 42)
($.cell/keyword "foo")])
;;
;; A few types are specific to Convex.
;;
;; Like account addresses...
;;
($.cell/address 42)
;; Or binary large objects...
;;
($.cell/blob (byte-array [1 2 3]))
;; Namespace `convex.cell` has functions for creating those cells.
;;
;; But usually it is easier using the `*` macro which converts Clojure data to Convex.
;;
($.cell/* (+ 2 2))
;; And `~` can be used to insert Convex types.
;;
($.cell/* (transfer ~($.cell/address 42)
500000))
;;
;; ENCODING
;;
;; Cells have been designed explicitly for fast storage and efficient sharing over the network.
;;
;; Let us suppose this vector.
;;
(def my-vector
($.cell/* [:a :b]))
;; Each cell can be encoded to an efficient, dense binary representation.
;;
($.cell/encoding my-vector)
;; A SHA256 hash can be computed over an encoding.
;;
;; Does not sound very exciting but very important for later!
;;
($.cell/hash my-vector)
;;
;; HANDLING CELLS
;;
;; Almost all core Clojure functions related to sequences work on Convex collections.
;;
(first ($.cell/* [:a :b]))
(map identity
($.cell/* [:a :b :c]))
(concat ($.cell/* [:a :b])
($.cell/* [:c :d]))
;; Other classic Clojure functions can be found in the `convex.std` namespace.
;;
($.std/conj ($.cell/* [:a :b])
($.cell/* :c))
($.std/get ($.cell/* {:a :b})
($.cell/* :a))
;; Sometimes it is useful converting a cell to a Clojure type via the `convex.clj` namespace.
;;
(-> ($.cell/address 42)
$.clj/address)
;; And in the rare where all of this is not enough, there is always Java interop.
;;
;; https://www.javadoc.io/doc/world.convex/convex-core/latest/convex/core/data/package-summary.html
;;
;; READER
;;
;; The Convex Lisp reader takes a string of code as input and outputs a cell.
;;
;; Convex Lisp is the language used for querying data from the network or submitting transactions, such as
;; creating smart contracts. It is almost a subset of Clojure with added capabilities.
;;
;; See [[convex.recipe.cvm]] for examples on how to compile and evaluate cells in order to execute code.
;;
;; Reading a small snippet of code.
;;
($.read/string "(+ 2 2)")
;; Most commonly used when fetching smart contracts written in file.
;;
;; For instance, this simple smart contract is used in `convex.recipe.client`.
;;
($.read/file "project/recipe/src/cvx/main/simple_contract.cvx")
;; Cells can be printed to Convex Lisp.
;;
($.write/string ($.cell/* (+ 2 2)))
)
| true |
(ns convex.recipe.cell
"Cells represents anything that can be handled on the Convex network: data and other types such as functions.
They have been modeled very closely on Clojure and many of those types will be familiar to any Clojurist.
These examples show how to create cells and handle them. It is the very first step towards understanding Convex
and how to build dApps."
{:author "PI:NAME:<NAME>END_PI"}
(:require [convex.cell :as $.cell]
[convex.clj :as $.clj]
[convex.read :as $.read]
[convex.std :as $.std]
[convex.write :as $.write]))
;;;;;;;;;;
(comment
;;
;; CREATING CELLS
;;
;;
;; Many types have been directly modeled on what we find in Clojure.
;;
;; This is because we know that data is king and that data-centric applications
;; are key for building robust systems. Thanks PI:NAME:<NAME>END_PI!
;;
;; A long...
;;
($.cell/long 42)
;; A keyword...
;;
($.cell/keyword "foo")
;; A vector...
;;
($.cell/vector [($.cell/long 42)
($.cell/keyword "foo")])
;;
;; A few types are specific to Convex.
;;
;; Like account addresses...
;;
($.cell/address 42)
;; Or binary large objects...
;;
($.cell/blob (byte-array [1 2 3]))
;; Namespace `convex.cell` has functions for creating those cells.
;;
;; But usually it is easier using the `*` macro which converts Clojure data to Convex.
;;
($.cell/* (+ 2 2))
;; And `~` can be used to insert Convex types.
;;
($.cell/* (transfer ~($.cell/address 42)
500000))
;;
;; ENCODING
;;
;; Cells have been designed explicitly for fast storage and efficient sharing over the network.
;;
;; Let us suppose this vector.
;;
(def my-vector
($.cell/* [:a :b]))
;; Each cell can be encoded to an efficient, dense binary representation.
;;
($.cell/encoding my-vector)
;; A SHA256 hash can be computed over an encoding.
;;
;; Does not sound very exciting but very important for later!
;;
($.cell/hash my-vector)
;;
;; HANDLING CELLS
;;
;; Almost all core Clojure functions related to sequences work on Convex collections.
;;
(first ($.cell/* [:a :b]))
(map identity
($.cell/* [:a :b :c]))
(concat ($.cell/* [:a :b])
($.cell/* [:c :d]))
;; Other classic Clojure functions can be found in the `convex.std` namespace.
;;
($.std/conj ($.cell/* [:a :b])
($.cell/* :c))
($.std/get ($.cell/* {:a :b})
($.cell/* :a))
;; Sometimes it is useful converting a cell to a Clojure type via the `convex.clj` namespace.
;;
(-> ($.cell/address 42)
$.clj/address)
;; And in the rare where all of this is not enough, there is always Java interop.
;;
;; https://www.javadoc.io/doc/world.convex/convex-core/latest/convex/core/data/package-summary.html
;;
;; READER
;;
;; The Convex Lisp reader takes a string of code as input and outputs a cell.
;;
;; Convex Lisp is the language used for querying data from the network or submitting transactions, such as
;; creating smart contracts. It is almost a subset of Clojure with added capabilities.
;;
;; See [[convex.recipe.cvm]] for examples on how to compile and evaluate cells in order to execute code.
;;
;; Reading a small snippet of code.
;;
($.read/string "(+ 2 2)")
;; Most commonly used when fetching smart contracts written in file.
;;
;; For instance, this simple smart contract is used in `convex.recipe.client`.
;;
($.read/file "project/recipe/src/cvx/main/simple_contract.cvx")
;; Cells can be printed to Convex Lisp.
;;
($.write/string ($.cell/* (+ 2 2)))
)
|
[
{
"context": "t ^:generative index-updates\n (let [field-keys #{:a :b :c :d}]\n (checking \"tree updates\" 50\n [[familie",
"end": 16868,
"score": 0.9679036140441895,
"start": 16858,
"tag": "KEY",
"value": "a :b :c :d"
}
] |
lib/core/test/merkle_db/index_test.clj
|
greglook/merkle-db
| 47 |
(ns merkle-db.index-test
(:require
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.string :as str]
[clojure.test :refer :all]
[clojure.test.check.generators :as gen]
[com.gfredericks.test.chuck.clojure-test :refer [checking]]
[com.gfredericks.test.chuck.generators :as tcgen]
[merkle-db.generators :as mdgen]
[merkle-db.graph :as graph]
[merkle-db.index :as index]
[merkle-db.key :as key]
[merkle-db.partition :as part]
[merkle-db.patch :as patch]
[merkle-db.record :as record]
[merkle-db.test-utils :as tu]
[merkledag.core :as mdag]
[merkledag.link :as link]
[merkledag.node :as node]))
(deftest index-limits
(testing "limits"
(is (= index/default-fan-out (#'index/max-branches {})))
(is (= 420 (#'index/max-branches {::index/fan-out 420})))
(is (= 2 (#'index/min-branches {::index/fan-out 4})))
(is (= 3 (#'index/min-branches {::index/fan-out 5}))))
(testing "split-limited"
(let [split-limited @#'index/split-limited]
(is (nil? (#'index/split-limited 3 [])))
(is (= [[:a]]
(#'index/split-limited 3 [:a])))
(is (= [[:a :b :c]]
(#'index/split-limited 3 [:a :b :c])))
(is (= [[:a :b] [:c :d :e]]
(#'index/split-limited 3 [:a :b :c :d :e])))
(is (= [100 100 101 100 101]
(->>
(range 502)
(#'index/split-limited 120)
(map count)))))))
;; A
;; / \
;; B C
;; /|\ / \
;; 012 3 4
(def params
{::index/fan-out 4
::part/limit 5
::record/families {:bc #{:b :c}}})
(defn nth-key
"Generate a key for index i."
[i]
(key/create [i]))
(defn nth-record
"Generate a record for index i."
[i]
(cond-> {:a i}
(zero? (mod i 3)) (assoc :b (- 100 i))
(zero? (mod i 5)) (assoc :c (+ 20 i))))
(defn records
"Return a sequence of the records at each of the given indexes."
[& idxs]
(map (juxt nth-key nth-record) idxs))
(defn tombstones
"Return a sequence of tombstone markers at each of the given indexes."
[& idxs]
(map (juxt nth-key (constantly ::patch/tombstone)) idxs))
(defn nth-child
"Loads and returns the nth child of the given index node."
[store node i]
(graph/get-link! store node (nth (::index/children node) i)))
(defmacro ^:private with-index-fixture
[& body]
`(let [store# (mdag/init-store :types graph/codec-types)
~'store store#
~'part0 (part/from-records store# params (records 4 5 6))
~'part1 (part/from-records store# params (records 7 8 10 11))
~'part2 (part/from-records store# params (records 12 13 14 17 18))
~'part3 (part/from-records store# params (records 21 23 24 25))
~'part4 (part/from-records store# params (records 30 31 32))
~'idxB (index/build-tree store# params [~'part0 ~'part1 ~'part2])
~'idxC (index/build-tree store# params [~'part3 ~'part4])
~'idxA (index/build-tree store# params [~'idxB ~'idxC])]
~@body))
(defmacro ^:private is-index
[node height child-count record-count first-key-idx last-key-idx]
`(let [node# ~node]
(is (~'valid? ::index/node-data node#)
"spec is valid")
(is (= :merkle-db/index (:data/type node#))
"has index data type")
(is (= ~height (::index/height node#))
"has expected height")
(is (= ~child-count (count (::index/children node#)))
"has expected number of children")
(is (= ~record-count (::record/count node#))
"contains expected number of records")
(is (= (nth-key ~first-key-idx) (::record/first-key node#))
"contains expected first key")
(is (= (nth-key ~last-key-idx) (::record/last-key node#))
"contains expected last key")))
(deftest index-reading
(with-index-fixture
(testing "root properties"
(is (= :merkle-db/index (:data/type idxA)))
(is (= 2 (::index/height idxA)))
(is (= 2 (count (::index/children idxA))))
(is (= 19 (::record/count idxA)))
(is (= (nth-key 4) (::record/first-key idxA)))
(is (= (nth-key 32) (::record/last-key idxA))))
(testing "read-all"
(is (thrown? Exception
(index/read-all store {:data/type :foo} nil)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store idxA nil)))
(is (= [[(nth-key 5) {:c 25}]
[(nth-key 10) {:c 30}]
[(nth-key 25) {:c 45}]
[(nth-key 30) {:c 50}]]
(index/read-all store idxA #{:c}))))
(testing "read-batch"
(is (thrown? Exception
(index/read-batch store {:data/type :foo} nil nil)))
(is (= (records 5 8 23)
(index/read-batch
store idxA nil
#{(nth-key 8) (nth-key 5) (nth-key 23) (nth-key 80)})))
(is (= [[(nth-key 12) {:b 88}]
[(nth-key 21) {:b 79}]]
(index/read-batch
store idxA #{:b}
#{(nth-key 12) (nth-key 21) (nth-key 22)}))))
(testing "read-range"
(is (thrown? Exception
(index/read-range store {:data/type :foo} nil nil nil)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-range store idxA nil nil nil)))
(is (= [[(nth-key 6) {:b 94}]
[(nth-key 12) {:b 88}]
[(nth-key 18) {:b 82}]
[(nth-key 21) {:b 79}]
[(nth-key 24) {:b 76}]
[(nth-key 30) {:b 70}]]
(index/read-range store idxA #{:b} nil nil)))
(is (= (records 4 5 6 7 8 10)
(index/read-range store idxA nil nil (nth-key 10))))
(is (= (records 21 23 24 25 30 31 32)
(index/read-range
store idxA nil
(nth-key 20) nil)))
(is (= [[(nth-key 5) {:c 25}]
[(nth-key 10) {:c 30}]
[(nth-key 25) {:c 45}]
[(nth-key 30) {:c 50}]]
(index/read-range store idxA #{:c} (nth-key 5) (nth-key 30)))))))
(deftest empty-root-updates
(let [store (mdag/init-store :types graph/codec-types)
root nil]
(testing "bad input"
(is (thrown? Exception (index/update-tree store params {:data/type :foo}
(records 1)))))
(testing "unchanged contents"
(is (nil? (index/update-tree store params root [])))
(is (nil? (index/update-tree store params root (tombstones 0)))))
(testing "insertion"
(let [root' (index/update-tree store params nil
(concat (tombstones 1 2 3)
(records 4 5)
(tombstones 6 7 8)))]
(is (= :merkle-db/partition (:data/type root')))
(is (= 2 (::record/count root')))
(is (= (nth-key 4) (::record/first-key root')))
(is (= (nth-key 5) (::record/last-key root')))
(is (= (records 4 5) (index/read-all store root' nil)))))))
(deftest partition-root-updates
(let [store (mdag/init-store :types graph/codec-types)
root (part/from-records store params (records 4 5 6))]
(testing "unchanged contents"
(is (identical? root (index/update-tree store params root [])))
(is (identical? root (index/update-tree store params root (tombstones 1 2 7)))))
(testing "full deletion"
(is (nil? (index/update-tree store params root (tombstones 4 5 6)))))
(testing "underflow"
(let [root' (index/update-tree store params root (tombstones 4 6))]
(is (= :merkle-db/partition (:data/type root')))
(is (= 1 (::record/count root')))
(is (= (nth-key 5) (::record/first-key root')))
(is (= (nth-key 5) (::record/last-key root')))
(is (= (records 5) (index/read-all store root' nil)))))
(testing "update"
(let [root' (index/update-tree store params root
[[(nth-key 3) {:x 123}]
[(nth-key 5) {:y 456}]
[(nth-key 7) {:z 789}]])]
(is (= :merkle-db/partition (:data/type root')))
(is (= 5 (::record/count root')))
(is (= (nth-key 3) (::record/first-key root')))
(is (= (nth-key 7) (::record/last-key root')))
(is (= [[(nth-key 3) {:x 123}]
[(nth-key 4) {:a 4}]
[(nth-key 5) {:y 456}]
[(nth-key 6) {:a 6, :b 94}]
[(nth-key 7) {:z 789}]]
(index/read-all store root' nil)))))
(testing "overflow"
(let [root' (index/update-tree store params root
(records 1 2 3 8 9))]
(is-index root' 1 2 8 1 9)
(is (= (records 1 2 3 4 5 6 8 9)
(index/read-all store root' nil)))))))
(deftest index-tree-noop-update
(with-index-fixture
(is (identical? idxA (index/update-tree store params idxA [])))
; TODO: identical? would be a stronger guarantee here
(is (= idxA (index/update-tree store params idxA
(tombstones 0))))
(is (= idxA (index/update-tree store params idxA
(records 5 10 14 23 30))))))
(deftest index-tree-insert-2-parts
(with-index-fixture
(let [root (index/update-tree store params idxA
(records 0 1 2 3 9 15 16))]
(is-index root 2 2 26 0 32)
(is (= (records 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 21 23
24 25 30 31 32)
(index/read-all store root nil)))
(is-index (nth-child store root 0) 1 4 19 0 18)
(is-index (nth-child store root 1) 1 2 7 21 32))))
(deftest index-tree-remove-part-from-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 7 8 10 11))]
(is-index root 2 2 15 4 32)
(is (= (records 4 5 6 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 2 8 4 18)
(is (= part0 (nth-child store lchild 0)))
(is (= part2 (nth-child store lchild 1))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-underflow-first-part-in-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 6))]
(is-index root 2 2 18 4 32)
(is (= (records 4 5 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 3 11 4 18)
(is (= part2 (nth-child store lchild 2))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-underflow-last-part-in-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 12 14 18))]
(is-index root 2 2 16 4 32)
(is (= (records 4 5 6 7 8 10 11 13 17 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 3 9 4 17)
(is (= part0 (nth-child store lchild 0))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-carry-part-to-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 12 13 14 17 18))]
(is-index root 1 3 11 7 32)
(is (= (records 7 8 10 11 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(is (= part1 (nth-child store root 0)))
(is (= part3 (nth-child store root 1)))
(is (= part4 (nth-child store root 2))))))
(deftest index-tree-carry-records-to-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 10 11 12 13 17 18))]
(is-index root 1 3 9 8 32)
(is (= (records 8 14 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(is (= part4 (nth-child store root 2))))))
(deftest index-tree-delete-subtree-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 8 10 11 12 13 14 17 18))]
(is (= idxC root)))))
(deftest index-tree-delete-subtree-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25 30 31 32))]
(is (= idxB root)))))
(deftest index-tree-delete-to-part
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18
30 31 32))]
(is (= part3 root)))))
(deftest index-tree-delete-to-underflow
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18
21 25
30 31 32))]
(is (= :merkle-db/partition (:data/type root)))
(is (= 2 (::record/count root)))
(is (= (nth-key 23) (::record/first-key root)))
(is (= (nth-key 24) (::record/last-key root)))
(is (= (records 23 24)
(index/read-all store root nil))))))
(deftest index-tree-carry-back-part
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25))]
(is-index root 1 4 15 4 32)
(is (= part0 (nth-child store root 0)))
(is (= part1 (nth-child store root 1)))
(is (= part2 (nth-child store root 2)))
(is (= part4 (nth-child store root 3)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 30 31 32)
(index/read-all store root nil))))))
(deftest index-tree-carry-back-records
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25 31))]
(is-index root 1 4 14 4 32)
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 30 32)
(index/read-all store root nil)))
(is (= part0 (nth-child store root 0)))
(is (= part1 (nth-child store root 1))))))
(deftest index-tree-delete-all
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 8 10 11 12 13 14 17 18
21 23 24 25 30 31 32))]
(is (nil? root)))))
(deftest index-tree-adopt-subtree
(with-index-fixture
; ..W..
; / \
; A X
; / \ / \
; B C Y Z
; /|\ / \ / \ / \
; 012 3 4 5 6 7 8
(let [part5 (part/from-records store params (records 35 36 37))
part6 (part/from-records store params (records 40 42 44))
part7 (part/from-records store params (records 45 46 49))
part8 (part/from-records store params (records 51 52 53))
idxY (index/build-tree store params [part5 part6])
idxZ (index/build-tree store params [part7 part8])
idxX (index/build-tree store params [idxY idxZ])
idxW (index/build-tree store params [idxA idxX])]
(testing "carry subtree forward"
; ..X'.
; / | \
; C Y Z
; / \ / \ / \
; 3 4 5 6 7 8
(let [root (index/update-tree
store params idxW
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18))]
(is-index root 2 3 19 21 53)
(is (= idxC (nth-child store root 0)))
(is (= idxY (nth-child store root 1)))
(is (= idxZ (nth-child store root 2)))))
(testing "carry subtree backward"
; ..A'.
; / | \
; B C Z
; / \ / \ / \
; 012 3 4 7 8
(let [root (index/update-tree
store params idxW
(tombstones 35 36 37 40 42 44))]
(is-index root 2 3 25 4 53)
(is (= idxB (nth-child store root 0)))
(is (= idxC (nth-child store root 1)))
(is (= idxZ (nth-child store root 2))))))))
;; ## Generative Tests
(defmacro timer
[label & body]
`(let [start# (System/nanoTime)
result# (do ~@body)
elapsed# (/ (- (System/nanoTime) start#) 1e6)]
(printf "%s: %.2f ms\n" ~label elapsed#)
(flush)
result#))
#_
(deftest ^:generative index-updates
(let [field-keys #{:a :b :c :d}]
(checking "tree updates" 50
[[families fan-out part-limit [rkeys ukeys dkeys]]
(gen/tuple
(tcgen/sub-map {:ab #{:a :b}, :cd #{:c :d}})
(gen/large-integer* {:min 4, :max 32})
(gen/large-integer* {:min 5, :max 500})
(gen/bind
(gen/large-integer* {:min 10, :max 5000})
(fn [n]
(let [all-keys (map #(key/encode key/long-lexicoder %) (range n))]
(gen/tuple
(tcgen/subsequence all-keys)
(gen/fmap
(fn [fracs]
(->> (map list fracs all-keys)
(filter #(< 0.85 (first %)))
(map second)))
(apply gen/tuple (repeat (count all-keys) (gen/double* {:min 0.0, :max 1.0}))))
(gen/fmap
(fn [fracs]
(->> (map list fracs all-keys)
(filter #(< 0.85 (first %)))
(map second)))
(apply gen/tuple (repeat (count all-keys) (gen/double* {:min 0.0, :max 1.0})))))))))]
(printf "\n===============\n")
(printf "%d records, %d updates, %d deletions\n"
(count rkeys) (count ukeys) (count dkeys))
(flush)
(let [store (mdag/init-store :types graph/codec-types)
params {::record/families families
::index/fan-out fan-out
::part/limit part-limit}
records (map-indexed #(vector %2 {:a %1}) rkeys)
updates (map-indexed #(vector %2 {:b %1}) ukeys)
deletions (map vector dkeys (repeat ::patch/tombstone))
changes (patch/patch-seq deletions updates)
root (let [parts (timer "partition records"
(part/partition-records store params records))]
(timer "build tree"
(index/build-tree store params parts)))
root' (timer "update tree"
(index/update-tree store params root (vec changes)))
expected-data (patch/patch-seq changes records)]
(is (= expected-data (index/read-all store root' nil)))
(timer "check-asserts"
(validate/check-asserts
(validate/run!
store
(::node/id (meta root'))
validate/validate-data-tree
(assoc params ::record/count (count expected-data)))))
(printf "---------------\n")
(flush)))))
|
30378
|
(ns merkle-db.index-test
(:require
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.string :as str]
[clojure.test :refer :all]
[clojure.test.check.generators :as gen]
[com.gfredericks.test.chuck.clojure-test :refer [checking]]
[com.gfredericks.test.chuck.generators :as tcgen]
[merkle-db.generators :as mdgen]
[merkle-db.graph :as graph]
[merkle-db.index :as index]
[merkle-db.key :as key]
[merkle-db.partition :as part]
[merkle-db.patch :as patch]
[merkle-db.record :as record]
[merkle-db.test-utils :as tu]
[merkledag.core :as mdag]
[merkledag.link :as link]
[merkledag.node :as node]))
(deftest index-limits
(testing "limits"
(is (= index/default-fan-out (#'index/max-branches {})))
(is (= 420 (#'index/max-branches {::index/fan-out 420})))
(is (= 2 (#'index/min-branches {::index/fan-out 4})))
(is (= 3 (#'index/min-branches {::index/fan-out 5}))))
(testing "split-limited"
(let [split-limited @#'index/split-limited]
(is (nil? (#'index/split-limited 3 [])))
(is (= [[:a]]
(#'index/split-limited 3 [:a])))
(is (= [[:a :b :c]]
(#'index/split-limited 3 [:a :b :c])))
(is (= [[:a :b] [:c :d :e]]
(#'index/split-limited 3 [:a :b :c :d :e])))
(is (= [100 100 101 100 101]
(->>
(range 502)
(#'index/split-limited 120)
(map count)))))))
;; A
;; / \
;; B C
;; /|\ / \
;; 012 3 4
(def params
{::index/fan-out 4
::part/limit 5
::record/families {:bc #{:b :c}}})
(defn nth-key
"Generate a key for index i."
[i]
(key/create [i]))
(defn nth-record
"Generate a record for index i."
[i]
(cond-> {:a i}
(zero? (mod i 3)) (assoc :b (- 100 i))
(zero? (mod i 5)) (assoc :c (+ 20 i))))
(defn records
"Return a sequence of the records at each of the given indexes."
[& idxs]
(map (juxt nth-key nth-record) idxs))
(defn tombstones
"Return a sequence of tombstone markers at each of the given indexes."
[& idxs]
(map (juxt nth-key (constantly ::patch/tombstone)) idxs))
(defn nth-child
"Loads and returns the nth child of the given index node."
[store node i]
(graph/get-link! store node (nth (::index/children node) i)))
(defmacro ^:private with-index-fixture
[& body]
`(let [store# (mdag/init-store :types graph/codec-types)
~'store store#
~'part0 (part/from-records store# params (records 4 5 6))
~'part1 (part/from-records store# params (records 7 8 10 11))
~'part2 (part/from-records store# params (records 12 13 14 17 18))
~'part3 (part/from-records store# params (records 21 23 24 25))
~'part4 (part/from-records store# params (records 30 31 32))
~'idxB (index/build-tree store# params [~'part0 ~'part1 ~'part2])
~'idxC (index/build-tree store# params [~'part3 ~'part4])
~'idxA (index/build-tree store# params [~'idxB ~'idxC])]
~@body))
(defmacro ^:private is-index
[node height child-count record-count first-key-idx last-key-idx]
`(let [node# ~node]
(is (~'valid? ::index/node-data node#)
"spec is valid")
(is (= :merkle-db/index (:data/type node#))
"has index data type")
(is (= ~height (::index/height node#))
"has expected height")
(is (= ~child-count (count (::index/children node#)))
"has expected number of children")
(is (= ~record-count (::record/count node#))
"contains expected number of records")
(is (= (nth-key ~first-key-idx) (::record/first-key node#))
"contains expected first key")
(is (= (nth-key ~last-key-idx) (::record/last-key node#))
"contains expected last key")))
(deftest index-reading
(with-index-fixture
(testing "root properties"
(is (= :merkle-db/index (:data/type idxA)))
(is (= 2 (::index/height idxA)))
(is (= 2 (count (::index/children idxA))))
(is (= 19 (::record/count idxA)))
(is (= (nth-key 4) (::record/first-key idxA)))
(is (= (nth-key 32) (::record/last-key idxA))))
(testing "read-all"
(is (thrown? Exception
(index/read-all store {:data/type :foo} nil)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store idxA nil)))
(is (= [[(nth-key 5) {:c 25}]
[(nth-key 10) {:c 30}]
[(nth-key 25) {:c 45}]
[(nth-key 30) {:c 50}]]
(index/read-all store idxA #{:c}))))
(testing "read-batch"
(is (thrown? Exception
(index/read-batch store {:data/type :foo} nil nil)))
(is (= (records 5 8 23)
(index/read-batch
store idxA nil
#{(nth-key 8) (nth-key 5) (nth-key 23) (nth-key 80)})))
(is (= [[(nth-key 12) {:b 88}]
[(nth-key 21) {:b 79}]]
(index/read-batch
store idxA #{:b}
#{(nth-key 12) (nth-key 21) (nth-key 22)}))))
(testing "read-range"
(is (thrown? Exception
(index/read-range store {:data/type :foo} nil nil nil)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-range store idxA nil nil nil)))
(is (= [[(nth-key 6) {:b 94}]
[(nth-key 12) {:b 88}]
[(nth-key 18) {:b 82}]
[(nth-key 21) {:b 79}]
[(nth-key 24) {:b 76}]
[(nth-key 30) {:b 70}]]
(index/read-range store idxA #{:b} nil nil)))
(is (= (records 4 5 6 7 8 10)
(index/read-range store idxA nil nil (nth-key 10))))
(is (= (records 21 23 24 25 30 31 32)
(index/read-range
store idxA nil
(nth-key 20) nil)))
(is (= [[(nth-key 5) {:c 25}]
[(nth-key 10) {:c 30}]
[(nth-key 25) {:c 45}]
[(nth-key 30) {:c 50}]]
(index/read-range store idxA #{:c} (nth-key 5) (nth-key 30)))))))
(deftest empty-root-updates
(let [store (mdag/init-store :types graph/codec-types)
root nil]
(testing "bad input"
(is (thrown? Exception (index/update-tree store params {:data/type :foo}
(records 1)))))
(testing "unchanged contents"
(is (nil? (index/update-tree store params root [])))
(is (nil? (index/update-tree store params root (tombstones 0)))))
(testing "insertion"
(let [root' (index/update-tree store params nil
(concat (tombstones 1 2 3)
(records 4 5)
(tombstones 6 7 8)))]
(is (= :merkle-db/partition (:data/type root')))
(is (= 2 (::record/count root')))
(is (= (nth-key 4) (::record/first-key root')))
(is (= (nth-key 5) (::record/last-key root')))
(is (= (records 4 5) (index/read-all store root' nil)))))))
(deftest partition-root-updates
(let [store (mdag/init-store :types graph/codec-types)
root (part/from-records store params (records 4 5 6))]
(testing "unchanged contents"
(is (identical? root (index/update-tree store params root [])))
(is (identical? root (index/update-tree store params root (tombstones 1 2 7)))))
(testing "full deletion"
(is (nil? (index/update-tree store params root (tombstones 4 5 6)))))
(testing "underflow"
(let [root' (index/update-tree store params root (tombstones 4 6))]
(is (= :merkle-db/partition (:data/type root')))
(is (= 1 (::record/count root')))
(is (= (nth-key 5) (::record/first-key root')))
(is (= (nth-key 5) (::record/last-key root')))
(is (= (records 5) (index/read-all store root' nil)))))
(testing "update"
(let [root' (index/update-tree store params root
[[(nth-key 3) {:x 123}]
[(nth-key 5) {:y 456}]
[(nth-key 7) {:z 789}]])]
(is (= :merkle-db/partition (:data/type root')))
(is (= 5 (::record/count root')))
(is (= (nth-key 3) (::record/first-key root')))
(is (= (nth-key 7) (::record/last-key root')))
(is (= [[(nth-key 3) {:x 123}]
[(nth-key 4) {:a 4}]
[(nth-key 5) {:y 456}]
[(nth-key 6) {:a 6, :b 94}]
[(nth-key 7) {:z 789}]]
(index/read-all store root' nil)))))
(testing "overflow"
(let [root' (index/update-tree store params root
(records 1 2 3 8 9))]
(is-index root' 1 2 8 1 9)
(is (= (records 1 2 3 4 5 6 8 9)
(index/read-all store root' nil)))))))
(deftest index-tree-noop-update
(with-index-fixture
(is (identical? idxA (index/update-tree store params idxA [])))
; TODO: identical? would be a stronger guarantee here
(is (= idxA (index/update-tree store params idxA
(tombstones 0))))
(is (= idxA (index/update-tree store params idxA
(records 5 10 14 23 30))))))
(deftest index-tree-insert-2-parts
(with-index-fixture
(let [root (index/update-tree store params idxA
(records 0 1 2 3 9 15 16))]
(is-index root 2 2 26 0 32)
(is (= (records 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 21 23
24 25 30 31 32)
(index/read-all store root nil)))
(is-index (nth-child store root 0) 1 4 19 0 18)
(is-index (nth-child store root 1) 1 2 7 21 32))))
(deftest index-tree-remove-part-from-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 7 8 10 11))]
(is-index root 2 2 15 4 32)
(is (= (records 4 5 6 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 2 8 4 18)
(is (= part0 (nth-child store lchild 0)))
(is (= part2 (nth-child store lchild 1))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-underflow-first-part-in-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 6))]
(is-index root 2 2 18 4 32)
(is (= (records 4 5 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 3 11 4 18)
(is (= part2 (nth-child store lchild 2))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-underflow-last-part-in-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 12 14 18))]
(is-index root 2 2 16 4 32)
(is (= (records 4 5 6 7 8 10 11 13 17 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 3 9 4 17)
(is (= part0 (nth-child store lchild 0))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-carry-part-to-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 12 13 14 17 18))]
(is-index root 1 3 11 7 32)
(is (= (records 7 8 10 11 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(is (= part1 (nth-child store root 0)))
(is (= part3 (nth-child store root 1)))
(is (= part4 (nth-child store root 2))))))
(deftest index-tree-carry-records-to-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 10 11 12 13 17 18))]
(is-index root 1 3 9 8 32)
(is (= (records 8 14 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(is (= part4 (nth-child store root 2))))))
(deftest index-tree-delete-subtree-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 8 10 11 12 13 14 17 18))]
(is (= idxC root)))))
(deftest index-tree-delete-subtree-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25 30 31 32))]
(is (= idxB root)))))
(deftest index-tree-delete-to-part
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18
30 31 32))]
(is (= part3 root)))))
(deftest index-tree-delete-to-underflow
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18
21 25
30 31 32))]
(is (= :merkle-db/partition (:data/type root)))
(is (= 2 (::record/count root)))
(is (= (nth-key 23) (::record/first-key root)))
(is (= (nth-key 24) (::record/last-key root)))
(is (= (records 23 24)
(index/read-all store root nil))))))
(deftest index-tree-carry-back-part
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25))]
(is-index root 1 4 15 4 32)
(is (= part0 (nth-child store root 0)))
(is (= part1 (nth-child store root 1)))
(is (= part2 (nth-child store root 2)))
(is (= part4 (nth-child store root 3)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 30 31 32)
(index/read-all store root nil))))))
(deftest index-tree-carry-back-records
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25 31))]
(is-index root 1 4 14 4 32)
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 30 32)
(index/read-all store root nil)))
(is (= part0 (nth-child store root 0)))
(is (= part1 (nth-child store root 1))))))
(deftest index-tree-delete-all
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 8 10 11 12 13 14 17 18
21 23 24 25 30 31 32))]
(is (nil? root)))))
(deftest index-tree-adopt-subtree
(with-index-fixture
; ..W..
; / \
; A X
; / \ / \
; B C Y Z
; /|\ / \ / \ / \
; 012 3 4 5 6 7 8
(let [part5 (part/from-records store params (records 35 36 37))
part6 (part/from-records store params (records 40 42 44))
part7 (part/from-records store params (records 45 46 49))
part8 (part/from-records store params (records 51 52 53))
idxY (index/build-tree store params [part5 part6])
idxZ (index/build-tree store params [part7 part8])
idxX (index/build-tree store params [idxY idxZ])
idxW (index/build-tree store params [idxA idxX])]
(testing "carry subtree forward"
; ..X'.
; / | \
; C Y Z
; / \ / \ / \
; 3 4 5 6 7 8
(let [root (index/update-tree
store params idxW
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18))]
(is-index root 2 3 19 21 53)
(is (= idxC (nth-child store root 0)))
(is (= idxY (nth-child store root 1)))
(is (= idxZ (nth-child store root 2)))))
(testing "carry subtree backward"
; ..A'.
; / | \
; B C Z
; / \ / \ / \
; 012 3 4 7 8
(let [root (index/update-tree
store params idxW
(tombstones 35 36 37 40 42 44))]
(is-index root 2 3 25 4 53)
(is (= idxB (nth-child store root 0)))
(is (= idxC (nth-child store root 1)))
(is (= idxZ (nth-child store root 2))))))))
;; ## Generative Tests
(defmacro timer
[label & body]
`(let [start# (System/nanoTime)
result# (do ~@body)
elapsed# (/ (- (System/nanoTime) start#) 1e6)]
(printf "%s: %.2f ms\n" ~label elapsed#)
(flush)
result#))
#_
(deftest ^:generative index-updates
(let [field-keys #{:<KEY>}]
(checking "tree updates" 50
[[families fan-out part-limit [rkeys ukeys dkeys]]
(gen/tuple
(tcgen/sub-map {:ab #{:a :b}, :cd #{:c :d}})
(gen/large-integer* {:min 4, :max 32})
(gen/large-integer* {:min 5, :max 500})
(gen/bind
(gen/large-integer* {:min 10, :max 5000})
(fn [n]
(let [all-keys (map #(key/encode key/long-lexicoder %) (range n))]
(gen/tuple
(tcgen/subsequence all-keys)
(gen/fmap
(fn [fracs]
(->> (map list fracs all-keys)
(filter #(< 0.85 (first %)))
(map second)))
(apply gen/tuple (repeat (count all-keys) (gen/double* {:min 0.0, :max 1.0}))))
(gen/fmap
(fn [fracs]
(->> (map list fracs all-keys)
(filter #(< 0.85 (first %)))
(map second)))
(apply gen/tuple (repeat (count all-keys) (gen/double* {:min 0.0, :max 1.0})))))))))]
(printf "\n===============\n")
(printf "%d records, %d updates, %d deletions\n"
(count rkeys) (count ukeys) (count dkeys))
(flush)
(let [store (mdag/init-store :types graph/codec-types)
params {::record/families families
::index/fan-out fan-out
::part/limit part-limit}
records (map-indexed #(vector %2 {:a %1}) rkeys)
updates (map-indexed #(vector %2 {:b %1}) ukeys)
deletions (map vector dkeys (repeat ::patch/tombstone))
changes (patch/patch-seq deletions updates)
root (let [parts (timer "partition records"
(part/partition-records store params records))]
(timer "build tree"
(index/build-tree store params parts)))
root' (timer "update tree"
(index/update-tree store params root (vec changes)))
expected-data (patch/patch-seq changes records)]
(is (= expected-data (index/read-all store root' nil)))
(timer "check-asserts"
(validate/check-asserts
(validate/run!
store
(::node/id (meta root'))
validate/validate-data-tree
(assoc params ::record/count (count expected-data)))))
(printf "---------------\n")
(flush)))))
| true |
(ns merkle-db.index-test
(:require
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.string :as str]
[clojure.test :refer :all]
[clojure.test.check.generators :as gen]
[com.gfredericks.test.chuck.clojure-test :refer [checking]]
[com.gfredericks.test.chuck.generators :as tcgen]
[merkle-db.generators :as mdgen]
[merkle-db.graph :as graph]
[merkle-db.index :as index]
[merkle-db.key :as key]
[merkle-db.partition :as part]
[merkle-db.patch :as patch]
[merkle-db.record :as record]
[merkle-db.test-utils :as tu]
[merkledag.core :as mdag]
[merkledag.link :as link]
[merkledag.node :as node]))
(deftest index-limits
(testing "limits"
(is (= index/default-fan-out (#'index/max-branches {})))
(is (= 420 (#'index/max-branches {::index/fan-out 420})))
(is (= 2 (#'index/min-branches {::index/fan-out 4})))
(is (= 3 (#'index/min-branches {::index/fan-out 5}))))
(testing "split-limited"
(let [split-limited @#'index/split-limited]
(is (nil? (#'index/split-limited 3 [])))
(is (= [[:a]]
(#'index/split-limited 3 [:a])))
(is (= [[:a :b :c]]
(#'index/split-limited 3 [:a :b :c])))
(is (= [[:a :b] [:c :d :e]]
(#'index/split-limited 3 [:a :b :c :d :e])))
(is (= [100 100 101 100 101]
(->>
(range 502)
(#'index/split-limited 120)
(map count)))))))
;; A
;; / \
;; B C
;; /|\ / \
;; 012 3 4
(def params
{::index/fan-out 4
::part/limit 5
::record/families {:bc #{:b :c}}})
(defn nth-key
"Generate a key for index i."
[i]
(key/create [i]))
(defn nth-record
"Generate a record for index i."
[i]
(cond-> {:a i}
(zero? (mod i 3)) (assoc :b (- 100 i))
(zero? (mod i 5)) (assoc :c (+ 20 i))))
(defn records
"Return a sequence of the records at each of the given indexes."
[& idxs]
(map (juxt nth-key nth-record) idxs))
(defn tombstones
"Return a sequence of tombstone markers at each of the given indexes."
[& idxs]
(map (juxt nth-key (constantly ::patch/tombstone)) idxs))
(defn nth-child
"Loads and returns the nth child of the given index node."
[store node i]
(graph/get-link! store node (nth (::index/children node) i)))
(defmacro ^:private with-index-fixture
[& body]
`(let [store# (mdag/init-store :types graph/codec-types)
~'store store#
~'part0 (part/from-records store# params (records 4 5 6))
~'part1 (part/from-records store# params (records 7 8 10 11))
~'part2 (part/from-records store# params (records 12 13 14 17 18))
~'part3 (part/from-records store# params (records 21 23 24 25))
~'part4 (part/from-records store# params (records 30 31 32))
~'idxB (index/build-tree store# params [~'part0 ~'part1 ~'part2])
~'idxC (index/build-tree store# params [~'part3 ~'part4])
~'idxA (index/build-tree store# params [~'idxB ~'idxC])]
~@body))
(defmacro ^:private is-index
[node height child-count record-count first-key-idx last-key-idx]
`(let [node# ~node]
(is (~'valid? ::index/node-data node#)
"spec is valid")
(is (= :merkle-db/index (:data/type node#))
"has index data type")
(is (= ~height (::index/height node#))
"has expected height")
(is (= ~child-count (count (::index/children node#)))
"has expected number of children")
(is (= ~record-count (::record/count node#))
"contains expected number of records")
(is (= (nth-key ~first-key-idx) (::record/first-key node#))
"contains expected first key")
(is (= (nth-key ~last-key-idx) (::record/last-key node#))
"contains expected last key")))
(deftest index-reading
(with-index-fixture
(testing "root properties"
(is (= :merkle-db/index (:data/type idxA)))
(is (= 2 (::index/height idxA)))
(is (= 2 (count (::index/children idxA))))
(is (= 19 (::record/count idxA)))
(is (= (nth-key 4) (::record/first-key idxA)))
(is (= (nth-key 32) (::record/last-key idxA))))
(testing "read-all"
(is (thrown? Exception
(index/read-all store {:data/type :foo} nil)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store idxA nil)))
(is (= [[(nth-key 5) {:c 25}]
[(nth-key 10) {:c 30}]
[(nth-key 25) {:c 45}]
[(nth-key 30) {:c 50}]]
(index/read-all store idxA #{:c}))))
(testing "read-batch"
(is (thrown? Exception
(index/read-batch store {:data/type :foo} nil nil)))
(is (= (records 5 8 23)
(index/read-batch
store idxA nil
#{(nth-key 8) (nth-key 5) (nth-key 23) (nth-key 80)})))
(is (= [[(nth-key 12) {:b 88}]
[(nth-key 21) {:b 79}]]
(index/read-batch
store idxA #{:b}
#{(nth-key 12) (nth-key 21) (nth-key 22)}))))
(testing "read-range"
(is (thrown? Exception
(index/read-range store {:data/type :foo} nil nil nil)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-range store idxA nil nil nil)))
(is (= [[(nth-key 6) {:b 94}]
[(nth-key 12) {:b 88}]
[(nth-key 18) {:b 82}]
[(nth-key 21) {:b 79}]
[(nth-key 24) {:b 76}]
[(nth-key 30) {:b 70}]]
(index/read-range store idxA #{:b} nil nil)))
(is (= (records 4 5 6 7 8 10)
(index/read-range store idxA nil nil (nth-key 10))))
(is (= (records 21 23 24 25 30 31 32)
(index/read-range
store idxA nil
(nth-key 20) nil)))
(is (= [[(nth-key 5) {:c 25}]
[(nth-key 10) {:c 30}]
[(nth-key 25) {:c 45}]
[(nth-key 30) {:c 50}]]
(index/read-range store idxA #{:c} (nth-key 5) (nth-key 30)))))))
(deftest empty-root-updates
(let [store (mdag/init-store :types graph/codec-types)
root nil]
(testing "bad input"
(is (thrown? Exception (index/update-tree store params {:data/type :foo}
(records 1)))))
(testing "unchanged contents"
(is (nil? (index/update-tree store params root [])))
(is (nil? (index/update-tree store params root (tombstones 0)))))
(testing "insertion"
(let [root' (index/update-tree store params nil
(concat (tombstones 1 2 3)
(records 4 5)
(tombstones 6 7 8)))]
(is (= :merkle-db/partition (:data/type root')))
(is (= 2 (::record/count root')))
(is (= (nth-key 4) (::record/first-key root')))
(is (= (nth-key 5) (::record/last-key root')))
(is (= (records 4 5) (index/read-all store root' nil)))))))
(deftest partition-root-updates
(let [store (mdag/init-store :types graph/codec-types)
root (part/from-records store params (records 4 5 6))]
(testing "unchanged contents"
(is (identical? root (index/update-tree store params root [])))
(is (identical? root (index/update-tree store params root (tombstones 1 2 7)))))
(testing "full deletion"
(is (nil? (index/update-tree store params root (tombstones 4 5 6)))))
(testing "underflow"
(let [root' (index/update-tree store params root (tombstones 4 6))]
(is (= :merkle-db/partition (:data/type root')))
(is (= 1 (::record/count root')))
(is (= (nth-key 5) (::record/first-key root')))
(is (= (nth-key 5) (::record/last-key root')))
(is (= (records 5) (index/read-all store root' nil)))))
(testing "update"
(let [root' (index/update-tree store params root
[[(nth-key 3) {:x 123}]
[(nth-key 5) {:y 456}]
[(nth-key 7) {:z 789}]])]
(is (= :merkle-db/partition (:data/type root')))
(is (= 5 (::record/count root')))
(is (= (nth-key 3) (::record/first-key root')))
(is (= (nth-key 7) (::record/last-key root')))
(is (= [[(nth-key 3) {:x 123}]
[(nth-key 4) {:a 4}]
[(nth-key 5) {:y 456}]
[(nth-key 6) {:a 6, :b 94}]
[(nth-key 7) {:z 789}]]
(index/read-all store root' nil)))))
(testing "overflow"
(let [root' (index/update-tree store params root
(records 1 2 3 8 9))]
(is-index root' 1 2 8 1 9)
(is (= (records 1 2 3 4 5 6 8 9)
(index/read-all store root' nil)))))))
(deftest index-tree-noop-update
(with-index-fixture
(is (identical? idxA (index/update-tree store params idxA [])))
; TODO: identical? would be a stronger guarantee here
(is (= idxA (index/update-tree store params idxA
(tombstones 0))))
(is (= idxA (index/update-tree store params idxA
(records 5 10 14 23 30))))))
(deftest index-tree-insert-2-parts
(with-index-fixture
(let [root (index/update-tree store params idxA
(records 0 1 2 3 9 15 16))]
(is-index root 2 2 26 0 32)
(is (= (records 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 21 23
24 25 30 31 32)
(index/read-all store root nil)))
(is-index (nth-child store root 0) 1 4 19 0 18)
(is-index (nth-child store root 1) 1 2 7 21 32))))
(deftest index-tree-remove-part-from-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 7 8 10 11))]
(is-index root 2 2 15 4 32)
(is (= (records 4 5 6 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 2 8 4 18)
(is (= part0 (nth-child store lchild 0)))
(is (= part2 (nth-child store lchild 1))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-underflow-first-part-in-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 6))]
(is-index root 2 2 18 4 32)
(is (= (records 4 5 7 8 10 11 12 13 14 17 18 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 3 11 4 18)
(is (= part2 (nth-child store lchild 2))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-underflow-last-part-in-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 12 14 18))]
(is-index root 2 2 16 4 32)
(is (= (records 4 5 6 7 8 10 11 13 17 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(let [lchild (nth-child store root 0)]
(is-index lchild 1 3 9 4 17)
(is (= part0 (nth-child store lchild 0))))
(is (= idxC (nth-child store root 1))))))
(deftest index-tree-carry-part-to-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 12 13 14 17 18))]
(is-index root 1 3 11 7 32)
(is (= (records 7 8 10 11 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(is (= part1 (nth-child store root 0)))
(is (= part3 (nth-child store root 1)))
(is (= part4 (nth-child store root 2))))))
(deftest index-tree-carry-records-to-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 10 11 12 13 17 18))]
(is-index root 1 3 9 8 32)
(is (= (records 8 14 21 23 24 25 30 31 32)
(index/read-all store root nil)))
(is (= part4 (nth-child store root 2))))))
(deftest index-tree-delete-subtree-B
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 8 10 11 12 13 14 17 18))]
(is (= idxC root)))))
(deftest index-tree-delete-subtree-C
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25 30 31 32))]
(is (= idxB root)))))
(deftest index-tree-delete-to-part
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18
30 31 32))]
(is (= part3 root)))))
(deftest index-tree-delete-to-underflow
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18
21 25
30 31 32))]
(is (= :merkle-db/partition (:data/type root)))
(is (= 2 (::record/count root)))
(is (= (nth-key 23) (::record/first-key root)))
(is (= (nth-key 24) (::record/last-key root)))
(is (= (records 23 24)
(index/read-all store root nil))))))
(deftest index-tree-carry-back-part
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25))]
(is-index root 1 4 15 4 32)
(is (= part0 (nth-child store root 0)))
(is (= part1 (nth-child store root 1)))
(is (= part2 (nth-child store root 2)))
(is (= part4 (nth-child store root 3)))
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 30 31 32)
(index/read-all store root nil))))))
(deftest index-tree-carry-back-records
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 21 23 24 25 31))]
(is-index root 1 4 14 4 32)
(is (= (records 4 5 6 7 8 10 11 12 13 14 17 18 30 32)
(index/read-all store root nil)))
(is (= part0 (nth-child store root 0)))
(is (= part1 (nth-child store root 1))))))
(deftest index-tree-delete-all
(with-index-fixture
(let [root (index/update-tree store params idxA
(tombstones 4 5 6 7 8 10 11 12 13 14 17 18
21 23 24 25 30 31 32))]
(is (nil? root)))))
(deftest index-tree-adopt-subtree
(with-index-fixture
; ..W..
; / \
; A X
; / \ / \
; B C Y Z
; /|\ / \ / \ / \
; 012 3 4 5 6 7 8
(let [part5 (part/from-records store params (records 35 36 37))
part6 (part/from-records store params (records 40 42 44))
part7 (part/from-records store params (records 45 46 49))
part8 (part/from-records store params (records 51 52 53))
idxY (index/build-tree store params [part5 part6])
idxZ (index/build-tree store params [part7 part8])
idxX (index/build-tree store params [idxY idxZ])
idxW (index/build-tree store params [idxA idxX])]
(testing "carry subtree forward"
; ..X'.
; / | \
; C Y Z
; / \ / \ / \
; 3 4 5 6 7 8
(let [root (index/update-tree
store params idxW
(tombstones 4 5 6
7 8 10 11
12 13 14 17 18))]
(is-index root 2 3 19 21 53)
(is (= idxC (nth-child store root 0)))
(is (= idxY (nth-child store root 1)))
(is (= idxZ (nth-child store root 2)))))
(testing "carry subtree backward"
; ..A'.
; / | \
; B C Z
; / \ / \ / \
; 012 3 4 7 8
(let [root (index/update-tree
store params idxW
(tombstones 35 36 37 40 42 44))]
(is-index root 2 3 25 4 53)
(is (= idxB (nth-child store root 0)))
(is (= idxC (nth-child store root 1)))
(is (= idxZ (nth-child store root 2))))))))
;; ## Generative Tests
(defmacro timer
[label & body]
`(let [start# (System/nanoTime)
result# (do ~@body)
elapsed# (/ (- (System/nanoTime) start#) 1e6)]
(printf "%s: %.2f ms\n" ~label elapsed#)
(flush)
result#))
#_
(deftest ^:generative index-updates
(let [field-keys #{:PI:KEY:<KEY>END_PI}]
(checking "tree updates" 50
[[families fan-out part-limit [rkeys ukeys dkeys]]
(gen/tuple
(tcgen/sub-map {:ab #{:a :b}, :cd #{:c :d}})
(gen/large-integer* {:min 4, :max 32})
(gen/large-integer* {:min 5, :max 500})
(gen/bind
(gen/large-integer* {:min 10, :max 5000})
(fn [n]
(let [all-keys (map #(key/encode key/long-lexicoder %) (range n))]
(gen/tuple
(tcgen/subsequence all-keys)
(gen/fmap
(fn [fracs]
(->> (map list fracs all-keys)
(filter #(< 0.85 (first %)))
(map second)))
(apply gen/tuple (repeat (count all-keys) (gen/double* {:min 0.0, :max 1.0}))))
(gen/fmap
(fn [fracs]
(->> (map list fracs all-keys)
(filter #(< 0.85 (first %)))
(map second)))
(apply gen/tuple (repeat (count all-keys) (gen/double* {:min 0.0, :max 1.0})))))))))]
(printf "\n===============\n")
(printf "%d records, %d updates, %d deletions\n"
(count rkeys) (count ukeys) (count dkeys))
(flush)
(let [store (mdag/init-store :types graph/codec-types)
params {::record/families families
::index/fan-out fan-out
::part/limit part-limit}
records (map-indexed #(vector %2 {:a %1}) rkeys)
updates (map-indexed #(vector %2 {:b %1}) ukeys)
deletions (map vector dkeys (repeat ::patch/tombstone))
changes (patch/patch-seq deletions updates)
root (let [parts (timer "partition records"
(part/partition-records store params records))]
(timer "build tree"
(index/build-tree store params parts)))
root' (timer "update tree"
(index/update-tree store params root (vec changes)))
expected-data (patch/patch-seq changes records)]
(is (= expected-data (index/read-all store root' nil)))
(timer "check-asserts"
(validate/check-asserts
(validate/run!
store
(::node/id (meta root'))
validate/validate-data-tree
(assoc params ::record/count (count expected-data)))))
(printf "---------------\n")
(flush)))))
|
[
{
"context": "ill-be-performed\n {:name \"My Field\"\n :database-type nil\n ",
"end": 2342,
"score": 0.8081822395324707,
"start": 2334,
"tag": "NAME",
"value": "My Field"
},
{
"context": "base-position 0}\n {:name \"My Field\"\n :database-type \"Integer\"\n ",
"end": 2502,
"score": 0.7659721374511719,
"start": 2494,
"tag": "NAME",
"value": "My Field"
},
{
"context": "ill-be-performed\n {:name \"My Field\"\n :database-type nil\n ",
"end": 2917,
"score": 0.8295401930809021,
"start": 2909,
"tag": "NAME",
"value": "My Field"
},
{
"context": "base-position 0}\n {:name \"My Field\"\n :database-type \"NULL\"\n ",
"end": 3077,
"score": 0.6857817769050598,
"start": 3069,
"tag": "NAME",
"value": "My Field"
},
{
"context": "ill-be-performed\n {:name \"My Field\"\n :database-type \"Integer\"\n ",
"end": 3441,
"score": 0.9327347278594971,
"start": 3433,
"tag": "NAME",
"value": "My Field"
},
{
"context": "base-position 0}\n {:name \"My Field\"\n :database-type \"Integer\"\n ",
"end": 3643,
"score": 0.9448994994163513,
"start": 3635,
"tag": "NAME",
"value": "My Field"
}
] |
c#-metabase/test/metabase/sync/sync_metadata/fields/sync_metadata_test.clj
|
hanakhry/Crime_Admin
| 0 |
(ns metabase.sync.sync-metadata.fields.sync-metadata-test
(:require [clojure.test :refer :all]
[metabase.models.table :refer [Table]]
[metabase.sync.sync-metadata.fields.sync-metadata :as sync-metadata]
[toucan.db :as db]
[toucan.util.test :as tt]))
(defn- updates-that-will-be-performed [new-metadata-from-sync metadata-in-application-db]
(tt/with-temp Table [table]
(let [update-operations (atom [])]
(with-redefs [db/update! (fn [model id updates]
(swap! update-operations conj [(name model) id updates]))]
(#'sync-metadata/update-field-metadata-if-needed!
table
new-metadata-from-sync
metadata-in-application-db)
@update-operations))))
(deftest database-type-changed-test
(testing "test that if database-type changes we will update it in the DB"
(is (= [["Field" 1 {:database_type "Integer"}]]
(updates-that-will-be-performed
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:database-position 0}
{:name "My Field"
:database-type "NULL"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest no-op-test
(testing "no changes should be made (i.e., no calls to `update!`) if nothing changes"
(is (= []
(updates-that-will-be-performed
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:database-position 0}
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest nil-database-type-test
(testing (str "test that if `database-type` comes back as `nil` in the metadata from the sync process, we won't try "
"to set a `nil` value in the DB -- this is against the rules -- we should set `NULL` instead. See "
"`TableMetadataField` schema.")
(is (= [["Field" 1 {:database_type "NULL"}]]
(updates-that-will-be-performed
{:name "My Field"
:database-type nil
:base-type :type/Integer
:database-position 0}
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:database-position 0
:id 1}))))
(testing (str "if `database-type` comes back as `nil` and was already saved in application DB as `NULL` no changes "
"should be made")
(is (= []
(updates-that-will-be-performed
{:name "My Field"
:database-type nil
:base-type :type/Integer
:database-position 0}
{:name "My Field"
:database-type "NULL"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest dont-overwrite-semantic-type-test
(testing "We should not override non-nil `semantic_type`s"
(is (= []
(updates-that-will-be-performed
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:semantic-type nil
:database-position 0}
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:semantic-type :type/Price
:id 1
:database-position 0})))))
|
79612
|
(ns metabase.sync.sync-metadata.fields.sync-metadata-test
(:require [clojure.test :refer :all]
[metabase.models.table :refer [Table]]
[metabase.sync.sync-metadata.fields.sync-metadata :as sync-metadata]
[toucan.db :as db]
[toucan.util.test :as tt]))
(defn- updates-that-will-be-performed [new-metadata-from-sync metadata-in-application-db]
(tt/with-temp Table [table]
(let [update-operations (atom [])]
(with-redefs [db/update! (fn [model id updates]
(swap! update-operations conj [(name model) id updates]))]
(#'sync-metadata/update-field-metadata-if-needed!
table
new-metadata-from-sync
metadata-in-application-db)
@update-operations))))
(deftest database-type-changed-test
(testing "test that if database-type changes we will update it in the DB"
(is (= [["Field" 1 {:database_type "Integer"}]]
(updates-that-will-be-performed
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:database-position 0}
{:name "My Field"
:database-type "NULL"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest no-op-test
(testing "no changes should be made (i.e., no calls to `update!`) if nothing changes"
(is (= []
(updates-that-will-be-performed
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:database-position 0}
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest nil-database-type-test
(testing (str "test that if `database-type` comes back as `nil` in the metadata from the sync process, we won't try "
"to set a `nil` value in the DB -- this is against the rules -- we should set `NULL` instead. See "
"`TableMetadataField` schema.")
(is (= [["Field" 1 {:database_type "NULL"}]]
(updates-that-will-be-performed
{:name "<NAME>"
:database-type nil
:base-type :type/Integer
:database-position 0}
{:name "<NAME>"
:database-type "Integer"
:base-type :type/Integer
:database-position 0
:id 1}))))
(testing (str "if `database-type` comes back as `nil` and was already saved in application DB as `NULL` no changes "
"should be made")
(is (= []
(updates-that-will-be-performed
{:name "<NAME>"
:database-type nil
:base-type :type/Integer
:database-position 0}
{:name "<NAME>"
:database-type "NULL"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest dont-overwrite-semantic-type-test
(testing "We should not override non-nil `semantic_type`s"
(is (= []
(updates-that-will-be-performed
{:name "<NAME>"
:database-type "Integer"
:base-type :type/Integer
:semantic-type nil
:database-position 0}
{:name "<NAME>"
:database-type "Integer"
:base-type :type/Integer
:semantic-type :type/Price
:id 1
:database-position 0})))))
| true |
(ns metabase.sync.sync-metadata.fields.sync-metadata-test
(:require [clojure.test :refer :all]
[metabase.models.table :refer [Table]]
[metabase.sync.sync-metadata.fields.sync-metadata :as sync-metadata]
[toucan.db :as db]
[toucan.util.test :as tt]))
(defn- updates-that-will-be-performed [new-metadata-from-sync metadata-in-application-db]
(tt/with-temp Table [table]
(let [update-operations (atom [])]
(with-redefs [db/update! (fn [model id updates]
(swap! update-operations conj [(name model) id updates]))]
(#'sync-metadata/update-field-metadata-if-needed!
table
new-metadata-from-sync
metadata-in-application-db)
@update-operations))))
(deftest database-type-changed-test
(testing "test that if database-type changes we will update it in the DB"
(is (= [["Field" 1 {:database_type "Integer"}]]
(updates-that-will-be-performed
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:database-position 0}
{:name "My Field"
:database-type "NULL"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest no-op-test
(testing "no changes should be made (i.e., no calls to `update!`) if nothing changes"
(is (= []
(updates-that-will-be-performed
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:database-position 0}
{:name "My Field"
:database-type "Integer"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest nil-database-type-test
(testing (str "test that if `database-type` comes back as `nil` in the metadata from the sync process, we won't try "
"to set a `nil` value in the DB -- this is against the rules -- we should set `NULL` instead. See "
"`TableMetadataField` schema.")
(is (= [["Field" 1 {:database_type "NULL"}]]
(updates-that-will-be-performed
{:name "PI:NAME:<NAME>END_PI"
:database-type nil
:base-type :type/Integer
:database-position 0}
{:name "PI:NAME:<NAME>END_PI"
:database-type "Integer"
:base-type :type/Integer
:database-position 0
:id 1}))))
(testing (str "if `database-type` comes back as `nil` and was already saved in application DB as `NULL` no changes "
"should be made")
(is (= []
(updates-that-will-be-performed
{:name "PI:NAME:<NAME>END_PI"
:database-type nil
:base-type :type/Integer
:database-position 0}
{:name "PI:NAME:<NAME>END_PI"
:database-type "NULL"
:base-type :type/Integer
:id 1
:database-position 0})))))
(deftest dont-overwrite-semantic-type-test
(testing "We should not override non-nil `semantic_type`s"
(is (= []
(updates-that-will-be-performed
{:name "PI:NAME:<NAME>END_PI"
:database-type "Integer"
:base-type :type/Integer
:semantic-type nil
:database-position 0}
{:name "PI:NAME:<NAME>END_PI"
:database-type "Integer"
:base-type :type/Integer
:semantic-type :type/Price
:id 1
:database-position 0})))))
|
[
{
"context": ";\n; Copyright (c) Ludger Solbach. All rights reserved.\n; The use and distributio",
"end": 34,
"score": 0.9998500347137451,
"start": 20,
"tag": "NAME",
"value": "Ludger Solbach"
}
] |
data/train/clojure/61710df59a9081caeb4c210cdaf84a178cfec27acobertura.clj
|
harshp8l/deep-learning-lang-detection
| 84 |
;
; Copyright (c) Ludger Solbach. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
; which can be found in the file license.txt at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
;
(ns baumeister.plugin.cobertura
(:use [clojure.java.io :exclude [delete-file]]
[clojure.string :only [join split]]
[org.soulspace.clj file]
[baumeister.utils ant-utils files log]
[baumeister.config registry]))
(ant-taskdef {:resource "net.sourceforge.cobertura.ant.antlib.xml"})
;(ant-taskdef {:name cobertura-instrument :classname "net.sourceforge.cobertura.ant.InstrumentTask"})
;(ant-taskdef {:name cobertura-report :classname "net.sourceforge.cobertura.ant.ReportTask"})
;(ant-taskdef {:name cobertura-merge :classname "net.sourceforge.cobertura.ant.MergeTask"})
;(ant-taskdef {:name cobertura-check :classname "net.sourceforge.cobertura.ant.CheckTask"})
(define-ant-task ant-cobertura-instrument cobertura-instrument)
(define-ant-task ant-cobertura-report cobertura-report)
(define-ant-type ant-ignore net.sourceforge.cobertura.ant.Ignore)
(defmethod add-nested [:baumeister.utils.ant-utils/cobertura-instrument
net.sourceforge.cobertura.ant.Ignore]
[_ task regex] (doto (.createIgnore task) (.setRegex regex)))
(defn instrument-task []
(ant-cobertura-instrument {:toDir (param :build.instrumented.dir)
:datafile (param :cobertura-data-file)}
(ant-ignore {:regex "org.apache.log4j.*"})
(ant-ignore {:regex "antlr.*"})
(ant-fileset {:dir (param :build-classes-dir)
:includes "**/*.class" :excludes "**/*Test.class"})))
(defn report-task []
(ant-cobertura-report {:destdir (param :cobertura-report-dir) :format "xml"
:datafile (param :cobertura-data-file)}
(ant-fileset {:dir (param :module-dir) :includes (join " " (split (source-path) #":"))})))
(defn cobertura-junit [class-path test-dir report-dir]
(log :debug class-path test-dir)
(ant-junit {:fork (param :junit-fork) :forkMode (param :junit-fork-mode)
:maxmemory (param :junit-max-memory)
:printsummary (param :junit-print-summary)
:errorProperty "unittest.error"
:failureProperty "unittest.error"}
(ant-path class-path)
(ant-variable {:key "base.dir" :value (param :module-dir)})
(ant-variable {:key "net.sourceforge.cobertura.datafile" :value (param :cobertura-data-file)})
(ant-formatter {:type "brief" :useFile "false"})
{:todir report-dir
:fileset (ant-fileset {:dir test-dir :includes "**/*Test.class" :excludes "junit/**/*Test.class,**/Abstract*.class"})}))
(def cobertura-run-classpath
(class-path [(param :build-cobertura-dir)]))
(defn cobertura-clean []
(log :info "cleaning cobertura...")
(delete-file (as-file (param :build-cobertura-dir)))
(delete-file (as-file (param :cobertura-report-dir))))
(defn cobertura-init []
(log :info "initializing cobertura...")
(create-dir (as-file (param :cobertura-report-dir)))
(create-dir (as-file (param :build-cobertura-dir))))
(defn cobertura-pre-coverage []
(log :info "pre-coverage cobertura...")
(instrument-task))
(defn cobertura-coverage []
(log :info "coverage cobertura...")
(cobertura-junit cobertura-run-classpath (param :build-unittest-classes-dir) (param :unittest-unittest-report-dir))) ; add unittest classpath
(defn cobertura-post-coverage []
(log :info "post-coverage cobertura...")
(report-task))
(def config
{:params [[:build-cobertura-dir "${build-dir}/cobertura"]
[:cobertura-data-file "${build-cobertura-dir}/cobertura.ser"]
[:cobertura-report-dir "${build-report-dir}/cobertura"]]
:steps [[:clean cobertura-clean]
[:init cobertura-init]
[:pre-coverage cobertura-pre-coverage]
[:coverage cobertura-coverage]
[:post-coverage cobertura-post-coverage]]
:functions []})
|
441
|
;
; Copyright (c) <NAME>. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
; which can be found in the file license.txt at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
;
(ns baumeister.plugin.cobertura
(:use [clojure.java.io :exclude [delete-file]]
[clojure.string :only [join split]]
[org.soulspace.clj file]
[baumeister.utils ant-utils files log]
[baumeister.config registry]))
(ant-taskdef {:resource "net.sourceforge.cobertura.ant.antlib.xml"})
;(ant-taskdef {:name cobertura-instrument :classname "net.sourceforge.cobertura.ant.InstrumentTask"})
;(ant-taskdef {:name cobertura-report :classname "net.sourceforge.cobertura.ant.ReportTask"})
;(ant-taskdef {:name cobertura-merge :classname "net.sourceforge.cobertura.ant.MergeTask"})
;(ant-taskdef {:name cobertura-check :classname "net.sourceforge.cobertura.ant.CheckTask"})
(define-ant-task ant-cobertura-instrument cobertura-instrument)
(define-ant-task ant-cobertura-report cobertura-report)
(define-ant-type ant-ignore net.sourceforge.cobertura.ant.Ignore)
(defmethod add-nested [:baumeister.utils.ant-utils/cobertura-instrument
net.sourceforge.cobertura.ant.Ignore]
[_ task regex] (doto (.createIgnore task) (.setRegex regex)))
(defn instrument-task []
(ant-cobertura-instrument {:toDir (param :build.instrumented.dir)
:datafile (param :cobertura-data-file)}
(ant-ignore {:regex "org.apache.log4j.*"})
(ant-ignore {:regex "antlr.*"})
(ant-fileset {:dir (param :build-classes-dir)
:includes "**/*.class" :excludes "**/*Test.class"})))
(defn report-task []
(ant-cobertura-report {:destdir (param :cobertura-report-dir) :format "xml"
:datafile (param :cobertura-data-file)}
(ant-fileset {:dir (param :module-dir) :includes (join " " (split (source-path) #":"))})))
(defn cobertura-junit [class-path test-dir report-dir]
(log :debug class-path test-dir)
(ant-junit {:fork (param :junit-fork) :forkMode (param :junit-fork-mode)
:maxmemory (param :junit-max-memory)
:printsummary (param :junit-print-summary)
:errorProperty "unittest.error"
:failureProperty "unittest.error"}
(ant-path class-path)
(ant-variable {:key "base.dir" :value (param :module-dir)})
(ant-variable {:key "net.sourceforge.cobertura.datafile" :value (param :cobertura-data-file)})
(ant-formatter {:type "brief" :useFile "false"})
{:todir report-dir
:fileset (ant-fileset {:dir test-dir :includes "**/*Test.class" :excludes "junit/**/*Test.class,**/Abstract*.class"})}))
(def cobertura-run-classpath
(class-path [(param :build-cobertura-dir)]))
(defn cobertura-clean []
(log :info "cleaning cobertura...")
(delete-file (as-file (param :build-cobertura-dir)))
(delete-file (as-file (param :cobertura-report-dir))))
(defn cobertura-init []
(log :info "initializing cobertura...")
(create-dir (as-file (param :cobertura-report-dir)))
(create-dir (as-file (param :build-cobertura-dir))))
(defn cobertura-pre-coverage []
(log :info "pre-coverage cobertura...")
(instrument-task))
(defn cobertura-coverage []
(log :info "coverage cobertura...")
(cobertura-junit cobertura-run-classpath (param :build-unittest-classes-dir) (param :unittest-unittest-report-dir))) ; add unittest classpath
(defn cobertura-post-coverage []
(log :info "post-coverage cobertura...")
(report-task))
(def config
{:params [[:build-cobertura-dir "${build-dir}/cobertura"]
[:cobertura-data-file "${build-cobertura-dir}/cobertura.ser"]
[:cobertura-report-dir "${build-report-dir}/cobertura"]]
:steps [[:clean cobertura-clean]
[:init cobertura-init]
[:pre-coverage cobertura-pre-coverage]
[:coverage cobertura-coverage]
[:post-coverage cobertura-post-coverage]]
:functions []})
| true |
;
; Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
; which can be found in the file license.txt at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
;
(ns baumeister.plugin.cobertura
(:use [clojure.java.io :exclude [delete-file]]
[clojure.string :only [join split]]
[org.soulspace.clj file]
[baumeister.utils ant-utils files log]
[baumeister.config registry]))
(ant-taskdef {:resource "net.sourceforge.cobertura.ant.antlib.xml"})
;(ant-taskdef {:name cobertura-instrument :classname "net.sourceforge.cobertura.ant.InstrumentTask"})
;(ant-taskdef {:name cobertura-report :classname "net.sourceforge.cobertura.ant.ReportTask"})
;(ant-taskdef {:name cobertura-merge :classname "net.sourceforge.cobertura.ant.MergeTask"})
;(ant-taskdef {:name cobertura-check :classname "net.sourceforge.cobertura.ant.CheckTask"})
(define-ant-task ant-cobertura-instrument cobertura-instrument)
(define-ant-task ant-cobertura-report cobertura-report)
(define-ant-type ant-ignore net.sourceforge.cobertura.ant.Ignore)
(defmethod add-nested [:baumeister.utils.ant-utils/cobertura-instrument
net.sourceforge.cobertura.ant.Ignore]
[_ task regex] (doto (.createIgnore task) (.setRegex regex)))
(defn instrument-task []
(ant-cobertura-instrument {:toDir (param :build.instrumented.dir)
:datafile (param :cobertura-data-file)}
(ant-ignore {:regex "org.apache.log4j.*"})
(ant-ignore {:regex "antlr.*"})
(ant-fileset {:dir (param :build-classes-dir)
:includes "**/*.class" :excludes "**/*Test.class"})))
(defn report-task []
(ant-cobertura-report {:destdir (param :cobertura-report-dir) :format "xml"
:datafile (param :cobertura-data-file)}
(ant-fileset {:dir (param :module-dir) :includes (join " " (split (source-path) #":"))})))
(defn cobertura-junit [class-path test-dir report-dir]
(log :debug class-path test-dir)
(ant-junit {:fork (param :junit-fork) :forkMode (param :junit-fork-mode)
:maxmemory (param :junit-max-memory)
:printsummary (param :junit-print-summary)
:errorProperty "unittest.error"
:failureProperty "unittest.error"}
(ant-path class-path)
(ant-variable {:key "base.dir" :value (param :module-dir)})
(ant-variable {:key "net.sourceforge.cobertura.datafile" :value (param :cobertura-data-file)})
(ant-formatter {:type "brief" :useFile "false"})
{:todir report-dir
:fileset (ant-fileset {:dir test-dir :includes "**/*Test.class" :excludes "junit/**/*Test.class,**/Abstract*.class"})}))
(def cobertura-run-classpath
(class-path [(param :build-cobertura-dir)]))
(defn cobertura-clean []
(log :info "cleaning cobertura...")
(delete-file (as-file (param :build-cobertura-dir)))
(delete-file (as-file (param :cobertura-report-dir))))
(defn cobertura-init []
(log :info "initializing cobertura...")
(create-dir (as-file (param :cobertura-report-dir)))
(create-dir (as-file (param :build-cobertura-dir))))
(defn cobertura-pre-coverage []
(log :info "pre-coverage cobertura...")
(instrument-task))
(defn cobertura-coverage []
(log :info "coverage cobertura...")
(cobertura-junit cobertura-run-classpath (param :build-unittest-classes-dir) (param :unittest-unittest-report-dir))) ; add unittest classpath
(defn cobertura-post-coverage []
(log :info "post-coverage cobertura...")
(report-task))
(def config
{:params [[:build-cobertura-dir "${build-dir}/cobertura"]
[:cobertura-data-file "${build-cobertura-dir}/cobertura.ser"]
[:cobertura-report-dir "${build-report-dir}/cobertura"]]
:steps [[:clean cobertura-clean]
[:init cobertura-init]
[:pre-coverage cobertura-pre-coverage]
[:coverage cobertura-coverage]
[:post-coverage cobertura-post-coverage]]
:functions []})
|
[
{
"context": "keypairs (\"identities\")\n\n(def nodes\n {:n1 {:sec \"SCECGNDJ6ZKTLHC2EBQSQ7LWATWIRBZCIWUHTD23SR3QCB4JQEY4DMJL\"\n :pub \"GBMEX3FCTKJVM25IXTRJZKSD6Z3HJXAJ6X",
"end": 834,
"score": 0.8638879656791687,
"start": 778,
"tag": "KEY",
"value": "SCECGNDJ6ZKTLHC2EBQSQ7LWATWIRBZCIWUHTD23SR3QCB4JQEY4DMJL"
},
{
"context": "LWATWIRBZCIWUHTD23SR3QCB4JQEY4DMJL\"\n :pub \"GBMEX3FCTKJVM25IXTRJZKSD6Z3HJXAJ6XGZN5F3WAO4UIN6RKXL3EIC\"}\n :n2 {:sec \"SAS66K2FZKXYCKXQIPV5H6SNK6WG3FRSB",
"end": 906,
"score": 0.9997410774230957,
"start": 850,
"tag": "KEY",
"value": "GBMEX3FCTKJVM25IXTRJZKSD6Z3HJXAJ6XGZN5F3WAO4UIN6RKXL3EIC"
},
{
"context": "3HJXAJ6XGZN5F3WAO4UIN6RKXL3EIC\"}\n :n2 {:sec \"SAS66K2FZKXYCKXQIPV5H6SNK6WG3FRSBVEBXY4KOCSTHLZLT6E2XJBW\"\n :pub \"",
"end": 945,
"score": 0.5695430040359497,
"start": 926,
"tag": "KEY",
"value": "66K2FZKXYCKXQIPV5H6"
},
{
"context": "6RKXL3EIC\"}\n :n2 {:sec \"SAS66K2FZKXYCKXQIPV5H6SNK6WG3FRSBVEBXY4KOCSTHLZLT6E2XJBW\"\n :pub \"GCB6H5QKFTUISFST4CDT2XFERDQV4TZPT3",
"end": 979,
"score": 0.5998526215553284,
"start": 947,
"tag": "KEY",
"value": "K6WG3FRSBVEBXY4KOCSTHLZLT6E2XJBW"
},
{
"context": "SNK6WG3FRSBVEBXY4KOCSTHLZLT6E2XJBW\"\n :pub \"GCB6H5QKFTUISFST4CDT2XFERDQV4TZPT3BQCMT53NBGVTQNLXYBYYMP\"}\n :n3 {:sec \"SDLBP3HWZBHMJ26JNKN55DALEHZJQVT2B",
"end": 1051,
"score": 0.999741792678833,
"start": 995,
"tag": "KEY",
"value": "GCB6H5QKFTUISFST4CDT2XFERDQV4TZPT3BQCMT53NBGVTQNLXYBYYMP"
},
{
"context": "ERDQV4TZPT3BQCMT53NBGVTQNLXYBYYMP\"}\n :n3 {:sec \"SDLBP3HWZBHMJ26JNKN55DALEHZJQVT2BSNB2I6KISZYHY6KOGPYLJSK\"\n :pub \"GCS5GQQCVOWOILCD7QBUGYMAALK372MXFG",
"end": 1124,
"score": 0.7184578776359558,
"start": 1068,
"tag": "KEY",
"value": "SDLBP3HWZBHMJ26JNKN55DALEHZJQVT2BSNB2I6KISZYHY6KOGPYLJSK"
},
{
"context": "ALEHZJQVT2BSNB2I6KISZYHY6KOGPYLJSK\"\n :pub \"GCS5GQQCVOWOILCD7QBUGYMAALK372MXFGACHFA2G2YXPOTC6IHYVYOP\"}\n :n4 {:sec \"SBNJ7KIFJLNOFRWXUYVLG2CAEORXE3XK3",
"end": 1196,
"score": 0.9997520446777344,
"start": 1140,
"tag": "KEY",
"value": "GCS5GQQCVOWOILCD7QBUGYMAALK372MXFGACHFA2G2YXPOTC6IHYVYOP"
},
{
"context": " \"SBNJ7KIFJLNOFRWXUYVLG2CAEORXE3XK3WQXSWXOKXERZ3B2W6HRNH7P\"\n :pub \"GDQ4LOJEICOCFW34LK4WYIUM5UPNA6ONI4",
"end": 1269,
"score": 0.9995982050895691,
"start": 1261,
"tag": "KEY",
"value": "W6HRNH7P"
},
{
"context": "CAEORXE3XK3WQXSWXOKXERZ3B2W6HRNH7P\"\n :pub \"GDQ4LOJEICOCFW34LK4WYIUM5UPNA6ONI43QSZ5MLEFZDYOVL5IISQH3\"}\n :n5 {:sec \"SBTQU46R47J2CDU65E32AITEN6XE7QETQ",
"end": 1341,
"score": 0.9996334910392761,
"start": 1285,
"tag": "KEY",
"value": "GDQ4LOJEICOCFW34LK4WYIUM5UPNA6ONI43QSZ5MLEFZDYOVL5IISQH3"
},
{
"context": "TEN6XE7QETQLILAB2LGG3AZ6AE4ZWVBSMB\"\n :pub \"GB3WGEHMELIWXSYCNOQ3OZ4CTSKGSNDYIMZYVG6OHNDYABLESPZI7CJQ\"}})\n\n(def nodenames (sort (keys nodes)))\n\n(defn n",
"end": 1486,
"score": 0.9996083378791809,
"start": 1430,
"tag": "KEY",
"value": "GB3WGEHMELIWXSYCNOQ3OZ4CTSKGSNDYIMZYVG6OHNDYABLESPZI7CJQ"
}
] |
src/jepsen_stellar_core/core.clj
|
graydon/jepsen-stellar-core
| 1 |
(ns jepsen-stellar-core.core
(:use [clojure.core.strint])
(:require [clj-http.client :as http]
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.set :as set]
[clojure.tools.logging :refer [warn info debug]]
[jepsen.os.debian :as debian]
[jepsen.control.net :as net]
[jepsen [client :as client]
[core :as jepsen]
[db :as db]
[tests :as tests]
[control :as c :refer [|]]
[checker :as checker]
[nemesis :as nemesis]
[model :as model]
[generator :as gen]
[util :refer [timeout meh log-op]]]))
;; Bunch of random node keypairs ("identities")
(def nodes
{:n1 {:sec "SCECGNDJ6ZKTLHC2EBQSQ7LWATWIRBZCIWUHTD23SR3QCB4JQEY4DMJL"
:pub "GBMEX3FCTKJVM25IXTRJZKSD6Z3HJXAJ6XGZN5F3WAO4UIN6RKXL3EIC"}
:n2 {:sec "SAS66K2FZKXYCKXQIPV5H6SNK6WG3FRSBVEBXY4KOCSTHLZLT6E2XJBW"
:pub "GCB6H5QKFTUISFST4CDT2XFERDQV4TZPT3BQCMT53NBGVTQNLXYBYYMP"}
:n3 {:sec "SDLBP3HWZBHMJ26JNKN55DALEHZJQVT2BSNB2I6KISZYHY6KOGPYLJSK"
:pub "GCS5GQQCVOWOILCD7QBUGYMAALK372MXFGACHFA2G2YXPOTC6IHYVYOP"}
:n4 {:sec "SBNJ7KIFJLNOFRWXUYVLG2CAEORXE3XK3WQXSWXOKXERZ3B2W6HRNH7P"
:pub "GDQ4LOJEICOCFW34LK4WYIUM5UPNA6ONI43QSZ5MLEFZDYOVL5IISQH3"}
:n5 {:sec "SBTQU46R47J2CDU65E32AITEN6XE7QETQLILAB2LGG3AZ6AE4ZWVBSMB"
:pub "GB3WGEHMELIWXSYCNOQ3OZ4CTSKGSNDYIMZYVG6OHNDYABLESPZI7CJQ"}})
(def nodenames (sort (keys nodes)))
(defn nodenum [n]
(nth nodenames (rem n (count nodenames))))
(def ^:dynamic *stellar-core-version*
"0.0.1-132-4654e395")
(def stellar-core-deb-url-path
"https://s3.amazonaws.com/stellar.org/releases/stellar-core")
(defn stellar-core-deb []
(<< "stellar-core-~{*stellar-core-version*}_amd64.deb"))
(defn stellar-core-deb-url []
(<< "~{stellar-core-deb-url-path}/~(stellar-core-deb)"))
;; Basic mechanisms for talking to a stellar-core node's test port
(def ten-million 10000000)
(def ^:dynamic *node-host* "localhost")
(def ^:dynamic *node-port* 11626)
(defn get-json [path & [qp]]
(let [params {:as :json}]
(http/get (<< "http://~{*node-host*}:~{*node-port*}/~{path}")
(if qp
(assoc params :query-params qp)
params))))
(defn node-info []
(-> (get-json "info") :body :info))
(defn ledger-num []
(-> (node-info) :ledger :num))
(defn node-status []
(:state (node-info)))
(defn node-metrics []
(-> (get-json "metrics") :body :metrics))
(defn test-tx [qp]
(-> (get-json "testtx" qp) :body))
(defn get-account [who]
(-> (get-json "testacc" {:name who}) :body))
(defn has-account [who]
(-> (get-account who) (contains? :seqnum)))
(defn payment-qp [from to amount]
{:from from
:to to
:amount amount})
(defn create-account-qp [to]
(assoc (payment-qp "root" to (* 10000 ten-million))
:create 'true))
(defn create-account-from-qp [from to]
(assoc (payment-qp from to (* 100 ten-million))
:create 'true))
(defn do-payment [from to amount]
(test-tx (payment-qp from to amount)))
(defn do-create-account-from [from to]
(test-tx (create-account-from-qp from to)))
(defn do-create-account [to]
(test-tx (create-account-qp to)))
(defn retry-until
"Keep trying the provided `:f` function until the `:until` function
passes, or `:ledgers` expire, or `:retries` retries expire. Logs
some waiting-for-close noise after the first 20 retries, and slows
retrying down a bit. Returns `:ok` if it passed, `:fail` if anything
timed out."
[& {:keys [ledgers retries f until]
:or {ledgers 100
retries 100
f #()
until #(or false)}}]
(let [first (ledger-num)
last (+ first ledgers)]
(loop [retried 0]
(let [curr (ledger-num)]
(f)
(cond
(until) :ok
(> curr last) :fail
(> retried retries) :fail
(> retried 20)
(do
(Thread/sleep 800)
(info (<< "awaiting close: node ~{*node-host*}, ledger ~{curr}, status ~(node-status)"))
(recur (inc retried)))
true
(do
(Thread/sleep 300)
(recur (inc retried))))))))
(defn install!
"Install stellar-core and its dependencies on a node."
[node]
(when-not (or
(debian/installed? :libpq5)
(debian/installed? :libpq5:amd64))
(debian/update!)
(debian/install '(:libpq5:amd64)))
(when-not (or
(debian/installed? :libsqlite3-0)
(debian/installed? :libsqlite3-0:amd64))
(debian/update!)
(debian/install '(:libsqlite3-0:amd64)))
(when-not (and
(or
; Different dpkg versions report this slightly differently
(debian/installed? :stellar-core:amd64)
(debian/installed? :stellar-core))
(= (debian/installed-version :stellar-core)
*stellar-core-version*))
(c/exec :wget :--no-clobber (stellar-core-deb-url))
(meh (debian/uninstall! :stellar-core))
(c/exec :dpkg :-i (stellar-core-deb))))
(defn configure!
"Install keys, init control and config files on a node.
Pubkeys are needed to scp history between nodes' history stores."
[node]
(let [self (nodes node)
others (vec (sort (filter #(not= %1 node) (keys nodes))))]
(c/upload '("/root/.ssh/known_hosts"
"/root/.ssh/id_rsa"
"/root/.ssh/id_rsa.pub")
"/root/.ssh" :mode 0600)
(c/exec :echo
(slurp (io/resource "stellar-core"))
:> "/etc/init.d/stellar-core")
(c/exec :chmod :0755 "/etc/init.d/stellar-core")
(c/exec :echo
(-> (io/resource "stellar-core.cfg")
slurp
(str/replace #"%VALIDATION_SEED%" (:sec self))
(str/replace #"%PUBKEY(\d)%"
(fn [[_ n]] ((nodes (keyword (str "n" n))) :pub)))
(str/replace #"%SELF%" (name node))
(str/replace #"%OTHER(\d)%"
(fn [[_ n]] (name (others (- (read-string n) 1))))))
:> "stellar-core.cfg")))
(defn wipe!
"Wipe a node's state, keys and config files."
[]
(meh (c/exec :service :stellar-core :stop))
(c/exec :rm :-f
"/etc/init.d/stellar-core"
"/root/.ssh/known_hosts"
"/root/.ssh/id_rsa"
"/root/.ssh/id_rsa.pub")
(c/exec :rm :-rf :history :buckets :stellar.db :stellar-core.cfg :stellar-core.log))
(defn initialize!
"Initialize history and database on a node."
[node]
(c/exec :stellar-core :--conf :stellar-core.cfg :--newhist node)
(c/exec :stellar-core :--conf :stellar-core.cfg :--newdb)
(c/exec :stellar-core :--conf :stellar-core.cfg :--forcescp))
(defn db
"Standard db/DB reification, for a single node."
[]
(reify db/DB
(setup! [db test node]
(wipe!)
(install! node)
(configure! node)
(initialize! node)
(c/exec :service :stellar-core :start))
(teardown! [db test node]
(meh (c/exec :service :stellar-core :stop)))
))
;; We dynamically track the max-account to read-back during any test.
;; This number only ever increases; it's harmless if it's "too high",
;; we just do a bunch of unsuccessful queries to nonexistent high values
;; at the end of a test.
(def max-account (atom 0))
(defn account-id [n] (keyword (<< "account~{n}")))
(defn client
"Standard client for `:add` and `:read` operations, modeled by `model/set`.
An `{:type :invoke :f :add :value n}` operation causes this client
to add an account named `account~{n}` from a funding-account called
`(nodenum n)`. That is, `account0`, `account5`, `account10`, etc. are all
funded from an intermediate account called `n1`.
Splitting the adds up with intermediate accounts allows them to proceed without
interfering with one another's sequence numbers; if we didn't do this, most adds
would fail (harmlessly, but it's a waste of requests; we want to test adds, not
rejections).
"
[node]
(reify client/Client
(setup! [this test node] (client node))
(teardown! [this test])
(invoke! [this test op]
(case (:f op)
:setup
(binding [*node-host* (name node)]
(let [n (nth nodenames (:value op))]
(assoc op :type (retry-until
:f #(do-create-account n)
:until #(has-account n)))))
:read
(binding [*node-host* (name node)]
(assoc op
:type :ok,
:value (apply sorted-set
(filter #(has-account (account-id %1))
(range (inc @max-account))))))
:add
(binding [*node-host* (name node)]
(let [v (:value op)
id (account-id v)
src (nodenum v)]
(assoc op :type (retry-until
:f #(do-create-account-from src id)
:until #(has-account id)))))
))))
(defn setup
"Generator that invokes :setup on every node, once, single-threaded"
[]
(gen/singlethreaded
(apply gen/concat
(map (fn [node] (gen/on (fn [process] (= process node))
(gen/once {:type :invoke :f :setup :value node})))
(range (count nodenames))))))
(defn adds
"Generator that emits :add operations for sequential integers."
[]
(->> (range)
(map (fn [x]
(swap! max-account (fn [e] (max e x)))
{:type :invoke, :f :add, :value x}))
gen/seq))
(defn recover
"A generator which stops the nemesis and allows some time for recovery."
[]
(gen/nemesis
(gen/phases
(gen/once {:type :info, :f :stop})
(gen/sleep 20))))
(defn read-once
"A generator which reads exactly once."
[]
(gen/clients
(gen/once {:type :invoke, :f :read})))
(defn random-subset
"Return a random subset of a collection"
[coll]
(take (rand-int (inc (count coll))) (shuffle coll)))
(defn damaged-net-nemesis
"Induces network damage on random subset of nodes"
[damage & [nodes]]
(reify client/Client
(setup! [this test _]
(let [nodes (random-subset (:nodes test))]
(c/on-many nodes (meh (net/fast)))
(damaged-net-nemesis damage nodes)))
(invoke! [this test op]
(case (:f op)
:start
(do
(c/on-many nodes (damage))
(assoc op :value (str "network damaged on " (pr-str nodes))))
:stop
(do
(c/on-many nodes (meh (net/fast)))
(assoc op :value (str "healed network on " (pr-str nodes))))
))
(teardown! [this test]
(c/on-many nodes (meh (net/fast)))
this)))
(defn flaky-net-nemesis [] (damaged-net-nemesis net/flaky))
(defn slow-net-nemesis [] (damaged-net-nemesis net/slow))
(defn simple-test
"All our tests follow a simple phase structure: they `:setup` up a new network,
run for 10 minutes `:add`ing sequential accounts and running nemesis disruptions
for 20 seconds every minute (with 40s to recover from each). The network
is then allowed to fully heal, and a final `:read` is performed."
[]
(assoc tests/noop-test
:name "stellar-core"
:os debian/os
:db (db)
:client (client nil)
:model (model/set)
:generator (gen/phases
(setup)
(->> (adds)
(gen/stagger 1/10)
(gen/delay 1)
(gen/nemesis
(gen/seq (cycle
[(gen/sleep 40)
{:type :info :f :start}
(gen/sleep 20)
{:type :info :f :stop}])))
(gen/time-limit 600))
(recover)
(read-once))
:nemesis nemesis/noop
:checker checker/set))
(defn hammer-test
"Randomly pauses (STOP) and resumes (CONT) nodes on the network"
[]
(assoc (simple-test)
:name "hammer-test"
:nemesis (nemesis/hammer-time :stellar-core)))
(defn bridge-test
"Randomly partitions network into [2 nodes] <-> bridge-node <-> [2 nodes]"
[]
(assoc (simple-test)
:name "bridge-test"
:nemesis (nemesis/partitioner (comp nemesis/bridge shuffle))))
(defn majorities-ring-test
"Cuts links such that each node sees a different majority"
[]
(assoc (simple-test)
:name "majorities-ring-test"
:nemesis (nemesis/partition-majorities-ring)))
(defn random-split-test
"Randomly partitions network into halves"
[]
(assoc (simple-test)
:name "random-split-test"
:nemesis (nemesis/partition-random-halves)))
(defn random-isolate-test
"Randomly split a single node off the group"
[]
(assoc (simple-test)
:name "random-isolate-test"
:nemesis (nemesis/partition-random-node)))
(defn flaky-net-test
"Make network connections randomly flaky"
[]
(assoc (simple-test)
:name "flaky-net-test"
:nemesis (flaky-net-nemesis)))
(defn slow-net-test
"Make network connections randomly slow"
[]
(assoc (simple-test)
:name "slow-net-test"
:nemesis (slow-net-nemesis)))
|
91072
|
(ns jepsen-stellar-core.core
(:use [clojure.core.strint])
(:require [clj-http.client :as http]
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.set :as set]
[clojure.tools.logging :refer [warn info debug]]
[jepsen.os.debian :as debian]
[jepsen.control.net :as net]
[jepsen [client :as client]
[core :as jepsen]
[db :as db]
[tests :as tests]
[control :as c :refer [|]]
[checker :as checker]
[nemesis :as nemesis]
[model :as model]
[generator :as gen]
[util :refer [timeout meh log-op]]]))
;; Bunch of random node keypairs ("identities")
(def nodes
{:n1 {:sec "<KEY>"
:pub "<KEY>"}
:n2 {:sec "SAS<KEY>SN<KEY>"
:pub "<KEY>"}
:n3 {:sec "<KEY>"
:pub "<KEY>"}
:n4 {:sec "SBNJ7KIFJLNOFRWXUYVLG2CAEORXE3XK3WQXSWXOKXERZ3B2<KEY>"
:pub "<KEY>"}
:n5 {:sec "SBTQU46R47J2CDU65E32AITEN6XE7QETQLILAB2LGG3AZ6AE4ZWVBSMB"
:pub "<KEY>"}})
(def nodenames (sort (keys nodes)))
(defn nodenum [n]
(nth nodenames (rem n (count nodenames))))
(def ^:dynamic *stellar-core-version*
"0.0.1-132-4654e395")
(def stellar-core-deb-url-path
"https://s3.amazonaws.com/stellar.org/releases/stellar-core")
(defn stellar-core-deb []
(<< "stellar-core-~{*stellar-core-version*}_amd64.deb"))
(defn stellar-core-deb-url []
(<< "~{stellar-core-deb-url-path}/~(stellar-core-deb)"))
;; Basic mechanisms for talking to a stellar-core node's test port
(def ten-million 10000000)
(def ^:dynamic *node-host* "localhost")
(def ^:dynamic *node-port* 11626)
(defn get-json [path & [qp]]
(let [params {:as :json}]
(http/get (<< "http://~{*node-host*}:~{*node-port*}/~{path}")
(if qp
(assoc params :query-params qp)
params))))
(defn node-info []
(-> (get-json "info") :body :info))
(defn ledger-num []
(-> (node-info) :ledger :num))
(defn node-status []
(:state (node-info)))
(defn node-metrics []
(-> (get-json "metrics") :body :metrics))
(defn test-tx [qp]
(-> (get-json "testtx" qp) :body))
(defn get-account [who]
(-> (get-json "testacc" {:name who}) :body))
(defn has-account [who]
(-> (get-account who) (contains? :seqnum)))
(defn payment-qp [from to amount]
{:from from
:to to
:amount amount})
(defn create-account-qp [to]
(assoc (payment-qp "root" to (* 10000 ten-million))
:create 'true))
(defn create-account-from-qp [from to]
(assoc (payment-qp from to (* 100 ten-million))
:create 'true))
(defn do-payment [from to amount]
(test-tx (payment-qp from to amount)))
(defn do-create-account-from [from to]
(test-tx (create-account-from-qp from to)))
(defn do-create-account [to]
(test-tx (create-account-qp to)))
(defn retry-until
"Keep trying the provided `:f` function until the `:until` function
passes, or `:ledgers` expire, or `:retries` retries expire. Logs
some waiting-for-close noise after the first 20 retries, and slows
retrying down a bit. Returns `:ok` if it passed, `:fail` if anything
timed out."
[& {:keys [ledgers retries f until]
:or {ledgers 100
retries 100
f #()
until #(or false)}}]
(let [first (ledger-num)
last (+ first ledgers)]
(loop [retried 0]
(let [curr (ledger-num)]
(f)
(cond
(until) :ok
(> curr last) :fail
(> retried retries) :fail
(> retried 20)
(do
(Thread/sleep 800)
(info (<< "awaiting close: node ~{*node-host*}, ledger ~{curr}, status ~(node-status)"))
(recur (inc retried)))
true
(do
(Thread/sleep 300)
(recur (inc retried))))))))
(defn install!
"Install stellar-core and its dependencies on a node."
[node]
(when-not (or
(debian/installed? :libpq5)
(debian/installed? :libpq5:amd64))
(debian/update!)
(debian/install '(:libpq5:amd64)))
(when-not (or
(debian/installed? :libsqlite3-0)
(debian/installed? :libsqlite3-0:amd64))
(debian/update!)
(debian/install '(:libsqlite3-0:amd64)))
(when-not (and
(or
; Different dpkg versions report this slightly differently
(debian/installed? :stellar-core:amd64)
(debian/installed? :stellar-core))
(= (debian/installed-version :stellar-core)
*stellar-core-version*))
(c/exec :wget :--no-clobber (stellar-core-deb-url))
(meh (debian/uninstall! :stellar-core))
(c/exec :dpkg :-i (stellar-core-deb))))
(defn configure!
"Install keys, init control and config files on a node.
Pubkeys are needed to scp history between nodes' history stores."
[node]
(let [self (nodes node)
others (vec (sort (filter #(not= %1 node) (keys nodes))))]
(c/upload '("/root/.ssh/known_hosts"
"/root/.ssh/id_rsa"
"/root/.ssh/id_rsa.pub")
"/root/.ssh" :mode 0600)
(c/exec :echo
(slurp (io/resource "stellar-core"))
:> "/etc/init.d/stellar-core")
(c/exec :chmod :0755 "/etc/init.d/stellar-core")
(c/exec :echo
(-> (io/resource "stellar-core.cfg")
slurp
(str/replace #"%VALIDATION_SEED%" (:sec self))
(str/replace #"%PUBKEY(\d)%"
(fn [[_ n]] ((nodes (keyword (str "n" n))) :pub)))
(str/replace #"%SELF%" (name node))
(str/replace #"%OTHER(\d)%"
(fn [[_ n]] (name (others (- (read-string n) 1))))))
:> "stellar-core.cfg")))
(defn wipe!
"Wipe a node's state, keys and config files."
[]
(meh (c/exec :service :stellar-core :stop))
(c/exec :rm :-f
"/etc/init.d/stellar-core"
"/root/.ssh/known_hosts"
"/root/.ssh/id_rsa"
"/root/.ssh/id_rsa.pub")
(c/exec :rm :-rf :history :buckets :stellar.db :stellar-core.cfg :stellar-core.log))
(defn initialize!
"Initialize history and database on a node."
[node]
(c/exec :stellar-core :--conf :stellar-core.cfg :--newhist node)
(c/exec :stellar-core :--conf :stellar-core.cfg :--newdb)
(c/exec :stellar-core :--conf :stellar-core.cfg :--forcescp))
(defn db
"Standard db/DB reification, for a single node."
[]
(reify db/DB
(setup! [db test node]
(wipe!)
(install! node)
(configure! node)
(initialize! node)
(c/exec :service :stellar-core :start))
(teardown! [db test node]
(meh (c/exec :service :stellar-core :stop)))
))
;; We dynamically track the max-account to read-back during any test.
;; This number only ever increases; it's harmless if it's "too high",
;; we just do a bunch of unsuccessful queries to nonexistent high values
;; at the end of a test.
(def max-account (atom 0))
(defn account-id [n] (keyword (<< "account~{n}")))
(defn client
"Standard client for `:add` and `:read` operations, modeled by `model/set`.
An `{:type :invoke :f :add :value n}` operation causes this client
to add an account named `account~{n}` from a funding-account called
`(nodenum n)`. That is, `account0`, `account5`, `account10`, etc. are all
funded from an intermediate account called `n1`.
Splitting the adds up with intermediate accounts allows them to proceed without
interfering with one another's sequence numbers; if we didn't do this, most adds
would fail (harmlessly, but it's a waste of requests; we want to test adds, not
rejections).
"
[node]
(reify client/Client
(setup! [this test node] (client node))
(teardown! [this test])
(invoke! [this test op]
(case (:f op)
:setup
(binding [*node-host* (name node)]
(let [n (nth nodenames (:value op))]
(assoc op :type (retry-until
:f #(do-create-account n)
:until #(has-account n)))))
:read
(binding [*node-host* (name node)]
(assoc op
:type :ok,
:value (apply sorted-set
(filter #(has-account (account-id %1))
(range (inc @max-account))))))
:add
(binding [*node-host* (name node)]
(let [v (:value op)
id (account-id v)
src (nodenum v)]
(assoc op :type (retry-until
:f #(do-create-account-from src id)
:until #(has-account id)))))
))))
(defn setup
"Generator that invokes :setup on every node, once, single-threaded"
[]
(gen/singlethreaded
(apply gen/concat
(map (fn [node] (gen/on (fn [process] (= process node))
(gen/once {:type :invoke :f :setup :value node})))
(range (count nodenames))))))
(defn adds
"Generator that emits :add operations for sequential integers."
[]
(->> (range)
(map (fn [x]
(swap! max-account (fn [e] (max e x)))
{:type :invoke, :f :add, :value x}))
gen/seq))
(defn recover
"A generator which stops the nemesis and allows some time for recovery."
[]
(gen/nemesis
(gen/phases
(gen/once {:type :info, :f :stop})
(gen/sleep 20))))
(defn read-once
"A generator which reads exactly once."
[]
(gen/clients
(gen/once {:type :invoke, :f :read})))
(defn random-subset
"Return a random subset of a collection"
[coll]
(take (rand-int (inc (count coll))) (shuffle coll)))
(defn damaged-net-nemesis
"Induces network damage on random subset of nodes"
[damage & [nodes]]
(reify client/Client
(setup! [this test _]
(let [nodes (random-subset (:nodes test))]
(c/on-many nodes (meh (net/fast)))
(damaged-net-nemesis damage nodes)))
(invoke! [this test op]
(case (:f op)
:start
(do
(c/on-many nodes (damage))
(assoc op :value (str "network damaged on " (pr-str nodes))))
:stop
(do
(c/on-many nodes (meh (net/fast)))
(assoc op :value (str "healed network on " (pr-str nodes))))
))
(teardown! [this test]
(c/on-many nodes (meh (net/fast)))
this)))
(defn flaky-net-nemesis [] (damaged-net-nemesis net/flaky))
(defn slow-net-nemesis [] (damaged-net-nemesis net/slow))
(defn simple-test
"All our tests follow a simple phase structure: they `:setup` up a new network,
run for 10 minutes `:add`ing sequential accounts and running nemesis disruptions
for 20 seconds every minute (with 40s to recover from each). The network
is then allowed to fully heal, and a final `:read` is performed."
[]
(assoc tests/noop-test
:name "stellar-core"
:os debian/os
:db (db)
:client (client nil)
:model (model/set)
:generator (gen/phases
(setup)
(->> (adds)
(gen/stagger 1/10)
(gen/delay 1)
(gen/nemesis
(gen/seq (cycle
[(gen/sleep 40)
{:type :info :f :start}
(gen/sleep 20)
{:type :info :f :stop}])))
(gen/time-limit 600))
(recover)
(read-once))
:nemesis nemesis/noop
:checker checker/set))
(defn hammer-test
"Randomly pauses (STOP) and resumes (CONT) nodes on the network"
[]
(assoc (simple-test)
:name "hammer-test"
:nemesis (nemesis/hammer-time :stellar-core)))
(defn bridge-test
"Randomly partitions network into [2 nodes] <-> bridge-node <-> [2 nodes]"
[]
(assoc (simple-test)
:name "bridge-test"
:nemesis (nemesis/partitioner (comp nemesis/bridge shuffle))))
(defn majorities-ring-test
"Cuts links such that each node sees a different majority"
[]
(assoc (simple-test)
:name "majorities-ring-test"
:nemesis (nemesis/partition-majorities-ring)))
(defn random-split-test
"Randomly partitions network into halves"
[]
(assoc (simple-test)
:name "random-split-test"
:nemesis (nemesis/partition-random-halves)))
(defn random-isolate-test
"Randomly split a single node off the group"
[]
(assoc (simple-test)
:name "random-isolate-test"
:nemesis (nemesis/partition-random-node)))
(defn flaky-net-test
"Make network connections randomly flaky"
[]
(assoc (simple-test)
:name "flaky-net-test"
:nemesis (flaky-net-nemesis)))
(defn slow-net-test
"Make network connections randomly slow"
[]
(assoc (simple-test)
:name "slow-net-test"
:nemesis (slow-net-nemesis)))
| true |
(ns jepsen-stellar-core.core
(:use [clojure.core.strint])
(:require [clj-http.client :as http]
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.set :as set]
[clojure.tools.logging :refer [warn info debug]]
[jepsen.os.debian :as debian]
[jepsen.control.net :as net]
[jepsen [client :as client]
[core :as jepsen]
[db :as db]
[tests :as tests]
[control :as c :refer [|]]
[checker :as checker]
[nemesis :as nemesis]
[model :as model]
[generator :as gen]
[util :refer [timeout meh log-op]]]))
;; Bunch of random node keypairs ("identities")
(def nodes
{:n1 {:sec "PI:KEY:<KEY>END_PI"
:pub "PI:KEY:<KEY>END_PI"}
:n2 {:sec "SASPI:KEY:<KEY>END_PISNPI:KEY:<KEY>END_PI"
:pub "PI:KEY:<KEY>END_PI"}
:n3 {:sec "PI:KEY:<KEY>END_PI"
:pub "PI:KEY:<KEY>END_PI"}
:n4 {:sec "SBNJ7KIFJLNOFRWXUYVLG2CAEORXE3XK3WQXSWXOKXERZ3B2PI:KEY:<KEY>END_PI"
:pub "PI:KEY:<KEY>END_PI"}
:n5 {:sec "SBTQU46R47J2CDU65E32AITEN6XE7QETQLILAB2LGG3AZ6AE4ZWVBSMB"
:pub "PI:KEY:<KEY>END_PI"}})
(def nodenames (sort (keys nodes)))
(defn nodenum [n]
(nth nodenames (rem n (count nodenames))))
(def ^:dynamic *stellar-core-version*
"0.0.1-132-4654e395")
(def stellar-core-deb-url-path
"https://s3.amazonaws.com/stellar.org/releases/stellar-core")
(defn stellar-core-deb []
(<< "stellar-core-~{*stellar-core-version*}_amd64.deb"))
(defn stellar-core-deb-url []
(<< "~{stellar-core-deb-url-path}/~(stellar-core-deb)"))
;; Basic mechanisms for talking to a stellar-core node's test port
(def ten-million 10000000)
(def ^:dynamic *node-host* "localhost")
(def ^:dynamic *node-port* 11626)
(defn get-json [path & [qp]]
(let [params {:as :json}]
(http/get (<< "http://~{*node-host*}:~{*node-port*}/~{path}")
(if qp
(assoc params :query-params qp)
params))))
(defn node-info []
(-> (get-json "info") :body :info))
(defn ledger-num []
(-> (node-info) :ledger :num))
(defn node-status []
(:state (node-info)))
(defn node-metrics []
(-> (get-json "metrics") :body :metrics))
(defn test-tx [qp]
(-> (get-json "testtx" qp) :body))
(defn get-account [who]
(-> (get-json "testacc" {:name who}) :body))
(defn has-account [who]
(-> (get-account who) (contains? :seqnum)))
(defn payment-qp [from to amount]
{:from from
:to to
:amount amount})
(defn create-account-qp [to]
(assoc (payment-qp "root" to (* 10000 ten-million))
:create 'true))
(defn create-account-from-qp [from to]
(assoc (payment-qp from to (* 100 ten-million))
:create 'true))
(defn do-payment [from to amount]
(test-tx (payment-qp from to amount)))
(defn do-create-account-from [from to]
(test-tx (create-account-from-qp from to)))
(defn do-create-account [to]
(test-tx (create-account-qp to)))
(defn retry-until
"Keep trying the provided `:f` function until the `:until` function
passes, or `:ledgers` expire, or `:retries` retries expire. Logs
some waiting-for-close noise after the first 20 retries, and slows
retrying down a bit. Returns `:ok` if it passed, `:fail` if anything
timed out."
[& {:keys [ledgers retries f until]
:or {ledgers 100
retries 100
f #()
until #(or false)}}]
(let [first (ledger-num)
last (+ first ledgers)]
(loop [retried 0]
(let [curr (ledger-num)]
(f)
(cond
(until) :ok
(> curr last) :fail
(> retried retries) :fail
(> retried 20)
(do
(Thread/sleep 800)
(info (<< "awaiting close: node ~{*node-host*}, ledger ~{curr}, status ~(node-status)"))
(recur (inc retried)))
true
(do
(Thread/sleep 300)
(recur (inc retried))))))))
(defn install!
"Install stellar-core and its dependencies on a node."
[node]
(when-not (or
(debian/installed? :libpq5)
(debian/installed? :libpq5:amd64))
(debian/update!)
(debian/install '(:libpq5:amd64)))
(when-not (or
(debian/installed? :libsqlite3-0)
(debian/installed? :libsqlite3-0:amd64))
(debian/update!)
(debian/install '(:libsqlite3-0:amd64)))
(when-not (and
(or
; Different dpkg versions report this slightly differently
(debian/installed? :stellar-core:amd64)
(debian/installed? :stellar-core))
(= (debian/installed-version :stellar-core)
*stellar-core-version*))
(c/exec :wget :--no-clobber (stellar-core-deb-url))
(meh (debian/uninstall! :stellar-core))
(c/exec :dpkg :-i (stellar-core-deb))))
(defn configure!
"Install keys, init control and config files on a node.
Pubkeys are needed to scp history between nodes' history stores."
[node]
(let [self (nodes node)
others (vec (sort (filter #(not= %1 node) (keys nodes))))]
(c/upload '("/root/.ssh/known_hosts"
"/root/.ssh/id_rsa"
"/root/.ssh/id_rsa.pub")
"/root/.ssh" :mode 0600)
(c/exec :echo
(slurp (io/resource "stellar-core"))
:> "/etc/init.d/stellar-core")
(c/exec :chmod :0755 "/etc/init.d/stellar-core")
(c/exec :echo
(-> (io/resource "stellar-core.cfg")
slurp
(str/replace #"%VALIDATION_SEED%" (:sec self))
(str/replace #"%PUBKEY(\d)%"
(fn [[_ n]] ((nodes (keyword (str "n" n))) :pub)))
(str/replace #"%SELF%" (name node))
(str/replace #"%OTHER(\d)%"
(fn [[_ n]] (name (others (- (read-string n) 1))))))
:> "stellar-core.cfg")))
(defn wipe!
"Wipe a node's state, keys and config files."
[]
(meh (c/exec :service :stellar-core :stop))
(c/exec :rm :-f
"/etc/init.d/stellar-core"
"/root/.ssh/known_hosts"
"/root/.ssh/id_rsa"
"/root/.ssh/id_rsa.pub")
(c/exec :rm :-rf :history :buckets :stellar.db :stellar-core.cfg :stellar-core.log))
(defn initialize!
"Initialize history and database on a node."
[node]
(c/exec :stellar-core :--conf :stellar-core.cfg :--newhist node)
(c/exec :stellar-core :--conf :stellar-core.cfg :--newdb)
(c/exec :stellar-core :--conf :stellar-core.cfg :--forcescp))
(defn db
"Standard db/DB reification, for a single node."
[]
(reify db/DB
(setup! [db test node]
(wipe!)
(install! node)
(configure! node)
(initialize! node)
(c/exec :service :stellar-core :start))
(teardown! [db test node]
(meh (c/exec :service :stellar-core :stop)))
))
;; We dynamically track the max-account to read-back during any test.
;; This number only ever increases; it's harmless if it's "too high",
;; we just do a bunch of unsuccessful queries to nonexistent high values
;; at the end of a test.
(def max-account (atom 0))
(defn account-id [n] (keyword (<< "account~{n}")))
(defn client
"Standard client for `:add` and `:read` operations, modeled by `model/set`.
An `{:type :invoke :f :add :value n}` operation causes this client
to add an account named `account~{n}` from a funding-account called
`(nodenum n)`. That is, `account0`, `account5`, `account10`, etc. are all
funded from an intermediate account called `n1`.
Splitting the adds up with intermediate accounts allows them to proceed without
interfering with one another's sequence numbers; if we didn't do this, most adds
would fail (harmlessly, but it's a waste of requests; we want to test adds, not
rejections).
"
[node]
(reify client/Client
(setup! [this test node] (client node))
(teardown! [this test])
(invoke! [this test op]
(case (:f op)
:setup
(binding [*node-host* (name node)]
(let [n (nth nodenames (:value op))]
(assoc op :type (retry-until
:f #(do-create-account n)
:until #(has-account n)))))
:read
(binding [*node-host* (name node)]
(assoc op
:type :ok,
:value (apply sorted-set
(filter #(has-account (account-id %1))
(range (inc @max-account))))))
:add
(binding [*node-host* (name node)]
(let [v (:value op)
id (account-id v)
src (nodenum v)]
(assoc op :type (retry-until
:f #(do-create-account-from src id)
:until #(has-account id)))))
))))
(defn setup
"Generator that invokes :setup on every node, once, single-threaded"
[]
(gen/singlethreaded
(apply gen/concat
(map (fn [node] (gen/on (fn [process] (= process node))
(gen/once {:type :invoke :f :setup :value node})))
(range (count nodenames))))))
(defn adds
"Generator that emits :add operations for sequential integers."
[]
(->> (range)
(map (fn [x]
(swap! max-account (fn [e] (max e x)))
{:type :invoke, :f :add, :value x}))
gen/seq))
(defn recover
"A generator which stops the nemesis and allows some time for recovery."
[]
(gen/nemesis
(gen/phases
(gen/once {:type :info, :f :stop})
(gen/sleep 20))))
(defn read-once
"A generator which reads exactly once."
[]
(gen/clients
(gen/once {:type :invoke, :f :read})))
(defn random-subset
"Return a random subset of a collection"
[coll]
(take (rand-int (inc (count coll))) (shuffle coll)))
(defn damaged-net-nemesis
"Induces network damage on random subset of nodes"
[damage & [nodes]]
(reify client/Client
(setup! [this test _]
(let [nodes (random-subset (:nodes test))]
(c/on-many nodes (meh (net/fast)))
(damaged-net-nemesis damage nodes)))
(invoke! [this test op]
(case (:f op)
:start
(do
(c/on-many nodes (damage))
(assoc op :value (str "network damaged on " (pr-str nodes))))
:stop
(do
(c/on-many nodes (meh (net/fast)))
(assoc op :value (str "healed network on " (pr-str nodes))))
))
(teardown! [this test]
(c/on-many nodes (meh (net/fast)))
this)))
(defn flaky-net-nemesis [] (damaged-net-nemesis net/flaky))
(defn slow-net-nemesis [] (damaged-net-nemesis net/slow))
(defn simple-test
"All our tests follow a simple phase structure: they `:setup` up a new network,
run for 10 minutes `:add`ing sequential accounts and running nemesis disruptions
for 20 seconds every minute (with 40s to recover from each). The network
is then allowed to fully heal, and a final `:read` is performed."
[]
(assoc tests/noop-test
:name "stellar-core"
:os debian/os
:db (db)
:client (client nil)
:model (model/set)
:generator (gen/phases
(setup)
(->> (adds)
(gen/stagger 1/10)
(gen/delay 1)
(gen/nemesis
(gen/seq (cycle
[(gen/sleep 40)
{:type :info :f :start}
(gen/sleep 20)
{:type :info :f :stop}])))
(gen/time-limit 600))
(recover)
(read-once))
:nemesis nemesis/noop
:checker checker/set))
(defn hammer-test
"Randomly pauses (STOP) and resumes (CONT) nodes on the network"
[]
(assoc (simple-test)
:name "hammer-test"
:nemesis (nemesis/hammer-time :stellar-core)))
(defn bridge-test
"Randomly partitions network into [2 nodes] <-> bridge-node <-> [2 nodes]"
[]
(assoc (simple-test)
:name "bridge-test"
:nemesis (nemesis/partitioner (comp nemesis/bridge shuffle))))
(defn majorities-ring-test
"Cuts links such that each node sees a different majority"
[]
(assoc (simple-test)
:name "majorities-ring-test"
:nemesis (nemesis/partition-majorities-ring)))
(defn random-split-test
"Randomly partitions network into halves"
[]
(assoc (simple-test)
:name "random-split-test"
:nemesis (nemesis/partition-random-halves)))
(defn random-isolate-test
"Randomly split a single node off the group"
[]
(assoc (simple-test)
:name "random-isolate-test"
:nemesis (nemesis/partition-random-node)))
(defn flaky-net-test
"Make network connections randomly flaky"
[]
(assoc (simple-test)
:name "flaky-net-test"
:nemesis (flaky-net-nemesis)))
(defn slow-net-test
"Make network connections randomly slow"
[]
(assoc (simple-test)
:name "slow-net-test"
:nemesis (slow-net-nemesis)))
|
[
{
"context": "abels [:customer :person]}\n \"(c {firstName: 'Neo', lastName: 'Anderson'})\" {:ref-id \"c\"\n ",
"end": 2106,
"score": 0.9993467926979065,
"start": 2103,
"tag": "NAME",
"value": "Neo"
},
{
"context": ":person]}\n \"(c {firstName: 'Neo', lastName: 'Anderson'})\" {:ref-id \"c\"\n ",
"end": 2128,
"score": 0.9992109537124634,
"start": 2120,
"tag": "NAME",
"value": "Anderson"
},
{
"context": " :props {:first-name \"Neo\"\n ",
"end": 2224,
"score": 0.9994528889656067,
"start": 2221,
"tag": "NAME",
"value": "Neo"
},
{
"context": " :last_name \"Anderson\"}}\n \"(c:Person:Customer {firstName: 'Neo', l",
"end": 2308,
"score": 0.9996315240859985,
"start": 2300,
"tag": "NAME",
"value": "Anderson"
},
{
"context": "nderson\"}}\n \"(c:Person:Customer {firstName: 'Neo', lastName: 'Anderson'})\" {:ref-id \"c\"\n ",
"end": 2354,
"score": 0.9981800317764282,
"start": 2351,
"tag": "NAME",
"value": "Neo"
},
{
"context": "\"(c:Person:Customer {firstName: 'Neo', lastName: 'Anderson'})\" {:ref-id \"c\"\n ",
"end": 2376,
"score": 0.9990025758743286,
"start": 2368,
"tag": "NAME",
"value": "Anderson"
},
{
"context": " :props {:first-name \"Neo\"\n ",
"end": 2586,
"score": 0.9994159936904907,
"start": 2583,
"tag": "NAME",
"value": "Neo"
},
{
"context": " :last_name \"Anderson\"}})))\n\n(deftest relationship\n (testing \"Cypher r",
"end": 2696,
"score": 0.9997202157974243,
"start": 2688,
"tag": "NAME",
"value": "Anderson"
},
{
"context": ":person]}\n [\"(c:Person:Customer {firstName: 'Neo', lastName: 'Anderson'})\" nil] {:ref-id \"c\"\n ",
"end": 3742,
"score": 0.9997612833976746,
"start": 3739,
"tag": "NAME",
"value": "Neo"
},
{
"context": "\"(c:Person:Customer {firstName: 'Neo', lastName: 'Anderson'})\" nil] {:ref-id \"c\"\n ",
"end": 3764,
"score": 0.9998170137405396,
"start": 3756,
"tag": "NAME",
"value": "Anderson"
},
{
"context": " :props {:first-name \"Neo\"\n ",
"end": 3991,
"score": 0.9998064041137695,
"start": 3988,
"tag": "NAME",
"value": "Neo"
},
{
"context": " :last_name \"Anderson\"}}\n [\"(c:Person:Customer)\" \"(c.firstName = '",
"end": 4097,
"score": 0.9998164772987366,
"start": 4089,
"tag": "NAME",
"value": "Anderson"
},
{
"context": "\"}}\n [\"(c:Person:Customer)\" \"(c.firstName = 'Neo' OR c.lastName = 'Anderson')\"] {:ref-id \"c\"\n ",
"end": 4150,
"score": 0.9997572302818298,
"start": 4147,
"tag": "NAME",
"value": "Neo"
},
{
"context": "Customer)\" \"(c.firstName = 'Neo' OR c.lastName = 'Anderson')\"] {:ref-id \"c\"\n ",
"end": 4177,
"score": 0.9998325109481812,
"start": 4169,
"tag": "NAME",
"value": "Anderson"
},
{
"context": " :props [{:first-name \"Neo\"}\n ",
"end": 4412,
"score": 0.9997982382774353,
"start": 4409,
"tag": "NAME",
"value": "Neo"
},
{
"context": " {:last_name \"Anderson\"}]}\n [\"(c:Person:Customer)\" \"ID(c) = 12\"] {:",
"end": 4526,
"score": 0.9998156428337097,
"start": 4518,
"tag": "NAME",
"value": "Anderson"
}
] |
test/neo4clj/cypher_test.clj
|
WhoNeedszZz/neo4clj
| 0 |
(ns neo4clj.cypher-test
(:require [clojure.test :refer :all]
[neo4clj.cypher :as sut]))
(deftest gen-ref-id
(testing "Generate a unique reference id"
(with-redefs [gensym (fn [] (str "G__123"))]
(is (= "G__123" (sut/gen-ref-id))))))
(deftest where
(testing "Cypher for where parts based on properties"
(let [single-key-props-coll [{:number "12345678"} {:number "87654321"}]
multi-key-props-coll [{:code "+45" :number "12345678"} {:code "+18" :number "87654321"}]
cypher-single-key "G__42.number = '12345678' OR G__42.number = '87654321'"
cypher-multi-keys (str "G__42.code = '+45' AND G__42.number = '12345678'"
" OR "
"G__42.code = '+18' AND G__42.number = '87654321'")]
(are [cypher props]
(= cypher (sut/where "G__42" props))
"G__42.number = '12345678'" {:number "12345678"}
"G__42.code = '+45' AND G__42.number = '12345678'" {:code "+45" :number "12345678"}
cypher-single-key (set single-key-props-coll)
cypher-multi-keys (set multi-key-props-coll)
cypher-single-key single-key-props-coll
cypher-multi-keys multi-key-props-coll
cypher-single-key (apply list single-key-props-coll)
cypher-multi-keys (apply list multi-key-props-coll)))))
(deftest properties
(testing "Cypher representation of property map"
(are [cypher props]
(= cypher (sut/properties props))
nil nil
" {}" {}
" {a: 1, b: 'test', c: TRUE}" {:a 1 :b "test" :c true})))
(deftest labels
(testing "Generating a Cypher representaiton of labels"
(are [cypher labels]
(= cypher (sut/labels labels))
"" []
":Address" [:address]
":Base:Address" [:address :base])))
(deftest node
(testing "Cypher representation of a node"
(are [cypher-parts node]
(= cypher-parts (sut/node node))
"(n)" {:ref-id "n"}
"(p:Person)" {:ref-id "p" :labels [:person]}
"(c:Person:Customer)" {:ref-id "c" :labels [:customer :person]}
"(c {firstName: 'Neo', lastName: 'Anderson'})" {:ref-id "c"
:props {:first-name "Neo"
:last_name "Anderson"}}
"(c:Person:Customer {firstName: 'Neo', lastName: 'Anderson'})" {:ref-id "c"
:labels [:customer :person]
:props {:first-name "Neo"
:last_name "Anderson"}})))
(deftest relationship
(testing "Cypher representation of a relationship"
(are [cypher rel]
(= cypher (sut/relationship "(p:Person)" "(c:Company)" rel))
"(p:Person)-[]->(c:Company)" {}
"(p:Person)-[r]->(c:Company)" {:ref-id "r"}
"(p:Person)-[:EMPLOYEE]->(c:Company)" {:type :employee}
"(p:Person)-[r:FORMER_EMPLOYEE]->(c:Company)" {:ref-id "r" :type :former-employee}
"(p:Person)-[r {hiredAt: 2008}]->(c:Company)" {:ref-id "r" :props {:hired-at 2008}}
"(p:Person)-[r:EMPLOYEE {hiredAt: 2008}]->(c:Company)" {:ref-id "r" :type :employee :props {:hired-at 2008}})))
(deftest lookup
(testing "Cypher representation of a lookup entity including where parts"
(are [cypher-parts lookup]
(= cypher-parts (sut/lookup lookup))
["(n)" nil] {:ref-id "n"}
["(n)" "ID(n) = 12"] {:ref-id "n" :id 12}
["(p:Person)" nil] {:ref-id "p" :labels [:person]}
["(c:Person:Customer)" nil] {:ref-id "c" :labels [:customer :person]}
["(c:Person:Customer {firstName: 'Neo', lastName: 'Anderson'})" nil] {:ref-id "c"
:labels [:customer :person]
:props {:first-name "Neo"
:last_name "Anderson"}}
["(c:Person:Customer)" "(c.firstName = 'Neo' OR c.lastName = 'Anderson')"] {:ref-id "c"
:labels [:customer :person]
:props [{:first-name "Neo"}
{:last_name "Anderson"}]}
["(c:Person:Customer)" "ID(c) = 12"] {:ref-id "c" :labels [:customer :person] :id 12})))
|
1911
|
(ns neo4clj.cypher-test
(:require [clojure.test :refer :all]
[neo4clj.cypher :as sut]))
(deftest gen-ref-id
(testing "Generate a unique reference id"
(with-redefs [gensym (fn [] (str "G__123"))]
(is (= "G__123" (sut/gen-ref-id))))))
(deftest where
(testing "Cypher for where parts based on properties"
(let [single-key-props-coll [{:number "12345678"} {:number "87654321"}]
multi-key-props-coll [{:code "+45" :number "12345678"} {:code "+18" :number "87654321"}]
cypher-single-key "G__42.number = '12345678' OR G__42.number = '87654321'"
cypher-multi-keys (str "G__42.code = '+45' AND G__42.number = '12345678'"
" OR "
"G__42.code = '+18' AND G__42.number = '87654321'")]
(are [cypher props]
(= cypher (sut/where "G__42" props))
"G__42.number = '12345678'" {:number "12345678"}
"G__42.code = '+45' AND G__42.number = '12345678'" {:code "+45" :number "12345678"}
cypher-single-key (set single-key-props-coll)
cypher-multi-keys (set multi-key-props-coll)
cypher-single-key single-key-props-coll
cypher-multi-keys multi-key-props-coll
cypher-single-key (apply list single-key-props-coll)
cypher-multi-keys (apply list multi-key-props-coll)))))
(deftest properties
(testing "Cypher representation of property map"
(are [cypher props]
(= cypher (sut/properties props))
nil nil
" {}" {}
" {a: 1, b: 'test', c: TRUE}" {:a 1 :b "test" :c true})))
(deftest labels
(testing "Generating a Cypher representaiton of labels"
(are [cypher labels]
(= cypher (sut/labels labels))
"" []
":Address" [:address]
":Base:Address" [:address :base])))
(deftest node
(testing "Cypher representation of a node"
(are [cypher-parts node]
(= cypher-parts (sut/node node))
"(n)" {:ref-id "n"}
"(p:Person)" {:ref-id "p" :labels [:person]}
"(c:Person:Customer)" {:ref-id "c" :labels [:customer :person]}
"(c {firstName: '<NAME>', lastName: '<NAME>'})" {:ref-id "c"
:props {:first-name "<NAME>"
:last_name "<NAME>"}}
"(c:Person:Customer {firstName: '<NAME>', lastName: '<NAME>'})" {:ref-id "c"
:labels [:customer :person]
:props {:first-name "<NAME>"
:last_name "<NAME>"}})))
(deftest relationship
(testing "Cypher representation of a relationship"
(are [cypher rel]
(= cypher (sut/relationship "(p:Person)" "(c:Company)" rel))
"(p:Person)-[]->(c:Company)" {}
"(p:Person)-[r]->(c:Company)" {:ref-id "r"}
"(p:Person)-[:EMPLOYEE]->(c:Company)" {:type :employee}
"(p:Person)-[r:FORMER_EMPLOYEE]->(c:Company)" {:ref-id "r" :type :former-employee}
"(p:Person)-[r {hiredAt: 2008}]->(c:Company)" {:ref-id "r" :props {:hired-at 2008}}
"(p:Person)-[r:EMPLOYEE {hiredAt: 2008}]->(c:Company)" {:ref-id "r" :type :employee :props {:hired-at 2008}})))
(deftest lookup
(testing "Cypher representation of a lookup entity including where parts"
(are [cypher-parts lookup]
(= cypher-parts (sut/lookup lookup))
["(n)" nil] {:ref-id "n"}
["(n)" "ID(n) = 12"] {:ref-id "n" :id 12}
["(p:Person)" nil] {:ref-id "p" :labels [:person]}
["(c:Person:Customer)" nil] {:ref-id "c" :labels [:customer :person]}
["(c:Person:Customer {firstName: '<NAME>', lastName: '<NAME>'})" nil] {:ref-id "c"
:labels [:customer :person]
:props {:first-name "<NAME>"
:last_name "<NAME>"}}
["(c:Person:Customer)" "(c.firstName = '<NAME>' OR c.lastName = '<NAME>')"] {:ref-id "c"
:labels [:customer :person]
:props [{:first-name "<NAME>"}
{:last_name "<NAME>"}]}
["(c:Person:Customer)" "ID(c) = 12"] {:ref-id "c" :labels [:customer :person] :id 12})))
| true |
(ns neo4clj.cypher-test
(:require [clojure.test :refer :all]
[neo4clj.cypher :as sut]))
(deftest gen-ref-id
(testing "Generate a unique reference id"
(with-redefs [gensym (fn [] (str "G__123"))]
(is (= "G__123" (sut/gen-ref-id))))))
(deftest where
(testing "Cypher for where parts based on properties"
(let [single-key-props-coll [{:number "12345678"} {:number "87654321"}]
multi-key-props-coll [{:code "+45" :number "12345678"} {:code "+18" :number "87654321"}]
cypher-single-key "G__42.number = '12345678' OR G__42.number = '87654321'"
cypher-multi-keys (str "G__42.code = '+45' AND G__42.number = '12345678'"
" OR "
"G__42.code = '+18' AND G__42.number = '87654321'")]
(are [cypher props]
(= cypher (sut/where "G__42" props))
"G__42.number = '12345678'" {:number "12345678"}
"G__42.code = '+45' AND G__42.number = '12345678'" {:code "+45" :number "12345678"}
cypher-single-key (set single-key-props-coll)
cypher-multi-keys (set multi-key-props-coll)
cypher-single-key single-key-props-coll
cypher-multi-keys multi-key-props-coll
cypher-single-key (apply list single-key-props-coll)
cypher-multi-keys (apply list multi-key-props-coll)))))
(deftest properties
(testing "Cypher representation of property map"
(are [cypher props]
(= cypher (sut/properties props))
nil nil
" {}" {}
" {a: 1, b: 'test', c: TRUE}" {:a 1 :b "test" :c true})))
(deftest labels
(testing "Generating a Cypher representaiton of labels"
(are [cypher labels]
(= cypher (sut/labels labels))
"" []
":Address" [:address]
":Base:Address" [:address :base])))
(deftest node
(testing "Cypher representation of a node"
(are [cypher-parts node]
(= cypher-parts (sut/node node))
"(n)" {:ref-id "n"}
"(p:Person)" {:ref-id "p" :labels [:person]}
"(c:Person:Customer)" {:ref-id "c" :labels [:customer :person]}
"(c {firstName: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI'})" {:ref-id "c"
:props {:first-name "PI:NAME:<NAME>END_PI"
:last_name "PI:NAME:<NAME>END_PI"}}
"(c:Person:Customer {firstName: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI'})" {:ref-id "c"
:labels [:customer :person]
:props {:first-name "PI:NAME:<NAME>END_PI"
:last_name "PI:NAME:<NAME>END_PI"}})))
(deftest relationship
(testing "Cypher representation of a relationship"
(are [cypher rel]
(= cypher (sut/relationship "(p:Person)" "(c:Company)" rel))
"(p:Person)-[]->(c:Company)" {}
"(p:Person)-[r]->(c:Company)" {:ref-id "r"}
"(p:Person)-[:EMPLOYEE]->(c:Company)" {:type :employee}
"(p:Person)-[r:FORMER_EMPLOYEE]->(c:Company)" {:ref-id "r" :type :former-employee}
"(p:Person)-[r {hiredAt: 2008}]->(c:Company)" {:ref-id "r" :props {:hired-at 2008}}
"(p:Person)-[r:EMPLOYEE {hiredAt: 2008}]->(c:Company)" {:ref-id "r" :type :employee :props {:hired-at 2008}})))
(deftest lookup
(testing "Cypher representation of a lookup entity including where parts"
(are [cypher-parts lookup]
(= cypher-parts (sut/lookup lookup))
["(n)" nil] {:ref-id "n"}
["(n)" "ID(n) = 12"] {:ref-id "n" :id 12}
["(p:Person)" nil] {:ref-id "p" :labels [:person]}
["(c:Person:Customer)" nil] {:ref-id "c" :labels [:customer :person]}
["(c:Person:Customer {firstName: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI'})" nil] {:ref-id "c"
:labels [:customer :person]
:props {:first-name "PI:NAME:<NAME>END_PI"
:last_name "PI:NAME:<NAME>END_PI"}}
["(c:Person:Customer)" "(c.firstName = 'PI:NAME:<NAME>END_PI' OR c.lastName = 'PI:NAME:<NAME>END_PI')"] {:ref-id "c"
:labels [:customer :person]
:props [{:first-name "PI:NAME:<NAME>END_PI"}
{:last_name "PI:NAME:<NAME>END_PI"}]}
["(c:Person:Customer)" "ID(c) = 12"] {:ref-id "c" :labels [:customer :person] :id 12})))
|
[
{
"context": "ctures\n (let [p1 (make-instance president [\"gw\" \"George\" \"Washington\" 57] :save false)\n p2 (make-i",
"end": 942,
"score": 0.9988752007484436,
"start": 936,
"tag": "NAME",
"value": "George"
},
{
"context": "(let [p1 (make-instance president [\"gw\" \"George\" \"Washington\" 57] :save false)\n p2 (make-instance presi",
"end": 955,
"score": 0.9768953323364258,
"start": 945,
"tag": "NAME",
"value": "Washington"
},
{
"context": "false)\n p2 (make-instance president [\"ja\" \"John\" \"Adams\" 62] :save false)\n p3 (make-instan",
"end": 1021,
"score": 0.9997465014457703,
"start": 1017,
"tag": "NAME",
"value": "John"
},
{
"context": " p2 (make-instance president [\"ja\" \"John\" \"Adams\" 62] :save false)\n p3 (make-instance presi",
"end": 1029,
"score": 0.9991105198860168,
"start": 1024,
"tag": "NAME",
"value": "Adams"
},
{
"context": "false)\n p3 (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58] :save false)\n p4 (make-in",
"end": 1097,
"score": 0.9962217211723328,
"start": 1091,
"tag": "NAME",
"value": "Thomas"
},
{
"context": " p3 (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58] :save false)\n p4 (make-instance presi",
"end": 1109,
"score": 0.998910665512085,
"start": 1100,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": "false)\n p4 (make-instance president [\"jm\" \"James\" \"Madison\" 58] :save false)]\n (is (= (p1 :logi",
"end": 1176,
"score": 0.9997162818908691,
"start": 1171,
"tag": "NAME",
"value": "James"
},
{
"context": " p4 (make-instance president [\"jm\" \"James\" \"Madison\" 58] :save false)]\n (is (= (p1 :login) \"gw\"))\n",
"end": 1186,
"score": 0.995337963104248,
"start": 1179,
"tag": "NAME",
"value": "Madison"
},
{
"context": "= (p1 :login) \"gw\"))\n (is (= (p2 :first-name) \"John\"))\n (is (= (p3 :age) 58))\n (is (nil? (p4 :b",
"end": 1269,
"score": 0.9995039105415344,
"start": 1265,
"tag": "NAME",
"value": "John"
},
{
"context": "lt shelf\"\n (make-instance president [\"gw\" \"George\" \"Washington\" 57] :cupboard @cb)\n (verify-",
"end": 3982,
"score": 0.999607503414154,
"start": 3976,
"tag": "NAME",
"value": "George"
},
{
"context": "\n (make-instance president [\"gw\" \"George\" \"Washington\" 57] :cupboard @cb)\n (verify-shelf *defaul",
"end": 3995,
"score": 0.9974391460418701,
"start": 3985,
"tag": "NAME",
"value": "Washington"
},
{
"context": "nt shelf\"\n (make-instance president [\"ja\" \"John\" \"Adams\" 62] :cupboard @cb :shelf-name \"president",
"end": 4162,
"score": 0.9998044967651367,
"start": 4158,
"tag": "NAME",
"value": "John"
},
{
"context": "f\"\n (make-instance president [\"ja\" \"John\" \"Adams\" 62] :cupboard @cb :shelf-name \"presidents\")\n ",
"end": 4170,
"score": 0.9995420575141907,
"start": 4165,
"tag": "NAME",
"value": "Adams"
},
{
"context": "n\n (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]\n ",
"end": 5940,
"score": 0.9956287145614624,
"start": 5938,
"tag": "NAME",
"value": "tj"
},
{
"context": " (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]\n ",
"end": 5949,
"score": 0.9997407793998718,
"start": 5943,
"tag": "NAME",
"value": "Thomas"
},
{
"context": " (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]\n :cupb",
"end": 5961,
"score": 0.999767541885376,
"start": 5952,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": "n\n (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]\n ",
"end": 6137,
"score": 0.978495180606842,
"start": 6135,
"tag": "NAME",
"value": "tj"
},
{
"context": " (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]\n ",
"end": 6146,
"score": 0.9996768236160278,
"start": 6140,
"tag": "NAME",
"value": "Thomas"
},
{
"context": " (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]\n :cupb",
"end": 6158,
"score": 0.9997453689575195,
"start": 6149,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": " (reset! p1 (make-instance president [\"gw\" \"George\" \"Washington\" 57]))\n (reset! p2 (make-ins",
"end": 8149,
"score": 0.9933443069458008,
"start": 8143,
"tag": "NAME",
"value": "George"
},
{
"context": "eset! p1 (make-instance president [\"gw\" \"George\" \"Washington\" 57]))\n (reset! p2 (make-instance preside",
"end": 8162,
"score": 0.9531822204589844,
"start": 8152,
"tag": "NAME",
"value": "Washington"
},
{
"context": " (reset! p2 (make-instance president [\"ja\" \"John\" \"Adams\" 62]))\n (reset! p3 (make-instance",
"end": 8226,
"score": 0.9998449087142944,
"start": 8222,
"tag": "NAME",
"value": "John"
},
{
"context": "(reset! p2 (make-instance president [\"ja\" \"John\" \"Adams\" 62]))\n (reset! p3 (make-instance preside",
"end": 8234,
"score": 0.9988940954208374,
"start": 8229,
"tag": "NAME",
"value": "Adams"
},
{
"context": " (reset! p3 (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]))\n (reset! p4 (make-inst",
"end": 8300,
"score": 0.9966574907302856,
"start": 8294,
"tag": "NAME",
"value": "Thomas"
},
{
"context": "eset! p3 (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]))\n (reset! p4 (make-instance preside",
"end": 8312,
"score": 0.9983585476875305,
"start": 8303,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": " (reset! p4 (make-instance president [\"jm\" \"James\" \"Madison\" 58])))\n\n (testing \"ability to bu",
"end": 8377,
"score": 0.9998151659965515,
"start": 8372,
"tag": "NAME",
"value": "James"
},
{
"context": "reset! p4 (make-instance president [\"jm\" \"James\" \"Madison\" 58])))\n\n (testing \"ability to build a stru",
"end": 8387,
"score": 0.9988812208175659,
"start": 8380,
"tag": "NAME",
"value": "Madison"
},
{
"context": "board*)))\n (is (= @p3 (retrieve :login \"tj\")))\n (is (= @p4 (retrieve :login \"jm\"))",
"end": 8979,
"score": 0.5471889972686768,
"start": 8977,
"tag": "NAME",
"value": "tj"
},
{
"context": " (reset! p1 (make-instance president [\"gw\" \"George\" \"Washington\" 57] :cupboard cb))\n (reset!",
"end": 9469,
"score": 0.9992926120758057,
"start": 9463,
"tag": "NAME",
"value": "George"
},
{
"context": "eset! p1 (make-instance president [\"gw\" \"George\" \"Washington\" 57] :cupboard cb))\n (reset! p2 (make-ins",
"end": 9482,
"score": 0.9738411903381348,
"start": 9472,
"tag": "NAME",
"value": "Washington"
},
{
"context": " (reset! p2 (make-instance president [\"ja\" \"John\" \"Adams\" 62] :cupboard cb))\n (reset! p3 (",
"end": 9559,
"score": 0.9998307228088379,
"start": 9555,
"tag": "NAME",
"value": "John"
},
{
"context": "(reset! p2 (make-instance president [\"ja\" \"John\" \"Adams\" 62] :cupboard cb))\n (reset! p3 (make-ins",
"end": 9567,
"score": 0.9939372539520264,
"start": 9562,
"tag": "NAME",
"value": "Adams"
},
{
"context": " (reset! p3 (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58] :cupboard cb))\n (reset! ",
"end": 9646,
"score": 0.9993139505386353,
"start": 9640,
"tag": "NAME",
"value": "Thomas"
},
{
"context": "eset! p3 (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58] :cupboard cb))\n (reset! p4 (make-ins",
"end": 9658,
"score": 0.9993257522583008,
"start": 9649,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": " (reset! p4 (make-instance president [\"jm\" \"James\" \"Madison\" 58] :cupboard cb)))\n (with-open-",
"end": 9736,
"score": 0.9998061060905457,
"start": 9731,
"tag": "NAME",
"value": "James"
},
{
"context": "reset! p4 (make-instance president [\"jm\" \"James\" \"Madison\" 58] :cupboard cb)))\n (with-open-cupboard [",
"end": 9746,
"score": 0.996561586856842,
"start": 9739,
"tag": "NAME",
"value": "Madison"
},
{
"context": "board cb)))\n (is (= @p2 (retrieve :login \"ja\" :cupboard cb)))\n (is (= @p3 (retrieve :l",
"end": 9916,
"score": 0.6191811561584473,
"start": 9914,
"tag": "NAME",
"value": "ja"
},
{
"context": "board cb)))\n (is (= @p3 (retrieve :login \"tj\" :cupboard cb)))\n (is (= @p4 (retrieve :l",
"end": 9974,
"score": 0.5769141316413879,
"start": 9972,
"tag": "NAME",
"value": "tj"
},
{
"context": "-cupboard [*cupboard-path*]\n (let [p1 {:login \"gw\" :first-name \"George\" :last-name \"Washington\" :ag",
"end": 10278,
"score": 0.7531513571739197,
"start": 10276,
"tag": "USERNAME",
"value": "gw"
},
{
"context": "ard-path*]\n (let [p1 {:login \"gw\" :first-name \"George\" :last-name \"Washington\" :age 57 :bank-acct nil}]",
"end": 10299,
"score": 0.9997190833091736,
"start": 10293,
"tag": "NAME",
"value": "George"
},
{
"context": "[p1 {:login \"gw\" :first-name \"George\" :last-name \"Washington\" :age 57 :bank-acct nil}]\n\n (testing \"basic ",
"end": 10323,
"score": 0.9994111061096191,
"start": 10313,
"tag": "NAME",
"value": "Washington"
},
{
"context": "-txn []\n (make-instance president [\"gw\" \"George\" \"Washington\" 57])\n (is (= (retrieve :lo",
"end": 10456,
"score": 0.9967432022094727,
"start": 10450,
"tag": "NAME",
"value": "George"
},
{
"context": " (make-instance president [\"gw\" \"George\" \"Washington\" 57])\n (is (= (retrieve :login \"gw\") p1)",
"end": 10469,
"score": 0.731084942817688,
"start": 10459,
"tag": "NAME",
"value": "Washington"
},
{
"context": "\"\n (make-instance president [\"ja\" \"John\" \"Adams\" 62]))))\n (is (empty? (retrieve :l",
"end": 10685,
"score": 0.999793529510498,
"start": 10681,
"tag": "NAME",
"value": "John"
},
{
"context": " (make-instance president [\"ja\" \"John\" \"Adams\" 62]))))\n (is (empty? (retrieve :login \"gw",
"end": 10693,
"score": 0.9995148777961731,
"start": 10688,
"tag": "NAME",
"value": "Adams"
},
{
"context": "c true]\n (make-instance president [\"gw\" \"George\" \"Washington\" 57])\n (commit)\n (",
"end": 10881,
"score": 0.9989142417907715,
"start": 10875,
"tag": "NAME",
"value": "George"
},
{
"context": " (make-instance president [\"gw\" \"George\" \"Washington\" 57])\n (commit)\n (is (thrown-wi",
"end": 10894,
"score": 0.8802573680877686,
"start": 10884,
"tag": "NAME",
"value": "Washington"
},
{
"context": "\"\n (make-instance president [\"ja\" \"John\" \"Adams\" 62]))))\n (is (empty? (retrieve :l",
"end": 11063,
"score": 0.9998087286949158,
"start": 11059,
"tag": "NAME",
"value": "John"
},
{
"context": " (make-instance president [\"ja\" \"John\" \"Adams\" 62]))))\n (is (empty? (retrieve :login \"ja",
"end": 11071,
"score": 0.9994657039642334,
"start": 11066,
"tag": "NAME",
"value": "Adams"
},
{
"context": " removal\"\n (make-instance president [\"aj\" \"Andrew\" \"Johnson\"] :shelf-name \"presidents\")\n (wi",
"end": 11262,
"score": 0.9998049736022949,
"start": 11256,
"tag": "NAME",
"value": "Andrew"
},
{
"context": "\n (make-instance president [\"aj\" \"Andrew\" \"Johnson\"] :shelf-name \"presidents\")\n (with-txn []\n",
"end": 11272,
"score": 0.999691367149353,
"start": 11265,
"tag": "NAME",
"value": "Johnson"
},
{
"context": "helf-name \"presidents\")\n {:login \"aj\" :first-name \"Andrew\" :last-name \"Johnson\"\n ",
"end": 11415,
"score": 0.9197734594345093,
"start": 11413,
"tag": "NAME",
"value": "aj"
},
{
"context": "ents\")\n {:login \"aj\" :first-name \"Andrew\" :last-name \"Johnson\"\n :bank-acc",
"end": 11436,
"score": 0.9997777938842773,
"start": 11430,
"tag": "NAME",
"value": "Andrew"
},
{
"context": " {:login \"aj\" :first-name \"Andrew\" :last-name \"Johnson\"\n :bank-acct nil :age nil}))\n ",
"end": 11457,
"score": 0.999679446220398,
"start": 11450,
"tag": "NAME",
"value": "Johnson"
},
{
"context": "xn [txn1]\n (make-instance president [\"gw\" \"George\" \"Washington\" 57] :txn txn1)\n (rollback tx",
"end": 11869,
"score": 0.9992274045944214,
"start": 11863,
"tag": "NAME",
"value": "George"
},
{
"context": "\n (make-instance president [\"gw\" \"George\" \"Washington\" 57] :txn txn1)\n (rollback txn1))\n (i",
"end": 11882,
"score": 0.8919404745101929,
"start": 11872,
"tag": "NAME",
"value": "Washington"
},
{
"context": "try\n (let [gw (make-instance president [\"gw\" \"George\" \"Washington\" 57] :cupboard cb)\n ja (ma",
"end": 12342,
"score": 0.9997884631156921,
"start": 12336,
"tag": "NAME",
"value": "George"
},
{
"context": "(let [gw (make-instance president [\"gw\" \"George\" \"Washington\" 57] :cupboard cb)\n ja (make-instance p",
"end": 12355,
"score": 0.9994770288467407,
"start": 12345,
"tag": "NAME",
"value": "Washington"
},
{
"context": "cb)\n ja (make-instance president [\"ja\" \"John\" \"Adams\" 62] :cupboard cb)\n done-1 (ato",
"end": 12425,
"score": 0.9998651742935181,
"start": 12421,
"tag": "NAME",
"value": "John"
},
{
"context": " ja (make-instance president [\"ja\" \"John\" \"Adams\" 62] :cupboard cb)\n done-1 (atom false)",
"end": 12433,
"score": 0.9996082782745361,
"start": 12428,
"tag": "NAME",
"value": "Adams"
},
{
"context": "\n (make-instance president [\"gw\" \"George\" \"Washington\" 57] :cupboard c)\n (",
"end": 15445,
"score": 0.9991434216499329,
"start": 15439,
"tag": "NAME",
"value": "George"
},
{
"context": " (make-instance president [\"gw\" \"George\" \"Washington\" 57] :cupboard c)\n (make-instance",
"end": 15458,
"score": 0.9986759424209595,
"start": 15448,
"tag": "NAME",
"value": "Washington"
},
{
"context": "\n (make-instance president [\"ja\" \"John\" \"Adams\" 62] :cupboard c)))\n (send a2 (fn [_",
"end": 15530,
"score": 0.9998463988304138,
"start": 15526,
"tag": "NAME",
"value": "John"
},
{
"context": " (make-instance president [\"ja\" \"John\" \"Adams\" 62] :cupboard c)))\n (send a2 (fn [_]\n ",
"end": 15538,
"score": 0.9989433884620667,
"start": 15533,
"tag": "NAME",
"value": "Adams"
},
{
"context": "\n (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58] :cupboard c)\n (m",
"end": 15637,
"score": 0.9935962557792664,
"start": 15631,
"tag": "NAME",
"value": "Thomas"
},
{
"context": " (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58] :cupboard c)\n (make-instance",
"end": 15649,
"score": 0.9985429644584656,
"start": 15640,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": "\n (make-instance president [\"jm\" \"James\" \"Madison\" 58] :cupboard c)))\n (await a1 a2)",
"end": 15722,
"score": 0.9998237490653992,
"start": 15717,
"tag": "NAME",
"value": "James"
},
{
"context": " (make-instance president [\"jm\" \"James\" \"Madison\" 58] :cupboard c)))\n (await a1 a2)\n (is",
"end": 15732,
"score": 0.9982016086578369,
"start": 15725,
"tag": "NAME",
"value": "Madison"
},
{
"context": "2-02-22\")\n gw1 {:login \"gw\" :first-name \"George\" :last-name \"Washington\"\n :age 57 :",
"end": 15976,
"score": 0.9995924830436707,
"start": 15970,
"tag": "NAME",
"value": "George"
},
{
"context": "gw1 {:login \"gw\" :first-name \"George\" :last-name \"Washington\"\n :age 57 :bank-acct nil}\n ",
"end": 16000,
"score": 0.9990599155426025,
"start": 15990,
"tag": "NAME",
"value": "Washington"
},
{
"context": "acct nil}\n gw2 {:login \"gw\" :first-name \"George\" :last-name \"Washington\"\n :age 57 :",
"end": 16087,
"score": 0.9998086094856262,
"start": 16081,
"tag": "NAME",
"value": "George"
},
{
"context": "gw2 {:login \"gw\" :first-name \"George\" :last-name \"Washington\"\n :age 57 :bank-acct 1}\n g",
"end": 16111,
"score": 0.9989868998527527,
"start": 16101,
"tag": "NAME",
"value": "Washington"
},
{
"context": "k-acct 1}\n gw3 {:login \"gw\" :first-name \"George\" :last-name \"Washington\"\n :age 57 :",
"end": 16196,
"score": 0.9998013973236084,
"start": 16190,
"tag": "NAME",
"value": "George"
},
{
"context": "gw3 {:login \"gw\" :first-name \"George\" :last-name \"Washington\"\n :age 57 :bank-acct 1 :birthday da",
"end": 16220,
"score": 0.9994884729385376,
"start": 16210,
"tag": "NAME",
"value": "Washington"
},
{
"context": "5-10-30\")\n ja1 {:login \"ja\" :first-name \"John\" :last-name \"Adams\" :age 62 :bank-acct nil}\n ",
"end": 16364,
"score": 0.9998378753662109,
"start": 16360,
"tag": "NAME",
"value": "John"
},
{
"context": " ja1 {:login \"ja\" :first-name \"John\" :last-name \"Adams\" :age 62 :bank-acct nil}\n ja2 {:login \"j",
"end": 16383,
"score": 0.999057948589325,
"start": 16378,
"tag": "NAME",
"value": "Adams"
},
{
"context": "acct nil}\n ja2 {:login \"ja\" :first-name \"John\" :last-name \"Adams\" :age 62 :bank-acct 2}\n ",
"end": 16453,
"score": 0.9998354315757751,
"start": 16449,
"tag": "NAME",
"value": "John"
},
{
"context": " ja2 {:login \"ja\" :first-name \"John\" :last-name \"Adams\" :age 62 :bank-acct 2}\n ja3 {:login \"ja\"",
"end": 16472,
"score": 0.9988579750061035,
"start": 16467,
"tag": "NAME",
"value": "Adams"
},
{
"context": "k-acct 2}\n ja3 {:login \"ja\" :first-name \"John\" :last-name \"Adams\" :age 62 :bank-acct 2\n ",
"end": 16540,
"score": 0.9998304843902588,
"start": 16536,
"tag": "NAME",
"value": "John"
},
{
"context": " ja3 {:login \"ja\" :first-name \"John\" :last-name \"Adams\" :age 62 :bank-acct 2\n :birthday da",
"end": 16559,
"score": 0.9971811175346375,
"start": 16554,
"tag": "NAME",
"value": "Adams"
},
{
"context": "3-04-13\")\n tj1 {:login \"tj\" :first-name \"Thomas\" :last-name \"Jefferson\" :age 58 :bank-acct nil}\n ",
"end": 16705,
"score": 0.9995333552360535,
"start": 16699,
"tag": "NAME",
"value": "Thomas"
},
{
"context": "tj1 {:login \"tj\" :first-name \"Thomas\" :last-name \"Jefferson\" :age 58 :bank-acct nil}\n tj2 {:login \"t",
"end": 16728,
"score": 0.9996870160102844,
"start": 16719,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": "acct nil}\n tj2 {:login \"tj\" :first-name \"Thomas\" :last-name \"Jefferson\" :age 58 :bank-acct 3}\n ",
"end": 16800,
"score": 0.9995603561401367,
"start": 16794,
"tag": "NAME",
"value": "Thomas"
},
{
"context": "tj2 {:login \"tj\" :first-name \"Thomas\" :last-name \"Jefferson\" :age 58 :bank-acct 3}\n tj3 {:login \"tj\"",
"end": 16823,
"score": 0.9996979832649231,
"start": 16814,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": "k-acct 3}\n tj3 {:login \"tj\" :first-name \"Thomas\" :last-name \"Jefferson\" :age 58 :bank-acct 3\n ",
"end": 16893,
"score": 0.9995905160903931,
"start": 16887,
"tag": "NAME",
"value": "Thomas"
},
{
"context": "tj3 {:login \"tj\" :first-name \"Thomas\" :last-name \"Jefferson\" :age 58 :bank-acct 3\n :birthday da",
"end": 16916,
"score": 0.999696671962738,
"start": 16907,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": " (let [p (atom (make-instance president [\"gw\" \"George\" \"Washington\" 57]))]\n (is (= (retrieve :",
"end": 17087,
"score": 0.9997662305831909,
"start": 17081,
"tag": "NAME",
"value": "George"
},
{
"context": "[p (atom (make-instance president [\"gw\" \"George\" \"Washington\" 57]))]\n (is (= (retrieve :login \"gw\") g",
"end": 17100,
"score": 0.9925899505615234,
"start": 17090,
"tag": "NAME",
"value": "Washington"
},
{
"context": " (let [p (atom (make-instance president [\"ja\" \"John\" \"Adams\" 62]\n ",
"end": 17566,
"score": 0.9998685121536255,
"start": 17562,
"tag": "NAME",
"value": "John"
},
{
"context": "t [p (atom (make-instance president [\"ja\" \"John\" \"Adams\" 62]\n :she",
"end": 17574,
"score": 0.9995089769363403,
"start": 17569,
"tag": "NAME",
"value": "Adams"
},
{
"context": " (let [p (atom (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]))]\n (is (= (retrieve :l",
"end": 18207,
"score": 0.9990442395210266,
"start": 18201,
"tag": "NAME",
"value": "Thomas"
},
{
"context": "[p (atom (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58]))]\n (is (= (retrieve :login \"tj\") t",
"end": 18219,
"score": 0.9996563792228699,
"start": 18210,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": "th*]\n (let [p1 (make-instance president [\"gw\" \"George\" \"Washington\" 57])\n p2 (make-instance pr",
"end": 18667,
"score": 0.9961322546005249,
"start": 18661,
"tag": "NAME",
"value": "George"
},
{
"context": "(let [p1 (make-instance president [\"gw\" \"George\" \"Washington\" 57])\n p2 (make-instance president [\"ja\"",
"end": 18680,
"score": 0.9288381338119507,
"start": 18670,
"tag": "NAME",
"value": "Washington"
},
{
"context": "57])\n p2 (make-instance president [\"ja\" \"John\" \"Adams\" 62])\n p3 (make-instance preside",
"end": 18736,
"score": 0.9998782277107239,
"start": 18732,
"tag": "NAME",
"value": "John"
},
{
"context": " p2 (make-instance president [\"ja\" \"John\" \"Adams\" 62])\n p3 (make-instance president [\"tj\"",
"end": 18744,
"score": 0.9995734691619873,
"start": 18739,
"tag": "NAME",
"value": "Adams"
},
{
"context": "62])\n p3 (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58])\n p4 (make-instance pre",
"end": 18802,
"score": 0.9981731176376343,
"start": 18796,
"tag": "NAME",
"value": "Thomas"
},
{
"context": " p3 (make-instance president [\"tj\" \"Thomas\" \"Jefferson\" 58])\n p4 (make-instance president [\"jm1",
"end": 18814,
"score": 0.9995254874229431,
"start": 18805,
"tag": "NAME",
"value": "Jefferson"
},
{
"context": "8])\n p4 (make-instance president [\"jm1\" \"James\" \"Madison\" 58])\n p5 (make-instance presi",
"end": 18872,
"score": 0.9997828602790833,
"start": 18867,
"tag": "NAME",
"value": "James"
},
{
"context": " p4 (make-instance president [\"jm1\" \"James\" \"Madison\" 58])\n p5 (make-instance president [\"jm2",
"end": 18882,
"score": 0.99930739402771,
"start": 18875,
"tag": "NAME",
"value": "Madison"
},
{
"context": "8])\n p5 (make-instance president [\"jm2\" \"James\" \"Monroe\" 59])\n p6 (make-instance presid",
"end": 18940,
"score": 0.9998456239700317,
"start": 18935,
"tag": "NAME",
"value": "James"
},
{
"context": " p5 (make-instance president [\"jm2\" \"James\" \"Monroe\" 59])\n p6 (make-instance president [\"jqa",
"end": 18949,
"score": 0.9997729659080505,
"start": 18943,
"tag": "NAME",
"value": "Monroe"
},
{
"context": "9])\n p6 (make-instance president [\"jqa\" \"John\" \"Adams\" 58])\n p7 (make-instance preside",
"end": 19006,
"score": 0.9998720288276672,
"start": 19002,
"tag": "NAME",
"value": "John"
},
{
"context": " p6 (make-instance president [\"jqa\" \"John\" \"Adams\" 58])\n p7 (make-instance president [\"aj\"",
"end": 19014,
"score": 0.9996366500854492,
"start": 19009,
"tag": "NAME",
"value": "Adams"
},
{
"context": "58])\n p7 (make-instance president [\"aj\" \"Andrew\" \"Jackson\" 62])\n p8 (make-instance presi",
"end": 19072,
"score": 0.9996964931488037,
"start": 19066,
"tag": "NAME",
"value": "Andrew"
},
{
"context": " p7 (make-instance president [\"aj\" \"Andrew\" \"Jackson\" 62])\n p8 (make-instance president [\"mvb",
"end": 19082,
"score": 0.9996727705001831,
"start": 19075,
"tag": "NAME",
"value": "Jackson"
},
{
"context": "2])\n p8 (make-instance president [\"mvb\" \"Martin\" \"Van Buren\" 55])\n p9 (make-instance pre",
"end": 19141,
"score": 0.999742865562439,
"start": 19135,
"tag": "NAME",
"value": "Martin"
},
{
"context": " p8 (make-instance president [\"mvb\" \"Martin\" \"Van Buren\" 55])\n p9 (make-instance president [\"whh",
"end": 19153,
"score": 0.9997300505638123,
"start": 19144,
"tag": "NAME",
"value": "Van Buren"
},
{
"context": "5])\n p9 (make-instance president [\"whh\" \"William\" \"Harrison\" 68])\n p10 (make-instance pre",
"end": 19213,
"score": 0.9997358918190002,
"start": 19206,
"tag": "NAME",
"value": "William"
},
{
"context": " p9 (make-instance president [\"whh\" \"William\" \"Harrison\" 68])\n p10 (make-instance president [\"jt",
"end": 19224,
"score": 0.9997515678405762,
"start": 19216,
"tag": "NAME",
"value": "Harrison"
},
{
"context": "8])\n p10 (make-instance president [\"jt\" \"John\" \"Tyler\" 51])]\n\n (testing \"no-clause query (",
"end": 19281,
"score": 0.9998618960380554,
"start": 19277,
"tag": "NAME",
"value": "John"
},
{
"context": " p10 (make-instance president [\"jt\" \"John\" \"Tyler\" 51])]\n\n (testing \"no-clause query (list ful",
"end": 19289,
"score": 0.9995540380477905,
"start": 19284,
"tag": "NAME",
"value": "Tyler"
},
{
"context": " (is (= (set (query (< :age 60) (= :first-name \"John\"))) #{p6 p10}))\n (is (= (set (query (= :fi",
"end": 20194,
"score": 0.9984744191169739,
"start": 20190,
"tag": "NAME",
"value": "John"
},
{
"context": "p10}))\n (is (= (set (query (= :first-name \"John\"))) #{p2 p6 p10}))\n (is (= (count (query (",
"end": 20258,
"score": 0.999267041683197,
"start": 20254,
"tag": "NAME",
"value": "John"
},
{
"context": "backs\"\n (query (< :age 60) (= :first-name \"John\")\n :callback #(passoc! % :first-",
"end": 20452,
"score": 0.9995967745780945,
"start": 20448,
"tag": "NAME",
"value": "John"
},
{
"context": " :callback #(passoc! % :first-name \"Jack\"))\n (is (= (retrieve :login \"ja\") p2))\n ",
"end": 20512,
"score": 0.9998260140419006,
"start": 20508,
"tag": "NAME",
"value": "Jack"
},
{
"context": "(= (retrieve :login \"jqa\") (assoc p6 :first-name \"Jack\"))))\n\n (testing \"making sure natural joins a",
"end": 20625,
"score": 0.9998375773429871,
"start": 20621,
"tag": "NAME",
"value": "Jack"
},
{
"context": " '(cupboard.core/query (= :age 58) (= :last-name \"Adams\")\n :callbac",
"end": 20804,
"score": 0.9998009204864502,
"start": 20799,
"tag": "NAME",
"value": "Adams"
},
{
"context": " :callback #(passoc! % :first-name \"John Quincy\")))]\n (is (= (first (first (rest (rest (",
"end": 20892,
"score": 0.9998576045036316,
"start": 20881,
"tag": "NAME",
"value": "John Quincy"
},
{
"context": "join)))\n (query (= :age 58) (= :last-name \"Adams\")\n :callback #(passoc! % :first-",
"end": 21070,
"score": 0.9997785687446594,
"start": 21065,
"tag": "NAME",
"value": "Adams"
},
{
"context": " :callback #(passoc! % :first-name \"John Quincy\"))\n (is (= (retrieve :login \"jqa\") (assoc ",
"end": 21137,
"score": 0.9998492002487183,
"start": 21126,
"tag": "NAME",
"value": "John Quincy"
},
{
"context": "(= (retrieve :login \"jqa\") (assoc p6 :first-name \"John Quincy\"))))\n\n (testing \"delete as a callback\"\n ",
"end": 21214,
"score": 0.9998340606689453,
"start": 21203,
"tag": "NAME",
"value": "John Quincy"
},
{
"context": "gin \"tj\")))\n (is (nil? (retrieve :login \"jm1\")))\n (is (nil? (retrieve :login \"jqa\"))))\n",
"end": 21386,
"score": 0.9434794783592224,
"start": 21385,
"tag": "USERNAME",
"value": "1"
}
] |
test/test/cupboard/core.clj
|
gcv/cupboard
| 16 |
(ns test.cupboard.core
(:use [clojure test])
(:use [cupboard core utils])
(:require [cupboard.bdb.je :as je]))
;;; ----------------------------------------------------------------------------
;;; fixtures
;;; ----------------------------------------------------------------------------
(declare ^:dynamic *cupboard-path*)
(defn fixture-cupboard-path [f]
(binding [*cupboard-path* (.getAbsolutePath (make-temp-dir))]
(f)
(rmdir-recursive *cupboard-path*)))
(use-fixtures :each fixture-cupboard-path)
;;; ----------------------------------------------------------------------------
;;; tests
;;; ----------------------------------------------------------------------------
(defpersist president
((:login :index :unique)
(:first-name :index :any)
(:last-name :index :any)
(:age :index :any)
(:bank-acct :index :unique)))
(deftest persistent-structures
(let [p1 (make-instance president ["gw" "George" "Washington" 57] :save false)
p2 (make-instance president ["ja" "John" "Adams" 62] :save false)
p3 (make-instance president ["tj" "Thomas" "Jefferson" 58] :save false)
p4 (make-instance president ["jm" "James" "Madison" 58] :save false)]
(is (= (p1 :login) "gw"))
(is (= (p2 :first-name) "John"))
(is (= (p3 :age) 58))
(is (nil? (p4 :bank-acct)))
(is (= ((meta p2) :index-uniques) #{:login :bank-acct}))
(is (= ((meta p2) :index-anys) #{:first-name :last-name :age}))))
(deftest cupboard-basics
(let [cb (atom nil)]
(letfn [(verify-shelf [shelf-name]
;; check :login index
(is (contains? @(:index-unique-dbs (@(:shelves @cb) shelf-name)) :login))
(is (not (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:login :sorted-duplicates)))
(is (= (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name)) :login :name)
(str shelf-name :login)))
;; check :bank-acct index
(is (contains? @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:bank-acct))
(is (not (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:bank-acct :sorted-duplicates)))
;; check :first-name index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :first-name))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:first-name :sorted-duplicates))
(is (= (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:first-name :name)
(str shelf-name :first-name)))
;; check :last-name index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :last-name))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:last-name :sorted-duplicates))
;; check :age index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :age))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:age :sorted-duplicates)))]
(testing "making an empty cupboard and checking its state"
(reset! cb (open-cupboard *cupboard-path*))
(is (not (nil? @(:cupboard-env @cb))))
(is (not (nil? @(:shelves-db @cb))))
(is (not (nil? @(:shelves @cb))))
(is (not (@(:shelves-db @cb) :sorted-duplicates)))
(is (= (count @(:shelves @cb)) 1))
(is (= (:name @(:shelves-db @cb)) *shelves-db-name*))
(is (contains? @(:shelves @cb) *default-shelf-name*))
(is (empty? @(:index-unique-dbs (@(:shelves @cb) *default-shelf-name*))))
(is (empty? @(:index-any-dbs (@(:shelves @cb) *default-shelf-name*))))
(is (not (-> (@(:shelves @cb) *default-shelf-name*) :db :sorted-duplicates))))
(testing "writing something to the default shelf"
(make-instance president ["gw" "George" "Washington" 57] :cupboard @cb)
(verify-shelf *default-shelf-name*))
(testing "writing something to a different shelf"
(make-instance president ["ja" "John" "Adams" 62] :cupboard @cb :shelf-name "presidents")
(verify-shelf "presidents"))
(testing "closing cupboard"
(close-cupboard @cb)
(= (nil? @(:cupboard-env @cb)))
(= (empty? @(:shelves-db @cb)))
(= (empty? @(:shelves @cb))))
(testing "reopening cupboard read-only, and verifying correctness of reopened state"
(reset! cb (open-cupboard *cupboard-path* :read-only true))
(verify-shelf *default-shelf-name*)
(verify-shelf "presidents")
(is (thrown-with-msg? RuntimeException #".*[Rr]ead.+[Oo]nly.*"
(query (= :login "gw")
:callback #(delete % :cupboard @cb)
:cupboard @cb))))
(testing "closing cupboard again"
(close-cupboard @cb)
(= (nil? @(:cupboard-env @cb)))
(= (empty? @(:shelves-db @cb)))
(= (empty? @(:shelves @cb))))
(testing "reopening cupboard, and verifying correctness of reopened state"
(reset! cb (open-cupboard *cupboard-path*))
(verify-shelf *default-shelf-name*)
(verify-shelf "presidents"))
(testing "deleting shelf"
(remove-shelf "presidents" :cupboard @cb)
(verify-shelf *default-shelf-name*)
(is (not (contains? @(:shelves @cb) "presidents")))
(is (not (contains? (list-shelves :cupboard @cb) "presidents"))))
(testing "closing and reopening cupboard, and verifying correctness"
(close-cupboard @cb)
(reset! cb (open-cupboard *cupboard-path*))
(verify-shelf *default-shelf-name*)
(is (not (contains? @(:shelves @cb) "presidents"))))
(testing "checking invalid shelf names"
(is (thrown? RuntimeException
(make-instance president ["tj" "Thomas" "Jefferson" 58]
:cupboard cb :shelf-name "invalid:name")))
(is (thrown? RuntimeException
(make-instance president ["tj" "Thomas" "Jefferson" 58]
:cupboard cb :shelf-name *shelves-db-name*))))
(close-cupboard @cb)))
(testing "check the correctness of the cupboard databases"
(je/with-db-env [env *cupboard-path*]
(let [idx-name-age (str *default-shelf-name* :age)
idx-name-bank-acct (str *default-shelf-name* :bank-acct)
idx-name-first-name (str *default-shelf-name* :first-name)
idx-name-last-name (str *default-shelf-name* :last-name)
idx-name-login (str *default-shelf-name* :login)]
(testing "checking environment"
(is (= (set (.getDatabaseNames @(env :env-handle)))
#{*shelves-db-name* *default-shelf-name*
idx-name-age idx-name-bank-acct idx-name-first-name
idx-name-last-name idx-name-login})))
(testing "checking _shelves"
(je/with-db [shelves-db env *shelves-db-name*]
(je/with-db-cursor [cur1 shelves-db]
(is (= (je/db-cursor-first cur1) [*default-shelf-name* {}]))
(is (= (je/db-cursor-next cur1)
[idx-name-age {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-bank-acct {:sorted-duplicates false}]))
(is (= (je/db-cursor-next cur1)
[idx-name-first-name {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-last-name {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-login {:sorted-duplicates false}]))
(is (= (je/db-cursor-next cur1) [])))))))))
(deftest basics
(let [cupboard-location (make-temp-dir)
p1 (atom nil)
p2 (atom nil)
p3 (atom nil)
p4 (atom nil)]
(testing "default *cupboard*"
(try
(with-open-cupboard [cupboard-location]
(reset! p1 (make-instance president ["gw" "George" "Washington" 57]))
(reset! p2 (make-instance president ["ja" "John" "Adams" 62]))
(reset! p3 (make-instance president ["tj" "Thomas" "Jefferson" 58]))
(reset! p4 (make-instance president ["jm" "James" "Madison" 58])))
(testing "ability to build a struct-map on retrieve"
(with-open-cupboard [cupboard-location]
(is (= @p1 (retrieve :login "gw")))
(let [sp1 (retrieve :login "gw" :struct president)]
(is (= (type sp1) clojure.lang.PersistentStructMap))
(is (= (meta sp1) (meta @p1))))))
(testing "plain hash-map retrieval"
(with-open-cupboard [cupboard-location]
(is (= @p1 (retrieve :login "gw")))
(is (= @p2 (retrieve :login "ja" :cupboard *cupboard*)))
(is (= @p3 (retrieve :login "tj")))
(is (= @p4 (retrieve :login "jm")))
(testing ":any index retrieval"
(is (= (set (retrieve :age 58)) #{@p4 @p3})))
(testing "deletion"
(delete @p2)
(is (nil? (retrieve :login "ja"))))))
(finally
(rmdir-recursive cupboard-location))))
(testing "explicitly bound cupboard"
(try
(with-open-cupboard [cb cupboard-location]
(reset! p1 (make-instance president ["gw" "George" "Washington" 57] :cupboard cb))
(reset! p2 (make-instance president ["ja" "John" "Adams" 62] :cupboard cb))
(reset! p3 (make-instance president ["tj" "Thomas" "Jefferson" 58] :cupboard cb))
(reset! p4 (make-instance president ["jm" "James" "Madison" 58] :cupboard cb)))
(with-open-cupboard [cb cupboard-location]
(is (= @p1 (retrieve :login "gw" :cupboard cb)))
(is (= @p2 (retrieve :login "ja" :cupboard cb)))
(is (= @p3 (retrieve :login "tj" :cupboard cb)))
(is (= @p4 (retrieve :login "jm" :cupboard cb)))
(is (thrown? NullPointerException (retrieve :login "jm"))))
(finally
(rmdir-recursive cupboard-location))))))
(deftest transaction-basics
(with-open-cupboard [*cupboard-path*]
(let [p1 {:login "gw" :first-name "George" :last-name "Washington" :age 57 :bank-acct nil}]
(testing "basic transactions"
(with-txn []
(make-instance president ["gw" "George" "Washington" 57])
(is (= (retrieve :login "gw") p1))
(rollback)
(is (thrown-with-msg?
RuntimeException #".*non-open transaction"
(make-instance president ["ja" "John" "Adams" 62]))))
(is (empty? (retrieve :login "gw")))
(is (empty? (retrieve :login "ja")))
(with-txn [:write-no-sync true]
(make-instance president ["gw" "George" "Washington" 57])
(commit)
(is (thrown-with-msg?
RuntimeException #".*non-open transaction"
(make-instance president ["ja" "John" "Adams" 62]))))
(is (empty? (retrieve :login "ja")))
(is (= (retrieve :login "gw") p1)))
(testing "transactional shelf removal"
(make-instance president ["aj" "Andrew" "Johnson"] :shelf-name "presidents")
(with-txn []
(is (= (retrieve :login "aj" :shelf-name "presidents")
{:login "aj" :first-name "Andrew" :last-name "Johnson"
:bank-acct nil :age nil}))
(remove-shelf "presidents")
(is (not (some #(= % "presidents") (list-shelves))))
(rollback))
(is (some #(= % "presidents") (list-shelves)))))))
(deftest transaction-binding
(with-open-cupboard [*cupboard-path*]
(testing "lexically bound transaction"
(with-txn [txn1]
(make-instance president ["gw" "George" "Washington" 57] :txn txn1)
(rollback txn1))
(is (empty? (retrieve :login "gw"))))))
;;; TODO: These deadlock tests fail, because deadlock detection is difficult to
;;; test. Bugs in implementation are also possible.
#_ (deftest deadlocks
;; Cannot use with-open-cupboard because Clojure's dynamic variables do not
;; propagate to child threads.
(let [cb (open-cupboard *cupboard-path*)]
(try
(let [gw (make-instance president ["gw" "George" "Washington" 57] :cupboard cb)
ja (make-instance president ["ja" "John" "Adams" 62] :cupboard cb)
done-1 (atom false)
done-2 (atom false)]
(testing "deadlock resolution, both threads commit"
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 10]
(passoc! gw :bank-acct 1 :cupboard cb)
(Thread/sleep 50)
(passoc! ja :bank-acct 2 :cupboard cb))
(reset! done-1 true))))
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 250]
(passoc! ja :bank-acct 3 :cupboard cb)
(Thread/sleep 50)
(passoc! gw :bank-acct 4 :cupboard cb))
(reset! done-2 true))))
;; wait for threads to complete
(loop [i 0]
(when-not (and @done-1 @done-2)
(Thread/sleep 100)
(recur (inc i))))
;; The first thread has a shorter retry delay, so it should win the
;; race. It commits first, then the second thread overwrites the
;; values.
(is (= (retrieve :login "gw" :cupboard cb) (assoc gw :bank-acct 4)))
(is (= (retrieve :login "ja" :cupboard cb) (assoc ja :bank-acct 3))))
(testing "deadlock resolution, one thread rolls back permanently"
(reset! done-1 false)
(reset! done-2 false)
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 100]
(passoc! gw :bank-acct 5 :cupboard cb)
(Thread/sleep 100)
(passoc! ja :bank-acct 6 :cupboard cb))
(reset! done-1 true))))
(.start (Thread. (fn []
(is (thrown? RuntimeException
(try
(with-txn [:cupboard cb :max-attempts 1]
(passoc! ja :bank-acct 7 :cupboard cb)
(Thread/sleep 10)
(passoc! gw :bank-acct 8 :cupboard cb))
(finally
(reset! done-2 true))))))))
;; wait for threads to complete
(loop [i 0]
(when-not (and @done-1 @done-2)
(Thread/sleep 100)
(recur (inc i))))
;; Only the first thread should commit here.
(is (= (retrieve :login "gw" :cupboard cb) (assoc gw :bank-acct 5)))
(is (= (retrieve :login "ja" :cupboard cb) (assoc ja :bank-acct 6)))))
(finally
(close-cupboard cb)))))
(deftest simple-concurrency
(with-open-cupboard [c *cupboard-path*]
(let [a1 (agent nil)
a2 (agent nil)]
(send a1 (fn [_]
(make-instance president ["gw" "George" "Washington" 57] :cupboard c)
(make-instance president ["ja" "John" "Adams" 62] :cupboard c)))
(send a2 (fn [_]
(make-instance president ["tj" "Thomas" "Jefferson" 58] :cupboard c)
(make-instance president ["jm" "James" "Madison" 58] :cupboard c)))
(await a1 a2)
(is (= (shelf-count :cupboard c) 4)))))
(deftest passoc!-pdissoc!
(with-open-cupboard [*cupboard-path*]
(let [date-gw (localdate "1732-02-22")
gw1 {:login "gw" :first-name "George" :last-name "Washington"
:age 57 :bank-acct nil}
gw2 {:login "gw" :first-name "George" :last-name "Washington"
:age 57 :bank-acct 1}
gw3 {:login "gw" :first-name "George" :last-name "Washington"
:age 57 :bank-acct 1 :birthday date-gw}
date-ja (localdate "1735-10-30")
ja1 {:login "ja" :first-name "John" :last-name "Adams" :age 62 :bank-acct nil}
ja2 {:login "ja" :first-name "John" :last-name "Adams" :age 62 :bank-acct 2}
ja3 {:login "ja" :first-name "John" :last-name "Adams" :age 62 :bank-acct 2
:birthday date-ja}
date-tj (localdate "1743-04-13")
tj1 {:login "tj" :first-name "Thomas" :last-name "Jefferson" :age 58 :bank-acct nil}
tj2 {:login "tj" :first-name "Thomas" :last-name "Jefferson" :age 58 :bank-acct 3}
tj3 {:login "tj" :first-name "Thomas" :last-name "Jefferson" :age 58 :bank-acct 3
:birthday date-tj}]
(testing "simple passoc!-pdissoc! operations"
(let [p (atom (make-instance president ["gw" "George" "Washington" 57]))]
(is (= (retrieve :login "gw") gw1))
(reset! p (passoc! @p :bank-acct 1))
(is (= (retrieve :login "gw") gw2))
(reset! p (passoc! @p :birthday date-gw))
(is (= (retrieve :login "gw") gw3))
(reset! p (pdissoc! @p :birthday))
(is (= (retrieve :login "gw") gw2))))
(testing "passoc!-pdissoc! operations on non-default shelves"
(let [p (atom (make-instance president ["ja" "John" "Adams" 62]
:shelf-name "presidents"))]
(is (= (retrieve :login "ja" :shelf-name "presidents") ja1))
(reset! p (passoc! @p :bank-acct 2))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja2))
(reset! p (passoc! @p :birthday date-ja))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja3))
(reset! p (pdissoc! @p :birthday))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja2))))
(testing "passoc!-pdissoc! operations with multiple operands"
(let [p (atom (make-instance president ["tj" "Thomas" "Jefferson" 58]))]
(is (= (retrieve :login "tj") tj1))
(reset! p (passoc! @p [:bank-acct 3 :birthday date-tj]))
(is (= (retrieve :login "tj") tj3))
(reset! p (passoc! @p [:nonce1 1 :nonce2 2]))
(reset! p (pdissoc! @p [:birthday :nonce1 :nonce2]))
(is (= (retrieve :login "tj") tj2)))))))
(deftest queries
(with-open-cupboard [*cupboard-path*]
(let [p1 (make-instance president ["gw" "George" "Washington" 57])
p2 (make-instance president ["ja" "John" "Adams" 62])
p3 (make-instance president ["tj" "Thomas" "Jefferson" 58])
p4 (make-instance president ["jm1" "James" "Madison" 58])
p5 (make-instance president ["jm2" "James" "Monroe" 59])
p6 (make-instance president ["jqa" "John" "Adams" 58])
p7 (make-instance president ["aj" "Andrew" "Jackson" 62])
p8 (make-instance president ["mvb" "Martin" "Van Buren" 55])
p9 (make-instance president ["whh" "William" "Harrison" 68])
p10 (make-instance president ["jt" "John" "Tyler" 51])]
(testing "no-clause query (list full database contents)"
(is (= (set (query))
#{p1 p2 p3 p4 p5 p6 p7 p8 p9 p10})))
(testing "basic one-clause query operations"
(is (= (set (query (= :login "gw"))) #{p1}))
(is (= (set (query (= :login "aj"))) #{p7}))
(is (= (set (query (= :age 57))) #{p1}))
(is (empty? (query (= :age 57) (= :age 62))))
(is (= (set (query (= :age 62))) #{p2 p7}))
(is (= (set (query (<= :age 55))) #{p8 p10}))
(is (= (set (query (< :age 55))) #{p10}))
(is (= (set (query (> :age 60))) #{p2 p7 p9})))
(testing "queries with multiple clauses"
(is (= (set (query (< :age 60) (starts-with :first-name "J"))) #{p4 p5 p6 p10}))
(is (= (set (query (< :age 60) (starts-with :first-name "Ja"))) #{p4 p5}))
(is (= (set (query (< :age 60) (= :first-name "John"))) #{p6 p10}))
(is (= (set (query (= :first-name "John"))) #{p2 p6 p10}))
(is (= (count (query (< :age 60) (starts-with :first-name "J") :limit 2)) 2)))
(testing "destructive callbacks"
(query (< :age 60) (= :first-name "John")
:callback #(passoc! % :first-name "Jack"))
(is (= (retrieve :login "ja") p2))
(is (= (retrieve :login "jqa") (assoc p6 :first-name "Jack"))))
(testing "making sure natural joins are used wherever possible"
(let [q (macroexpand-1
'(cupboard.core/query (= :age 58) (= :last-name "Adams")
:callback #(passoc! % :first-name "John Quincy")))]
(is (= (first (first (rest (rest (rest (first (rest q)))))))
'cupboard.core/query-natural-join)))
(query (= :age 58) (= :last-name "Adams")
:callback #(passoc! % :first-name "John Quincy"))
(is (= (retrieve :login "jqa") (assoc p6 :first-name "John Quincy"))))
(testing "delete as a callback"
(query (= :age 58) :callback delete)
(is (nil? (retrieve :login "tj")))
(is (nil? (retrieve :login "jm1")))
(is (nil? (retrieve :login "jqa"))))
(testing "making sure that :struct applied to query works"
(let [everyone (query (> :age 50) :struct president)]
(is (> (count everyone) 0))
(is (every? #(= (type %) clojure.lang.PersistentStructMap) everyone)))))))
|
58386
|
(ns test.cupboard.core
(:use [clojure test])
(:use [cupboard core utils])
(:require [cupboard.bdb.je :as je]))
;;; ----------------------------------------------------------------------------
;;; fixtures
;;; ----------------------------------------------------------------------------
(declare ^:dynamic *cupboard-path*)
(defn fixture-cupboard-path [f]
(binding [*cupboard-path* (.getAbsolutePath (make-temp-dir))]
(f)
(rmdir-recursive *cupboard-path*)))
(use-fixtures :each fixture-cupboard-path)
;;; ----------------------------------------------------------------------------
;;; tests
;;; ----------------------------------------------------------------------------
(defpersist president
((:login :index :unique)
(:first-name :index :any)
(:last-name :index :any)
(:age :index :any)
(:bank-acct :index :unique)))
(deftest persistent-structures
(let [p1 (make-instance president ["gw" "<NAME>" "<NAME>" 57] :save false)
p2 (make-instance president ["ja" "<NAME>" "<NAME>" 62] :save false)
p3 (make-instance president ["tj" "<NAME>" "<NAME>" 58] :save false)
p4 (make-instance president ["jm" "<NAME>" "<NAME>" 58] :save false)]
(is (= (p1 :login) "gw"))
(is (= (p2 :first-name) "<NAME>"))
(is (= (p3 :age) 58))
(is (nil? (p4 :bank-acct)))
(is (= ((meta p2) :index-uniques) #{:login :bank-acct}))
(is (= ((meta p2) :index-anys) #{:first-name :last-name :age}))))
(deftest cupboard-basics
(let [cb (atom nil)]
(letfn [(verify-shelf [shelf-name]
;; check :login index
(is (contains? @(:index-unique-dbs (@(:shelves @cb) shelf-name)) :login))
(is (not (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:login :sorted-duplicates)))
(is (= (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name)) :login :name)
(str shelf-name :login)))
;; check :bank-acct index
(is (contains? @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:bank-acct))
(is (not (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:bank-acct :sorted-duplicates)))
;; check :first-name index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :first-name))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:first-name :sorted-duplicates))
(is (= (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:first-name :name)
(str shelf-name :first-name)))
;; check :last-name index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :last-name))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:last-name :sorted-duplicates))
;; check :age index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :age))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:age :sorted-duplicates)))]
(testing "making an empty cupboard and checking its state"
(reset! cb (open-cupboard *cupboard-path*))
(is (not (nil? @(:cupboard-env @cb))))
(is (not (nil? @(:shelves-db @cb))))
(is (not (nil? @(:shelves @cb))))
(is (not (@(:shelves-db @cb) :sorted-duplicates)))
(is (= (count @(:shelves @cb)) 1))
(is (= (:name @(:shelves-db @cb)) *shelves-db-name*))
(is (contains? @(:shelves @cb) *default-shelf-name*))
(is (empty? @(:index-unique-dbs (@(:shelves @cb) *default-shelf-name*))))
(is (empty? @(:index-any-dbs (@(:shelves @cb) *default-shelf-name*))))
(is (not (-> (@(:shelves @cb) *default-shelf-name*) :db :sorted-duplicates))))
(testing "writing something to the default shelf"
(make-instance president ["gw" "<NAME>" "<NAME>" 57] :cupboard @cb)
(verify-shelf *default-shelf-name*))
(testing "writing something to a different shelf"
(make-instance president ["ja" "<NAME>" "<NAME>" 62] :cupboard @cb :shelf-name "presidents")
(verify-shelf "presidents"))
(testing "closing cupboard"
(close-cupboard @cb)
(= (nil? @(:cupboard-env @cb)))
(= (empty? @(:shelves-db @cb)))
(= (empty? @(:shelves @cb))))
(testing "reopening cupboard read-only, and verifying correctness of reopened state"
(reset! cb (open-cupboard *cupboard-path* :read-only true))
(verify-shelf *default-shelf-name*)
(verify-shelf "presidents")
(is (thrown-with-msg? RuntimeException #".*[Rr]ead.+[Oo]nly.*"
(query (= :login "gw")
:callback #(delete % :cupboard @cb)
:cupboard @cb))))
(testing "closing cupboard again"
(close-cupboard @cb)
(= (nil? @(:cupboard-env @cb)))
(= (empty? @(:shelves-db @cb)))
(= (empty? @(:shelves @cb))))
(testing "reopening cupboard, and verifying correctness of reopened state"
(reset! cb (open-cupboard *cupboard-path*))
(verify-shelf *default-shelf-name*)
(verify-shelf "presidents"))
(testing "deleting shelf"
(remove-shelf "presidents" :cupboard @cb)
(verify-shelf *default-shelf-name*)
(is (not (contains? @(:shelves @cb) "presidents")))
(is (not (contains? (list-shelves :cupboard @cb) "presidents"))))
(testing "closing and reopening cupboard, and verifying correctness"
(close-cupboard @cb)
(reset! cb (open-cupboard *cupboard-path*))
(verify-shelf *default-shelf-name*)
(is (not (contains? @(:shelves @cb) "presidents"))))
(testing "checking invalid shelf names"
(is (thrown? RuntimeException
(make-instance president ["<NAME>" "<NAME>" "<NAME>" 58]
:cupboard cb :shelf-name "invalid:name")))
(is (thrown? RuntimeException
(make-instance president ["<NAME>" "<NAME>" "<NAME>" 58]
:cupboard cb :shelf-name *shelves-db-name*))))
(close-cupboard @cb)))
(testing "check the correctness of the cupboard databases"
(je/with-db-env [env *cupboard-path*]
(let [idx-name-age (str *default-shelf-name* :age)
idx-name-bank-acct (str *default-shelf-name* :bank-acct)
idx-name-first-name (str *default-shelf-name* :first-name)
idx-name-last-name (str *default-shelf-name* :last-name)
idx-name-login (str *default-shelf-name* :login)]
(testing "checking environment"
(is (= (set (.getDatabaseNames @(env :env-handle)))
#{*shelves-db-name* *default-shelf-name*
idx-name-age idx-name-bank-acct idx-name-first-name
idx-name-last-name idx-name-login})))
(testing "checking _shelves"
(je/with-db [shelves-db env *shelves-db-name*]
(je/with-db-cursor [cur1 shelves-db]
(is (= (je/db-cursor-first cur1) [*default-shelf-name* {}]))
(is (= (je/db-cursor-next cur1)
[idx-name-age {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-bank-acct {:sorted-duplicates false}]))
(is (= (je/db-cursor-next cur1)
[idx-name-first-name {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-last-name {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-login {:sorted-duplicates false}]))
(is (= (je/db-cursor-next cur1) [])))))))))
(deftest basics
(let [cupboard-location (make-temp-dir)
p1 (atom nil)
p2 (atom nil)
p3 (atom nil)
p4 (atom nil)]
(testing "default *cupboard*"
(try
(with-open-cupboard [cupboard-location]
(reset! p1 (make-instance president ["gw" "<NAME>" "<NAME>" 57]))
(reset! p2 (make-instance president ["ja" "<NAME>" "<NAME>" 62]))
(reset! p3 (make-instance president ["tj" "<NAME>" "<NAME>" 58]))
(reset! p4 (make-instance president ["jm" "<NAME>" "<NAME>" 58])))
(testing "ability to build a struct-map on retrieve"
(with-open-cupboard [cupboard-location]
(is (= @p1 (retrieve :login "gw")))
(let [sp1 (retrieve :login "gw" :struct president)]
(is (= (type sp1) clojure.lang.PersistentStructMap))
(is (= (meta sp1) (meta @p1))))))
(testing "plain hash-map retrieval"
(with-open-cupboard [cupboard-location]
(is (= @p1 (retrieve :login "gw")))
(is (= @p2 (retrieve :login "ja" :cupboard *cupboard*)))
(is (= @p3 (retrieve :login "<NAME>")))
(is (= @p4 (retrieve :login "jm")))
(testing ":any index retrieval"
(is (= (set (retrieve :age 58)) #{@p4 @p3})))
(testing "deletion"
(delete @p2)
(is (nil? (retrieve :login "ja"))))))
(finally
(rmdir-recursive cupboard-location))))
(testing "explicitly bound cupboard"
(try
(with-open-cupboard [cb cupboard-location]
(reset! p1 (make-instance president ["gw" "<NAME>" "<NAME>" 57] :cupboard cb))
(reset! p2 (make-instance president ["ja" "<NAME>" "<NAME>" 62] :cupboard cb))
(reset! p3 (make-instance president ["tj" "<NAME>" "<NAME>" 58] :cupboard cb))
(reset! p4 (make-instance president ["jm" "<NAME>" "<NAME>" 58] :cupboard cb)))
(with-open-cupboard [cb cupboard-location]
(is (= @p1 (retrieve :login "gw" :cupboard cb)))
(is (= @p2 (retrieve :login "<NAME>" :cupboard cb)))
(is (= @p3 (retrieve :login "<NAME>" :cupboard cb)))
(is (= @p4 (retrieve :login "jm" :cupboard cb)))
(is (thrown? NullPointerException (retrieve :login "jm"))))
(finally
(rmdir-recursive cupboard-location))))))
(deftest transaction-basics
(with-open-cupboard [*cupboard-path*]
(let [p1 {:login "gw" :first-name "<NAME>" :last-name "<NAME>" :age 57 :bank-acct nil}]
(testing "basic transactions"
(with-txn []
(make-instance president ["gw" "<NAME>" "<NAME>" 57])
(is (= (retrieve :login "gw") p1))
(rollback)
(is (thrown-with-msg?
RuntimeException #".*non-open transaction"
(make-instance president ["ja" "<NAME>" "<NAME>" 62]))))
(is (empty? (retrieve :login "gw")))
(is (empty? (retrieve :login "ja")))
(with-txn [:write-no-sync true]
(make-instance president ["gw" "<NAME>" "<NAME>" 57])
(commit)
(is (thrown-with-msg?
RuntimeException #".*non-open transaction"
(make-instance president ["ja" "<NAME>" "<NAME>" 62]))))
(is (empty? (retrieve :login "ja")))
(is (= (retrieve :login "gw") p1)))
(testing "transactional shelf removal"
(make-instance president ["aj" "<NAME>" "<NAME>"] :shelf-name "presidents")
(with-txn []
(is (= (retrieve :login "aj" :shelf-name "presidents")
{:login "<NAME>" :first-name "<NAME>" :last-name "<NAME>"
:bank-acct nil :age nil}))
(remove-shelf "presidents")
(is (not (some #(= % "presidents") (list-shelves))))
(rollback))
(is (some #(= % "presidents") (list-shelves)))))))
(deftest transaction-binding
(with-open-cupboard [*cupboard-path*]
(testing "lexically bound transaction"
(with-txn [txn1]
(make-instance president ["gw" "<NAME>" "<NAME>" 57] :txn txn1)
(rollback txn1))
(is (empty? (retrieve :login "gw"))))))
;;; TODO: These deadlock tests fail, because deadlock detection is difficult to
;;; test. Bugs in implementation are also possible.
#_ (deftest deadlocks
;; Cannot use with-open-cupboard because Clojure's dynamic variables do not
;; propagate to child threads.
(let [cb (open-cupboard *cupboard-path*)]
(try
(let [gw (make-instance president ["gw" "<NAME>" "<NAME>" 57] :cupboard cb)
ja (make-instance president ["ja" "<NAME>" "<NAME>" 62] :cupboard cb)
done-1 (atom false)
done-2 (atom false)]
(testing "deadlock resolution, both threads commit"
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 10]
(passoc! gw :bank-acct 1 :cupboard cb)
(Thread/sleep 50)
(passoc! ja :bank-acct 2 :cupboard cb))
(reset! done-1 true))))
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 250]
(passoc! ja :bank-acct 3 :cupboard cb)
(Thread/sleep 50)
(passoc! gw :bank-acct 4 :cupboard cb))
(reset! done-2 true))))
;; wait for threads to complete
(loop [i 0]
(when-not (and @done-1 @done-2)
(Thread/sleep 100)
(recur (inc i))))
;; The first thread has a shorter retry delay, so it should win the
;; race. It commits first, then the second thread overwrites the
;; values.
(is (= (retrieve :login "gw" :cupboard cb) (assoc gw :bank-acct 4)))
(is (= (retrieve :login "ja" :cupboard cb) (assoc ja :bank-acct 3))))
(testing "deadlock resolution, one thread rolls back permanently"
(reset! done-1 false)
(reset! done-2 false)
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 100]
(passoc! gw :bank-acct 5 :cupboard cb)
(Thread/sleep 100)
(passoc! ja :bank-acct 6 :cupboard cb))
(reset! done-1 true))))
(.start (Thread. (fn []
(is (thrown? RuntimeException
(try
(with-txn [:cupboard cb :max-attempts 1]
(passoc! ja :bank-acct 7 :cupboard cb)
(Thread/sleep 10)
(passoc! gw :bank-acct 8 :cupboard cb))
(finally
(reset! done-2 true))))))))
;; wait for threads to complete
(loop [i 0]
(when-not (and @done-1 @done-2)
(Thread/sleep 100)
(recur (inc i))))
;; Only the first thread should commit here.
(is (= (retrieve :login "gw" :cupboard cb) (assoc gw :bank-acct 5)))
(is (= (retrieve :login "ja" :cupboard cb) (assoc ja :bank-acct 6)))))
(finally
(close-cupboard cb)))))
(deftest simple-concurrency
(with-open-cupboard [c *cupboard-path*]
(let [a1 (agent nil)
a2 (agent nil)]
(send a1 (fn [_]
(make-instance president ["gw" "<NAME>" "<NAME>" 57] :cupboard c)
(make-instance president ["ja" "<NAME>" "<NAME>" 62] :cupboard c)))
(send a2 (fn [_]
(make-instance president ["tj" "<NAME>" "<NAME>" 58] :cupboard c)
(make-instance president ["jm" "<NAME>" "<NAME>" 58] :cupboard c)))
(await a1 a2)
(is (= (shelf-count :cupboard c) 4)))))
(deftest passoc!-pdissoc!
(with-open-cupboard [*cupboard-path*]
(let [date-gw (localdate "1732-02-22")
gw1 {:login "gw" :first-name "<NAME>" :last-name "<NAME>"
:age 57 :bank-acct nil}
gw2 {:login "gw" :first-name "<NAME>" :last-name "<NAME>"
:age 57 :bank-acct 1}
gw3 {:login "gw" :first-name "<NAME>" :last-name "<NAME>"
:age 57 :bank-acct 1 :birthday date-gw}
date-ja (localdate "1735-10-30")
ja1 {:login "ja" :first-name "<NAME>" :last-name "<NAME>" :age 62 :bank-acct nil}
ja2 {:login "ja" :first-name "<NAME>" :last-name "<NAME>" :age 62 :bank-acct 2}
ja3 {:login "ja" :first-name "<NAME>" :last-name "<NAME>" :age 62 :bank-acct 2
:birthday date-ja}
date-tj (localdate "1743-04-13")
tj1 {:login "tj" :first-name "<NAME>" :last-name "<NAME>" :age 58 :bank-acct nil}
tj2 {:login "tj" :first-name "<NAME>" :last-name "<NAME>" :age 58 :bank-acct 3}
tj3 {:login "tj" :first-name "<NAME>" :last-name "<NAME>" :age 58 :bank-acct 3
:birthday date-tj}]
(testing "simple passoc!-pdissoc! operations"
(let [p (atom (make-instance president ["gw" "<NAME>" "<NAME>" 57]))]
(is (= (retrieve :login "gw") gw1))
(reset! p (passoc! @p :bank-acct 1))
(is (= (retrieve :login "gw") gw2))
(reset! p (passoc! @p :birthday date-gw))
(is (= (retrieve :login "gw") gw3))
(reset! p (pdissoc! @p :birthday))
(is (= (retrieve :login "gw") gw2))))
(testing "passoc!-pdissoc! operations on non-default shelves"
(let [p (atom (make-instance president ["ja" "<NAME>" "<NAME>" 62]
:shelf-name "presidents"))]
(is (= (retrieve :login "ja" :shelf-name "presidents") ja1))
(reset! p (passoc! @p :bank-acct 2))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja2))
(reset! p (passoc! @p :birthday date-ja))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja3))
(reset! p (pdissoc! @p :birthday))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja2))))
(testing "passoc!-pdissoc! operations with multiple operands"
(let [p (atom (make-instance president ["tj" "<NAME>" "<NAME>" 58]))]
(is (= (retrieve :login "tj") tj1))
(reset! p (passoc! @p [:bank-acct 3 :birthday date-tj]))
(is (= (retrieve :login "tj") tj3))
(reset! p (passoc! @p [:nonce1 1 :nonce2 2]))
(reset! p (pdissoc! @p [:birthday :nonce1 :nonce2]))
(is (= (retrieve :login "tj") tj2)))))))
(deftest queries
(with-open-cupboard [*cupboard-path*]
(let [p1 (make-instance president ["gw" "<NAME>" "<NAME>" 57])
p2 (make-instance president ["ja" "<NAME>" "<NAME>" 62])
p3 (make-instance president ["tj" "<NAME>" "<NAME>" 58])
p4 (make-instance president ["jm1" "<NAME>" "<NAME>" 58])
p5 (make-instance president ["jm2" "<NAME>" "<NAME>" 59])
p6 (make-instance president ["jqa" "<NAME>" "<NAME>" 58])
p7 (make-instance president ["aj" "<NAME>" "<NAME>" 62])
p8 (make-instance president ["mvb" "<NAME>" "<NAME>" 55])
p9 (make-instance president ["whh" "<NAME>" "<NAME>" 68])
p10 (make-instance president ["jt" "<NAME>" "<NAME>" 51])]
(testing "no-clause query (list full database contents)"
(is (= (set (query))
#{p1 p2 p3 p4 p5 p6 p7 p8 p9 p10})))
(testing "basic one-clause query operations"
(is (= (set (query (= :login "gw"))) #{p1}))
(is (= (set (query (= :login "aj"))) #{p7}))
(is (= (set (query (= :age 57))) #{p1}))
(is (empty? (query (= :age 57) (= :age 62))))
(is (= (set (query (= :age 62))) #{p2 p7}))
(is (= (set (query (<= :age 55))) #{p8 p10}))
(is (= (set (query (< :age 55))) #{p10}))
(is (= (set (query (> :age 60))) #{p2 p7 p9})))
(testing "queries with multiple clauses"
(is (= (set (query (< :age 60) (starts-with :first-name "J"))) #{p4 p5 p6 p10}))
(is (= (set (query (< :age 60) (starts-with :first-name "Ja"))) #{p4 p5}))
(is (= (set (query (< :age 60) (= :first-name "<NAME>"))) #{p6 p10}))
(is (= (set (query (= :first-name "<NAME>"))) #{p2 p6 p10}))
(is (= (count (query (< :age 60) (starts-with :first-name "J") :limit 2)) 2)))
(testing "destructive callbacks"
(query (< :age 60) (= :first-name "<NAME>")
:callback #(passoc! % :first-name "<NAME>"))
(is (= (retrieve :login "ja") p2))
(is (= (retrieve :login "jqa") (assoc p6 :first-name "<NAME>"))))
(testing "making sure natural joins are used wherever possible"
(let [q (macroexpand-1
'(cupboard.core/query (= :age 58) (= :last-name "<NAME>")
:callback #(passoc! % :first-name "<NAME>")))]
(is (= (first (first (rest (rest (rest (first (rest q)))))))
'cupboard.core/query-natural-join)))
(query (= :age 58) (= :last-name "<NAME>")
:callback #(passoc! % :first-name "<NAME>"))
(is (= (retrieve :login "jqa") (assoc p6 :first-name "<NAME>"))))
(testing "delete as a callback"
(query (= :age 58) :callback delete)
(is (nil? (retrieve :login "tj")))
(is (nil? (retrieve :login "jm1")))
(is (nil? (retrieve :login "jqa"))))
(testing "making sure that :struct applied to query works"
(let [everyone (query (> :age 50) :struct president)]
(is (> (count everyone) 0))
(is (every? #(= (type %) clojure.lang.PersistentStructMap) everyone)))))))
| true |
(ns test.cupboard.core
(:use [clojure test])
(:use [cupboard core utils])
(:require [cupboard.bdb.je :as je]))
;;; ----------------------------------------------------------------------------
;;; fixtures
;;; ----------------------------------------------------------------------------
(declare ^:dynamic *cupboard-path*)
(defn fixture-cupboard-path [f]
(binding [*cupboard-path* (.getAbsolutePath (make-temp-dir))]
(f)
(rmdir-recursive *cupboard-path*)))
(use-fixtures :each fixture-cupboard-path)
;;; ----------------------------------------------------------------------------
;;; tests
;;; ----------------------------------------------------------------------------
(defpersist president
((:login :index :unique)
(:first-name :index :any)
(:last-name :index :any)
(:age :index :any)
(:bank-acct :index :unique)))
(deftest persistent-structures
(let [p1 (make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57] :save false)
p2 (make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62] :save false)
p3 (make-instance president ["tj" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58] :save false)
p4 (make-instance president ["jm" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58] :save false)]
(is (= (p1 :login) "gw"))
(is (= (p2 :first-name) "PI:NAME:<NAME>END_PI"))
(is (= (p3 :age) 58))
(is (nil? (p4 :bank-acct)))
(is (= ((meta p2) :index-uniques) #{:login :bank-acct}))
(is (= ((meta p2) :index-anys) #{:first-name :last-name :age}))))
(deftest cupboard-basics
(let [cb (atom nil)]
(letfn [(verify-shelf [shelf-name]
;; check :login index
(is (contains? @(:index-unique-dbs (@(:shelves @cb) shelf-name)) :login))
(is (not (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:login :sorted-duplicates)))
(is (= (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name)) :login :name)
(str shelf-name :login)))
;; check :bank-acct index
(is (contains? @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:bank-acct))
(is (not (-> @(:index-unique-dbs (@(:shelves @cb) shelf-name))
:bank-acct :sorted-duplicates)))
;; check :first-name index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :first-name))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:first-name :sorted-duplicates))
(is (= (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:first-name :name)
(str shelf-name :first-name)))
;; check :last-name index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :last-name))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:last-name :sorted-duplicates))
;; check :age index
(is (contains? @(:index-any-dbs (@(:shelves @cb) shelf-name)) :age))
(is (-> @(:index-any-dbs (@(:shelves @cb) shelf-name))
:age :sorted-duplicates)))]
(testing "making an empty cupboard and checking its state"
(reset! cb (open-cupboard *cupboard-path*))
(is (not (nil? @(:cupboard-env @cb))))
(is (not (nil? @(:shelves-db @cb))))
(is (not (nil? @(:shelves @cb))))
(is (not (@(:shelves-db @cb) :sorted-duplicates)))
(is (= (count @(:shelves @cb)) 1))
(is (= (:name @(:shelves-db @cb)) *shelves-db-name*))
(is (contains? @(:shelves @cb) *default-shelf-name*))
(is (empty? @(:index-unique-dbs (@(:shelves @cb) *default-shelf-name*))))
(is (empty? @(:index-any-dbs (@(:shelves @cb) *default-shelf-name*))))
(is (not (-> (@(:shelves @cb) *default-shelf-name*) :db :sorted-duplicates))))
(testing "writing something to the default shelf"
(make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57] :cupboard @cb)
(verify-shelf *default-shelf-name*))
(testing "writing something to a different shelf"
(make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62] :cupboard @cb :shelf-name "presidents")
(verify-shelf "presidents"))
(testing "closing cupboard"
(close-cupboard @cb)
(= (nil? @(:cupboard-env @cb)))
(= (empty? @(:shelves-db @cb)))
(= (empty? @(:shelves @cb))))
(testing "reopening cupboard read-only, and verifying correctness of reopened state"
(reset! cb (open-cupboard *cupboard-path* :read-only true))
(verify-shelf *default-shelf-name*)
(verify-shelf "presidents")
(is (thrown-with-msg? RuntimeException #".*[Rr]ead.+[Oo]nly.*"
(query (= :login "gw")
:callback #(delete % :cupboard @cb)
:cupboard @cb))))
(testing "closing cupboard again"
(close-cupboard @cb)
(= (nil? @(:cupboard-env @cb)))
(= (empty? @(:shelves-db @cb)))
(= (empty? @(:shelves @cb))))
(testing "reopening cupboard, and verifying correctness of reopened state"
(reset! cb (open-cupboard *cupboard-path*))
(verify-shelf *default-shelf-name*)
(verify-shelf "presidents"))
(testing "deleting shelf"
(remove-shelf "presidents" :cupboard @cb)
(verify-shelf *default-shelf-name*)
(is (not (contains? @(:shelves @cb) "presidents")))
(is (not (contains? (list-shelves :cupboard @cb) "presidents"))))
(testing "closing and reopening cupboard, and verifying correctness"
(close-cupboard @cb)
(reset! cb (open-cupboard *cupboard-path*))
(verify-shelf *default-shelf-name*)
(is (not (contains? @(:shelves @cb) "presidents"))))
(testing "checking invalid shelf names"
(is (thrown? RuntimeException
(make-instance president ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58]
:cupboard cb :shelf-name "invalid:name")))
(is (thrown? RuntimeException
(make-instance president ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58]
:cupboard cb :shelf-name *shelves-db-name*))))
(close-cupboard @cb)))
(testing "check the correctness of the cupboard databases"
(je/with-db-env [env *cupboard-path*]
(let [idx-name-age (str *default-shelf-name* :age)
idx-name-bank-acct (str *default-shelf-name* :bank-acct)
idx-name-first-name (str *default-shelf-name* :first-name)
idx-name-last-name (str *default-shelf-name* :last-name)
idx-name-login (str *default-shelf-name* :login)]
(testing "checking environment"
(is (= (set (.getDatabaseNames @(env :env-handle)))
#{*shelves-db-name* *default-shelf-name*
idx-name-age idx-name-bank-acct idx-name-first-name
idx-name-last-name idx-name-login})))
(testing "checking _shelves"
(je/with-db [shelves-db env *shelves-db-name*]
(je/with-db-cursor [cur1 shelves-db]
(is (= (je/db-cursor-first cur1) [*default-shelf-name* {}]))
(is (= (je/db-cursor-next cur1)
[idx-name-age {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-bank-acct {:sorted-duplicates false}]))
(is (= (je/db-cursor-next cur1)
[idx-name-first-name {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-last-name {:sorted-duplicates true}]))
(is (= (je/db-cursor-next cur1)
[idx-name-login {:sorted-duplicates false}]))
(is (= (je/db-cursor-next cur1) [])))))))))
(deftest basics
(let [cupboard-location (make-temp-dir)
p1 (atom nil)
p2 (atom nil)
p3 (atom nil)
p4 (atom nil)]
(testing "default *cupboard*"
(try
(with-open-cupboard [cupboard-location]
(reset! p1 (make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57]))
(reset! p2 (make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62]))
(reset! p3 (make-instance president ["tj" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58]))
(reset! p4 (make-instance president ["jm" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58])))
(testing "ability to build a struct-map on retrieve"
(with-open-cupboard [cupboard-location]
(is (= @p1 (retrieve :login "gw")))
(let [sp1 (retrieve :login "gw" :struct president)]
(is (= (type sp1) clojure.lang.PersistentStructMap))
(is (= (meta sp1) (meta @p1))))))
(testing "plain hash-map retrieval"
(with-open-cupboard [cupboard-location]
(is (= @p1 (retrieve :login "gw")))
(is (= @p2 (retrieve :login "ja" :cupboard *cupboard*)))
(is (= @p3 (retrieve :login "PI:NAME:<NAME>END_PI")))
(is (= @p4 (retrieve :login "jm")))
(testing ":any index retrieval"
(is (= (set (retrieve :age 58)) #{@p4 @p3})))
(testing "deletion"
(delete @p2)
(is (nil? (retrieve :login "ja"))))))
(finally
(rmdir-recursive cupboard-location))))
(testing "explicitly bound cupboard"
(try
(with-open-cupboard [cb cupboard-location]
(reset! p1 (make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57] :cupboard cb))
(reset! p2 (make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62] :cupboard cb))
(reset! p3 (make-instance president ["tj" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58] :cupboard cb))
(reset! p4 (make-instance president ["jm" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58] :cupboard cb)))
(with-open-cupboard [cb cupboard-location]
(is (= @p1 (retrieve :login "gw" :cupboard cb)))
(is (= @p2 (retrieve :login "PI:NAME:<NAME>END_PI" :cupboard cb)))
(is (= @p3 (retrieve :login "PI:NAME:<NAME>END_PI" :cupboard cb)))
(is (= @p4 (retrieve :login "jm" :cupboard cb)))
(is (thrown? NullPointerException (retrieve :login "jm"))))
(finally
(rmdir-recursive cupboard-location))))))
(deftest transaction-basics
(with-open-cupboard [*cupboard-path*]
(let [p1 {:login "gw" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI" :age 57 :bank-acct nil}]
(testing "basic transactions"
(with-txn []
(make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57])
(is (= (retrieve :login "gw") p1))
(rollback)
(is (thrown-with-msg?
RuntimeException #".*non-open transaction"
(make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62]))))
(is (empty? (retrieve :login "gw")))
(is (empty? (retrieve :login "ja")))
(with-txn [:write-no-sync true]
(make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57])
(commit)
(is (thrown-with-msg?
RuntimeException #".*non-open transaction"
(make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62]))))
(is (empty? (retrieve :login "ja")))
(is (= (retrieve :login "gw") p1)))
(testing "transactional shelf removal"
(make-instance president ["aj" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI"] :shelf-name "presidents")
(with-txn []
(is (= (retrieve :login "aj" :shelf-name "presidents")
{:login "PI:NAME:<NAME>END_PI" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"
:bank-acct nil :age nil}))
(remove-shelf "presidents")
(is (not (some #(= % "presidents") (list-shelves))))
(rollback))
(is (some #(= % "presidents") (list-shelves)))))))
(deftest transaction-binding
(with-open-cupboard [*cupboard-path*]
(testing "lexically bound transaction"
(with-txn [txn1]
(make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57] :txn txn1)
(rollback txn1))
(is (empty? (retrieve :login "gw"))))))
;;; TODO: These deadlock tests fail, because deadlock detection is difficult to
;;; test. Bugs in implementation are also possible.
#_ (deftest deadlocks
;; Cannot use with-open-cupboard because Clojure's dynamic variables do not
;; propagate to child threads.
(let [cb (open-cupboard *cupboard-path*)]
(try
(let [gw (make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57] :cupboard cb)
ja (make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62] :cupboard cb)
done-1 (atom false)
done-2 (atom false)]
(testing "deadlock resolution, both threads commit"
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 10]
(passoc! gw :bank-acct 1 :cupboard cb)
(Thread/sleep 50)
(passoc! ja :bank-acct 2 :cupboard cb))
(reset! done-1 true))))
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 250]
(passoc! ja :bank-acct 3 :cupboard cb)
(Thread/sleep 50)
(passoc! gw :bank-acct 4 :cupboard cb))
(reset! done-2 true))))
;; wait for threads to complete
(loop [i 0]
(when-not (and @done-1 @done-2)
(Thread/sleep 100)
(recur (inc i))))
;; The first thread has a shorter retry delay, so it should win the
;; race. It commits first, then the second thread overwrites the
;; values.
(is (= (retrieve :login "gw" :cupboard cb) (assoc gw :bank-acct 4)))
(is (= (retrieve :login "ja" :cupboard cb) (assoc ja :bank-acct 3))))
(testing "deadlock resolution, one thread rolls back permanently"
(reset! done-1 false)
(reset! done-2 false)
(.start (Thread. (fn []
(with-txn [:cupboard cb :max-attempts 2 :retry-delay-msec 100]
(passoc! gw :bank-acct 5 :cupboard cb)
(Thread/sleep 100)
(passoc! ja :bank-acct 6 :cupboard cb))
(reset! done-1 true))))
(.start (Thread. (fn []
(is (thrown? RuntimeException
(try
(with-txn [:cupboard cb :max-attempts 1]
(passoc! ja :bank-acct 7 :cupboard cb)
(Thread/sleep 10)
(passoc! gw :bank-acct 8 :cupboard cb))
(finally
(reset! done-2 true))))))))
;; wait for threads to complete
(loop [i 0]
(when-not (and @done-1 @done-2)
(Thread/sleep 100)
(recur (inc i))))
;; Only the first thread should commit here.
(is (= (retrieve :login "gw" :cupboard cb) (assoc gw :bank-acct 5)))
(is (= (retrieve :login "ja" :cupboard cb) (assoc ja :bank-acct 6)))))
(finally
(close-cupboard cb)))))
(deftest simple-concurrency
(with-open-cupboard [c *cupboard-path*]
(let [a1 (agent nil)
a2 (agent nil)]
(send a1 (fn [_]
(make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57] :cupboard c)
(make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62] :cupboard c)))
(send a2 (fn [_]
(make-instance president ["tj" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58] :cupboard c)
(make-instance president ["jm" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58] :cupboard c)))
(await a1 a2)
(is (= (shelf-count :cupboard c) 4)))))
(deftest passoc!-pdissoc!
(with-open-cupboard [*cupboard-path*]
(let [date-gw (localdate "1732-02-22")
gw1 {:login "gw" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"
:age 57 :bank-acct nil}
gw2 {:login "gw" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"
:age 57 :bank-acct 1}
gw3 {:login "gw" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"
:age 57 :bank-acct 1 :birthday date-gw}
date-ja (localdate "1735-10-30")
ja1 {:login "ja" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI" :age 62 :bank-acct nil}
ja2 {:login "ja" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI" :age 62 :bank-acct 2}
ja3 {:login "ja" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI" :age 62 :bank-acct 2
:birthday date-ja}
date-tj (localdate "1743-04-13")
tj1 {:login "tj" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI" :age 58 :bank-acct nil}
tj2 {:login "tj" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI" :age 58 :bank-acct 3}
tj3 {:login "tj" :first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI" :age 58 :bank-acct 3
:birthday date-tj}]
(testing "simple passoc!-pdissoc! operations"
(let [p (atom (make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57]))]
(is (= (retrieve :login "gw") gw1))
(reset! p (passoc! @p :bank-acct 1))
(is (= (retrieve :login "gw") gw2))
(reset! p (passoc! @p :birthday date-gw))
(is (= (retrieve :login "gw") gw3))
(reset! p (pdissoc! @p :birthday))
(is (= (retrieve :login "gw") gw2))))
(testing "passoc!-pdissoc! operations on non-default shelves"
(let [p (atom (make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62]
:shelf-name "presidents"))]
(is (= (retrieve :login "ja" :shelf-name "presidents") ja1))
(reset! p (passoc! @p :bank-acct 2))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja2))
(reset! p (passoc! @p :birthday date-ja))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja3))
(reset! p (pdissoc! @p :birthday))
(is (= (retrieve :login "ja" :shelf-name "presidents") ja2))))
(testing "passoc!-pdissoc! operations with multiple operands"
(let [p (atom (make-instance president ["tj" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58]))]
(is (= (retrieve :login "tj") tj1))
(reset! p (passoc! @p [:bank-acct 3 :birthday date-tj]))
(is (= (retrieve :login "tj") tj3))
(reset! p (passoc! @p [:nonce1 1 :nonce2 2]))
(reset! p (pdissoc! @p [:birthday :nonce1 :nonce2]))
(is (= (retrieve :login "tj") tj2)))))))
(deftest queries
(with-open-cupboard [*cupboard-path*]
(let [p1 (make-instance president ["gw" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 57])
p2 (make-instance president ["ja" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62])
p3 (make-instance president ["tj" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58])
p4 (make-instance president ["jm1" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58])
p5 (make-instance president ["jm2" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 59])
p6 (make-instance president ["jqa" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 58])
p7 (make-instance president ["aj" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 62])
p8 (make-instance president ["mvb" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 55])
p9 (make-instance president ["whh" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 68])
p10 (make-instance president ["jt" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" 51])]
(testing "no-clause query (list full database contents)"
(is (= (set (query))
#{p1 p2 p3 p4 p5 p6 p7 p8 p9 p10})))
(testing "basic one-clause query operations"
(is (= (set (query (= :login "gw"))) #{p1}))
(is (= (set (query (= :login "aj"))) #{p7}))
(is (= (set (query (= :age 57))) #{p1}))
(is (empty? (query (= :age 57) (= :age 62))))
(is (= (set (query (= :age 62))) #{p2 p7}))
(is (= (set (query (<= :age 55))) #{p8 p10}))
(is (= (set (query (< :age 55))) #{p10}))
(is (= (set (query (> :age 60))) #{p2 p7 p9})))
(testing "queries with multiple clauses"
(is (= (set (query (< :age 60) (starts-with :first-name "J"))) #{p4 p5 p6 p10}))
(is (= (set (query (< :age 60) (starts-with :first-name "Ja"))) #{p4 p5}))
(is (= (set (query (< :age 60) (= :first-name "PI:NAME:<NAME>END_PI"))) #{p6 p10}))
(is (= (set (query (= :first-name "PI:NAME:<NAME>END_PI"))) #{p2 p6 p10}))
(is (= (count (query (< :age 60) (starts-with :first-name "J") :limit 2)) 2)))
(testing "destructive callbacks"
(query (< :age 60) (= :first-name "PI:NAME:<NAME>END_PI")
:callback #(passoc! % :first-name "PI:NAME:<NAME>END_PI"))
(is (= (retrieve :login "ja") p2))
(is (= (retrieve :login "jqa") (assoc p6 :first-name "PI:NAME:<NAME>END_PI"))))
(testing "making sure natural joins are used wherever possible"
(let [q (macroexpand-1
'(cupboard.core/query (= :age 58) (= :last-name "PI:NAME:<NAME>END_PI")
:callback #(passoc! % :first-name "PI:NAME:<NAME>END_PI")))]
(is (= (first (first (rest (rest (rest (first (rest q)))))))
'cupboard.core/query-natural-join)))
(query (= :age 58) (= :last-name "PI:NAME:<NAME>END_PI")
:callback #(passoc! % :first-name "PI:NAME:<NAME>END_PI"))
(is (= (retrieve :login "jqa") (assoc p6 :first-name "PI:NAME:<NAME>END_PI"))))
(testing "delete as a callback"
(query (= :age 58) :callback delete)
(is (nil? (retrieve :login "tj")))
(is (nil? (retrieve :login "jm1")))
(is (nil? (retrieve :login "jqa"))))
(testing "making sure that :struct applied to query works"
(let [everyone (query (> :age 50) :struct president)]
(is (> (count everyone) 0))
(is (every? #(= (type %) clojure.lang.PersistentStructMap) everyone)))))))
|
[
{
"context": "r functions for manipulating maps\"\n :author \"Sam Aaron\"}\n overtone.helpers.map)\n\n(defn reverse-get\n \"R",
"end": 80,
"score": 0.9998928904533386,
"start": 71,
"tag": "NAME",
"value": "Sam Aaron"
}
] |
src/overtone/helpers/map.clj
|
ABaldwinHunter/overtone
| 3,870 |
(ns
^{:doc "Helper functions for manipulating maps"
:author "Sam Aaron"}
overtone.helpers.map)
(defn reverse-get
"Returns the key of the first val in maps vals that equals
v. Non-deterministic if (vals m) contains duplicates and map isn't
sorted."
[m v]
(let [f (first m)
n (next m)]
(if (= (val f) v)
(key f)
(when n (reverse-get n v)))))
|
63987
|
(ns
^{:doc "Helper functions for manipulating maps"
:author "<NAME>"}
overtone.helpers.map)
(defn reverse-get
"Returns the key of the first val in maps vals that equals
v. Non-deterministic if (vals m) contains duplicates and map isn't
sorted."
[m v]
(let [f (first m)
n (next m)]
(if (= (val f) v)
(key f)
(when n (reverse-get n v)))))
| true |
(ns
^{:doc "Helper functions for manipulating maps"
:author "PI:NAME:<NAME>END_PI"}
overtone.helpers.map)
(defn reverse-get
"Returns the key of the first val in maps vals that equals
v. Non-deterministic if (vals m) contains duplicates and map isn't
sorted."
[m v]
(let [f (first m)
n (next m)]
(if (= (val f) v)
(key f)
(when n (reverse-get n v)))))
|
[
{
"context": "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n;; @ Copyright (c) Michael Leachim ",
"end": 124,
"score": 0.9997709393501282,
"start": 109,
"tag": "NAME",
"value": "Michael Leachim"
},
{
"context": " @\n;; @@@@@@ At 2018-09-10 17:34 <[email protected]> @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n\n(ns wirefra",
"end": 502,
"score": 0.9999246597290039,
"start": 481,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "-cut.mik-pad-top-0 [:a {:href \"https://github.com/MichaelLeachim/wireframecss\"} \"Github\"]]\n [:p.mik-flush-",
"end": 2105,
"score": 0.9968146085739136,
"start": 2091,
"tag": "USERNAME",
"value": "MichaelLeachim"
},
{
"context": "ith ❤ in Clojure\" [:br]\n \"Copyright © 2018 M.L\" [:br]\n \"All rights reserved\" [:br]]\n ",
"end": 3027,
"score": 0.9870147705078125,
"start": 3024,
"tag": "NAME",
"value": "M.L"
}
] |
src/wireframe/views.clj
|
MichaelLeachim/wireframecss
| 1 |
;; @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
;; @ Copyright (c) Michael Leachim @
;; @ You can find additional information regarding licensing of this work in LICENSE.md @
;; @ You must not remove this notice, or any other, from this software. @
;; @ All rights reserved. @
;; @@@@@@ At 2018-09-10 17:34 <[email protected]> @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
(ns wireframe.views
(:require
[garden.core :as garden]
[hiccup.core :as hiccup]
[wireframe.config :refer [*VIEWS-FONTS*]]
[wireframe.gen-doc :as gen-doc]))
;; [:b "TODO:" [:br]
;; "fix up sizes" [:br]
;; "make download button" [:br]
;; "write readme for the system" [:br]
;; "push this stuff to github"]
(def STYLES
["/pure-min.css" *VIEWS-FONTS*
"/mik/t/wireframe.css"
"/specific.css"])
(defn head []
[:head {:itemtype "http://schema.org/Article", :itemscope "itemscope"}
(concat
[[:meta {:charset "utf-8"}]
[:meta {:name "viewport" :content "width=device-width, initial-scale=1.0"}]
[:meta {:content "IE=edge,chrome=1", :http-equiv "X-UA-Compatible"}]
[:title "Documentation site"]
(for [item STYLES]
[:link {:href item :rel "stylesheet"}])])])
(defn main
[]
(hiccup/html
[:html
(head)
[:body {:style "background:url('/background.png')"}
[:div.mik-tiny-container {:style "background:white;"}
[:header
[:div.pure-u-1.mik-pad-top.mik-pad-bottom
[:h1#top.mik-line-justify.mik-fs-4.mik-cut "W i r e"]
[:h1#top.mik-line-justify.mik-fs-4.mik-cut-angry "f r a m e"]
[:div.mik-fw-1.mik-fs-0
[:p.mik-cut.mik-flush-right "A set of practical, framework agnostic, zero configuration CSS classes"]
[:p.mik-cut.mik-flush-right "for everyday use"]]
[:div.mik-fw-2.mik-fs-0.mik-pad-top-5
[:p.mik-flush-right.mik-cut.mik-pad-top-0 [:a {:href "https://github.com/MichaelLeachim/wireframecss"} "Github"]]
[:p.mik-flush-right.mik-cut "Download"
[:br]
[:a {:href "/mik/f/wireframe.css"} " [ mik- ] "]
[:a {:href "/mik/t/wireframe.css"} " [ color- ]"] [:br]
[:a {:href "/qq/f/wireframe.css"} " [ qq- ] "]
[:a {:href "/qq/t/wireframe.css"} " [ color- ]"] [:br]
[:a {:href "/dd/f/wireframe.css"} " [ dd- ] "]
[:a {:href "/dd/t/wireframe.css" } " [ color- ]"] ]]]]
[:main {:role "main"}
[:p.mik-pad-0.element-color-background "Use " [:b "-angry "] "suffix to override (add " [:b " !important "]
" ) to the selector rules" [:br]
"For example:" [:b " .mik-cut-bottom "] "becomes" [:b " .mik-cut-bottom-angry "]]
gen-doc/datum]
[:footer.mik-fs-0
[:div
[:div.smaller.warm-gray.mik-flush-right.mik-footer "Made with ❤ in Clojure" [:br]
"Copyright © 2018 M.L" [:br]
"All rights reserved" [:br]]
[:div.mik-fs-0
[:a {:href "/sheet.png"} "Sheet background"]
[:a {:href "/background.png"} "Background image"]]]]]]]))
|
348
|
;; @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
;; @ Copyright (c) <NAME> @
;; @ You can find additional information regarding licensing of this work in LICENSE.md @
;; @ You must not remove this notice, or any other, from this software. @
;; @ All rights reserved. @
;; @@@@@@ At 2018-09-10 17:34 <<EMAIL>> @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
(ns wireframe.views
(:require
[garden.core :as garden]
[hiccup.core :as hiccup]
[wireframe.config :refer [*VIEWS-FONTS*]]
[wireframe.gen-doc :as gen-doc]))
;; [:b "TODO:" [:br]
;; "fix up sizes" [:br]
;; "make download button" [:br]
;; "write readme for the system" [:br]
;; "push this stuff to github"]
(def STYLES
["/pure-min.css" *VIEWS-FONTS*
"/mik/t/wireframe.css"
"/specific.css"])
(defn head []
[:head {:itemtype "http://schema.org/Article", :itemscope "itemscope"}
(concat
[[:meta {:charset "utf-8"}]
[:meta {:name "viewport" :content "width=device-width, initial-scale=1.0"}]
[:meta {:content "IE=edge,chrome=1", :http-equiv "X-UA-Compatible"}]
[:title "Documentation site"]
(for [item STYLES]
[:link {:href item :rel "stylesheet"}])])])
(defn main
[]
(hiccup/html
[:html
(head)
[:body {:style "background:url('/background.png')"}
[:div.mik-tiny-container {:style "background:white;"}
[:header
[:div.pure-u-1.mik-pad-top.mik-pad-bottom
[:h1#top.mik-line-justify.mik-fs-4.mik-cut "W i r e"]
[:h1#top.mik-line-justify.mik-fs-4.mik-cut-angry "f r a m e"]
[:div.mik-fw-1.mik-fs-0
[:p.mik-cut.mik-flush-right "A set of practical, framework agnostic, zero configuration CSS classes"]
[:p.mik-cut.mik-flush-right "for everyday use"]]
[:div.mik-fw-2.mik-fs-0.mik-pad-top-5
[:p.mik-flush-right.mik-cut.mik-pad-top-0 [:a {:href "https://github.com/MichaelLeachim/wireframecss"} "Github"]]
[:p.mik-flush-right.mik-cut "Download"
[:br]
[:a {:href "/mik/f/wireframe.css"} " [ mik- ] "]
[:a {:href "/mik/t/wireframe.css"} " [ color- ]"] [:br]
[:a {:href "/qq/f/wireframe.css"} " [ qq- ] "]
[:a {:href "/qq/t/wireframe.css"} " [ color- ]"] [:br]
[:a {:href "/dd/f/wireframe.css"} " [ dd- ] "]
[:a {:href "/dd/t/wireframe.css" } " [ color- ]"] ]]]]
[:main {:role "main"}
[:p.mik-pad-0.element-color-background "Use " [:b "-angry "] "suffix to override (add " [:b " !important "]
" ) to the selector rules" [:br]
"For example:" [:b " .mik-cut-bottom "] "becomes" [:b " .mik-cut-bottom-angry "]]
gen-doc/datum]
[:footer.mik-fs-0
[:div
[:div.smaller.warm-gray.mik-flush-right.mik-footer "Made with ❤ in Clojure" [:br]
"Copyright © 2018 <NAME>" [:br]
"All rights reserved" [:br]]
[:div.mik-fs-0
[:a {:href "/sheet.png"} "Sheet background"]
[:a {:href "/background.png"} "Background image"]]]]]]]))
| true |
;; @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
;; @ Copyright (c) PI:NAME:<NAME>END_PI @
;; @ You can find additional information regarding licensing of this work in LICENSE.md @
;; @ You must not remove this notice, or any other, from this software. @
;; @ All rights reserved. @
;; @@@@@@ At 2018-09-10 17:34 <PI:EMAIL:<EMAIL>END_PI> @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
(ns wireframe.views
(:require
[garden.core :as garden]
[hiccup.core :as hiccup]
[wireframe.config :refer [*VIEWS-FONTS*]]
[wireframe.gen-doc :as gen-doc]))
;; [:b "TODO:" [:br]
;; "fix up sizes" [:br]
;; "make download button" [:br]
;; "write readme for the system" [:br]
;; "push this stuff to github"]
(def STYLES
["/pure-min.css" *VIEWS-FONTS*
"/mik/t/wireframe.css"
"/specific.css"])
(defn head []
[:head {:itemtype "http://schema.org/Article", :itemscope "itemscope"}
(concat
[[:meta {:charset "utf-8"}]
[:meta {:name "viewport" :content "width=device-width, initial-scale=1.0"}]
[:meta {:content "IE=edge,chrome=1", :http-equiv "X-UA-Compatible"}]
[:title "Documentation site"]
(for [item STYLES]
[:link {:href item :rel "stylesheet"}])])])
(defn main
[]
(hiccup/html
[:html
(head)
[:body {:style "background:url('/background.png')"}
[:div.mik-tiny-container {:style "background:white;"}
[:header
[:div.pure-u-1.mik-pad-top.mik-pad-bottom
[:h1#top.mik-line-justify.mik-fs-4.mik-cut "W i r e"]
[:h1#top.mik-line-justify.mik-fs-4.mik-cut-angry "f r a m e"]
[:div.mik-fw-1.mik-fs-0
[:p.mik-cut.mik-flush-right "A set of practical, framework agnostic, zero configuration CSS classes"]
[:p.mik-cut.mik-flush-right "for everyday use"]]
[:div.mik-fw-2.mik-fs-0.mik-pad-top-5
[:p.mik-flush-right.mik-cut.mik-pad-top-0 [:a {:href "https://github.com/MichaelLeachim/wireframecss"} "Github"]]
[:p.mik-flush-right.mik-cut "Download"
[:br]
[:a {:href "/mik/f/wireframe.css"} " [ mik- ] "]
[:a {:href "/mik/t/wireframe.css"} " [ color- ]"] [:br]
[:a {:href "/qq/f/wireframe.css"} " [ qq- ] "]
[:a {:href "/qq/t/wireframe.css"} " [ color- ]"] [:br]
[:a {:href "/dd/f/wireframe.css"} " [ dd- ] "]
[:a {:href "/dd/t/wireframe.css" } " [ color- ]"] ]]]]
[:main {:role "main"}
[:p.mik-pad-0.element-color-background "Use " [:b "-angry "] "suffix to override (add " [:b " !important "]
" ) to the selector rules" [:br]
"For example:" [:b " .mik-cut-bottom "] "becomes" [:b " .mik-cut-bottom-angry "]]
gen-doc/datum]
[:footer.mik-fs-0
[:div
[:div.smaller.warm-gray.mik-flush-right.mik-footer "Made with ❤ in Clojure" [:br]
"Copyright © 2018 PI:NAME:<NAME>END_PI" [:br]
"All rights reserved" [:br]]
[:div.mik-fs-0
[:a {:href "/sheet.png"} "Sheet background"]
[:a {:href "/background.png"} "Background image"]]]]]]]))
|
[
{
"context": "oc \"Scheme interpreter in Clojure\"\n :author \"Evan Flynn\"}\n scheme.core\n (:refer-clojure :exclude [true?",
"end": 68,
"score": 0.9998698830604553,
"start": 58,
"tag": "NAME",
"value": "Evan Flynn"
}
] |
src/scheme/core.clj
|
eflynn/scheme-interpreter
| 0 |
(ns ^{:doc "Scheme interpreter in Clojure"
:author "Evan Flynn"}
scheme.core
(:refer-clojure :exclude [true? false?]))
(defprotocol Expression
"Things to be evaluated."
(value-of [exp env]))
(defrecord ProcExp [vars body]
Expression
(value-of [this env]
(fn [& args]
(value-of body (extend-env* vars args env)))))
(defn report-no-binding-found [search-var]
(throw (Exception. (str "Unbound variable: " search-var))))
(defn empty-env []
{})
(defn apply-env [env search-var]
(if-let [result (env search-var)]
(if (instance? ProcExp result)
(value-of result env)
result)
(report-no-binding-found search-var)))
(defn extend-env* [syms vals old-env]
(loop [env (transient old-env)
syms syms
vals vals]
(if (and syms vals)
(recur (assoc! env (first syms) (first vals))
(next syms)
(next vals))
(persistent! env))))
(def primitive-procedures
{'car first
'cdr rest
'cons cons
'+ +
'- -
'* *
'/ /
'> >
'< <
'= =})
(defn init-env []
primitive-procedures)
(defn true? [x]
(not (= x false)))
(defn false? [x]
(= x false))
(extend-type clojure.lang.Symbol Expression
(value-of [this env] (apply-env env this)))
(extend-type java.lang.Number Expression
(value-of [this env] this))
(defrecord CallExp [operator operands]
Expression
(value-of [this env]
(apply (value-of operator env)
(map #(value-of % env) operands))))
(defrecord IfExp [predicate consequent alternative]
Expression
(value-of [this env]
(value-of (if (true? (value-of predicate env)) consequent alternative) env)))
(defrecord LetExp [var exp body]
Expression
(value-of [this env]
(value-of body (extend-env* (list var)
(list (value-of exp env))
env))))
(defrecord LetRecExp [var exp body]
Expression
(value-of [this env]
(value-of body (extend-env* (list var) (list exp) env))))
(defn parse-expression [datum]
(if-not (list? datum)
datum
(let [head (first datum)
tail (rest datum)]
(case head
let (let [[[[var exp1]] body] tail]
(LetExp. var (parse-expression exp1) (parse-expression body)))
letrec (let [[[[var exp1]] body] tail]
(LetRecExp. var (parse-expression exp1) (parse-expression body)))
quote datum
if (apply ->IfExp (map parse-expression tail))
lambda (let [[vars body] tail]
(ProcExp. vars (parse-expression body)))
(CallExp. (parse-expression head)
(map parse-expression tail))))))
(parse-expression '(letrec ((factorial (lambda (x)
(if (< x 1) 1
(* (factorial (- x 1)) x)))))
(factorial 10)))
(value-of (parse-expression '(letrec ((factorial (lambda (x)
(if (< x 1) 1
(* (factorial (- x 1)) x)))))
(factorial 10))) (init-env))
|
76767
|
(ns ^{:doc "Scheme interpreter in Clojure"
:author "<NAME>"}
scheme.core
(:refer-clojure :exclude [true? false?]))
(defprotocol Expression
"Things to be evaluated."
(value-of [exp env]))
(defrecord ProcExp [vars body]
Expression
(value-of [this env]
(fn [& args]
(value-of body (extend-env* vars args env)))))
(defn report-no-binding-found [search-var]
(throw (Exception. (str "Unbound variable: " search-var))))
(defn empty-env []
{})
(defn apply-env [env search-var]
(if-let [result (env search-var)]
(if (instance? ProcExp result)
(value-of result env)
result)
(report-no-binding-found search-var)))
(defn extend-env* [syms vals old-env]
(loop [env (transient old-env)
syms syms
vals vals]
(if (and syms vals)
(recur (assoc! env (first syms) (first vals))
(next syms)
(next vals))
(persistent! env))))
(def primitive-procedures
{'car first
'cdr rest
'cons cons
'+ +
'- -
'* *
'/ /
'> >
'< <
'= =})
(defn init-env []
primitive-procedures)
(defn true? [x]
(not (= x false)))
(defn false? [x]
(= x false))
(extend-type clojure.lang.Symbol Expression
(value-of [this env] (apply-env env this)))
(extend-type java.lang.Number Expression
(value-of [this env] this))
(defrecord CallExp [operator operands]
Expression
(value-of [this env]
(apply (value-of operator env)
(map #(value-of % env) operands))))
(defrecord IfExp [predicate consequent alternative]
Expression
(value-of [this env]
(value-of (if (true? (value-of predicate env)) consequent alternative) env)))
(defrecord LetExp [var exp body]
Expression
(value-of [this env]
(value-of body (extend-env* (list var)
(list (value-of exp env))
env))))
(defrecord LetRecExp [var exp body]
Expression
(value-of [this env]
(value-of body (extend-env* (list var) (list exp) env))))
(defn parse-expression [datum]
(if-not (list? datum)
datum
(let [head (first datum)
tail (rest datum)]
(case head
let (let [[[[var exp1]] body] tail]
(LetExp. var (parse-expression exp1) (parse-expression body)))
letrec (let [[[[var exp1]] body] tail]
(LetRecExp. var (parse-expression exp1) (parse-expression body)))
quote datum
if (apply ->IfExp (map parse-expression tail))
lambda (let [[vars body] tail]
(ProcExp. vars (parse-expression body)))
(CallExp. (parse-expression head)
(map parse-expression tail))))))
(parse-expression '(letrec ((factorial (lambda (x)
(if (< x 1) 1
(* (factorial (- x 1)) x)))))
(factorial 10)))
(value-of (parse-expression '(letrec ((factorial (lambda (x)
(if (< x 1) 1
(* (factorial (- x 1)) x)))))
(factorial 10))) (init-env))
| true |
(ns ^{:doc "Scheme interpreter in Clojure"
:author "PI:NAME:<NAME>END_PI"}
scheme.core
(:refer-clojure :exclude [true? false?]))
(defprotocol Expression
"Things to be evaluated."
(value-of [exp env]))
(defrecord ProcExp [vars body]
Expression
(value-of [this env]
(fn [& args]
(value-of body (extend-env* vars args env)))))
(defn report-no-binding-found [search-var]
(throw (Exception. (str "Unbound variable: " search-var))))
(defn empty-env []
{})
(defn apply-env [env search-var]
(if-let [result (env search-var)]
(if (instance? ProcExp result)
(value-of result env)
result)
(report-no-binding-found search-var)))
(defn extend-env* [syms vals old-env]
(loop [env (transient old-env)
syms syms
vals vals]
(if (and syms vals)
(recur (assoc! env (first syms) (first vals))
(next syms)
(next vals))
(persistent! env))))
(def primitive-procedures
{'car first
'cdr rest
'cons cons
'+ +
'- -
'* *
'/ /
'> >
'< <
'= =})
(defn init-env []
primitive-procedures)
(defn true? [x]
(not (= x false)))
(defn false? [x]
(= x false))
(extend-type clojure.lang.Symbol Expression
(value-of [this env] (apply-env env this)))
(extend-type java.lang.Number Expression
(value-of [this env] this))
(defrecord CallExp [operator operands]
Expression
(value-of [this env]
(apply (value-of operator env)
(map #(value-of % env) operands))))
(defrecord IfExp [predicate consequent alternative]
Expression
(value-of [this env]
(value-of (if (true? (value-of predicate env)) consequent alternative) env)))
(defrecord LetExp [var exp body]
Expression
(value-of [this env]
(value-of body (extend-env* (list var)
(list (value-of exp env))
env))))
(defrecord LetRecExp [var exp body]
Expression
(value-of [this env]
(value-of body (extend-env* (list var) (list exp) env))))
(defn parse-expression [datum]
(if-not (list? datum)
datum
(let [head (first datum)
tail (rest datum)]
(case head
let (let [[[[var exp1]] body] tail]
(LetExp. var (parse-expression exp1) (parse-expression body)))
letrec (let [[[[var exp1]] body] tail]
(LetRecExp. var (parse-expression exp1) (parse-expression body)))
quote datum
if (apply ->IfExp (map parse-expression tail))
lambda (let [[vars body] tail]
(ProcExp. vars (parse-expression body)))
(CallExp. (parse-expression head)
(map parse-expression tail))))))
(parse-expression '(letrec ((factorial (lambda (x)
(if (< x 1) 1
(* (factorial (- x 1)) x)))))
(factorial 10)))
(value-of (parse-expression '(letrec ((factorial (lambda (x)
(if (< x 1) 1
(* (factorial (- x 1)) x)))))
(factorial 10))) (init-env))
|
[
{
"context": ";; Copyright © 2015-2022 Esko Luontola\n;; This software is released under the Apache Lic",
"end": 38,
"score": 0.9998839497566223,
"start": 25,
"tag": "NAME",
"value": "Esko Luontola"
}
] |
src/territory_bro/domain/territory.clj
|
3breadt/territory-bro
| 0 |
;; Copyright © 2015-2022 Esko Luontola
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(ns territory-bro.domain.territory
(:require [medley.core :refer [dissoc-in]]
[territory-bro.gis.gis-change :as gis-change])
(:import (territory_bro ValidationException)))
;;;; Read model
(defmulti projection (fn [_state event]
(:event/type event)))
(defmethod projection :default [state _event]
state)
(defmethod projection :territory.event/territory-defined
[state event]
(update-in state [::territories (:congregation/id event) (:territory/id event)]
(fn [territory]
(-> territory
(assoc :territory/id (:territory/id event))
(assoc :territory/number (:territory/number event))
(assoc :territory/addresses (:territory/addresses event))
(assoc :territory/region (:territory/region event))
(assoc :territory/meta (:territory/meta event))
(assoc :territory/location (:territory/location event))))))
(defmethod projection :territory.event/territory-deleted
[state event]
(dissoc-in state [::territories (:congregation/id event) (:territory/id event)]))
;;;; Queries
(defn check-territory-exists [state cong-id territory-id]
(when (nil? (get-in state [::territories cong-id territory-id]))
(throw (ValidationException. [[:no-such-territory cong-id territory-id]]))))
;;;; Write model
(defn- write-model [command events]
(let [state (reduce projection nil events)]
(get-in state [::territories (:congregation/id command) (:territory/id command)])))
;;;; Command handlers
(defmulti ^:private command-handler (fn [command _territory _injections]
(:command/type command)))
(def ^:private data-keys
[:territory/number
:territory/addresses
:territory/region
:territory/meta
:territory/location])
(defmethod command-handler :territory.command/define-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)]
(check-permit [:create-territory cong-id])
(when (nil? territory)
[(merge {:event/type :territory.event/territory-defined
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command)
(select-keys command data-keys))])))
(defmethod command-handler :territory.command/update-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)
old-data (select-keys territory data-keys)
new-data (select-keys command data-keys)]
(check-permit [:update-territory cong-id territory-id])
(when (not= old-data new-data)
[(merge {:event/type :territory.event/territory-defined
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command)
new-data)])))
(defmethod command-handler :territory.command/delete-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)]
(check-permit [:delete-territory cong-id territory-id])
(when (some? territory)
[(merge {:event/type :territory.event/territory-deleted
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command))])))
(defn handle-command [command events injections]
(command-handler command (write-model command events) injections))
|
52469
|
;; Copyright © 2015-2022 <NAME>
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(ns territory-bro.domain.territory
(:require [medley.core :refer [dissoc-in]]
[territory-bro.gis.gis-change :as gis-change])
(:import (territory_bro ValidationException)))
;;;; Read model
(defmulti projection (fn [_state event]
(:event/type event)))
(defmethod projection :default [state _event]
state)
(defmethod projection :territory.event/territory-defined
[state event]
(update-in state [::territories (:congregation/id event) (:territory/id event)]
(fn [territory]
(-> territory
(assoc :territory/id (:territory/id event))
(assoc :territory/number (:territory/number event))
(assoc :territory/addresses (:territory/addresses event))
(assoc :territory/region (:territory/region event))
(assoc :territory/meta (:territory/meta event))
(assoc :territory/location (:territory/location event))))))
(defmethod projection :territory.event/territory-deleted
[state event]
(dissoc-in state [::territories (:congregation/id event) (:territory/id event)]))
;;;; Queries
(defn check-territory-exists [state cong-id territory-id]
(when (nil? (get-in state [::territories cong-id territory-id]))
(throw (ValidationException. [[:no-such-territory cong-id territory-id]]))))
;;;; Write model
(defn- write-model [command events]
(let [state (reduce projection nil events)]
(get-in state [::territories (:congregation/id command) (:territory/id command)])))
;;;; Command handlers
(defmulti ^:private command-handler (fn [command _territory _injections]
(:command/type command)))
(def ^:private data-keys
[:territory/number
:territory/addresses
:territory/region
:territory/meta
:territory/location])
(defmethod command-handler :territory.command/define-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)]
(check-permit [:create-territory cong-id])
(when (nil? territory)
[(merge {:event/type :territory.event/territory-defined
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command)
(select-keys command data-keys))])))
(defmethod command-handler :territory.command/update-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)
old-data (select-keys territory data-keys)
new-data (select-keys command data-keys)]
(check-permit [:update-territory cong-id territory-id])
(when (not= old-data new-data)
[(merge {:event/type :territory.event/territory-defined
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command)
new-data)])))
(defmethod command-handler :territory.command/delete-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)]
(check-permit [:delete-territory cong-id territory-id])
(when (some? territory)
[(merge {:event/type :territory.event/territory-deleted
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command))])))
(defn handle-command [command events injections]
(command-handler command (write-model command events) injections))
| true |
;; Copyright © 2015-2022 PI:NAME:<NAME>END_PI
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(ns territory-bro.domain.territory
(:require [medley.core :refer [dissoc-in]]
[territory-bro.gis.gis-change :as gis-change])
(:import (territory_bro ValidationException)))
;;;; Read model
(defmulti projection (fn [_state event]
(:event/type event)))
(defmethod projection :default [state _event]
state)
(defmethod projection :territory.event/territory-defined
[state event]
(update-in state [::territories (:congregation/id event) (:territory/id event)]
(fn [territory]
(-> territory
(assoc :territory/id (:territory/id event))
(assoc :territory/number (:territory/number event))
(assoc :territory/addresses (:territory/addresses event))
(assoc :territory/region (:territory/region event))
(assoc :territory/meta (:territory/meta event))
(assoc :territory/location (:territory/location event))))))
(defmethod projection :territory.event/territory-deleted
[state event]
(dissoc-in state [::territories (:congregation/id event) (:territory/id event)]))
;;;; Queries
(defn check-territory-exists [state cong-id territory-id]
(when (nil? (get-in state [::territories cong-id territory-id]))
(throw (ValidationException. [[:no-such-territory cong-id territory-id]]))))
;;;; Write model
(defn- write-model [command events]
(let [state (reduce projection nil events)]
(get-in state [::territories (:congregation/id command) (:territory/id command)])))
;;;; Command handlers
(defmulti ^:private command-handler (fn [command _territory _injections]
(:command/type command)))
(def ^:private data-keys
[:territory/number
:territory/addresses
:territory/region
:territory/meta
:territory/location])
(defmethod command-handler :territory.command/define-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)]
(check-permit [:create-territory cong-id])
(when (nil? territory)
[(merge {:event/type :territory.event/territory-defined
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command)
(select-keys command data-keys))])))
(defmethod command-handler :territory.command/update-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)
old-data (select-keys territory data-keys)
new-data (select-keys command data-keys)]
(check-permit [:update-territory cong-id territory-id])
(when (not= old-data new-data)
[(merge {:event/type :territory.event/territory-defined
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command)
new-data)])))
(defmethod command-handler :territory.command/delete-territory
[command territory {:keys [check-permit]}]
(let [cong-id (:congregation/id command)
territory-id (:territory/id command)]
(check-permit [:delete-territory cong-id territory-id])
(when (some? territory)
[(merge {:event/type :territory.event/territory-deleted
:congregation/id cong-id
:territory/id territory-id}
(gis-change/event-metadata command))])))
(defn handle-command [command events injections]
(command-handler command (write-model command events) injections))
|
[
{
"context": "nch -- Predicate logic\n\n; Copyright (c) 2015 -2021 Burkhardt Renz, THM. All rights reserved.\n; The use and distribu",
"end": 83,
"score": 0.9998636841773987,
"start": 69,
"tag": "NAME",
"value": "Burkhardt Renz"
}
] |
src/lwb/resolution.clj
|
esb-lwb/lwb
| 22 |
; lwb Logic WorkBench -- Predicate logic
; Copyright (c) 2015 -2021 Burkhardt Renz, THM. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php).
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
(ns lwb.resolution
(:require [clojure.math.combinatorics :as combo])
(:require [lwb.prop.nf :refer [cnf]])
(:require [lwb.prop.sat :refer [cnf->dimacs]])
(:require [clojure.set :as set]))
; We need to remember which pairs of clauses have already been resolved
(def resolved (atom (transient {})))
(defn- reset-resolved
[]
(reset! resolved (transient{})))
(defn- is-resolved?
"Is the pair [cl1 cl2] already resolved?"
[[cl1 cl2]]
(get @resolved #{cl1 cl2}))
(defn- add-resolved
"Add the pair [cl1 cl2] and its resolvent to resolved."
[[cl1 cl2] res]
(swap! resolved assoc! #{cl1 cl2} res))
(defn tauto?
"Is the clause a tautology?"
[cl]
(if (seq (filter #(contains? cl (- %)) cl))
true
false))
(defn- resolve-pair
"Returns the resolvent of two clauses,
checks resolved if that's necessary;
throws an exception if the resolvent is the contradiction #{}."
[cl1 cl2]
(if (is-resolved? [cl1 cl2])
nil
(if-let [literal (first (filter #(contains? cl2 (- %)) cl1))]
(let [res (set/union (disj cl1 literal) (disj cl2 (- literal)))]
(cond
(= res #{}) (throw (ex-info "Contradiction found" {:from :lwb}))
(tauto? res) nil
:else (do
(add-resolved [cl1 cl2] res)
res))))))
(defn- resolution
"Return true if the given set of clauses is satisfiable."
[cl-set]
(try
(if-let [pairs (seq (filter #(not= (first %) (second %)) (combo/combinations cl-set 2)))]
(let [resolvents (doall (remove nil? (map #(apply resolve-pair %) pairs)))
ext-cl-set (set/union cl-set resolvents)]
(if (= cl-set ext-cl-set)
true
(resolution ext-cl-set))
)
true)
(catch Exception e
(if (= (ex-data e) {:from :lwb})
false
(throw e)))))
(defn sat?
"returns if the given propositional formula is satisfiable."
[phi]
(reset-resolved)
(if (true? (cnf phi))
true ; trivially true
(let [phi-d (cnf->dimacs phi)]
(resolution (:cl-set phi-d)))))
(comment
(sat? '(and (or P)))
(sat? '(and (or P (not P))))
(sat? '(and (or P) (or (not P))))
(sat? '(impl (impl F (impl G H)) (impl (impl F G) (impl F H))))
(sat? '(and (or A B (not C)) (or (not A)) (or A B C) (or A (not B))))
(sat? '(and (or A1 (not A2) A3) (or A2 (not A3) A4)))
)
|
116117
|
; lwb Logic WorkBench -- Predicate logic
; Copyright (c) 2015 -2021 <NAME>, THM. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php).
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
(ns lwb.resolution
(:require [clojure.math.combinatorics :as combo])
(:require [lwb.prop.nf :refer [cnf]])
(:require [lwb.prop.sat :refer [cnf->dimacs]])
(:require [clojure.set :as set]))
; We need to remember which pairs of clauses have already been resolved
(def resolved (atom (transient {})))
(defn- reset-resolved
[]
(reset! resolved (transient{})))
(defn- is-resolved?
"Is the pair [cl1 cl2] already resolved?"
[[cl1 cl2]]
(get @resolved #{cl1 cl2}))
(defn- add-resolved
"Add the pair [cl1 cl2] and its resolvent to resolved."
[[cl1 cl2] res]
(swap! resolved assoc! #{cl1 cl2} res))
(defn tauto?
"Is the clause a tautology?"
[cl]
(if (seq (filter #(contains? cl (- %)) cl))
true
false))
(defn- resolve-pair
"Returns the resolvent of two clauses,
checks resolved if that's necessary;
throws an exception if the resolvent is the contradiction #{}."
[cl1 cl2]
(if (is-resolved? [cl1 cl2])
nil
(if-let [literal (first (filter #(contains? cl2 (- %)) cl1))]
(let [res (set/union (disj cl1 literal) (disj cl2 (- literal)))]
(cond
(= res #{}) (throw (ex-info "Contradiction found" {:from :lwb}))
(tauto? res) nil
:else (do
(add-resolved [cl1 cl2] res)
res))))))
(defn- resolution
"Return true if the given set of clauses is satisfiable."
[cl-set]
(try
(if-let [pairs (seq (filter #(not= (first %) (second %)) (combo/combinations cl-set 2)))]
(let [resolvents (doall (remove nil? (map #(apply resolve-pair %) pairs)))
ext-cl-set (set/union cl-set resolvents)]
(if (= cl-set ext-cl-set)
true
(resolution ext-cl-set))
)
true)
(catch Exception e
(if (= (ex-data e) {:from :lwb})
false
(throw e)))))
(defn sat?
"returns if the given propositional formula is satisfiable."
[phi]
(reset-resolved)
(if (true? (cnf phi))
true ; trivially true
(let [phi-d (cnf->dimacs phi)]
(resolution (:cl-set phi-d)))))
(comment
(sat? '(and (or P)))
(sat? '(and (or P (not P))))
(sat? '(and (or P) (or (not P))))
(sat? '(impl (impl F (impl G H)) (impl (impl F G) (impl F H))))
(sat? '(and (or A B (not C)) (or (not A)) (or A B C) (or A (not B))))
(sat? '(and (or A1 (not A2) A3) (or A2 (not A3) A4)))
)
| true |
; lwb Logic WorkBench -- Predicate logic
; Copyright (c) 2015 -2021 PI:NAME:<NAME>END_PI, THM. All rights reserved.
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php).
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
(ns lwb.resolution
(:require [clojure.math.combinatorics :as combo])
(:require [lwb.prop.nf :refer [cnf]])
(:require [lwb.prop.sat :refer [cnf->dimacs]])
(:require [clojure.set :as set]))
; We need to remember which pairs of clauses have already been resolved
(def resolved (atom (transient {})))
(defn- reset-resolved
[]
(reset! resolved (transient{})))
(defn- is-resolved?
"Is the pair [cl1 cl2] already resolved?"
[[cl1 cl2]]
(get @resolved #{cl1 cl2}))
(defn- add-resolved
"Add the pair [cl1 cl2] and its resolvent to resolved."
[[cl1 cl2] res]
(swap! resolved assoc! #{cl1 cl2} res))
(defn tauto?
"Is the clause a tautology?"
[cl]
(if (seq (filter #(contains? cl (- %)) cl))
true
false))
(defn- resolve-pair
"Returns the resolvent of two clauses,
checks resolved if that's necessary;
throws an exception if the resolvent is the contradiction #{}."
[cl1 cl2]
(if (is-resolved? [cl1 cl2])
nil
(if-let [literal (first (filter #(contains? cl2 (- %)) cl1))]
(let [res (set/union (disj cl1 literal) (disj cl2 (- literal)))]
(cond
(= res #{}) (throw (ex-info "Contradiction found" {:from :lwb}))
(tauto? res) nil
:else (do
(add-resolved [cl1 cl2] res)
res))))))
(defn- resolution
"Return true if the given set of clauses is satisfiable."
[cl-set]
(try
(if-let [pairs (seq (filter #(not= (first %) (second %)) (combo/combinations cl-set 2)))]
(let [resolvents (doall (remove nil? (map #(apply resolve-pair %) pairs)))
ext-cl-set (set/union cl-set resolvents)]
(if (= cl-set ext-cl-set)
true
(resolution ext-cl-set))
)
true)
(catch Exception e
(if (= (ex-data e) {:from :lwb})
false
(throw e)))))
(defn sat?
"returns if the given propositional formula is satisfiable."
[phi]
(reset-resolved)
(if (true? (cnf phi))
true ; trivially true
(let [phi-d (cnf->dimacs phi)]
(resolution (:cl-set phi-d)))))
(comment
(sat? '(and (or P)))
(sat? '(and (or P (not P))))
(sat? '(and (or P) (or (not P))))
(sat? '(impl (impl F (impl G H)) (impl (impl F G) (impl F H))))
(sat? '(and (or A B (not C)) (or (not A)) (or A B C) (or A (not B))))
(sat? '(and (or A1 (not A2) A3) (or A2 (not A3) A4)))
)
|
[
{
"context": "))\n\n(defn footer []\n [:footer \"Copyright (c) 2016 Daehyun Kim\"])\n\n(defn layout [content & opts]\n (html5 {:lang",
"end": 124,
"score": 0.9998481869697571,
"start": 113,
"tag": "NAME",
"value": "Daehyun Kim"
}
] |
src/hareguu/layout.clj
|
hatemogi/hareguu
| 1 |
(ns hareguu.layout
(:use [hiccup.core]
[hiccup.page]))
(defn footer []
[:footer "Copyright (c) 2016 Daehyun Kim"])
(defn layout [content & opts]
(html5 {:lang "ko"}
[:head
[:meta {:charset "utf-8"}]
[:meta {:http-equiv "X-UA-Compatible", :content "IE=edge"}]
[:meta {:name "viewport", :content "width=device-width, initial-scale=1"}]
[:title (get opts :title "Hare and Guu site generator")]
(map include-css
(list* "//maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css"
"//maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css"
(:css opts)))]
[:body
[:nav "navigation"]
[:main content]
(footer)
(include-js "/js/hareguu.js")]))
(defn markdown-div [content]
[:div {:data-markdown true} content])
|
48205
|
(ns hareguu.layout
(:use [hiccup.core]
[hiccup.page]))
(defn footer []
[:footer "Copyright (c) 2016 <NAME>"])
(defn layout [content & opts]
(html5 {:lang "ko"}
[:head
[:meta {:charset "utf-8"}]
[:meta {:http-equiv "X-UA-Compatible", :content "IE=edge"}]
[:meta {:name "viewport", :content "width=device-width, initial-scale=1"}]
[:title (get opts :title "Hare and Guu site generator")]
(map include-css
(list* "//maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css"
"//maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css"
(:css opts)))]
[:body
[:nav "navigation"]
[:main content]
(footer)
(include-js "/js/hareguu.js")]))
(defn markdown-div [content]
[:div {:data-markdown true} content])
| true |
(ns hareguu.layout
(:use [hiccup.core]
[hiccup.page]))
(defn footer []
[:footer "Copyright (c) 2016 PI:NAME:<NAME>END_PI"])
(defn layout [content & opts]
(html5 {:lang "ko"}
[:head
[:meta {:charset "utf-8"}]
[:meta {:http-equiv "X-UA-Compatible", :content "IE=edge"}]
[:meta {:name "viewport", :content "width=device-width, initial-scale=1"}]
[:title (get opts :title "Hare and Guu site generator")]
(map include-css
(list* "//maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css"
"//maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css"
(:css opts)))]
[:body
[:nav "navigation"]
[:main content]
(footer)
(include-js "/js/hareguu.js")]))
(defn markdown-div [content]
[:div {:data-markdown true} content])
|
[
{
"context": ";; Copyright © 2015-2020 Esko Luontola\n;; This software is released under the Apache Lic",
"end": 38,
"score": 0.9998863339424133,
"start": 25,
"tag": "NAME",
"value": "Esko Luontola"
}
] |
src/territory_bro/infra/jwt.clj
|
3breadt/territory-bro
| 2 |
;; Copyright © 2015-2020 Esko Luontola
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(ns territory-bro.infra.jwt
(:require [mount.core :as mount]
[territory-bro.infra.config :refer [env]]
[territory-bro.infra.json :as json]
[territory-bro.infra.util :refer [getx]])
(:import (com.auth0.jwk JwkProviderBuilder JwkProvider)
(com.auth0.jwt JWT JWTVerifier$BaseVerification)
(com.auth0.jwt.algorithms Algorithm)
(com.auth0.jwt.interfaces Clock)
(java.nio.charset StandardCharsets)
(java.time Instant)
(java.util Base64 Date)))
(mount/defstate ^:dynamic ^JwkProvider jwk-provider
:start (-> (JwkProviderBuilder. ^String (getx env :auth0-domain))
(.build)))
(defn- fetch-public-key [^String jwt]
(let [key-id (.getKeyId (JWT/decode jwt))]
(.getPublicKey (.get jwk-provider key-id))))
(defn- decode-base64url [^String base64-str]
(-> (Base64/getUrlDecoder)
(.decode base64-str)
(String. StandardCharsets/UTF_8)))
(defn validate [^String jwt env]
(let [public-key (fetch-public-key jwt)
algorithm (Algorithm/RSA256 public-key nil)
clock (reify Clock
(getToday [_]
(Date/from ((getx env :now)))))
verifier (-> (JWT/require algorithm)
(.withIssuer (into-array String [(getx env :jwt-issuer)]))
(.withAudience (into-array String [(getx env :jwt-audience)]))
(->> ^JWTVerifier$BaseVerification (cast JWTVerifier$BaseVerification))
(.build clock))]
(-> (.verify verifier jwt)
(.getPayload)
(decode-base64url)
(json/read-value))))
(defn expired?
([jwt]
(expired? jwt (Instant/now)))
([jwt ^Instant now]
(< (:exp jwt) (.getEpochSecond now))))
|
83887
|
;; Copyright © 2015-2020 <NAME>
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(ns territory-bro.infra.jwt
(:require [mount.core :as mount]
[territory-bro.infra.config :refer [env]]
[territory-bro.infra.json :as json]
[territory-bro.infra.util :refer [getx]])
(:import (com.auth0.jwk JwkProviderBuilder JwkProvider)
(com.auth0.jwt JWT JWTVerifier$BaseVerification)
(com.auth0.jwt.algorithms Algorithm)
(com.auth0.jwt.interfaces Clock)
(java.nio.charset StandardCharsets)
(java.time Instant)
(java.util Base64 Date)))
(mount/defstate ^:dynamic ^JwkProvider jwk-provider
:start (-> (JwkProviderBuilder. ^String (getx env :auth0-domain))
(.build)))
(defn- fetch-public-key [^String jwt]
(let [key-id (.getKeyId (JWT/decode jwt))]
(.getPublicKey (.get jwk-provider key-id))))
(defn- decode-base64url [^String base64-str]
(-> (Base64/getUrlDecoder)
(.decode base64-str)
(String. StandardCharsets/UTF_8)))
(defn validate [^String jwt env]
(let [public-key (fetch-public-key jwt)
algorithm (Algorithm/RSA256 public-key nil)
clock (reify Clock
(getToday [_]
(Date/from ((getx env :now)))))
verifier (-> (JWT/require algorithm)
(.withIssuer (into-array String [(getx env :jwt-issuer)]))
(.withAudience (into-array String [(getx env :jwt-audience)]))
(->> ^JWTVerifier$BaseVerification (cast JWTVerifier$BaseVerification))
(.build clock))]
(-> (.verify verifier jwt)
(.getPayload)
(decode-base64url)
(json/read-value))))
(defn expired?
([jwt]
(expired? jwt (Instant/now)))
([jwt ^Instant now]
(< (:exp jwt) (.getEpochSecond now))))
| true |
;; Copyright © 2015-2020 PI:NAME:<NAME>END_PI
;; This software is released under the Apache License 2.0.
;; The license text is at http://www.apache.org/licenses/LICENSE-2.0
(ns territory-bro.infra.jwt
(:require [mount.core :as mount]
[territory-bro.infra.config :refer [env]]
[territory-bro.infra.json :as json]
[territory-bro.infra.util :refer [getx]])
(:import (com.auth0.jwk JwkProviderBuilder JwkProvider)
(com.auth0.jwt JWT JWTVerifier$BaseVerification)
(com.auth0.jwt.algorithms Algorithm)
(com.auth0.jwt.interfaces Clock)
(java.nio.charset StandardCharsets)
(java.time Instant)
(java.util Base64 Date)))
(mount/defstate ^:dynamic ^JwkProvider jwk-provider
:start (-> (JwkProviderBuilder. ^String (getx env :auth0-domain))
(.build)))
(defn- fetch-public-key [^String jwt]
(let [key-id (.getKeyId (JWT/decode jwt))]
(.getPublicKey (.get jwk-provider key-id))))
(defn- decode-base64url [^String base64-str]
(-> (Base64/getUrlDecoder)
(.decode base64-str)
(String. StandardCharsets/UTF_8)))
(defn validate [^String jwt env]
(let [public-key (fetch-public-key jwt)
algorithm (Algorithm/RSA256 public-key nil)
clock (reify Clock
(getToday [_]
(Date/from ((getx env :now)))))
verifier (-> (JWT/require algorithm)
(.withIssuer (into-array String [(getx env :jwt-issuer)]))
(.withAudience (into-array String [(getx env :jwt-audience)]))
(->> ^JWTVerifier$BaseVerification (cast JWTVerifier$BaseVerification))
(.build clock))]
(-> (.verify verifier jwt)
(.getPayload)
(decode-base64url)
(json/read-value))))
(defn expired?
([jwt]
(expired? jwt (Instant/now)))
([jwt ^Instant now]
(< (:exp jwt) (.getEpochSecond now))))
|
[
{
"context": "i.i18n.spec :as spec]))\n\n\n(def local-storage-key \"nuvla.ui.locale\")\n\n\n(reg-event-fx\n ::set-locale\n (fn [{db :db} ",
"end": 249,
"score": 0.9095526337623596,
"start": 234,
"tag": "KEY",
"value": "nuvla.ui.locale"
}
] |
code/src/cljs/sixsq/nuvla/ui/i18n/events.cljs
|
nuvla/ui
| 8 |
(ns sixsq.nuvla.ui.i18n.events
(:require
#_:clj-kondo/ignore
[com.degel.re-frame.storage :as storage]
[re-frame.core :refer [inject-cofx reg-event-fx]]
[sixsq.nuvla.ui.i18n.spec :as spec]))
(def local-storage-key "nuvla.ui.locale")
(reg-event-fx
::set-locale
(fn [{db :db} [_ locale-new]]
(let [locale-db (::spec/locale db)
locale (or locale-new locale-db)]
{:db (cond-> db
locale-new (assoc ::spec/locale locale-new))
:storage/set {:session? false
:name local-storage-key
:value locale}})))
(reg-event-fx
::get-locale-from-local-storage
[(inject-cofx :storage/get {:name local-storage-key})]
(fn [{db :db locale :storage/get}]
(when-not (empty? locale)
{:db (assoc db ::spec/locale locale)})))
|
17572
|
(ns sixsq.nuvla.ui.i18n.events
(:require
#_:clj-kondo/ignore
[com.degel.re-frame.storage :as storage]
[re-frame.core :refer [inject-cofx reg-event-fx]]
[sixsq.nuvla.ui.i18n.spec :as spec]))
(def local-storage-key "<KEY>")
(reg-event-fx
::set-locale
(fn [{db :db} [_ locale-new]]
(let [locale-db (::spec/locale db)
locale (or locale-new locale-db)]
{:db (cond-> db
locale-new (assoc ::spec/locale locale-new))
:storage/set {:session? false
:name local-storage-key
:value locale}})))
(reg-event-fx
::get-locale-from-local-storage
[(inject-cofx :storage/get {:name local-storage-key})]
(fn [{db :db locale :storage/get}]
(when-not (empty? locale)
{:db (assoc db ::spec/locale locale)})))
| true |
(ns sixsq.nuvla.ui.i18n.events
(:require
#_:clj-kondo/ignore
[com.degel.re-frame.storage :as storage]
[re-frame.core :refer [inject-cofx reg-event-fx]]
[sixsq.nuvla.ui.i18n.spec :as spec]))
(def local-storage-key "PI:KEY:<KEY>END_PI")
(reg-event-fx
::set-locale
(fn [{db :db} [_ locale-new]]
(let [locale-db (::spec/locale db)
locale (or locale-new locale-db)]
{:db (cond-> db
locale-new (assoc ::spec/locale locale-new))
:storage/set {:session? false
:name local-storage-key
:value locale}})))
(reg-event-fx
::get-locale-from-local-storage
[(inject-cofx :storage/get {:name local-storage-key})]
(fn [{db :db locale :storage/get}]
(when-not (empty? locale)
{:db (assoc db ::spec/locale locale)})))
|
[
{
"context": "(comment\n re-core, Copyright 2013 Ronen Narkis, narkisr.com\n Licensed under the Apache License,",
"end": 47,
"score": 0.999886155128479,
"start": 35,
"tag": "NAME",
"value": "Ronen Narkis"
},
{
"context": "(comment\n re-core, Copyright 2013 Ronen Narkis, narkisr.com\n Licensed under the Apache License,\n Version 2.",
"end": 60,
"score": 0.6771295070648193,
"start": 50,
"tag": "EMAIL",
"value": "arkisr.com"
},
{
"context": " nil nil))\n keypair (CertAndKeyGen. \"RSA\" \"SHA1WithRSA\" nil)\n x500 (X500Name. cname org-unit org ",
"end": 1393,
"score": 0.9347181916236877,
"start": 1382,
"tag": "KEY",
"value": "SHA1WithRSA"
}
] |
src/re_core/ssl.clj
|
celestial-ops/core
| 1 |
(comment
re-core, Copyright 2013 Ronen Narkis, narkisr.com
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns re-core.ssl
"SSL cert generation"
(:import
java.util.Date
java.io.FileOutputStream
java.security.KeyStore
java.security.PrivateKey
java.security.cert.X509Certificate
java.util.Date
sun.security.tools.keytool.CertAndKeyGen
sun.security.x509.X500Name)
(:use
[clojure.core.strint :only (<<)]))
; TODO enable more dynamic options here
(def keysize 1024)
(def cname "celesital-ops.local")
(def org-unit "IT")
(def org "test")
(def city "TA")
(def state "IL")
(def country "IL")
(def validity 1096)
(def alias- "re-core-ops-jetty")
(defn generate-store
"Generates a java keystore file with defined spec"
[output ^String key-pass]
(let [keystore (doto (KeyStore/getInstance "JKS") (.load nil nil))
keypair (CertAndKeyGen. "RSA" "SHA1WithRSA" nil)
x500 (X500Name. cname org-unit org city state country)
pass-chars (.toCharArray key-pass)]
(.generate keypair keysize)
(let [private-key (.getPrivateKey keypair)
chain (.getSelfCertificate keypair x500 (Date.) (long (* validity 24 60 60)))]
(.setKeyEntry keystore alias- private-key pass-chars (into-array X509Certificate [chain]))
(.store keystore (FileOutputStream. ^String output) pass-chars))))
|
87067
|
(comment
re-core, Copyright 2013 <NAME>, n<EMAIL>
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns re-core.ssl
"SSL cert generation"
(:import
java.util.Date
java.io.FileOutputStream
java.security.KeyStore
java.security.PrivateKey
java.security.cert.X509Certificate
java.util.Date
sun.security.tools.keytool.CertAndKeyGen
sun.security.x509.X500Name)
(:use
[clojure.core.strint :only (<<)]))
; TODO enable more dynamic options here
(def keysize 1024)
(def cname "celesital-ops.local")
(def org-unit "IT")
(def org "test")
(def city "TA")
(def state "IL")
(def country "IL")
(def validity 1096)
(def alias- "re-core-ops-jetty")
(defn generate-store
"Generates a java keystore file with defined spec"
[output ^String key-pass]
(let [keystore (doto (KeyStore/getInstance "JKS") (.load nil nil))
keypair (CertAndKeyGen. "RSA" "<KEY>" nil)
x500 (X500Name. cname org-unit org city state country)
pass-chars (.toCharArray key-pass)]
(.generate keypair keysize)
(let [private-key (.getPrivateKey keypair)
chain (.getSelfCertificate keypair x500 (Date.) (long (* validity 24 60 60)))]
(.setKeyEntry keystore alias- private-key pass-chars (into-array X509Certificate [chain]))
(.store keystore (FileOutputStream. ^String output) pass-chars))))
| true |
(comment
re-core, Copyright 2013 PI:NAME:<NAME>END_PI, nPI:EMAIL:<EMAIL>END_PI
Licensed under the Apache License,
Version 2.0 (the "License") you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.)
(ns re-core.ssl
"SSL cert generation"
(:import
java.util.Date
java.io.FileOutputStream
java.security.KeyStore
java.security.PrivateKey
java.security.cert.X509Certificate
java.util.Date
sun.security.tools.keytool.CertAndKeyGen
sun.security.x509.X500Name)
(:use
[clojure.core.strint :only (<<)]))
; TODO enable more dynamic options here
(def keysize 1024)
(def cname "celesital-ops.local")
(def org-unit "IT")
(def org "test")
(def city "TA")
(def state "IL")
(def country "IL")
(def validity 1096)
(def alias- "re-core-ops-jetty")
(defn generate-store
"Generates a java keystore file with defined spec"
[output ^String key-pass]
(let [keystore (doto (KeyStore/getInstance "JKS") (.load nil nil))
keypair (CertAndKeyGen. "RSA" "PI:KEY:<KEY>END_PI" nil)
x500 (X500Name. cname org-unit org city state country)
pass-chars (.toCharArray key-pass)]
(.generate keypair keysize)
(let [private-key (.getPrivateKey keypair)
chain (.getSelfCertificate keypair x500 (Date.) (long (* validity 24 60 60)))]
(.setKeyEntry keystore alias- private-key pass-chars (into-array X509Certificate [chain]))
(.store keystore (FileOutputStream. ^String output) pass-chars))))
|
[
{
"context": "lm/tuple{\"id\" #elm/integer\"1\" \"name\" #elm/string \"john\"}\n {:id 1 :name \"john\"}))\n\n\n;; 2.2. Instance\n;",
"end": 1316,
"score": 0.9978969097137451,
"start": 1312,
"tag": "NAME",
"value": "john"
},
{
"context": "r\"1\" \"name\" #elm/string \"john\"}\n {:id 1 :name \"john\"}))\n\n\n;; 2.2. Instance\n;;\n;; The Instance express",
"end": 1341,
"score": 0.9989832639694214,
"start": 1337,
"tag": "NAME",
"value": "john"
}
] |
modules/cql/test/blaze/elm/compiler/structured_values_test.clj
|
samply/blaze
| 50 |
(ns blaze.elm.compiler.structured-values-test
"2. Structured Values
Section numbers are according to
https://cql.hl7.org/04-logicalspecification.html."
(:require
[blaze.coll.core :as coll]
[blaze.elm.code-spec]
[blaze.elm.compiler :as c]
[blaze.elm.compiler.core :as core]
[blaze.elm.compiler.test-util :as tu]
[blaze.elm.literal]
[blaze.elm.literal-spec]
[blaze.fhir.spec.type]
[clojure.spec.test.alpha :as st]
[clojure.test :as test :refer [are deftest is testing]]
[juxt.iota :refer [given]])
(:import
[blaze.elm.code Code]))
(st/instrument)
(tu/instrument-compile)
(defn- fixture [f]
(st/instrument)
(tu/instrument-compile)
(f)
(st/unstrument))
(test/use-fixtures :each fixture)
;; 2.1. Tuple
;;
;; The Tuple expression allows tuples of any type to be built up as an
;; expression. The tupleType attribute specifies the type of the tuple being
;; built, if any, and the list of tuple elements specify the values for the
;; elements of the tuple. Note that the value of an element may be any
;; expression, including another Tuple.
(deftest compile-tuple-test
(are [elm res] (= res (core/-eval (c/compile {} elm) {} nil nil))
#elm/tuple{"id" #elm/integer"1"}
{:id 1}
#elm/tuple{"id" #elm/integer"1" "name" #elm/string "john"}
{:id 1 :name "john"}))
;; 2.2. Instance
;;
;; The Instance expression allows class instances of any type to be built up as
;; an expression. The classType attribute specifies the type of the class
;; instance being built, and the list of instance elements specify the values
;; for the elements of the class instance. Note that the value of an element may
;; be any expression, including another Instance.
(deftest compile-instance-test
(testing "Code"
(given (c/compile {} (tu/code "system-134534" "code-134551"))
type := Code
:system := "system-134534"
:code := "code-134551")))
;; 2.3. Property
;;
;; The Property operator returns the value of the property on source specified
;; by the path attribute.
;;
;; If the result of evaluating source is null, the result is null.
;;
;; The path attribute may include qualifiers (.) and indexers ([x]). Indexers
;; must be literal integer values.
;;
;; If the path attribute contains qualifiers or indexers, each qualifier or
;; indexer is traversed to obtain the actual value. If the object of the
;; property access at any point in traversing the path is null, the result is
;; null.
;;
;; If a scope is specified, the name is used to resolve the scope in which the
;; path will be resolved. Scopes can be named by operators such as Filter and
;; ForEach.
;;
;; Property expressions can also be used to access the individual points and
;; closed indicators for interval types using the property names low, high,
;; lowClosed, and highClosed.
(deftest compile-property-test
(testing "with scope"
(testing "with entity supplied over query context"
(testing "Patient.identifier"
(testing "with source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= identifier result))))
(testing "without source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= identifier result)))))
(testing "Patient.extension"
(testing "without source-type"
(let [elm
{:path "extension"
:scope "R"
:type "Property"}
extension
#fhir/Extension
{:url "foo"
:valueString "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:extension [extension]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= extension result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil {"R" entity})))))
(testing "without source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil {"R" entity}))))))
(testing "Observation.value"
(testing "with source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil {"R" entity})))))
(testing "without source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil {"R" entity})))))))
(testing "with entity supplied directly"
(testing "Patient.identifier"
(testing "with source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)
result (coll/first (core/-eval expr nil nil entity))]
(is (= identifier result))))
(testing "without source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)
result (coll/first (core/-eval expr nil nil entity))]
(is (= identifier result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil entity)))))
(testing "without source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil entity))))))
(testing "Observation.value"
(testing "with source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil entity)))))
(testing "without source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Observation :id "0" :value "value-114318"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil entity))))))))
(testing "with source"
(testing "Patient.identifier"
(testing "with source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "identifier"
:source #elm/expression-ref "Patient"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
source
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (coll/first (core/-eval expr {:library-context {"Patient" source}} nil nil))]
(is (= identifier result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "identifier"
:source #elm/expression-ref "Patient"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
source
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (coll/first (core/-eval expr {:library-context {"Patient" source}} nil nil))]
(is (= identifier result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "gender"
:source #elm/expression-ref "Patient"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
source
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Patient" source}} nil nil)]
(is (= #fhir/code"male" result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "gender"
:source #elm/expression-ref "Patient"
:type "Property"}
source
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Patient" source}} nil nil)]
(is (= #fhir/code"male" result)))))
(testing "Observation.value"
(testing "with source-type"
(let [library {:statements {:def [{:name "Observation"}]}}
elm
{:path "value"
:source #elm/expression-ref "Observation"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
source
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Observation" source}} nil nil)]
(is (= "value-114318" result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Observation"}]}}
elm
{:path "value"
:source #elm/expression-ref "Observation"
:type "Property"}
source
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Observation" source}} nil nil)]
(is (= "value-114318" result)))))
(testing "Tuple"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}Integer"
:path "id"
:type "Property"
:source
{:type "Tuple"
:resultTypeSpecifier
{:type "TupleTypeSpecifier"
:element
[{:name "id"
:type {:name "{urn:hl7-org:elm-types:r1}Integer" :type "NamedTypeSpecifier"}}
{:name "name"
:type {:name "{urn:hl7-org:elm-types:r1}String" :type "NamedTypeSpecifier"}}]}
:element
[{:name "id" :value #elm/integer"1"}]}}
1))
(testing "Quantity"
(testing "value"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}Decimal"
:path "value"
:type "Property"
:source #elm/quantity[42 "m"]}
42M))
(testing "unit"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}String"
:path "unit"
:type "Property"
:source #elm/quantity[42 "m"]}
"m")))
(testing "nil"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:path "value"
:type "Property"
:source {:type "Null"}}
nil))))
|
47710
|
(ns blaze.elm.compiler.structured-values-test
"2. Structured Values
Section numbers are according to
https://cql.hl7.org/04-logicalspecification.html."
(:require
[blaze.coll.core :as coll]
[blaze.elm.code-spec]
[blaze.elm.compiler :as c]
[blaze.elm.compiler.core :as core]
[blaze.elm.compiler.test-util :as tu]
[blaze.elm.literal]
[blaze.elm.literal-spec]
[blaze.fhir.spec.type]
[clojure.spec.test.alpha :as st]
[clojure.test :as test :refer [are deftest is testing]]
[juxt.iota :refer [given]])
(:import
[blaze.elm.code Code]))
(st/instrument)
(tu/instrument-compile)
(defn- fixture [f]
(st/instrument)
(tu/instrument-compile)
(f)
(st/unstrument))
(test/use-fixtures :each fixture)
;; 2.1. Tuple
;;
;; The Tuple expression allows tuples of any type to be built up as an
;; expression. The tupleType attribute specifies the type of the tuple being
;; built, if any, and the list of tuple elements specify the values for the
;; elements of the tuple. Note that the value of an element may be any
;; expression, including another Tuple.
(deftest compile-tuple-test
(are [elm res] (= res (core/-eval (c/compile {} elm) {} nil nil))
#elm/tuple{"id" #elm/integer"1"}
{:id 1}
#elm/tuple{"id" #elm/integer"1" "name" #elm/string "<NAME>"}
{:id 1 :name "<NAME>"}))
;; 2.2. Instance
;;
;; The Instance expression allows class instances of any type to be built up as
;; an expression. The classType attribute specifies the type of the class
;; instance being built, and the list of instance elements specify the values
;; for the elements of the class instance. Note that the value of an element may
;; be any expression, including another Instance.
(deftest compile-instance-test
(testing "Code"
(given (c/compile {} (tu/code "system-134534" "code-134551"))
type := Code
:system := "system-134534"
:code := "code-134551")))
;; 2.3. Property
;;
;; The Property operator returns the value of the property on source specified
;; by the path attribute.
;;
;; If the result of evaluating source is null, the result is null.
;;
;; The path attribute may include qualifiers (.) and indexers ([x]). Indexers
;; must be literal integer values.
;;
;; If the path attribute contains qualifiers or indexers, each qualifier or
;; indexer is traversed to obtain the actual value. If the object of the
;; property access at any point in traversing the path is null, the result is
;; null.
;;
;; If a scope is specified, the name is used to resolve the scope in which the
;; path will be resolved. Scopes can be named by operators such as Filter and
;; ForEach.
;;
;; Property expressions can also be used to access the individual points and
;; closed indicators for interval types using the property names low, high,
;; lowClosed, and highClosed.
(deftest compile-property-test
(testing "with scope"
(testing "with entity supplied over query context"
(testing "Patient.identifier"
(testing "with source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= identifier result))))
(testing "without source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= identifier result)))))
(testing "Patient.extension"
(testing "without source-type"
(let [elm
{:path "extension"
:scope "R"
:type "Property"}
extension
#fhir/Extension
{:url "foo"
:valueString "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:extension [extension]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= extension result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil {"R" entity})))))
(testing "without source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil {"R" entity}))))))
(testing "Observation.value"
(testing "with source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil {"R" entity})))))
(testing "without source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil {"R" entity})))))))
(testing "with entity supplied directly"
(testing "Patient.identifier"
(testing "with source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)
result (coll/first (core/-eval expr nil nil entity))]
(is (= identifier result))))
(testing "without source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)
result (coll/first (core/-eval expr nil nil entity))]
(is (= identifier result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil entity)))))
(testing "without source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil entity))))))
(testing "Observation.value"
(testing "with source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil entity)))))
(testing "without source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Observation :id "0" :value "value-114318"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil entity))))))))
(testing "with source"
(testing "Patient.identifier"
(testing "with source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "identifier"
:source #elm/expression-ref "Patient"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
source
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (coll/first (core/-eval expr {:library-context {"Patient" source}} nil nil))]
(is (= identifier result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "identifier"
:source #elm/expression-ref "Patient"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
source
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (coll/first (core/-eval expr {:library-context {"Patient" source}} nil nil))]
(is (= identifier result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "gender"
:source #elm/expression-ref "Patient"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
source
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Patient" source}} nil nil)]
(is (= #fhir/code"male" result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "gender"
:source #elm/expression-ref "Patient"
:type "Property"}
source
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Patient" source}} nil nil)]
(is (= #fhir/code"male" result)))))
(testing "Observation.value"
(testing "with source-type"
(let [library {:statements {:def [{:name "Observation"}]}}
elm
{:path "value"
:source #elm/expression-ref "Observation"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
source
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Observation" source}} nil nil)]
(is (= "value-114318" result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Observation"}]}}
elm
{:path "value"
:source #elm/expression-ref "Observation"
:type "Property"}
source
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Observation" source}} nil nil)]
(is (= "value-114318" result)))))
(testing "Tuple"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}Integer"
:path "id"
:type "Property"
:source
{:type "Tuple"
:resultTypeSpecifier
{:type "TupleTypeSpecifier"
:element
[{:name "id"
:type {:name "{urn:hl7-org:elm-types:r1}Integer" :type "NamedTypeSpecifier"}}
{:name "name"
:type {:name "{urn:hl7-org:elm-types:r1}String" :type "NamedTypeSpecifier"}}]}
:element
[{:name "id" :value #elm/integer"1"}]}}
1))
(testing "Quantity"
(testing "value"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}Decimal"
:path "value"
:type "Property"
:source #elm/quantity[42 "m"]}
42M))
(testing "unit"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}String"
:path "unit"
:type "Property"
:source #elm/quantity[42 "m"]}
"m")))
(testing "nil"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:path "value"
:type "Property"
:source {:type "Null"}}
nil))))
| true |
(ns blaze.elm.compiler.structured-values-test
"2. Structured Values
Section numbers are according to
https://cql.hl7.org/04-logicalspecification.html."
(:require
[blaze.coll.core :as coll]
[blaze.elm.code-spec]
[blaze.elm.compiler :as c]
[blaze.elm.compiler.core :as core]
[blaze.elm.compiler.test-util :as tu]
[blaze.elm.literal]
[blaze.elm.literal-spec]
[blaze.fhir.spec.type]
[clojure.spec.test.alpha :as st]
[clojure.test :as test :refer [are deftest is testing]]
[juxt.iota :refer [given]])
(:import
[blaze.elm.code Code]))
(st/instrument)
(tu/instrument-compile)
(defn- fixture [f]
(st/instrument)
(tu/instrument-compile)
(f)
(st/unstrument))
(test/use-fixtures :each fixture)
;; 2.1. Tuple
;;
;; The Tuple expression allows tuples of any type to be built up as an
;; expression. The tupleType attribute specifies the type of the tuple being
;; built, if any, and the list of tuple elements specify the values for the
;; elements of the tuple. Note that the value of an element may be any
;; expression, including another Tuple.
(deftest compile-tuple-test
(are [elm res] (= res (core/-eval (c/compile {} elm) {} nil nil))
#elm/tuple{"id" #elm/integer"1"}
{:id 1}
#elm/tuple{"id" #elm/integer"1" "name" #elm/string "PI:NAME:<NAME>END_PI"}
{:id 1 :name "PI:NAME:<NAME>END_PI"}))
;; 2.2. Instance
;;
;; The Instance expression allows class instances of any type to be built up as
;; an expression. The classType attribute specifies the type of the class
;; instance being built, and the list of instance elements specify the values
;; for the elements of the class instance. Note that the value of an element may
;; be any expression, including another Instance.
(deftest compile-instance-test
(testing "Code"
(given (c/compile {} (tu/code "system-134534" "code-134551"))
type := Code
:system := "system-134534"
:code := "code-134551")))
;; 2.3. Property
;;
;; The Property operator returns the value of the property on source specified
;; by the path attribute.
;;
;; If the result of evaluating source is null, the result is null.
;;
;; The path attribute may include qualifiers (.) and indexers ([x]). Indexers
;; must be literal integer values.
;;
;; If the path attribute contains qualifiers or indexers, each qualifier or
;; indexer is traversed to obtain the actual value. If the object of the
;; property access at any point in traversing the path is null, the result is
;; null.
;;
;; If a scope is specified, the name is used to resolve the scope in which the
;; path will be resolved. Scopes can be named by operators such as Filter and
;; ForEach.
;;
;; Property expressions can also be used to access the individual points and
;; closed indicators for interval types using the property names low, high,
;; lowClosed, and highClosed.
(deftest compile-property-test
(testing "with scope"
(testing "with entity supplied over query context"
(testing "Patient.identifier"
(testing "with source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= identifier result))))
(testing "without source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= identifier result)))))
(testing "Patient.extension"
(testing "without source-type"
(let [elm
{:path "extension"
:scope "R"
:type "Property"}
extension
#fhir/Extension
{:url "foo"
:valueString "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:extension [extension]}
expr
(c/compile
{:eval-context "Patient"}
elm)
result (coll/first (core/-eval expr nil nil {"R" entity}))]
(is (= extension result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil {"R" entity})))))
(testing "without source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil {"R" entity}))))))
(testing "Observation.value"
(testing "with source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil {"R" entity})))))
(testing "without source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil {"R" entity})))))))
(testing "with entity supplied directly"
(testing "Patient.identifier"
(testing "with source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)
result (coll/first (core/-eval expr nil nil entity))]
(is (= identifier result))))
(testing "without source-type"
(let [elm
{:path "identifier"
:scope "R"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
entity
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)
result (coll/first (core/-eval expr nil nil entity))]
(is (= identifier result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil entity)))))
(testing "without source-type"
(let [elm
{:path "gender"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= #fhir/code"male" (core/-eval expr nil nil entity))))))
(testing "Observation.value"
(testing "with source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
entity
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil entity)))))
(testing "without source-type"
(let [elm
{:path "value"
:scope "R"
:type "Property"}
entity
{:fhir/type :fhir/Observation :id "0" :value "value-114318"}
expr
(c/compile
{:eval-context "Patient"
:life/single-query-scope "R"}
elm)]
(is (= "value-114318" (core/-eval expr nil nil entity))))))))
(testing "with source"
(testing "Patient.identifier"
(testing "with source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "identifier"
:source #elm/expression-ref "Patient"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
source
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (coll/first (core/-eval expr {:library-context {"Patient" source}} nil nil))]
(is (= identifier result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "identifier"
:source #elm/expression-ref "Patient"
:type "Property"}
identifier
#fhir/Identifier
{:system #fhir/uri"foo"
:value "bar"}
source
{:fhir/type :fhir/Patient :id "0"
:identifier [identifier]}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (coll/first (core/-eval expr {:library-context {"Patient" source}} nil nil))]
(is (= identifier result)))))
(testing "Patient.gender"
(testing "with source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "gender"
:source #elm/expression-ref "Patient"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Patient"}
source
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Patient" source}} nil nil)]
(is (= #fhir/code"male" result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Patient"}]}}
elm
{:path "gender"
:source #elm/expression-ref "Patient"
:type "Property"}
source
{:fhir/type :fhir/Patient :id "0"
:gender #fhir/code"male"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Patient" source}} nil nil)]
(is (= #fhir/code"male" result)))))
(testing "Observation.value"
(testing "with source-type"
(let [library {:statements {:def [{:name "Observation"}]}}
elm
{:path "value"
:source #elm/expression-ref "Observation"
:type "Property"
:life/source-type "{http://hl7.org/fhir}Observation"}
source
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Observation" source}} nil nil)]
(is (= "value-114318" result))))
(testing "without source-type"
(let [library {:statements {:def [{:name "Observation"}]}}
elm
{:path "value"
:source #elm/expression-ref "Observation"
:type "Property"}
source
{:fhir/type :fhir/Observation :id "0"
:value "value-114318"}
expr (c/compile {:library library :eval-context "Patient"} elm)
result (core/-eval expr {:library-context {"Observation" source}} nil nil)]
(is (= "value-114318" result)))))
(testing "Tuple"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}Integer"
:path "id"
:type "Property"
:source
{:type "Tuple"
:resultTypeSpecifier
{:type "TupleTypeSpecifier"
:element
[{:name "id"
:type {:name "{urn:hl7-org:elm-types:r1}Integer" :type "NamedTypeSpecifier"}}
{:name "name"
:type {:name "{urn:hl7-org:elm-types:r1}String" :type "NamedTypeSpecifier"}}]}
:element
[{:name "id" :value #elm/integer"1"}]}}
1))
(testing "Quantity"
(testing "value"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}Decimal"
:path "value"
:type "Property"
:source #elm/quantity[42 "m"]}
42M))
(testing "unit"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:resultTypeName "{urn:hl7-org:elm-types:r1}String"
:path "unit"
:type "Property"
:source #elm/quantity[42 "m"]}
"m")))
(testing "nil"
(are [elm result]
(= result (core/-eval (c/compile {:eval-context "Unfiltered"} elm) {} nil nil))
{:path "value"
:type "Property"
:source {:type "Null"}}
nil))))
|
[
{
"context": " :songwriter [{:name \"John Lennon\"}\n {",
"end": 426,
"score": 0.9998623728752136,
"start": 415,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": " {:name \"Paul McCartney\"}]\n :producer [{:",
"end": 497,
"score": 0.9998871684074402,
"start": 483,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": " :producer [{:name \"George Martin\"}]}\n {:name \"Somet",
"end": 566,
"score": 0.9998709559440613,
"start": 553,
"tag": "NAME",
"value": "George Martin"
},
{
"context": " :songwriter [{:name \"George Harrison\"}]}]})\n(deftest trace-test\n (testing \"should ret",
"end": 799,
"score": 0.9998940229415894,
"start": 784,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "les\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Paul",
"end": 2117,
"score": 0.9998611807823181,
"start": 2106,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "nnon\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"Geo",
"end": 2177,
"score": 0.9998920559883118,
"start": 2163,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "ney\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"",
"end": 2237,
"score": 0.9998394846916199,
"start": 2224,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "rge Martin\"}]}\n {:name \"Something\"\n :number 2\n ",
"end": 2286,
"score": 0.9801713824272156,
"start": 2277,
"tag": "NAME",
"value": "Something"
},
{
"context": "les\"}]\n :songwriter [{:name \"George Harrison\"}]}]})))\n\n (testing \"dissoc the third occurrence",
"end": 2440,
"score": 0.9998642802238464,
"start": 2425,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "les\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Paul",
"end": 2775,
"score": 0.9998664259910583,
"start": 2764,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "nnon\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"Geo",
"end": 2835,
"score": 0.9998914003372192,
"start": 2821,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "ney\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"",
"end": 2895,
"score": 0.9998828768730164,
"start": 2882,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "rge Martin\"}]}\n {:name \"Something\"\n :number 2\n ",
"end": 2944,
"score": 0.9820417165756226,
"start": 2935,
"tag": "NAME",
"value": "Something"
},
{
"context": "les\"}]\n :songwriter [{:name \"George Harrison\"}]}]})))\n\n (testing \"dissoc the second occurrenc",
"end": 3098,
"score": 0.9998568296432495,
"start": 3083,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "les\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Paul",
"end": 3465,
"score": 0.9998817443847656,
"start": 3454,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "nnon\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"Geo",
"end": 3525,
"score": 0.9998904466629028,
"start": 3511,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "ney\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"",
"end": 3585,
"score": 0.9998785853385925,
"start": 3572,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "rge Martin\"}]}\n {:name \"Something\"\n :number 2\n ",
"end": 3634,
"score": 0.9848909974098206,
"start": 3625,
"tag": "NAME",
"value": "Something"
},
{
"context": "les\"}]\n :songwriter [{:name \"George Harrison\"}]}]})))\n\n (testing \"dissoc a key that doesn't e",
"end": 3788,
"score": 0.9998815655708313,
"start": 3773,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "les\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Paul",
"end": 5030,
"score": 0.9998098015785217,
"start": 5019,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "nnon\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"Geo",
"end": 5090,
"score": 0.9998903274536133,
"start": 5076,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "ney\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"",
"end": 5150,
"score": 0.9998787045478821,
"start": 5137,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "rge Martin\"}]}\n {:name \"Something\"\n :number 2\n ",
"end": 5199,
"score": 0.9341233372688293,
"start": 5190,
"tag": "NAME",
"value": "Something"
},
{
"context": "les\"}]\n :songwriter [{:name \"George Harrison\"}]}]})))\n\n (testing \"assoc the fourth occurrence",
"end": 5353,
"score": 0.9998784065246582,
"start": 5338,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "foo\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Paul",
"end": 5735,
"score": 0.9996752142906189,
"start": 5724,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "nnon\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"Geo",
"end": 5795,
"score": 0.9998733401298523,
"start": 5781,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "ney\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"",
"end": 5855,
"score": 0.994431734085083,
"start": 5842,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "rge Martin\"}]}\n {:name \"Something\"\n :number 2\n ",
"end": 5904,
"score": 0.8463615775108337,
"start": 5895,
"tag": "NAME",
"value": "Something"
},
{
"context": "les\"}]\n :songwriter [{:name \"George Harrison\"}]}]}))\n\n (testing \"assoc the first occurrence",
"end": 6058,
"score": 0.9998588562011719,
"start": 6043,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "s\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Pa",
"end": 6462,
"score": 0.9998505711555481,
"start": 6451,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "on\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"G",
"end": 6524,
"score": 0.9998918771743774,
"start": 6510,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "y\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Somethin",
"end": 6586,
"score": 0.9997788071632385,
"start": 6573,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "e Martin\"}]}\n {:name \"Something\"\n :number 2\n ",
"end": 6637,
"score": 0.9612893462181091,
"start": 6628,
"tag": "NAME",
"value": "Something"
},
{
"context": "s\"}]\n :songwriter [{:name \"George Harrison\"}]}]})))\n\n (testing \"assoc a key that doesn't ",
"end": 6797,
"score": 0.9998844265937805,
"start": 6782,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "s\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Pa",
"end": 7233,
"score": 0.9998672008514404,
"start": 7222,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "on\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"G",
"end": 7295,
"score": 0.9998887777328491,
"start": 7281,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "y\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Somethin",
"end": 7357,
"score": 0.9998772740364075,
"start": 7344,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "s\"}]\n :songwriter [{:name \"George Harrison\"}]}]}))))\n\n (testing \"throw an assertion if n is",
"end": 7568,
"score": 0.9998899698257446,
"start": 7553,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "eetle\"\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"",
"end": 8525,
"score": 0.9998317360877991,
"start": 8512,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "les\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Paul",
"end": 9121,
"score": 0.999885618686676,
"start": 9110,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "nnon\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"Geo",
"end": 9181,
"score": 0.9998928308486938,
"start": 9167,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "ney\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"",
"end": 9241,
"score": 0.9998781085014343,
"start": 9228,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "les\"}]\n :songwriter [{:name \"George Harrison\"}]}]}))))\n\n(deftest update-all-test\n(testing \"inc",
"end": 9444,
"score": 0.9998997449874878,
"start": 9429,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "les\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Paul",
"end": 9865,
"score": 0.9998548626899719,
"start": 9854,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "nnon\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"Geo",
"end": 9925,
"score": 0.9998979568481445,
"start": 9911,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "ney\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"",
"end": 9985,
"score": 0.9998810291290283,
"start": 9972,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "rge Martin\"}]}\n {:name \"Something\"\n :number 12\n ",
"end": 10034,
"score": 0.846444845199585,
"start": 10025,
"tag": "NAME",
"value": "Something"
},
{
"context": "les\"}]\n :songwriter [{:name \"George Harrison\"}]}]})))\n\n (testing \"update the value of occurre",
"end": 10189,
"score": 0.9998847842216492,
"start": 10174,
"tag": "NAME",
"value": "George Harrison"
},
{
"context": "les\"}]\n :songwriter [{:name \"John Lennon\"}\n {:name \"Paul",
"end": 11252,
"score": 0.9998626708984375,
"start": 11241,
"tag": "NAME",
"value": "John Lennon"
},
{
"context": "nnon\"}\n {:name \"Paul McCartney\"}]\n :producer [{:name \"Georg",
"end": 11312,
"score": 0.9998644590377808,
"start": 11298,
"tag": "NAME",
"value": "Paul McCartney"
},
{
"context": "rtney\"}]\n :producer [{:name \"George Martin\"}]}\n {:name \"Something\"\n ",
"end": 11370,
"score": 0.9998825192451477,
"start": 11357,
"tag": "NAME",
"value": "George Martin"
},
{
"context": "les\"}]\n :songwriter [{:name \"George Harrison\"}]}]}))))\n",
"end": 11559,
"score": 0.9998916387557983,
"start": 11544,
"tag": "NAME",
"value": "George Harrison"
}
] |
test/tellar/core_test.clj
|
murtaza0xFF/tellar
| 1 |
(ns tellar.core-test
(:require [clojure.test :refer :all]
[tellar.core :refer :all]))
(def nested-structure {:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]})
(deftest trace-test
(testing "should return all the paths leading to the given node"
(is (= (trace nested-structure :name)
[[:name]
[:artist 0 :name]
[:tracks 0 :name]
[:tracks 0 :artist 0 :name]
[:tracks 0 :songwriter 0 :name]
[:tracks 0 :songwriter 1 :name]
[:tracks 0 :producer 0 :name]
[:tracks 1 :name]
[:tracks 1 :artist 0 :name]
[:tracks 1 :songwriter 0 :name]]))
(is (= (trace nested-structure :tracks)
[[:tracks]]))
(is (= (trace nested-structure :artist)
[[:artist] [:tracks 0 :artist] [:tracks 1 :artist]]))
(is (= (trace nested-structure :number)
[[:tracks 0 :number] [:tracks 1 :number]]))
(is (= (trace nested-structure :songwriter)
[[:tracks 0 :songwriter] [:tracks 1 :songwriter]]))
(is (= (trace nested-structure :foo)
[]))))
(deftest dissoc-nth-test
(testing "dissoc the first occurrence of :name"
(is (= (dissoc-nth nested-structure :name 1)
{:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]})))
(testing "dissoc the third occurrence of :name"
(is (= (dissoc-nth nested-structure :name 3)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]})))
(testing "dissoc the second occurrence of :name"
(is (= (dissoc-nth nested-structure :name 2)
{:name "Abbey Road"
:artist [{}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]})))
(testing "dissoc a key that doesn't exist in the map"
(is (= (dissoc-nth nested-structure :foo 2) nested-structure)))
(testing "throw an assertion if n is less than 1"
(is (thrown? AssertionError (assoc-nth nested-structure :name "foo" 0)))))
(deftest dissoc-all-test
(testing "dissoc all occurrences of :name"
(is (= (dissoc-all nested-structure :name)
{:artist [{}]
:tracks [{:number 1
:artist [{}]
:songwriter [{}
{}]
:producer [{}]}
{:number 2
:artist [{}]
:songwriter [{}]}]})))
(testing "dissoc a key that doesn't exist in the map"
(is (= (dissoc-all nested-structure :foo) nested-structure))))
(deftest assoc-nth-test
(testing "assoc the first occurrence of :name"
(is (= (assoc-nth nested-structure :name "foo" 1)
{:name "foo"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]})))
(testing "assoc the fourth occurrence of :name"
(is (= (assoc-nth nested-structure :name "foo" 4)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "foo"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]}))
(testing "assoc the first occurrence of :artist"
(is (= (assoc-nth nested-structure :artist "Betelgeuse" 1)
{:name "Abbey Road"
:artist "Betelgeuse"
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]})))
(testing "assoc a key that doesn't exist"
(is (= (assoc-nth nested-structure :record-label "the-record-label-for-this-album" 4)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]}))))
(testing "throw an assertion if n is less than 1"
(is (thrown? AssertionError (assoc-nth nested-structure :name "foo" 0)))))
(deftest assoc-all-test
(testing "assoc all occurrences of :foo"
(is (= (assoc-all [[[{:foo 1 :bar 2} {:baz 3}]]] :foo 2)
[[[{:foo 2 :bar 2} {:baz 3}]]])))
(testing "assoc all occurrences of :tracks"
(is (= (assoc-all nested-structure :tracks "foo")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks "foo"})))
(testing "assoc all occurrences of :songwriter"
(is (= (assoc-all nested-structure :songwriter "Beetle")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter "Beetle"
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter "Beetle"}]})))
(testing "assoc a key that doesn't exist"
(is (= (assoc-all nested-structure :record "the-record-label-for-this-album")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]}))))
(deftest update-all-test
(testing "increment all occurrences of :number by 10"
(is (= (update-all nested-structure :number + 10)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 11
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 12
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]})))
(testing "update the value of occurrences of :name to beetle"
(is (= (update-all nested-structure :name (constantly "beetle"))
{:name "beetle"
:artist [{:name "beetle"}]
:tracks [{:name "beetle"
:number 1
:artist [{:name "beetle"}]
:songwriter [{:name "beetle"}
{:name "beetle"}]
:producer [{:name "beetle"}]}
{:name "beetle"
:number 2
:artist [{:name "beetle"}]
:songwriter [{:name "beetle"}]}]})))
(testing "update the value of occurrences of a key that doesn't exist"
(is (= (update-all nested-structure :foo (constantly "beetle"))
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "John Lennon"}
{:name "Paul McCartney"}]
:producer [{:name "George Martin"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "George Harrison"}]}]}))))
|
123703
|
(ns tellar.core-test
(:require [clojure.test :refer :all]
[tellar.core :refer :all]))
(def nested-structure {:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]})
(deftest trace-test
(testing "should return all the paths leading to the given node"
(is (= (trace nested-structure :name)
[[:name]
[:artist 0 :name]
[:tracks 0 :name]
[:tracks 0 :artist 0 :name]
[:tracks 0 :songwriter 0 :name]
[:tracks 0 :songwriter 1 :name]
[:tracks 0 :producer 0 :name]
[:tracks 1 :name]
[:tracks 1 :artist 0 :name]
[:tracks 1 :songwriter 0 :name]]))
(is (= (trace nested-structure :tracks)
[[:tracks]]))
(is (= (trace nested-structure :artist)
[[:artist] [:tracks 0 :artist] [:tracks 1 :artist]]))
(is (= (trace nested-structure :number)
[[:tracks 0 :number] [:tracks 1 :number]]))
(is (= (trace nested-structure :songwriter)
[[:tracks 0 :songwriter] [:tracks 1 :songwriter]]))
(is (= (trace nested-structure :foo)
[]))))
(deftest dissoc-nth-test
(testing "dissoc the first occurrence of :name"
(is (= (dissoc-nth nested-structure :name 1)
{:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "<NAME>"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]})))
(testing "dissoc the third occurrence of :name"
(is (= (dissoc-nth nested-structure :name 3)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "<NAME>"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]})))
(testing "dissoc the second occurrence of :name"
(is (= (dissoc-nth nested-structure :name 2)
{:name "Abbey Road"
:artist [{}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "<NAME>"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]})))
(testing "dissoc a key that doesn't exist in the map"
(is (= (dissoc-nth nested-structure :foo 2) nested-structure)))
(testing "throw an assertion if n is less than 1"
(is (thrown? AssertionError (assoc-nth nested-structure :name "foo" 0)))))
(deftest dissoc-all-test
(testing "dissoc all occurrences of :name"
(is (= (dissoc-all nested-structure :name)
{:artist [{}]
:tracks [{:number 1
:artist [{}]
:songwriter [{}
{}]
:producer [{}]}
{:number 2
:artist [{}]
:songwriter [{}]}]})))
(testing "dissoc a key that doesn't exist in the map"
(is (= (dissoc-all nested-structure :foo) nested-structure))))
(deftest assoc-nth-test
(testing "assoc the first occurrence of :name"
(is (= (assoc-nth nested-structure :name "foo" 1)
{:name "foo"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "<NAME>"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]})))
(testing "assoc the fourth occurrence of :name"
(is (= (assoc-nth nested-structure :name "foo" 4)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "foo"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "<NAME>"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]}))
(testing "assoc the first occurrence of :artist"
(is (= (assoc-nth nested-structure :artist "Betelgeuse" 1)
{:name "Abbey Road"
:artist "Betelgeuse"
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "<NAME>"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]})))
(testing "assoc a key that doesn't exist"
(is (= (assoc-nth nested-structure :record-label "the-record-label-for-this-album" 4)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]}))))
(testing "throw an assertion if n is less than 1"
(is (thrown? AssertionError (assoc-nth nested-structure :name "foo" 0)))))
(deftest assoc-all-test
(testing "assoc all occurrences of :foo"
(is (= (assoc-all [[[{:foo 1 :bar 2} {:baz 3}]]] :foo 2)
[[[{:foo 2 :bar 2} {:baz 3}]]])))
(testing "assoc all occurrences of :tracks"
(is (= (assoc-all nested-structure :tracks "foo")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks "foo"})))
(testing "assoc all occurrences of :songwriter"
(is (= (assoc-all nested-structure :songwriter "Beetle")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter "Beetle"
:producer [{:name "<NAME>"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter "Beetle"}]})))
(testing "assoc a key that doesn't exist"
(is (= (assoc-all nested-structure :record "the-record-label-for-this-album")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]}))))
(deftest update-all-test
(testing "increment all occurrences of :number by 10"
(is (= (update-all nested-structure :number + 10)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 11
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "<NAME>"
:number 12
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]})))
(testing "update the value of occurrences of :name to beetle"
(is (= (update-all nested-structure :name (constantly "beetle"))
{:name "beetle"
:artist [{:name "beetle"}]
:tracks [{:name "beetle"
:number 1
:artist [{:name "beetle"}]
:songwriter [{:name "beetle"}
{:name "beetle"}]
:producer [{:name "beetle"}]}
{:name "beetle"
:number 2
:artist [{:name "beetle"}]
:songwriter [{:name "beetle"}]}]})))
(testing "update the value of occurrences of a key that doesn't exist"
(is (= (update-all nested-structure :foo (constantly "beetle"))
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}
{:name "<NAME>"}]
:producer [{:name "<NAME>"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "<NAME>"}]}]}))))
| true |
(ns tellar.core-test
(:require [clojure.test :refer :all]
[tellar.core :refer :all]))
(def nested-structure {:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]})
(deftest trace-test
(testing "should return all the paths leading to the given node"
(is (= (trace nested-structure :name)
[[:name]
[:artist 0 :name]
[:tracks 0 :name]
[:tracks 0 :artist 0 :name]
[:tracks 0 :songwriter 0 :name]
[:tracks 0 :songwriter 1 :name]
[:tracks 0 :producer 0 :name]
[:tracks 1 :name]
[:tracks 1 :artist 0 :name]
[:tracks 1 :songwriter 0 :name]]))
(is (= (trace nested-structure :tracks)
[[:tracks]]))
(is (= (trace nested-structure :artist)
[[:artist] [:tracks 0 :artist] [:tracks 1 :artist]]))
(is (= (trace nested-structure :number)
[[:tracks 0 :number] [:tracks 1 :number]]))
(is (= (trace nested-structure :songwriter)
[[:tracks 0 :songwriter] [:tracks 1 :songwriter]]))
(is (= (trace nested-structure :foo)
[]))))
(deftest dissoc-nth-test
(testing "dissoc the first occurrence of :name"
(is (= (dissoc-nth nested-structure :name 1)
{:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "PI:NAME:<NAME>END_PI"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]})))
(testing "dissoc the third occurrence of :name"
(is (= (dissoc-nth nested-structure :name 3)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "PI:NAME:<NAME>END_PI"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]})))
(testing "dissoc the second occurrence of :name"
(is (= (dissoc-nth nested-structure :name 2)
{:name "Abbey Road"
:artist [{}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "PI:NAME:<NAME>END_PI"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]})))
(testing "dissoc a key that doesn't exist in the map"
(is (= (dissoc-nth nested-structure :foo 2) nested-structure)))
(testing "throw an assertion if n is less than 1"
(is (thrown? AssertionError (assoc-nth nested-structure :name "foo" 0)))))
(deftest dissoc-all-test
(testing "dissoc all occurrences of :name"
(is (= (dissoc-all nested-structure :name)
{:artist [{}]
:tracks [{:number 1
:artist [{}]
:songwriter [{}
{}]
:producer [{}]}
{:number 2
:artist [{}]
:songwriter [{}]}]})))
(testing "dissoc a key that doesn't exist in the map"
(is (= (dissoc-all nested-structure :foo) nested-structure))))
(deftest assoc-nth-test
(testing "assoc the first occurrence of :name"
(is (= (assoc-nth nested-structure :name "foo" 1)
{:name "foo"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "PI:NAME:<NAME>END_PI"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]})))
(testing "assoc the fourth occurrence of :name"
(is (= (assoc-nth nested-structure :name "foo" 4)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "foo"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "PI:NAME:<NAME>END_PI"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]}))
(testing "assoc the first occurrence of :artist"
(is (= (assoc-nth nested-structure :artist "Betelgeuse" 1)
{:name "Abbey Road"
:artist "Betelgeuse"
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "PI:NAME:<NAME>END_PI"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]})))
(testing "assoc a key that doesn't exist"
(is (= (assoc-nth nested-structure :record-label "the-record-label-for-this-album" 4)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]}))))
(testing "throw an assertion if n is less than 1"
(is (thrown? AssertionError (assoc-nth nested-structure :name "foo" 0)))))
(deftest assoc-all-test
(testing "assoc all occurrences of :foo"
(is (= (assoc-all [[[{:foo 1 :bar 2} {:baz 3}]]] :foo 2)
[[[{:foo 2 :bar 2} {:baz 3}]]])))
(testing "assoc all occurrences of :tracks"
(is (= (assoc-all nested-structure :tracks "foo")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks "foo"})))
(testing "assoc all occurrences of :songwriter"
(is (= (assoc-all nested-structure :songwriter "Beetle")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter "Beetle"
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter "Beetle"}]})))
(testing "assoc a key that doesn't exist"
(is (= (assoc-all nested-structure :record "the-record-label-for-this-album")
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]}))))
(deftest update-all-test
(testing "increment all occurrences of :number by 10"
(is (= (update-all nested-structure :number + 10)
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 11
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "PI:NAME:<NAME>END_PI"
:number 12
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]})))
(testing "update the value of occurrences of :name to beetle"
(is (= (update-all nested-structure :name (constantly "beetle"))
{:name "beetle"
:artist [{:name "beetle"}]
:tracks [{:name "beetle"
:number 1
:artist [{:name "beetle"}]
:songwriter [{:name "beetle"}
{:name "beetle"}]
:producer [{:name "beetle"}]}
{:name "beetle"
:number 2
:artist [{:name "beetle"}]
:songwriter [{:name "beetle"}]}]})))
(testing "update the value of occurrences of a key that doesn't exist"
(is (= (update-all nested-structure :foo (constantly "beetle"))
{:name "Abbey Road"
:artist [{:name "The Beatles"}]
:tracks [{:name "Come Together"
:number 1
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}
{:name "PI:NAME:<NAME>END_PI"}]
:producer [{:name "PI:NAME:<NAME>END_PI"}]}
{:name "Something"
:number 2
:artist [{:name "The Beatles"}]
:songwriter [{:name "PI:NAME:<NAME>END_PI"}]}]}))))
|
[
{
"context": "ayah.client :as ayah]))\n\n(def AYAH_PUBLISHER_KEY \"00000\")\n(def AYAH_SCORING_KEY \"11111\")\n\n(defn get-pag",
"end": 540,
"score": 0.9390770196914673,
"start": 535,
"tag": "KEY",
"value": "00000"
},
{
"context": "H_PUBLISHER_KEY \"00000\")\n(def AYAH_SCORING_KEY \"11111\")\n\n(defn get-page-html\n [show-form message param",
"end": 573,
"score": 0.9984821081161499,
"start": 568,
"tag": "KEY",
"value": "11111"
},
{
"context": "s \"/\")\n (route/not-found \"Not Found\"))\n\n; http://mmcgrana.github.com/2010/07/develop-deploy-clojure-web-app",
"end": 1558,
"score": 0.7818970680236816,
"start": 1550,
"tag": "USERNAME",
"value": "mmcgrana"
}
] |
src/sample/handler.clj
|
davidlloyd/ayah-clj
| 0 |
(ns sample.handler
(:use
[compojure.core :only (defroutes GET POST PUT)]
[hiccup.page :only [html5]]
[compojure.handler :only [api]]
[ring.middleware.multipart-params :only [wrap-multipart-params]]
[ring.middleware.session.memory :only [memory-store]]
[ring.middleware.reload :only (wrap-reload)])
(:require [ring.adapter.jetty :as ring]
[compojure.handler :as handler]
[compojure.route :as route]
[ayah.client :as ayah]))
(def AYAH_PUBLISHER_KEY "00000")
(def AYAH_SCORING_KEY "11111")
(defn get-page-html
[show-form message params]
(html5
[:head
[:title "Are You A Human example"]
]
[:body
[:h1 "Are You A Human Example"]
[:div message]
(if show-form
[:form {:method "POST"}
[:div "First Name:" [:input {:name "firstname" :id "firstname"}]]
[:div "Last Name:" [:input {:name "lastname" :id "lastname"}]]
[:div (ayah/get-publisher-html AYAH_PUBLISHER_KEY)]
[:input {:type "submit" :value "Submit"}]
]
[:h2 "Thank you for submitting the form " (params :firstname) "."])
]))
(defroutes home-routes
(GET "/" []
(get-page-html true "" {}))
(POST "/" [:as {params :params}]
(let [score (ayah/score-result AYAH_SCORING_KEY (params :session_secret))]
(get-page-html (not score) (if score "You are a human!" "Try again.") params))))
(defroutes app-routes
home-routes
; (route/resources "/")
(route/not-found "Not Found"))
; http://mmcgrana.github.com/2010/07/develop-deploy-clojure-web-applications.html
(defn wrap-if [handler pred wrapper & args]
(if pred
(apply wrapper handler args)
handler))
(def app (-> #'app-routes
api
wrap-multipart-params
(wrap-if true ; development?
wrap-reload {:dirs ["src"]})))
(defn start [port]
(ring/run-jetty app {:port port :join? false}))
(defn -main []
(let [port (Integer/parseInt (or (System/getenv "PORT") "8080"))]
(start port)))
|
18875
|
(ns sample.handler
(:use
[compojure.core :only (defroutes GET POST PUT)]
[hiccup.page :only [html5]]
[compojure.handler :only [api]]
[ring.middleware.multipart-params :only [wrap-multipart-params]]
[ring.middleware.session.memory :only [memory-store]]
[ring.middleware.reload :only (wrap-reload)])
(:require [ring.adapter.jetty :as ring]
[compojure.handler :as handler]
[compojure.route :as route]
[ayah.client :as ayah]))
(def AYAH_PUBLISHER_KEY "<KEY>")
(def AYAH_SCORING_KEY "<KEY>")
(defn get-page-html
[show-form message params]
(html5
[:head
[:title "Are You A Human example"]
]
[:body
[:h1 "Are You A Human Example"]
[:div message]
(if show-form
[:form {:method "POST"}
[:div "First Name:" [:input {:name "firstname" :id "firstname"}]]
[:div "Last Name:" [:input {:name "lastname" :id "lastname"}]]
[:div (ayah/get-publisher-html AYAH_PUBLISHER_KEY)]
[:input {:type "submit" :value "Submit"}]
]
[:h2 "Thank you for submitting the form " (params :firstname) "."])
]))
(defroutes home-routes
(GET "/" []
(get-page-html true "" {}))
(POST "/" [:as {params :params}]
(let [score (ayah/score-result AYAH_SCORING_KEY (params :session_secret))]
(get-page-html (not score) (if score "You are a human!" "Try again.") params))))
(defroutes app-routes
home-routes
; (route/resources "/")
(route/not-found "Not Found"))
; http://mmcgrana.github.com/2010/07/develop-deploy-clojure-web-applications.html
(defn wrap-if [handler pred wrapper & args]
(if pred
(apply wrapper handler args)
handler))
(def app (-> #'app-routes
api
wrap-multipart-params
(wrap-if true ; development?
wrap-reload {:dirs ["src"]})))
(defn start [port]
(ring/run-jetty app {:port port :join? false}))
(defn -main []
(let [port (Integer/parseInt (or (System/getenv "PORT") "8080"))]
(start port)))
| true |
(ns sample.handler
(:use
[compojure.core :only (defroutes GET POST PUT)]
[hiccup.page :only [html5]]
[compojure.handler :only [api]]
[ring.middleware.multipart-params :only [wrap-multipart-params]]
[ring.middleware.session.memory :only [memory-store]]
[ring.middleware.reload :only (wrap-reload)])
(:require [ring.adapter.jetty :as ring]
[compojure.handler :as handler]
[compojure.route :as route]
[ayah.client :as ayah]))
(def AYAH_PUBLISHER_KEY "PI:KEY:<KEY>END_PI")
(def AYAH_SCORING_KEY "PI:KEY:<KEY>END_PI")
(defn get-page-html
[show-form message params]
(html5
[:head
[:title "Are You A Human example"]
]
[:body
[:h1 "Are You A Human Example"]
[:div message]
(if show-form
[:form {:method "POST"}
[:div "First Name:" [:input {:name "firstname" :id "firstname"}]]
[:div "Last Name:" [:input {:name "lastname" :id "lastname"}]]
[:div (ayah/get-publisher-html AYAH_PUBLISHER_KEY)]
[:input {:type "submit" :value "Submit"}]
]
[:h2 "Thank you for submitting the form " (params :firstname) "."])
]))
(defroutes home-routes
(GET "/" []
(get-page-html true "" {}))
(POST "/" [:as {params :params}]
(let [score (ayah/score-result AYAH_SCORING_KEY (params :session_secret))]
(get-page-html (not score) (if score "You are a human!" "Try again.") params))))
(defroutes app-routes
home-routes
; (route/resources "/")
(route/not-found "Not Found"))
; http://mmcgrana.github.com/2010/07/develop-deploy-clojure-web-applications.html
(defn wrap-if [handler pred wrapper & args]
(if pred
(apply wrapper handler args)
handler))
(def app (-> #'app-routes
api
wrap-multipart-params
(wrap-if true ; development?
wrap-reload {:dirs ["src"]})))
(defn start [port]
(ring/run-jetty app {:port port :join? false}))
(defn -main []
(let [port (Integer/parseInt (or (System/getenv "PORT") "8080"))]
(start port)))
|
[
{
"context": "ame}\" :get\n {:path-params {\"username\" username }\n :header-params {}\n :qu",
"end": 4070,
"score": 0.8845560550689697,
"start": 4062,
"tag": "USERNAME",
"value": "username"
},
{
"context": " :query-params {\"username\" username \"password\" password }\n :form-params {}\n :",
"end": 4892,
"score": 0.661838948726654,
"start": 4884,
"tag": "PASSWORD",
"value": "password"
}
] |
samples/client/petstore/clojure/src/open_api_petstore/api/user.clj
|
MalcolmScoffable/openapi-generator
| 11,868 |
(ns open-api-petstore.api.user
(:require [open-api-petstore.core :refer [call-api check-required-params with-collection-format *api-context*]]
[clojure.spec.alpha :as s]
[spec-tools.core :as st]
[orchestra.core :refer [defn-spec]]
[open-api-petstore.specs.tag :refer :all]
[open-api-petstore.specs.category :refer :all]
[open-api-petstore.specs.user :refer :all]
[open-api-petstore.specs.pet :refer :all]
[open-api-petstore.specs.order :refer :all]
)
(:import (java.io File)))
(defn-spec create-user-with-http-info any?
"Create user
This can only be done by the logged in user."
([] (create-user-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-user any?
"Create user
This can only be done by the logged in user."
([] (create-user nil))
([optional-params any?]
(let [res (:data (create-user-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec create-users-with-array-input-with-http-info any?
"Creates list of users with given input array"
([] (create-users-with-array-input-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user/createWithArray" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-users-with-array-input any?
"Creates list of users with given input array"
([] (create-users-with-array-input nil))
([optional-params any?]
(let [res (:data (create-users-with-array-input-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec create-users-with-list-input-with-http-info any?
"Creates list of users with given input array"
([] (create-users-with-list-input-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user/createWithList" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-users-with-list-input any?
"Creates list of users with given input array"
([] (create-users-with-list-input nil))
([optional-params any?]
(let [res (:data (create-users-with-list-input-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec delete-user-with-http-info any?
"Delete user
This can only be done by the logged in user."
[username string?]
(check-required-params username)
(call-api "/user/{username}" :delete
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts []
:auth-names []}))
(defn-spec delete-user any?
"Delete user
This can only be done by the logged in user."
[username string?]
(let [res (:data (delete-user-with-http-info username))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res)))
(defn-spec get-user-by-name-with-http-info any?
"Get user by user name"
[username string?]
(check-required-params username)
(call-api "/user/{username}" :get
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names []}))
(defn-spec get-user-by-name user-spec
"Get user by user name"
[username string?]
(let [res (:data (get-user-by-name-with-http-info username))]
(if (:decode-models *api-context*)
(st/decode user-spec res st/string-transformer)
res)))
(defn-spec login-user-with-http-info any?
"Logs user into the system"
([] (login-user-with-http-info nil))
([{:keys [username password]} (s/map-of keyword? any?)]
(call-api "/user/login" :get
{:path-params {}
:header-params {}
:query-params {"username" username "password" password }
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names []})))
(defn-spec login-user string?
"Logs user into the system"
([] (login-user nil))
([optional-params any?]
(let [res (:data (login-user-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode string? res st/string-transformer)
res))))
(defn-spec logout-user-with-http-info any?
"Logs out current logged in user session"
[]
(call-api "/user/logout" :get
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts []
:auth-names []}))
(defn-spec logout-user any?
"Logs out current logged in user session"
[]
(let [res (:data (logout-user-with-http-info))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res)))
(defn-spec update-user-with-http-info any?
"Updated user
This can only be done by the logged in user."
([username string?, ] (update-user-with-http-info username nil))
([username string?, {:keys [user]} (s/map-of keyword? any?)]
(check-required-params username)
(call-api "/user/{username}" :put
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec update-user any?
"Updated user
This can only be done by the logged in user."
([username string?, ] (update-user username nil))
([username string?, optional-params any?]
(let [res (:data (update-user-with-http-info username optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
|
23661
|
(ns open-api-petstore.api.user
(:require [open-api-petstore.core :refer [call-api check-required-params with-collection-format *api-context*]]
[clojure.spec.alpha :as s]
[spec-tools.core :as st]
[orchestra.core :refer [defn-spec]]
[open-api-petstore.specs.tag :refer :all]
[open-api-petstore.specs.category :refer :all]
[open-api-petstore.specs.user :refer :all]
[open-api-petstore.specs.pet :refer :all]
[open-api-petstore.specs.order :refer :all]
)
(:import (java.io File)))
(defn-spec create-user-with-http-info any?
"Create user
This can only be done by the logged in user."
([] (create-user-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-user any?
"Create user
This can only be done by the logged in user."
([] (create-user nil))
([optional-params any?]
(let [res (:data (create-user-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec create-users-with-array-input-with-http-info any?
"Creates list of users with given input array"
([] (create-users-with-array-input-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user/createWithArray" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-users-with-array-input any?
"Creates list of users with given input array"
([] (create-users-with-array-input nil))
([optional-params any?]
(let [res (:data (create-users-with-array-input-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec create-users-with-list-input-with-http-info any?
"Creates list of users with given input array"
([] (create-users-with-list-input-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user/createWithList" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-users-with-list-input any?
"Creates list of users with given input array"
([] (create-users-with-list-input nil))
([optional-params any?]
(let [res (:data (create-users-with-list-input-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec delete-user-with-http-info any?
"Delete user
This can only be done by the logged in user."
[username string?]
(check-required-params username)
(call-api "/user/{username}" :delete
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts []
:auth-names []}))
(defn-spec delete-user any?
"Delete user
This can only be done by the logged in user."
[username string?]
(let [res (:data (delete-user-with-http-info username))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res)))
(defn-spec get-user-by-name-with-http-info any?
"Get user by user name"
[username string?]
(check-required-params username)
(call-api "/user/{username}" :get
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names []}))
(defn-spec get-user-by-name user-spec
"Get user by user name"
[username string?]
(let [res (:data (get-user-by-name-with-http-info username))]
(if (:decode-models *api-context*)
(st/decode user-spec res st/string-transformer)
res)))
(defn-spec login-user-with-http-info any?
"Logs user into the system"
([] (login-user-with-http-info nil))
([{:keys [username password]} (s/map-of keyword? any?)]
(call-api "/user/login" :get
{:path-params {}
:header-params {}
:query-params {"username" username "password" <PASSWORD> }
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names []})))
(defn-spec login-user string?
"Logs user into the system"
([] (login-user nil))
([optional-params any?]
(let [res (:data (login-user-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode string? res st/string-transformer)
res))))
(defn-spec logout-user-with-http-info any?
"Logs out current logged in user session"
[]
(call-api "/user/logout" :get
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts []
:auth-names []}))
(defn-spec logout-user any?
"Logs out current logged in user session"
[]
(let [res (:data (logout-user-with-http-info))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res)))
(defn-spec update-user-with-http-info any?
"Updated user
This can only be done by the logged in user."
([username string?, ] (update-user-with-http-info username nil))
([username string?, {:keys [user]} (s/map-of keyword? any?)]
(check-required-params username)
(call-api "/user/{username}" :put
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec update-user any?
"Updated user
This can only be done by the logged in user."
([username string?, ] (update-user username nil))
([username string?, optional-params any?]
(let [res (:data (update-user-with-http-info username optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
| true |
(ns open-api-petstore.api.user
(:require [open-api-petstore.core :refer [call-api check-required-params with-collection-format *api-context*]]
[clojure.spec.alpha :as s]
[spec-tools.core :as st]
[orchestra.core :refer [defn-spec]]
[open-api-petstore.specs.tag :refer :all]
[open-api-petstore.specs.category :refer :all]
[open-api-petstore.specs.user :refer :all]
[open-api-petstore.specs.pet :refer :all]
[open-api-petstore.specs.order :refer :all]
)
(:import (java.io File)))
(defn-spec create-user-with-http-info any?
"Create user
This can only be done by the logged in user."
([] (create-user-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-user any?
"Create user
This can only be done by the logged in user."
([] (create-user nil))
([optional-params any?]
(let [res (:data (create-user-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec create-users-with-array-input-with-http-info any?
"Creates list of users with given input array"
([] (create-users-with-array-input-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user/createWithArray" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-users-with-array-input any?
"Creates list of users with given input array"
([] (create-users-with-array-input nil))
([optional-params any?]
(let [res (:data (create-users-with-array-input-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec create-users-with-list-input-with-http-info any?
"Creates list of users with given input array"
([] (create-users-with-list-input-with-http-info nil))
([{:keys [user]} (s/map-of keyword? any?)]
(call-api "/user/createWithList" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec create-users-with-list-input any?
"Creates list of users with given input array"
([] (create-users-with-list-input nil))
([optional-params any?]
(let [res (:data (create-users-with-list-input-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec delete-user-with-http-info any?
"Delete user
This can only be done by the logged in user."
[username string?]
(check-required-params username)
(call-api "/user/{username}" :delete
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts []
:auth-names []}))
(defn-spec delete-user any?
"Delete user
This can only be done by the logged in user."
[username string?]
(let [res (:data (delete-user-with-http-info username))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res)))
(defn-spec get-user-by-name-with-http-info any?
"Get user by user name"
[username string?]
(check-required-params username)
(call-api "/user/{username}" :get
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names []}))
(defn-spec get-user-by-name user-spec
"Get user by user name"
[username string?]
(let [res (:data (get-user-by-name-with-http-info username))]
(if (:decode-models *api-context*)
(st/decode user-spec res st/string-transformer)
res)))
(defn-spec login-user-with-http-info any?
"Logs user into the system"
([] (login-user-with-http-info nil))
([{:keys [username password]} (s/map-of keyword? any?)]
(call-api "/user/login" :get
{:path-params {}
:header-params {}
:query-params {"username" username "password" PI:PASSWORD:<PASSWORD>END_PI }
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names []})))
(defn-spec login-user string?
"Logs user into the system"
([] (login-user nil))
([optional-params any?]
(let [res (:data (login-user-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode string? res st/string-transformer)
res))))
(defn-spec logout-user-with-http-info any?
"Logs out current logged in user session"
[]
(call-api "/user/logout" :get
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts []
:auth-names []}))
(defn-spec logout-user any?
"Logs out current logged in user session"
[]
(let [res (:data (logout-user-with-http-info))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res)))
(defn-spec update-user-with-http-info any?
"Updated user
This can only be done by the logged in user."
([username string?, ] (update-user-with-http-info username nil))
([username string?, {:keys [user]} (s/map-of keyword? any?)]
(check-required-params username)
(call-api "/user/{username}" :put
{:path-params {"username" username }
:header-params {}
:query-params {}
:form-params {}
:body-param user
:content-types []
:accepts []
:auth-names []})))
(defn-spec update-user any?
"Updated user
This can only be done by the logged in user."
([username string?, ] (update-user username nil))
([username string?, optional-params any?]
(let [res (:data (update-user-with-http-info username optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
|
[
{
"context": "(with-noir\n (do\n (session/put! :username \"somebody\")\n (logged-in?) => true)))\n\n\n(fact \"User can",
"end": 524,
"score": 0.9995369911193848,
"start": 516,
"tag": "USERNAME",
"value": "somebody"
},
{
"context": " {:username \"admin\" :password \"password\"})]\n (get (:headers log",
"end": 907,
"score": 0.9992313385009766,
"start": 902,
"tag": "USERNAME",
"value": "admin"
},
{
"context": " {:username \"admin\" :password \"password\"})]\n (get (:headers login-result) \"Location\"",
"end": 928,
"score": 0.9989956617355347,
"start": 920,
"tag": "PASSWORD",
"value": "password"
}
] |
data/train/clojure/cfb3995bc7e94de37b397aae55f9041431965770logintest.clj
|
harshp8l/deep-learning-lang-detection
| 84 |
(ns memjore.views.logintest
(:require [noir.session :as session])
(:use [midje.sweet]
[noir.core :only [url-for]]
[noir.util.test]
[memjore.views.login]))
;;(fact "User sees username and password fields on / page"
;; (send-request "/") )
(fact "User is not logged in when session username is nil"
(with-noir
(do
(session/clear!)
(logged-in?) => false)))
(fact "User is logged in when session username is not nil"
(with-noir
(do
(session/put! :username "somebody")
(logged-in?) => true)))
(fact "User can log out"
(with-noir
(.contains (:body (send-request "/log-out"))
"Logged out") => true))
(fact "When user is logged in, user is redirected to manage home page"
(with-noir
(let [login-result (send-request [:post (url-for login-authentication)]
{:username "admin" :password "password"})]
(get (:headers login-result) "Location") => "/manage/home")))
|
20508
|
(ns memjore.views.logintest
(:require [noir.session :as session])
(:use [midje.sweet]
[noir.core :only [url-for]]
[noir.util.test]
[memjore.views.login]))
;;(fact "User sees username and password fields on / page"
;; (send-request "/") )
(fact "User is not logged in when session username is nil"
(with-noir
(do
(session/clear!)
(logged-in?) => false)))
(fact "User is logged in when session username is not nil"
(with-noir
(do
(session/put! :username "somebody")
(logged-in?) => true)))
(fact "User can log out"
(with-noir
(.contains (:body (send-request "/log-out"))
"Logged out") => true))
(fact "When user is logged in, user is redirected to manage home page"
(with-noir
(let [login-result (send-request [:post (url-for login-authentication)]
{:username "admin" :password "<PASSWORD>"})]
(get (:headers login-result) "Location") => "/manage/home")))
| true |
(ns memjore.views.logintest
(:require [noir.session :as session])
(:use [midje.sweet]
[noir.core :only [url-for]]
[noir.util.test]
[memjore.views.login]))
;;(fact "User sees username and password fields on / page"
;; (send-request "/") )
(fact "User is not logged in when session username is nil"
(with-noir
(do
(session/clear!)
(logged-in?) => false)))
(fact "User is logged in when session username is not nil"
(with-noir
(do
(session/put! :username "somebody")
(logged-in?) => true)))
(fact "User can log out"
(with-noir
(.contains (:body (send-request "/log-out"))
"Logged out") => true))
(fact "When user is logged in, user is redirected to manage home page"
(with-noir
(let [login-result (send-request [:post (url-for login-authentication)]
{:username "admin" :password "PI:PASSWORD:<PASSWORD>END_PI"})]
(get (:headers login-result) "Location") => "/manage/home")))
|
[
{
"context": "generated unique invite-id\"\n (let [email-1 \"[email protected]\"\n email-2 \"[email protected]\"\n ",
"end": 621,
"score": 0.9998940229415894,
"start": 600,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "l-1 \"[email protected]\"\n email-2 \"[email protected]\"\n expiry-days 7\n invite-i",
"end": 667,
"score": 0.9998326301574707,
"start": 645,
"tag": "EMAIL",
"value": "[email protected]"
},
{
"context": "can delete an invite\"\n (let [email-1 \"[email protected]\"\n expiry-days 7\n ",
"end": 1236,
"score": 0.9998990297317505,
"start": 1215,
"tag": "EMAIL",
"value": "[email protected]"
}
] |
test/stonecutter/test/db/invitations.clj
|
d-cent/stonecutter
| 39 |
(ns stonecutter.test.db.invitations
(:require [midje.sweet :refer :all]
[stonecutter.db.invitations :as i]
[stonecutter.db.mongo :as m]
[clauth.store :as cl-store]
[stonecutter.util.time :as time]
[stonecutter.test.util.time :as test-time]
[stonecutter.util.uuid :as uuid]))
(def invitation-store (m/create-memory-store))
(def test-clock (test-time/new-stub-clock 0))
(background
(before :facts (cl-store/reset-store! invitation-store)))
(facts "can store an invite with generated unique invite-id"
(let [email-1 "[email protected]"
email-2 "[email protected]"
expiry-days 7
invite-id-1 (i/generate-invite-id! invitation-store email-1 test-clock expiry-days uuid/uuid)
invite-id-2 (i/generate-invite-id! invitation-store email-2 test-clock expiry-days uuid/uuid)]
(i/fetch-by-id invitation-store invite-id-1) => (just {:email email-1 :invite-id invite-id-1
:_expiry (* expiry-days time/day)})
invite-id-1 =not=> invite-id-2))
(fact "can delete an invite"
(let [email-1 "[email protected]"
expiry-days 7
invite-id-1 (i/generate-invite-id! invitation-store email-1 test-clock expiry-days uuid/uuid)]
(i/fetch-by-id invitation-store invite-id-1) => (just {:email email-1 :invite-id invite-id-1
:_expiry (* expiry-days time/day)})
(i/remove-invite! invitation-store invite-id-1)
(i/fetch-by-id invitation-store invite-id-1) => nil))
|
54206
|
(ns stonecutter.test.db.invitations
(:require [midje.sweet :refer :all]
[stonecutter.db.invitations :as i]
[stonecutter.db.mongo :as m]
[clauth.store :as cl-store]
[stonecutter.util.time :as time]
[stonecutter.test.util.time :as test-time]
[stonecutter.util.uuid :as uuid]))
(def invitation-store (m/create-memory-store))
(def test-clock (test-time/new-stub-clock 0))
(background
(before :facts (cl-store/reset-store! invitation-store)))
(facts "can store an invite with generated unique invite-id"
(let [email-1 "<EMAIL>"
email-2 "<EMAIL>"
expiry-days 7
invite-id-1 (i/generate-invite-id! invitation-store email-1 test-clock expiry-days uuid/uuid)
invite-id-2 (i/generate-invite-id! invitation-store email-2 test-clock expiry-days uuid/uuid)]
(i/fetch-by-id invitation-store invite-id-1) => (just {:email email-1 :invite-id invite-id-1
:_expiry (* expiry-days time/day)})
invite-id-1 =not=> invite-id-2))
(fact "can delete an invite"
(let [email-1 "<EMAIL>"
expiry-days 7
invite-id-1 (i/generate-invite-id! invitation-store email-1 test-clock expiry-days uuid/uuid)]
(i/fetch-by-id invitation-store invite-id-1) => (just {:email email-1 :invite-id invite-id-1
:_expiry (* expiry-days time/day)})
(i/remove-invite! invitation-store invite-id-1)
(i/fetch-by-id invitation-store invite-id-1) => nil))
| true |
(ns stonecutter.test.db.invitations
(:require [midje.sweet :refer :all]
[stonecutter.db.invitations :as i]
[stonecutter.db.mongo :as m]
[clauth.store :as cl-store]
[stonecutter.util.time :as time]
[stonecutter.test.util.time :as test-time]
[stonecutter.util.uuid :as uuid]))
(def invitation-store (m/create-memory-store))
(def test-clock (test-time/new-stub-clock 0))
(background
(before :facts (cl-store/reset-store! invitation-store)))
(facts "can store an invite with generated unique invite-id"
(let [email-1 "PI:EMAIL:<EMAIL>END_PI"
email-2 "PI:EMAIL:<EMAIL>END_PI"
expiry-days 7
invite-id-1 (i/generate-invite-id! invitation-store email-1 test-clock expiry-days uuid/uuid)
invite-id-2 (i/generate-invite-id! invitation-store email-2 test-clock expiry-days uuid/uuid)]
(i/fetch-by-id invitation-store invite-id-1) => (just {:email email-1 :invite-id invite-id-1
:_expiry (* expiry-days time/day)})
invite-id-1 =not=> invite-id-2))
(fact "can delete an invite"
(let [email-1 "PI:EMAIL:<EMAIL>END_PI"
expiry-days 7
invite-id-1 (i/generate-invite-id! invitation-store email-1 test-clock expiry-days uuid/uuid)]
(i/fetch-by-id invitation-store invite-id-1) => (just {:email email-1 :invite-id invite-id-1
:_expiry (* expiry-days time/day)})
(i/remove-invite! invitation-store invite-id-1)
(i/fetch-by-id invitation-store invite-id-1) => nil))
|
[
{
"context": "]\n\n (test-range-query sophia \"test\" :key \"ABC\" :search-type :prefix))\n => []\n )\n\n",
"end": 6237,
"score": 0.5510467290878296,
"start": 6234,
"tag": "KEY",
"value": "ABC"
},
{
"context": "\n\n (test-range-query sophia \"test\" :key \"1000\"\n :search-type :index-sc",
"end": 7173,
"score": 0.5766990780830383,
"start": 7171,
"tag": "KEY",
"value": "00"
},
{
"context": "\n\n (test-range-query sophia \"test\" :key \"1000\"\n :search-type :index-sc",
"end": 7490,
"score": 0.6386842727661133,
"start": 7488,
"tag": "KEY",
"value": "00"
},
{
"context": "]\n\n (test-range-query sophia \"test\" :key \"ABC\"\n :search-type :index-sca",
"end": 7810,
"score": 0.9196354746818542,
"start": 7807,
"tag": "KEY",
"value": "ABC"
},
{
"context": "]\n\n (test-range-query sophia \"test\" :key \"1000\"\n :search-type :index-sca",
"end": 8495,
"score": 0.9678499102592468,
"start": 8491,
"tag": "KEY",
"value": "1000"
},
{
"context": "]\n\n (test-range-query sophia \"test\" :key \"1000\"\n :search-type :index-sca",
"end": 8877,
"score": 0.9489258527755737,
"start": 8873,
"tag": "KEY",
"value": "1000"
}
] |
test/com/brunobonacci/sophia/core_test.clj
|
BrunoBonacci/clj-sophia
| 16 |
(ns com.brunobonacci.sophia.core-test
(:require [clojure.pprint :refer [pprint]]
[clojure.test.check :as tc]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[midje.sweet :refer :all]
[com.brunobonacci.sophia :as db]
[clojure.string :as str]
[clojure.java.io :as io]
[safely.core :refer [safely]]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| U T I L I T I E S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn uuid []
(str (java.util.UUID/randomUUID)))
(defn rand-db-name [name]
(str "/tmp/sophia-" name "-" (uuid)))
(defn rand-db [name]
(db/sophia {:sophia.path (rand-db-name name) :dbs [name]}))
(defn rm-fr
[f & {:keys [force] :or {force true}}]
(let [^java.io.File f (io/file f)]
(if (.isDirectory f)
(run! #(rm-fr % :force force) (.listFiles f)))
(io/delete-file f force)))
(defmacro with-test-database
"bindings => [name init ...]
Evaluates body in a try expression with names bound to the values
of the inits, and a finally clause that calls (.close name) on each
name in reverse order."
[bindings & body]
(assert (vector? bindings) "a vector for its binding")
(assert (even? (count bindings)) "an even number of forms in binding vector")
(cond
(= (count bindings) 0) `(do ~@body)
(symbol? (bindings 0)) `(let ~(subvec bindings 0 2)
(try
(with-test-database ~(subvec bindings 2) ~@body)
(finally
(when-let [path# (-> ~(bindings 0) :config :sophia.path)]
(rm-fr path#)))))
:else (throw (IllegalArgumentException.
"with-test-database only allows Symbols in bindings"))))
(def sequecen-data
(->>
(for [x (range 3)
y (range 10)]
(format "%d%03d" x y))
(map (juxt identity identity))))
(defn load-seqence-data [sophia db]
(doseq [[k v] sequecen-data]
(db/set-value! sophia db k v)))
(defmacro test-range-query
[sophia db & opts]
`(with-open [^java.io.Closeable cursor# (db/cursor ~sophia)]
(doall (db/range-query cursor# ~db ~@opts))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T E S T . C H E C K |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def num-tests
(or
(println "TC_NUM_TESTS=" (or (System/getenv "TC_NUM_TESTS") 100))
(Integer/getInteger "test-check.num-tests")
(some-> (System/getenv "TC_NUM_TESTS") Integer/parseInt)
100))
;; workaround for https://dev.clojure.org/jira/browse/CLJ-2334
(def any-non-NaN
"A recursive generator that will generate many different, often nested, values"
(gen/recursive-gen gen/container-type
(gen/one-of [(gen/double* {:NaN? false})
gen/int gen/large-integer
gen/char gen/string gen/ratio
gen/boolean gen/keyword gen/keyword-ns gen/symbol
gen/symbol-ns gen/uuid])))
(def any-value-gen
(gen/frequency [[99 any-non-NaN] [1 (gen/return nil)]]))
(with-test-database [sophia (rand-db "test")]
(let [test
(tc/quick-check
num-tests
(prop/for-all
[key (gen/not-empty gen/string-ascii)
value any-value-gen]
;;(println (format "Testing SET/GET '%s' -> '%s'" key value))
;; set the key
(db/set-value! sophia "test" key value)
;; get the value and check the result
(= value (db/get-value sophia "test" key)))
:max-size 100)]
(pprint test)
(fact "set then get symmetry"
(:result test) => true)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| R A N G E - Q U E R Y |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "range-query - on empty db"
(with-test-database [sophia (rand-db "test")]
(test-range-query sophia "test")) => []
)
(facts "range-query - cursor closed"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(count
(with-open [^java.io.Closeable cursor (db/cursor sophia)]
(db/range-query cursor "test"))))
=> (throws #"Cursor already closed.")
)
(facts "range-query - full index scan"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test")) => sequecen-data
)
(facts "range-query - full index scan - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :order :desc))
=> (reverse sequecen-data)
)
(facts "range-query - prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1" :search-type :prefix))
=> (->> sequecen-data (filter #(str/starts-with? (first %) "1")))
)
(facts "range-query - non matching prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "ABC" :search-type :prefix))
=> []
)
(facts "range-query - prefix - descending order not working with prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1" :search-type :prefix
:order :desc))
=> (->> sequecen-data (filter #(str/starts-with? (first %) "1")))
)
(facts "range-query - index scan prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1"
:search-type :index-scan-inclusive))
=> (->> sequecen-data (drop 10))
)
(facts "range-query - index scan inclusive"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1000"
:search-type :index-scan-inclusive))
=> (->> sequecen-data (drop 10))
)
(facts "range-query - index scan exclusive"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1000"
:search-type :index-scan-exclusive))
=> (->> sequecen-data (drop 11))
)
(facts "range-query - index scan not matching"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "ABC"
:search-type :index-scan-inclusive))
=> []
)
(facts "range-query - index scan prefix - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1"
:search-type :index-scan-inclusive
:order :desc))
=> (->> sequecen-data (take 10) reverse)
)
(facts "range-query - index scan inclusive - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1000"
:search-type :index-scan-inclusive
:order :desc))
=> (->> sequecen-data (take 11) reverse)
)
(facts "range-query - index scan exclusive - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1000"
:search-type :index-scan-exclusive
:order :desc))
=> (->> sequecen-data (take 10) reverse)
)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T R A N S A C T I O N S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "transaction isolation - write inside a transaction are not visible from outside the tx."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/set-value! tx "test" "key2" "value2")
;; can read it back
(db/get-value tx "test" "key2") => "value2"
;; but not visible outside tx
(db/get-value sophia "test" "key2") => nil
)))
(facts "transaction isolation - write inside a transaction are not visible from outside the tx.
including updates"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/set-value! tx "test" "key1" "value2")
;; can read it back
(db/get-value tx "test" "key1") => "value2"
;; but not visible outside tx
(db/get-value sophia "test" "key1") => "value1"
)))
(facts "transaction isolation - write inside a transaction are not visible from outside the tx.
including deletes"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/delete-key! tx "test" "key1")
;; can read it back
(db/get-value tx "test" "key1") => nil
;; but not visible outside tx
(db/get-value sophia "test" "key1") => "value1"
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
(db/get-value tx "test" "key2") => nil
;; set a value outside the tx
(db/set-value! sophia "test" "key2" "value2")
;; can read it back
(db/get-value sophia "test" "key2") => "value2"
;; but not visible outside tx
(db/get-value tx "test" "key2") => nil
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx.
including updates"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value outside the tx
(db/set-value! sophia "test" "key1" "value2")
;; can read it back
(db/get-value sophia "test" "key1") => "value2"
;; but not visible inside tx
(db/get-value tx "test" "key1") => "value1"
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx.
including deletes"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value outside the tx
(db/delete-key! sophia "test" "key1")
;; can read it back
(db/get-value sophia "test" "key1") => nil
;; but not visible inside tx
(db/get-value tx "test" "key1") => "value1"
)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T R A N S A C T ! A N D F R I E N D S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "transact! - executes the function with a transaction and
attempts to commit at the end. If it fails because the
transaction is aborted for concurrent modification it will
retry the transaction after a while."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/transact! sophia
(fn [tx]
(let [u (db/get-value tx "test" "stats")]
(when u
(db/set-value! tx "test" "stats"
(update u :counter inc))))))
=> {:counter 1}
))
(facts "transact! - concurrent update should be retried"
(with-test-database [sophia (rand-db "test")]
(let [abort (atom false)]
(db/set-value! sophia "test" "stats" {:counter 0})
(dotimes [z 3]
(future
(safely
(dotimes [_ 1000]
(db/transact! sophia
(fn [tx]
(when-not @abort
(let [u (db/get-value tx "test" "stats")]
(when u
(db/set-value! tx "test" "stats"
(update u :counter inc))))))))
:on-error
:default nil)))
;; wait a bit
(loop [i 20]
(let [cnt (:counter (db/get-value sophia "test" "stats"))]
(when-not (or (= i 0) (= cnt 3000))
(println "Waiting 1sec... [" cnt "]")
(safely.core/sleep 1000)
(recur (dec i)))))
(reset! abort true)
;; all concurrent updates should now be ok
(db/get-value sophia "test" "stats") => {:counter 3000}
)))
(facts "update-value! executes the function of the
result of the of the key and saves the value
back wrapped in a transact!.
If fails it will retry.
"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/update-value! sophia "test" "stats"
update :counter inc)
=> {:counter 1}
))
(facts "update-value! If the key is not present
the function is not executed and nil is returned!
"
(with-test-database [sophia (rand-db "test")]
(db/update-value! sophia "test" "stats"
assoc :counter 1)
=> nil))
(facts "update-value! If the key is not present
the function is not executed and nil is returned!
"
(with-test-database [sophia (rand-db "test")]
(db/upsert-value! sophia "test" "stats"
update :counter (fnil inc 10)))
=> {:counter 11})
(facts "upsert-value! behaves like update if the value
already exists.
"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/upsert-value! sophia "test" "stats"
update :counter inc)
=> {:counter 1}
))
|
16336
|
(ns com.brunobonacci.sophia.core-test
(:require [clojure.pprint :refer [pprint]]
[clojure.test.check :as tc]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[midje.sweet :refer :all]
[com.brunobonacci.sophia :as db]
[clojure.string :as str]
[clojure.java.io :as io]
[safely.core :refer [safely]]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| U T I L I T I E S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn uuid []
(str (java.util.UUID/randomUUID)))
(defn rand-db-name [name]
(str "/tmp/sophia-" name "-" (uuid)))
(defn rand-db [name]
(db/sophia {:sophia.path (rand-db-name name) :dbs [name]}))
(defn rm-fr
[f & {:keys [force] :or {force true}}]
(let [^java.io.File f (io/file f)]
(if (.isDirectory f)
(run! #(rm-fr % :force force) (.listFiles f)))
(io/delete-file f force)))
(defmacro with-test-database
"bindings => [name init ...]
Evaluates body in a try expression with names bound to the values
of the inits, and a finally clause that calls (.close name) on each
name in reverse order."
[bindings & body]
(assert (vector? bindings) "a vector for its binding")
(assert (even? (count bindings)) "an even number of forms in binding vector")
(cond
(= (count bindings) 0) `(do ~@body)
(symbol? (bindings 0)) `(let ~(subvec bindings 0 2)
(try
(with-test-database ~(subvec bindings 2) ~@body)
(finally
(when-let [path# (-> ~(bindings 0) :config :sophia.path)]
(rm-fr path#)))))
:else (throw (IllegalArgumentException.
"with-test-database only allows Symbols in bindings"))))
(def sequecen-data
(->>
(for [x (range 3)
y (range 10)]
(format "%d%03d" x y))
(map (juxt identity identity))))
(defn load-seqence-data [sophia db]
(doseq [[k v] sequecen-data]
(db/set-value! sophia db k v)))
(defmacro test-range-query
[sophia db & opts]
`(with-open [^java.io.Closeable cursor# (db/cursor ~sophia)]
(doall (db/range-query cursor# ~db ~@opts))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T E S T . C H E C K |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def num-tests
(or
(println "TC_NUM_TESTS=" (or (System/getenv "TC_NUM_TESTS") 100))
(Integer/getInteger "test-check.num-tests")
(some-> (System/getenv "TC_NUM_TESTS") Integer/parseInt)
100))
;; workaround for https://dev.clojure.org/jira/browse/CLJ-2334
(def any-non-NaN
"A recursive generator that will generate many different, often nested, values"
(gen/recursive-gen gen/container-type
(gen/one-of [(gen/double* {:NaN? false})
gen/int gen/large-integer
gen/char gen/string gen/ratio
gen/boolean gen/keyword gen/keyword-ns gen/symbol
gen/symbol-ns gen/uuid])))
(def any-value-gen
(gen/frequency [[99 any-non-NaN] [1 (gen/return nil)]]))
(with-test-database [sophia (rand-db "test")]
(let [test
(tc/quick-check
num-tests
(prop/for-all
[key (gen/not-empty gen/string-ascii)
value any-value-gen]
;;(println (format "Testing SET/GET '%s' -> '%s'" key value))
;; set the key
(db/set-value! sophia "test" key value)
;; get the value and check the result
(= value (db/get-value sophia "test" key)))
:max-size 100)]
(pprint test)
(fact "set then get symmetry"
(:result test) => true)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| R A N G E - Q U E R Y |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "range-query - on empty db"
(with-test-database [sophia (rand-db "test")]
(test-range-query sophia "test")) => []
)
(facts "range-query - cursor closed"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(count
(with-open [^java.io.Closeable cursor (db/cursor sophia)]
(db/range-query cursor "test"))))
=> (throws #"Cursor already closed.")
)
(facts "range-query - full index scan"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test")) => sequecen-data
)
(facts "range-query - full index scan - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :order :desc))
=> (reverse sequecen-data)
)
(facts "range-query - prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1" :search-type :prefix))
=> (->> sequecen-data (filter #(str/starts-with? (first %) "1")))
)
(facts "range-query - non matching prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "<KEY>" :search-type :prefix))
=> []
)
(facts "range-query - prefix - descending order not working with prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1" :search-type :prefix
:order :desc))
=> (->> sequecen-data (filter #(str/starts-with? (first %) "1")))
)
(facts "range-query - index scan prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1"
:search-type :index-scan-inclusive))
=> (->> sequecen-data (drop 10))
)
(facts "range-query - index scan inclusive"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1<KEY>0"
:search-type :index-scan-inclusive))
=> (->> sequecen-data (drop 10))
)
(facts "range-query - index scan exclusive"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1<KEY>0"
:search-type :index-scan-exclusive))
=> (->> sequecen-data (drop 11))
)
(facts "range-query - index scan not matching"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "<KEY>"
:search-type :index-scan-inclusive))
=> []
)
(facts "range-query - index scan prefix - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1"
:search-type :index-scan-inclusive
:order :desc))
=> (->> sequecen-data (take 10) reverse)
)
(facts "range-query - index scan inclusive - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "<KEY>"
:search-type :index-scan-inclusive
:order :desc))
=> (->> sequecen-data (take 11) reverse)
)
(facts "range-query - index scan exclusive - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "<KEY>"
:search-type :index-scan-exclusive
:order :desc))
=> (->> sequecen-data (take 10) reverse)
)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T R A N S A C T I O N S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "transaction isolation - write inside a transaction are not visible from outside the tx."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/set-value! tx "test" "key2" "value2")
;; can read it back
(db/get-value tx "test" "key2") => "value2"
;; but not visible outside tx
(db/get-value sophia "test" "key2") => nil
)))
(facts "transaction isolation - write inside a transaction are not visible from outside the tx.
including updates"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/set-value! tx "test" "key1" "value2")
;; can read it back
(db/get-value tx "test" "key1") => "value2"
;; but not visible outside tx
(db/get-value sophia "test" "key1") => "value1"
)))
(facts "transaction isolation - write inside a transaction are not visible from outside the tx.
including deletes"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/delete-key! tx "test" "key1")
;; can read it back
(db/get-value tx "test" "key1") => nil
;; but not visible outside tx
(db/get-value sophia "test" "key1") => "value1"
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
(db/get-value tx "test" "key2") => nil
;; set a value outside the tx
(db/set-value! sophia "test" "key2" "value2")
;; can read it back
(db/get-value sophia "test" "key2") => "value2"
;; but not visible outside tx
(db/get-value tx "test" "key2") => nil
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx.
including updates"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value outside the tx
(db/set-value! sophia "test" "key1" "value2")
;; can read it back
(db/get-value sophia "test" "key1") => "value2"
;; but not visible inside tx
(db/get-value tx "test" "key1") => "value1"
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx.
including deletes"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value outside the tx
(db/delete-key! sophia "test" "key1")
;; can read it back
(db/get-value sophia "test" "key1") => nil
;; but not visible inside tx
(db/get-value tx "test" "key1") => "value1"
)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T R A N S A C T ! A N D F R I E N D S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "transact! - executes the function with a transaction and
attempts to commit at the end. If it fails because the
transaction is aborted for concurrent modification it will
retry the transaction after a while."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/transact! sophia
(fn [tx]
(let [u (db/get-value tx "test" "stats")]
(when u
(db/set-value! tx "test" "stats"
(update u :counter inc))))))
=> {:counter 1}
))
(facts "transact! - concurrent update should be retried"
(with-test-database [sophia (rand-db "test")]
(let [abort (atom false)]
(db/set-value! sophia "test" "stats" {:counter 0})
(dotimes [z 3]
(future
(safely
(dotimes [_ 1000]
(db/transact! sophia
(fn [tx]
(when-not @abort
(let [u (db/get-value tx "test" "stats")]
(when u
(db/set-value! tx "test" "stats"
(update u :counter inc))))))))
:on-error
:default nil)))
;; wait a bit
(loop [i 20]
(let [cnt (:counter (db/get-value sophia "test" "stats"))]
(when-not (or (= i 0) (= cnt 3000))
(println "Waiting 1sec... [" cnt "]")
(safely.core/sleep 1000)
(recur (dec i)))))
(reset! abort true)
;; all concurrent updates should now be ok
(db/get-value sophia "test" "stats") => {:counter 3000}
)))
(facts "update-value! executes the function of the
result of the of the key and saves the value
back wrapped in a transact!.
If fails it will retry.
"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/update-value! sophia "test" "stats"
update :counter inc)
=> {:counter 1}
))
(facts "update-value! If the key is not present
the function is not executed and nil is returned!
"
(with-test-database [sophia (rand-db "test")]
(db/update-value! sophia "test" "stats"
assoc :counter 1)
=> nil))
(facts "update-value! If the key is not present
the function is not executed and nil is returned!
"
(with-test-database [sophia (rand-db "test")]
(db/upsert-value! sophia "test" "stats"
update :counter (fnil inc 10)))
=> {:counter 11})
(facts "upsert-value! behaves like update if the value
already exists.
"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/upsert-value! sophia "test" "stats"
update :counter inc)
=> {:counter 1}
))
| true |
(ns com.brunobonacci.sophia.core-test
(:require [clojure.pprint :refer [pprint]]
[clojure.test.check :as tc]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[midje.sweet :refer :all]
[com.brunobonacci.sophia :as db]
[clojure.string :as str]
[clojure.java.io :as io]
[safely.core :refer [safely]]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| U T I L I T I E S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn uuid []
(str (java.util.UUID/randomUUID)))
(defn rand-db-name [name]
(str "/tmp/sophia-" name "-" (uuid)))
(defn rand-db [name]
(db/sophia {:sophia.path (rand-db-name name) :dbs [name]}))
(defn rm-fr
[f & {:keys [force] :or {force true}}]
(let [^java.io.File f (io/file f)]
(if (.isDirectory f)
(run! #(rm-fr % :force force) (.listFiles f)))
(io/delete-file f force)))
(defmacro with-test-database
"bindings => [name init ...]
Evaluates body in a try expression with names bound to the values
of the inits, and a finally clause that calls (.close name) on each
name in reverse order."
[bindings & body]
(assert (vector? bindings) "a vector for its binding")
(assert (even? (count bindings)) "an even number of forms in binding vector")
(cond
(= (count bindings) 0) `(do ~@body)
(symbol? (bindings 0)) `(let ~(subvec bindings 0 2)
(try
(with-test-database ~(subvec bindings 2) ~@body)
(finally
(when-let [path# (-> ~(bindings 0) :config :sophia.path)]
(rm-fr path#)))))
:else (throw (IllegalArgumentException.
"with-test-database only allows Symbols in bindings"))))
(def sequecen-data
(->>
(for [x (range 3)
y (range 10)]
(format "%d%03d" x y))
(map (juxt identity identity))))
(defn load-seqence-data [sophia db]
(doseq [[k v] sequecen-data]
(db/set-value! sophia db k v)))
(defmacro test-range-query
[sophia db & opts]
`(with-open [^java.io.Closeable cursor# (db/cursor ~sophia)]
(doall (db/range-query cursor# ~db ~@opts))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T E S T . C H E C K |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def num-tests
(or
(println "TC_NUM_TESTS=" (or (System/getenv "TC_NUM_TESTS") 100))
(Integer/getInteger "test-check.num-tests")
(some-> (System/getenv "TC_NUM_TESTS") Integer/parseInt)
100))
;; workaround for https://dev.clojure.org/jira/browse/CLJ-2334
(def any-non-NaN
"A recursive generator that will generate many different, often nested, values"
(gen/recursive-gen gen/container-type
(gen/one-of [(gen/double* {:NaN? false})
gen/int gen/large-integer
gen/char gen/string gen/ratio
gen/boolean gen/keyword gen/keyword-ns gen/symbol
gen/symbol-ns gen/uuid])))
(def any-value-gen
(gen/frequency [[99 any-non-NaN] [1 (gen/return nil)]]))
(with-test-database [sophia (rand-db "test")]
(let [test
(tc/quick-check
num-tests
(prop/for-all
[key (gen/not-empty gen/string-ascii)
value any-value-gen]
;;(println (format "Testing SET/GET '%s' -> '%s'" key value))
;; set the key
(db/set-value! sophia "test" key value)
;; get the value and check the result
(= value (db/get-value sophia "test" key)))
:max-size 100)]
(pprint test)
(fact "set then get symmetry"
(:result test) => true)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| R A N G E - Q U E R Y |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "range-query - on empty db"
(with-test-database [sophia (rand-db "test")]
(test-range-query sophia "test")) => []
)
(facts "range-query - cursor closed"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(count
(with-open [^java.io.Closeable cursor (db/cursor sophia)]
(db/range-query cursor "test"))))
=> (throws #"Cursor already closed.")
)
(facts "range-query - full index scan"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test")) => sequecen-data
)
(facts "range-query - full index scan - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :order :desc))
=> (reverse sequecen-data)
)
(facts "range-query - prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1" :search-type :prefix))
=> (->> sequecen-data (filter #(str/starts-with? (first %) "1")))
)
(facts "range-query - non matching prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "PI:KEY:<KEY>END_PI" :search-type :prefix))
=> []
)
(facts "range-query - prefix - descending order not working with prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1" :search-type :prefix
:order :desc))
=> (->> sequecen-data (filter #(str/starts-with? (first %) "1")))
)
(facts "range-query - index scan prefix"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1"
:search-type :index-scan-inclusive))
=> (->> sequecen-data (drop 10))
)
(facts "range-query - index scan inclusive"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1PI:KEY:<KEY>END_PI0"
:search-type :index-scan-inclusive))
=> (->> sequecen-data (drop 10))
)
(facts "range-query - index scan exclusive"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1PI:KEY:<KEY>END_PI0"
:search-type :index-scan-exclusive))
=> (->> sequecen-data (drop 11))
)
(facts "range-query - index scan not matching"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "PI:KEY:<KEY>END_PI"
:search-type :index-scan-inclusive))
=> []
)
(facts "range-query - index scan prefix - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "1"
:search-type :index-scan-inclusive
:order :desc))
=> (->> sequecen-data (take 10) reverse)
)
(facts "range-query - index scan inclusive - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "PI:KEY:<KEY>END_PI"
:search-type :index-scan-inclusive
:order :desc))
=> (->> sequecen-data (take 11) reverse)
)
(facts "range-query - index scan exclusive - descending order"
(with-test-database [sophia (rand-db "test")
_ (load-seqence-data sophia "test")]
(test-range-query sophia "test" :key "PI:KEY:<KEY>END_PI"
:search-type :index-scan-exclusive
:order :desc))
=> (->> sequecen-data (take 10) reverse)
)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T R A N S A C T I O N S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "transaction isolation - write inside a transaction are not visible from outside the tx."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/set-value! tx "test" "key2" "value2")
;; can read it back
(db/get-value tx "test" "key2") => "value2"
;; but not visible outside tx
(db/get-value sophia "test" "key2") => nil
)))
(facts "transaction isolation - write inside a transaction are not visible from outside the tx.
including updates"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/set-value! tx "test" "key1" "value2")
;; can read it back
(db/get-value tx "test" "key1") => "value2"
;; but not visible outside tx
(db/get-value sophia "test" "key1") => "value1"
)))
(facts "transaction isolation - write inside a transaction are not visible from outside the tx.
including deletes"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value within the tx
(db/delete-key! tx "test" "key1")
;; can read it back
(db/get-value tx "test" "key1") => nil
;; but not visible outside tx
(db/get-value sophia "test" "key1") => "value1"
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
(db/get-value tx "test" "key2") => nil
;; set a value outside the tx
(db/set-value! sophia "test" "key2" "value2")
;; can read it back
(db/get-value sophia "test" "key2") => "value2"
;; but not visible outside tx
(db/get-value tx "test" "key2") => nil
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx.
including updates"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value outside the tx
(db/set-value! sophia "test" "key1" "value2")
;; can read it back
(db/get-value sophia "test" "key1") => "value2"
;; but not visible inside tx
(db/get-value tx "test" "key1") => "value1"
)))
(facts "transaction isolation - write outside a transaction are not visible from inside the tx.
including deletes"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "key1" "value1")
(db/with-transaction [tx (db/begin-transaction sophia)]
;; can get pre-existing keys
(db/get-value tx "test" "key1") => "value1"
;; set a value outside the tx
(db/delete-key! sophia "test" "key1")
;; can read it back
(db/get-value sophia "test" "key1") => nil
;; but not visible inside tx
(db/get-value tx "test" "key1") => "value1"
)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; ----==| T R A N S A C T ! A N D F R I E N D S |==---- ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(facts "transact! - executes the function with a transaction and
attempts to commit at the end. If it fails because the
transaction is aborted for concurrent modification it will
retry the transaction after a while."
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/transact! sophia
(fn [tx]
(let [u (db/get-value tx "test" "stats")]
(when u
(db/set-value! tx "test" "stats"
(update u :counter inc))))))
=> {:counter 1}
))
(facts "transact! - concurrent update should be retried"
(with-test-database [sophia (rand-db "test")]
(let [abort (atom false)]
(db/set-value! sophia "test" "stats" {:counter 0})
(dotimes [z 3]
(future
(safely
(dotimes [_ 1000]
(db/transact! sophia
(fn [tx]
(when-not @abort
(let [u (db/get-value tx "test" "stats")]
(when u
(db/set-value! tx "test" "stats"
(update u :counter inc))))))))
:on-error
:default nil)))
;; wait a bit
(loop [i 20]
(let [cnt (:counter (db/get-value sophia "test" "stats"))]
(when-not (or (= i 0) (= cnt 3000))
(println "Waiting 1sec... [" cnt "]")
(safely.core/sleep 1000)
(recur (dec i)))))
(reset! abort true)
;; all concurrent updates should now be ok
(db/get-value sophia "test" "stats") => {:counter 3000}
)))
(facts "update-value! executes the function of the
result of the of the key and saves the value
back wrapped in a transact!.
If fails it will retry.
"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/update-value! sophia "test" "stats"
update :counter inc)
=> {:counter 1}
))
(facts "update-value! If the key is not present
the function is not executed and nil is returned!
"
(with-test-database [sophia (rand-db "test")]
(db/update-value! sophia "test" "stats"
assoc :counter 1)
=> nil))
(facts "update-value! If the key is not present
the function is not executed and nil is returned!
"
(with-test-database [sophia (rand-db "test")]
(db/upsert-value! sophia "test" "stats"
update :counter (fnil inc 10)))
=> {:counter 11})
(facts "upsert-value! behaves like update if the value
already exists.
"
(with-test-database [sophia (rand-db "test")]
(db/set-value! sophia "test" "stats" {:counter 0})
(db/upsert-value! sophia "test" "stats"
update :counter inc)
=> {:counter 1}
))
|
[
{
"context": "\n\n (query session get-by-last-name :last-name \\\"Jones\\\")\n\n The query itself may be either the var cre",
"end": 2620,
"score": 0.9989854097366333,
"start": 2615,
"tag": "NAME",
"value": "Jones"
}
] |
src/main/clojure/clara/rules.cljc
|
mrrodriguez/clara-rules
| 0 |
(ns clara.rules
"Forward-chaining rules for Clojure. The primary API is in this namespace."
(:require [clara.rules.engine :as eng]
[schema.core :as s]
[clara.rules.platform :as platform]
#?(:cljs [clara.rules.listener :as l])
#?(:clj [clara.rules.compiler :as com])
#?(:clj [clara.rules.dsl :as dsl]))
#?(:cljs (:require-macros clara.rules)))
(defn insert
"Inserts one or more facts into a working session. It does not modify the given
session, but returns a new session with the facts added."
[session & facts]
(eng/insert session facts))
(defn insert-all
"Inserts a sequence of facts into a working session. It does not modify the given
session, but returns a new session with the facts added."
[session fact-seq]
(eng/insert session fact-seq))
(defn retract
"Retracts a fact from a working session. It does not modify the given session,
but returns a new session with the facts retracted."
[session & facts]
(eng/retract session facts))
(defn fire-rules
"Fires are rules in the given session. Once a rule is fired, it is labeled in a fired
state and will not be re-fired unless facts affecting the rule are added or retracted.
This function does not modify the given session to mark rules as fired. Instead, it returns
a new session in which the rules are marked as fired.
This take an additional map of options as a second argument. Current options:
:cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
Note that this is purely a performance optimization and no guarantees are made at this time on whether a given rule's RHS will be called.
When this option is used rule RHS code that is executed shouldn't do anything that impacts state other than perform logical insertions."
([session] (eng/fire-rules session {}))
([session opts] (eng/fire-rules session opts)))
(defn query
"Runs the given query with the optional given parameters against the session.
The optional parameters should be in map form. For example, a query call might be:
(query session get-by-last-name :last-name \"Jones\")
The query itself may be either the var created by a defquery statement,
or the actual name of the query.
"
[session query & params]
(eng/query session query (apply hash-map params)))
(defn insert!
"To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
Inserted facts are logical, in that if the support for the insertion is removed, the fact
will automatically be retracted. For instance, if there is a rule that inserts a \"Cold\" fact
if a \"Temperature\" fact is below a threshold, and the \"Temperature\" fact that triggered
the rule is retracted, the \"Cold\" fact the rule inserted is also retracted. This is the underlying
truth maintenance facillity.
This truth maintenance is also transitive: if a rule depends on some criteria to fire, and a
criterion becomes invalid, it may retract facts that invalidate other rules, which in turn
retract their conclusions. This way we can ensure that information inferred by rules is always
in a consistent state."
[& facts]
(eng/insert-facts! facts false))
(defn insert-all!
"Behaves the same as insert!, but accepts a sequence of facts to be inserted. This can be simpler and more efficient for
rules needing to insert multiple facts.
See the doc in insert! for details on insert behavior.."
[facts]
(eng/insert-facts! facts false))
(defn insert-unconditional!
"To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
This differs from insert! in that it is unconditional. The facts inserted will not be retracted
even if the rule activation doing the insert becomes false. Most users should prefer the simple insert!
function as described above, but this function is available for use cases that don't wish to use
Clara's truth maintenance."
[& facts]
(eng/insert-facts! facts true))
(defn insert-all-unconditional!
"Behaves the same as insert-unconditional!, but accepts a sequence of facts to be inserted rather than individual facts.
See the doc in insert-unconditional! for details on uncondotional insert behavior."
[facts]
(eng/insert-facts! facts true))
(defn retract!
"To be executed within a rule's right-hand side, this retracts a fact or facts from the working memory.
Retracting facts from the right-hand side has slightly different semantics than insertion. As described
in the insert! documentation, inserts are logical and will automatically be retracted if the rule
that inserted them becomes false. This retract! function does not follow the inverse; retracted items
are simply removed, and not re-added if the rule that retracted them becomes false.
The reason for this is that retractions remove information from the knowledge base, and doing truth
maintenance over retractions would require holding onto all retracted items, which would be an issue
in some use cases. This retract! method is included to help with certain use cases, but unless you
have a specific need, it is better to simply do inserts on the rule's right-hand side, and let
Clara's underlying truth maintenance retract inserted items if their support becomes false."
[& facts]
(eng/rhs-retract-facts! facts))
(defn accumulate
"DEPRECATED. Use clara.rules.accumulators/accum instead.
Creates a new accumulator based on the given properties:
* An initial-value to be used with the reduced operations.
* A reduce-fn that can be used with the Clojure Reducers library to reduce items.
* An optional combine-fn that can be used with the Clojure Reducers library to combine reduced items.
* An optional retract-fn that can remove a retracted fact from a previously reduced computation
* An optional convert-return-fn that converts the reduced data into something useful to the caller.
Simply uses identity by default.
"
[& {:keys [initial-value reduce-fn combine-fn retract-fn convert-return-fn] :as args}]
(eng/map->Accumulator
(merge {;; Default conversion does nothing, so use identity.
:convert-return-fn identity}
args)))
#?(:cljs
(defrecord Rulebase [alpha-roots beta-roots productions queries production-nodes query-nodes id-to-node]))
#?(:cljs
(defn- create-get-alphas-fn
"Returns a function that given a sequence of facts,
returns a map associating alpha nodes with the facts they accept."
[fact-type-fn ancestors-fn merged-rules]
;; We preserve a map of fact types to alpha nodes for efficiency,
;; effectively memoizing this operation.
(let [alpha-map (atom {})
wrapped-fact-type-fn (if (= fact-type-fn type)
type
(fn [fact]
(if (isa? (type fact) :clara.rules.engine/system-type)
;; Internal system types always use ClojureScript's type mechanism.
(type fact)
;; All other types defer to the provided function.
(fact-type-fn fact))))
wrapped-ancestors-fn (fn [fact-type]
(if (isa? fact-type :clara.rules.engine/system-type)
;; Exclude system types from having ancestors for now
;; since none of our use-cases require them. If this changes
;; we may need to define a custom hierarchy for them.
#{}
(ancestors-fn fact-type)))]
(fn [facts]
(for [[fact-type facts] (platform/tuned-group-by wrapped-fact-type-fn facts)]
(if-let [alpha-nodes (get @alpha-map fact-type)]
;; If the matching alpha nodes are cached, simply return them.
[alpha-nodes facts]
;; The alpha nodes weren't cached for the type, so get them now.
(let [ancestors (conj (wrapped-ancestors-fn fact-type) fact-type)
;; Get all alpha nodes for all ancestors.
new-nodes (distinct
(reduce
(fn [coll ancestor]
(concat
coll
(get-in merged-rules [:alpha-roots ancestor])))
[]
ancestors))]
(swap! alpha-map assoc fact-type new-nodes)
[new-nodes facts])))))))
#?(:cljs
(defn- mk-rulebase
[beta-roots alpha-fns productions]
(let [beta-nodes (for [root beta-roots
node (tree-seq :children :children root)]
node)
production-nodes (for [node beta-nodes
:when (= eng/ProductionNode (type node))]
node)
query-nodes (for [node beta-nodes
:when (= eng/QueryNode (type node))]
node)
query-map (into {} (for [query-node query-nodes
;; Queries can be looked up by reference or by name;
entry [[(:query query-node) query-node]
[(:name (:query query-node)) query-node]]]
entry))
;; Map of node ids to beta nodes.
id-to-node (into {} (for [node beta-nodes]
[(:id node) node]))
;; type, alpha node tuples.
alpha-nodes (for [{:keys [id type alpha-fn children env]} alpha-fns
:let [beta-children (map id-to-node children)]]
[type (eng/->AlphaNode id env beta-children alpha-fn type)])
;; Merge the alpha nodes into a multi-map
alpha-map (reduce
(fn [alpha-map [type alpha-node]]
(update-in alpha-map [type] conj alpha-node))
{}
alpha-nodes)]
(map->Rulebase
{:alpha-roots alpha-map
:beta-roots beta-roots
:productions (filter :rhs productions)
:queries (remove :rhs productions)
:production-nodes production-nodes
:query-nodes query-map
:id-to-node id-to-node}))))
#?(:cljs
(defn assemble-session
"This is used by tools to create a session; most users won't use this function."
[beta-roots alpha-fns productions options]
(let [rulebase (mk-rulebase beta-roots alpha-fns productions)
transport (eng/LocalTransport.)
;; The fact-type uses Clojure's type function unless overridden.
fact-type-fn (or (get options :fact-type-fn)
type)
;; The ancestors for a logical type uses Clojurescript's ancestors function unless overridden.
ancestors-fn (or (get options :ancestors-fn)
ancestors)
;; Create a function that groups a sequence of facts by the collection
;; of alpha nodes they target.
;; We cache an alpha-map for facts of a given type to avoid computing
;; them for every fact entered.
get-alphas-fn (create-get-alphas-fn fact-type-fn ancestors-fn rulebase)
activation-group-sort-fn (eng/options->activation-group-sort-fn options)
activation-group-fn (eng/options->activation-group-fn options)
listener (if-let [listeners (:listeners options)]
(l/delegating-listener listeners)
l/default-listener)]
(eng/LocalSession. rulebase
(eng/local-memory rulebase transport activation-group-sort-fn activation-group-fn get-alphas-fn)
transport
listener
get-alphas-fn
[]))))
#?(:clj
(extend-type clojure.lang.Symbol
com/IRuleSource
(load-rules [sym]
;; Find the rules and queries in the namespace, shred them,
;; and compile them into a rule base.
(if (namespace sym)
;; The symbol is qualified, so load rules in the qualified symbol.
(let [resolved (resolve sym)]
(when (nil? resolved)
(throw (ex-info (str "Unable to resolve rule source: " sym) {:sym sym})))
(cond
;; The symbol references a rule or query, so just return it
(or (:query (meta resolved))
(:rule (meta resolved))) [@resolved]
;; The symbol refernces a sequence, so return it.
(sequential? @resolved) @resolved
:default
(throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym} ))))
;; The symbol is not qualified, so treat it as a namespace.
(->> (ns-interns sym)
(vals) ; Get the references in the namespace.
(filter var?)
(filter (comp (some-fn :rule :query :production-seq) meta)) ; Filter down to rules, queries, and seqs of both.
;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
;; it is possible that they won't have :line metadata, so we have a default of 0.
(sort (fn [v1 v2]
(compare (or (:line (meta v1)) 0)
(or (:line (meta v2)) 0))))
(mapcat #(if (:production-seq (meta %))
(deref %)
[(deref %)])))))))
#?(:clj
(defmacro mk-session
"Creates a new session using the given rule sources. The resulting session
is immutable, and can be used with insert, retract, fire-rules, and query functions.
If no sources are provided, it will attempt to load rules from the caller's namespace,
which is determined by reading Clojure's *ns* var.
This will use rules defined with defrule, queries defined with defquery, and sequences
of rule and/or query structures in vars that are annotated with the metadata ^:production-seq.
The caller may also specify keyword-style options at the end of the parameters. Currently five
options are supported, although most users will either not need these or just the first two:
* :fact-type-fn, which must have a value of a function used to determine the logical type of a given
fact. Defaults to Clojure's type function.
* :cache, indicating whether the session creation can be cached, effectively memoizing mk-session.
Defaults to true. Callers may wish to set this to false when needing to dynamically reload rules.
* :ancestors-fn, which returns a collection of ancestors for a given type. Defaults to Clojure's ancestors function. A
fact of a given type will match any rule that uses one of that type's ancestors. Note that if the collection is ordered
this ordering will be maintained by Clara; ordering the ancestors consistently will increase the consistency of overall performance.
* :activation-group-fn, a function applied to production structures and returns the group they should be activated with.
It defaults to checking the :salience property, or 0 if none exists.
* :activation-group-sort-fn, a comparator function used to sort the values returned by the above :activation-group-fn.
Defaults to >, so rules with a higher salience are executed first.
* :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
Larger batch sizes should see better performance compared to smaller batch sizes. (Only applicable to Clojure)
Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
* :omit-compile-ctx - When false Clara, in Clojure, retains additional information to improve error messages during
session deserialization at the cost of additional memory use.
By default this information is retained until the session is initially compiled and then will be discarded. This
information might prove useful for debugging compilation errors within the rulebase, eg. rulebase serialization
(ie. via Clara's durability support).
Defaults to true, see clara.rules.compiler/omit-compile-ctx-default for more information.
This is not supported in ClojureScript, since it requires eval to dynamically build a session. ClojureScript
users must use pre-defined rule sessions using defsession."
[& args]
(if (and (seq args) (not (keyword? (first args))))
`(com/mk-session ~(vec args)) ; At least one namespace given, so use it.
`(com/mk-session (concat [(ns-name *ns*)] ~(vec args)))))) ; No namespace given, so use the current one.
#?(:clj
(defmacro defsession
"Creates a sesson given a list of sources and keyword-style options, which are typically Clojure namespaces.
Typical usage would be like this, with a session defined as a var:
(defsession my-session 'example.namespace)
That var contains an immutable session that then can be used as a starting point to create sessions with
caller-provided data. Since the session itself is immutable, it can be safely used from multiple threads
and will not be modified by callers. So a user might grab it, insert facts, and otherwise
use it as follows:
(-> my-session
(insert (->Temperature 23))
(fire-rules))"
[name & sources-and-options]
(if (com/compiling-cljs?)
`(clara.macros/defsession ~name ~@sources-and-options)
`(def ~name (com/mk-session ~(vec sources-and-options))))))
#?(:clj
(defmacro defrule
"Defines a rule and stores it in the given var. For instance, a simple rule would look like this:
(defrule hvac-approval
\"HVAC repairs need the appropriate paperwork, so insert
a validation error if approval is not present.\"
[WorkOrder (= type :hvac)]
[:not [ApprovalForm (= formname \"27B-6\")]]
=>
(insert! (->ValidationError
:approval
\"HVAC repairs must include a 27B-6 form.\")))
See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
[name & body]
(if (com/compiling-cljs?)
`(clara.macros/defrule ~name ~@body)
(let [doc (if (string? (first body)) (first body) nil)]
`(def ~(vary-meta name assoc :rule true :doc doc)
~(dsl/build-rule name body (meta &form)))))))
#?(:clj
(defmacro defquery
"Defines a query and stored it in the given var. For instance, a simple query that accepts no
parameters would look like this:
(defquery check-job
\"Checks the job for validation errors.\"
[]
[?issue <- ValidationError])
See the [query authoring documentation](http://www.clara-rules.org/docs/queries/) for details."
[name & body]
(if (com/compiling-cljs?)
`(clara.macros/defquery ~name ~@body)
(let [doc (if (string? (first body)) (first body) nil)
binding (if doc (second body) (first body))
definition (if doc (drop 2 body) (rest body) )]
`(def ~(vary-meta name assoc :query true :doc doc)
~(dsl/build-query name body (meta &form)))))))
#?(:clj
(defmacro clear-ns-productions!
"Ensures that any rule/query definitions which have been cached will be cleared from the associated namespace.
Rule and query definitions can be cached such that if their definitions are not explicitly overwritten with the same
name (i.e. deleted or renamed), the stale definitions can be loaded into a session using that namespace on
reload via the REPL or mechanism such as figwheel. Place (clear-ns-productions!) at the top of any namespace
defining rules/queries to ensure the cache is cleared properly."
[]
(if (com/compiling-cljs?)
`(clara.macros/clear-ns-productions!)
(let [production-syms (->> (ns-interns *ns*)
(filter (comp var? second))
(filter (comp (some-fn :rule :query :production-seq) meta second)) ; Filter down to rules, queries, and seqs of both.
(map first) ; Take the symbols for the rule/query vars
)]
(doseq [psym production-syms]
(ns-unmap *ns* psym))))))
|
84220
|
(ns clara.rules
"Forward-chaining rules for Clojure. The primary API is in this namespace."
(:require [clara.rules.engine :as eng]
[schema.core :as s]
[clara.rules.platform :as platform]
#?(:cljs [clara.rules.listener :as l])
#?(:clj [clara.rules.compiler :as com])
#?(:clj [clara.rules.dsl :as dsl]))
#?(:cljs (:require-macros clara.rules)))
(defn insert
"Inserts one or more facts into a working session. It does not modify the given
session, but returns a new session with the facts added."
[session & facts]
(eng/insert session facts))
(defn insert-all
"Inserts a sequence of facts into a working session. It does not modify the given
session, but returns a new session with the facts added."
[session fact-seq]
(eng/insert session fact-seq))
(defn retract
"Retracts a fact from a working session. It does not modify the given session,
but returns a new session with the facts retracted."
[session & facts]
(eng/retract session facts))
(defn fire-rules
"Fires are rules in the given session. Once a rule is fired, it is labeled in a fired
state and will not be re-fired unless facts affecting the rule are added or retracted.
This function does not modify the given session to mark rules as fired. Instead, it returns
a new session in which the rules are marked as fired.
This take an additional map of options as a second argument. Current options:
:cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
Note that this is purely a performance optimization and no guarantees are made at this time on whether a given rule's RHS will be called.
When this option is used rule RHS code that is executed shouldn't do anything that impacts state other than perform logical insertions."
([session] (eng/fire-rules session {}))
([session opts] (eng/fire-rules session opts)))
(defn query
"Runs the given query with the optional given parameters against the session.
The optional parameters should be in map form. For example, a query call might be:
(query session get-by-last-name :last-name \"<NAME>\")
The query itself may be either the var created by a defquery statement,
or the actual name of the query.
"
[session query & params]
(eng/query session query (apply hash-map params)))
(defn insert!
"To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
Inserted facts are logical, in that if the support for the insertion is removed, the fact
will automatically be retracted. For instance, if there is a rule that inserts a \"Cold\" fact
if a \"Temperature\" fact is below a threshold, and the \"Temperature\" fact that triggered
the rule is retracted, the \"Cold\" fact the rule inserted is also retracted. This is the underlying
truth maintenance facillity.
This truth maintenance is also transitive: if a rule depends on some criteria to fire, and a
criterion becomes invalid, it may retract facts that invalidate other rules, which in turn
retract their conclusions. This way we can ensure that information inferred by rules is always
in a consistent state."
[& facts]
(eng/insert-facts! facts false))
(defn insert-all!
"Behaves the same as insert!, but accepts a sequence of facts to be inserted. This can be simpler and more efficient for
rules needing to insert multiple facts.
See the doc in insert! for details on insert behavior.."
[facts]
(eng/insert-facts! facts false))
(defn insert-unconditional!
"To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
This differs from insert! in that it is unconditional. The facts inserted will not be retracted
even if the rule activation doing the insert becomes false. Most users should prefer the simple insert!
function as described above, but this function is available for use cases that don't wish to use
Clara's truth maintenance."
[& facts]
(eng/insert-facts! facts true))
(defn insert-all-unconditional!
"Behaves the same as insert-unconditional!, but accepts a sequence of facts to be inserted rather than individual facts.
See the doc in insert-unconditional! for details on uncondotional insert behavior."
[facts]
(eng/insert-facts! facts true))
(defn retract!
"To be executed within a rule's right-hand side, this retracts a fact or facts from the working memory.
Retracting facts from the right-hand side has slightly different semantics than insertion. As described
in the insert! documentation, inserts are logical and will automatically be retracted if the rule
that inserted them becomes false. This retract! function does not follow the inverse; retracted items
are simply removed, and not re-added if the rule that retracted them becomes false.
The reason for this is that retractions remove information from the knowledge base, and doing truth
maintenance over retractions would require holding onto all retracted items, which would be an issue
in some use cases. This retract! method is included to help with certain use cases, but unless you
have a specific need, it is better to simply do inserts on the rule's right-hand side, and let
Clara's underlying truth maintenance retract inserted items if their support becomes false."
[& facts]
(eng/rhs-retract-facts! facts))
(defn accumulate
"DEPRECATED. Use clara.rules.accumulators/accum instead.
Creates a new accumulator based on the given properties:
* An initial-value to be used with the reduced operations.
* A reduce-fn that can be used with the Clojure Reducers library to reduce items.
* An optional combine-fn that can be used with the Clojure Reducers library to combine reduced items.
* An optional retract-fn that can remove a retracted fact from a previously reduced computation
* An optional convert-return-fn that converts the reduced data into something useful to the caller.
Simply uses identity by default.
"
[& {:keys [initial-value reduce-fn combine-fn retract-fn convert-return-fn] :as args}]
(eng/map->Accumulator
(merge {;; Default conversion does nothing, so use identity.
:convert-return-fn identity}
args)))
#?(:cljs
(defrecord Rulebase [alpha-roots beta-roots productions queries production-nodes query-nodes id-to-node]))
#?(:cljs
(defn- create-get-alphas-fn
"Returns a function that given a sequence of facts,
returns a map associating alpha nodes with the facts they accept."
[fact-type-fn ancestors-fn merged-rules]
;; We preserve a map of fact types to alpha nodes for efficiency,
;; effectively memoizing this operation.
(let [alpha-map (atom {})
wrapped-fact-type-fn (if (= fact-type-fn type)
type
(fn [fact]
(if (isa? (type fact) :clara.rules.engine/system-type)
;; Internal system types always use ClojureScript's type mechanism.
(type fact)
;; All other types defer to the provided function.
(fact-type-fn fact))))
wrapped-ancestors-fn (fn [fact-type]
(if (isa? fact-type :clara.rules.engine/system-type)
;; Exclude system types from having ancestors for now
;; since none of our use-cases require them. If this changes
;; we may need to define a custom hierarchy for them.
#{}
(ancestors-fn fact-type)))]
(fn [facts]
(for [[fact-type facts] (platform/tuned-group-by wrapped-fact-type-fn facts)]
(if-let [alpha-nodes (get @alpha-map fact-type)]
;; If the matching alpha nodes are cached, simply return them.
[alpha-nodes facts]
;; The alpha nodes weren't cached for the type, so get them now.
(let [ancestors (conj (wrapped-ancestors-fn fact-type) fact-type)
;; Get all alpha nodes for all ancestors.
new-nodes (distinct
(reduce
(fn [coll ancestor]
(concat
coll
(get-in merged-rules [:alpha-roots ancestor])))
[]
ancestors))]
(swap! alpha-map assoc fact-type new-nodes)
[new-nodes facts])))))))
#?(:cljs
(defn- mk-rulebase
[beta-roots alpha-fns productions]
(let [beta-nodes (for [root beta-roots
node (tree-seq :children :children root)]
node)
production-nodes (for [node beta-nodes
:when (= eng/ProductionNode (type node))]
node)
query-nodes (for [node beta-nodes
:when (= eng/QueryNode (type node))]
node)
query-map (into {} (for [query-node query-nodes
;; Queries can be looked up by reference or by name;
entry [[(:query query-node) query-node]
[(:name (:query query-node)) query-node]]]
entry))
;; Map of node ids to beta nodes.
id-to-node (into {} (for [node beta-nodes]
[(:id node) node]))
;; type, alpha node tuples.
alpha-nodes (for [{:keys [id type alpha-fn children env]} alpha-fns
:let [beta-children (map id-to-node children)]]
[type (eng/->AlphaNode id env beta-children alpha-fn type)])
;; Merge the alpha nodes into a multi-map
alpha-map (reduce
(fn [alpha-map [type alpha-node]]
(update-in alpha-map [type] conj alpha-node))
{}
alpha-nodes)]
(map->Rulebase
{:alpha-roots alpha-map
:beta-roots beta-roots
:productions (filter :rhs productions)
:queries (remove :rhs productions)
:production-nodes production-nodes
:query-nodes query-map
:id-to-node id-to-node}))))
#?(:cljs
(defn assemble-session
"This is used by tools to create a session; most users won't use this function."
[beta-roots alpha-fns productions options]
(let [rulebase (mk-rulebase beta-roots alpha-fns productions)
transport (eng/LocalTransport.)
;; The fact-type uses Clojure's type function unless overridden.
fact-type-fn (or (get options :fact-type-fn)
type)
;; The ancestors for a logical type uses Clojurescript's ancestors function unless overridden.
ancestors-fn (or (get options :ancestors-fn)
ancestors)
;; Create a function that groups a sequence of facts by the collection
;; of alpha nodes they target.
;; We cache an alpha-map for facts of a given type to avoid computing
;; them for every fact entered.
get-alphas-fn (create-get-alphas-fn fact-type-fn ancestors-fn rulebase)
activation-group-sort-fn (eng/options->activation-group-sort-fn options)
activation-group-fn (eng/options->activation-group-fn options)
listener (if-let [listeners (:listeners options)]
(l/delegating-listener listeners)
l/default-listener)]
(eng/LocalSession. rulebase
(eng/local-memory rulebase transport activation-group-sort-fn activation-group-fn get-alphas-fn)
transport
listener
get-alphas-fn
[]))))
#?(:clj
(extend-type clojure.lang.Symbol
com/IRuleSource
(load-rules [sym]
;; Find the rules and queries in the namespace, shred them,
;; and compile them into a rule base.
(if (namespace sym)
;; The symbol is qualified, so load rules in the qualified symbol.
(let [resolved (resolve sym)]
(when (nil? resolved)
(throw (ex-info (str "Unable to resolve rule source: " sym) {:sym sym})))
(cond
;; The symbol references a rule or query, so just return it
(or (:query (meta resolved))
(:rule (meta resolved))) [@resolved]
;; The symbol refernces a sequence, so return it.
(sequential? @resolved) @resolved
:default
(throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym} ))))
;; The symbol is not qualified, so treat it as a namespace.
(->> (ns-interns sym)
(vals) ; Get the references in the namespace.
(filter var?)
(filter (comp (some-fn :rule :query :production-seq) meta)) ; Filter down to rules, queries, and seqs of both.
;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
;; it is possible that they won't have :line metadata, so we have a default of 0.
(sort (fn [v1 v2]
(compare (or (:line (meta v1)) 0)
(or (:line (meta v2)) 0))))
(mapcat #(if (:production-seq (meta %))
(deref %)
[(deref %)])))))))
#?(:clj
(defmacro mk-session
"Creates a new session using the given rule sources. The resulting session
is immutable, and can be used with insert, retract, fire-rules, and query functions.
If no sources are provided, it will attempt to load rules from the caller's namespace,
which is determined by reading Clojure's *ns* var.
This will use rules defined with defrule, queries defined with defquery, and sequences
of rule and/or query structures in vars that are annotated with the metadata ^:production-seq.
The caller may also specify keyword-style options at the end of the parameters. Currently five
options are supported, although most users will either not need these or just the first two:
* :fact-type-fn, which must have a value of a function used to determine the logical type of a given
fact. Defaults to Clojure's type function.
* :cache, indicating whether the session creation can be cached, effectively memoizing mk-session.
Defaults to true. Callers may wish to set this to false when needing to dynamically reload rules.
* :ancestors-fn, which returns a collection of ancestors for a given type. Defaults to Clojure's ancestors function. A
fact of a given type will match any rule that uses one of that type's ancestors. Note that if the collection is ordered
this ordering will be maintained by Clara; ordering the ancestors consistently will increase the consistency of overall performance.
* :activation-group-fn, a function applied to production structures and returns the group they should be activated with.
It defaults to checking the :salience property, or 0 if none exists.
* :activation-group-sort-fn, a comparator function used to sort the values returned by the above :activation-group-fn.
Defaults to >, so rules with a higher salience are executed first.
* :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
Larger batch sizes should see better performance compared to smaller batch sizes. (Only applicable to Clojure)
Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
* :omit-compile-ctx - When false Clara, in Clojure, retains additional information to improve error messages during
session deserialization at the cost of additional memory use.
By default this information is retained until the session is initially compiled and then will be discarded. This
information might prove useful for debugging compilation errors within the rulebase, eg. rulebase serialization
(ie. via Clara's durability support).
Defaults to true, see clara.rules.compiler/omit-compile-ctx-default for more information.
This is not supported in ClojureScript, since it requires eval to dynamically build a session. ClojureScript
users must use pre-defined rule sessions using defsession."
[& args]
(if (and (seq args) (not (keyword? (first args))))
`(com/mk-session ~(vec args)) ; At least one namespace given, so use it.
`(com/mk-session (concat [(ns-name *ns*)] ~(vec args)))))) ; No namespace given, so use the current one.
#?(:clj
(defmacro defsession
"Creates a sesson given a list of sources and keyword-style options, which are typically Clojure namespaces.
Typical usage would be like this, with a session defined as a var:
(defsession my-session 'example.namespace)
That var contains an immutable session that then can be used as a starting point to create sessions with
caller-provided data. Since the session itself is immutable, it can be safely used from multiple threads
and will not be modified by callers. So a user might grab it, insert facts, and otherwise
use it as follows:
(-> my-session
(insert (->Temperature 23))
(fire-rules))"
[name & sources-and-options]
(if (com/compiling-cljs?)
`(clara.macros/defsession ~name ~@sources-and-options)
`(def ~name (com/mk-session ~(vec sources-and-options))))))
#?(:clj
(defmacro defrule
"Defines a rule and stores it in the given var. For instance, a simple rule would look like this:
(defrule hvac-approval
\"HVAC repairs need the appropriate paperwork, so insert
a validation error if approval is not present.\"
[WorkOrder (= type :hvac)]
[:not [ApprovalForm (= formname \"27B-6\")]]
=>
(insert! (->ValidationError
:approval
\"HVAC repairs must include a 27B-6 form.\")))
See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
[name & body]
(if (com/compiling-cljs?)
`(clara.macros/defrule ~name ~@body)
(let [doc (if (string? (first body)) (first body) nil)]
`(def ~(vary-meta name assoc :rule true :doc doc)
~(dsl/build-rule name body (meta &form)))))))
#?(:clj
(defmacro defquery
"Defines a query and stored it in the given var. For instance, a simple query that accepts no
parameters would look like this:
(defquery check-job
\"Checks the job for validation errors.\"
[]
[?issue <- ValidationError])
See the [query authoring documentation](http://www.clara-rules.org/docs/queries/) for details."
[name & body]
(if (com/compiling-cljs?)
`(clara.macros/defquery ~name ~@body)
(let [doc (if (string? (first body)) (first body) nil)
binding (if doc (second body) (first body))
definition (if doc (drop 2 body) (rest body) )]
`(def ~(vary-meta name assoc :query true :doc doc)
~(dsl/build-query name body (meta &form)))))))
#?(:clj
(defmacro clear-ns-productions!
"Ensures that any rule/query definitions which have been cached will be cleared from the associated namespace.
Rule and query definitions can be cached such that if their definitions are not explicitly overwritten with the same
name (i.e. deleted or renamed), the stale definitions can be loaded into a session using that namespace on
reload via the REPL or mechanism such as figwheel. Place (clear-ns-productions!) at the top of any namespace
defining rules/queries to ensure the cache is cleared properly."
[]
(if (com/compiling-cljs?)
`(clara.macros/clear-ns-productions!)
(let [production-syms (->> (ns-interns *ns*)
(filter (comp var? second))
(filter (comp (some-fn :rule :query :production-seq) meta second)) ; Filter down to rules, queries, and seqs of both.
(map first) ; Take the symbols for the rule/query vars
)]
(doseq [psym production-syms]
(ns-unmap *ns* psym))))))
| true |
(ns clara.rules
"Forward-chaining rules for Clojure. The primary API is in this namespace."
(:require [clara.rules.engine :as eng]
[schema.core :as s]
[clara.rules.platform :as platform]
#?(:cljs [clara.rules.listener :as l])
#?(:clj [clara.rules.compiler :as com])
#?(:clj [clara.rules.dsl :as dsl]))
#?(:cljs (:require-macros clara.rules)))
(defn insert
"Inserts one or more facts into a working session. It does not modify the given
session, but returns a new session with the facts added."
[session & facts]
(eng/insert session facts))
(defn insert-all
"Inserts a sequence of facts into a working session. It does not modify the given
session, but returns a new session with the facts added."
[session fact-seq]
(eng/insert session fact-seq))
(defn retract
"Retracts a fact from a working session. It does not modify the given session,
but returns a new session with the facts retracted."
[session & facts]
(eng/retract session facts))
(defn fire-rules
"Fires are rules in the given session. Once a rule is fired, it is labeled in a fired
state and will not be re-fired unless facts affecting the rule are added or retracted.
This function does not modify the given session to mark rules as fired. Instead, it returns
a new session in which the rules are marked as fired.
This take an additional map of options as a second argument. Current options:
:cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
Note that this is purely a performance optimization and no guarantees are made at this time on whether a given rule's RHS will be called.
When this option is used rule RHS code that is executed shouldn't do anything that impacts state other than perform logical insertions."
([session] (eng/fire-rules session {}))
([session opts] (eng/fire-rules session opts)))
(defn query
"Runs the given query with the optional given parameters against the session.
The optional parameters should be in map form. For example, a query call might be:
(query session get-by-last-name :last-name \"PI:NAME:<NAME>END_PI\")
The query itself may be either the var created by a defquery statement,
or the actual name of the query.
"
[session query & params]
(eng/query session query (apply hash-map params)))
(defn insert!
"To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
Inserted facts are logical, in that if the support for the insertion is removed, the fact
will automatically be retracted. For instance, if there is a rule that inserts a \"Cold\" fact
if a \"Temperature\" fact is below a threshold, and the \"Temperature\" fact that triggered
the rule is retracted, the \"Cold\" fact the rule inserted is also retracted. This is the underlying
truth maintenance facillity.
This truth maintenance is also transitive: if a rule depends on some criteria to fire, and a
criterion becomes invalid, it may retract facts that invalidate other rules, which in turn
retract their conclusions. This way we can ensure that information inferred by rules is always
in a consistent state."
[& facts]
(eng/insert-facts! facts false))
(defn insert-all!
"Behaves the same as insert!, but accepts a sequence of facts to be inserted. This can be simpler and more efficient for
rules needing to insert multiple facts.
See the doc in insert! for details on insert behavior.."
[facts]
(eng/insert-facts! facts false))
(defn insert-unconditional!
"To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
This differs from insert! in that it is unconditional. The facts inserted will not be retracted
even if the rule activation doing the insert becomes false. Most users should prefer the simple insert!
function as described above, but this function is available for use cases that don't wish to use
Clara's truth maintenance."
[& facts]
(eng/insert-facts! facts true))
(defn insert-all-unconditional!
"Behaves the same as insert-unconditional!, but accepts a sequence of facts to be inserted rather than individual facts.
See the doc in insert-unconditional! for details on uncondotional insert behavior."
[facts]
(eng/insert-facts! facts true))
(defn retract!
"To be executed within a rule's right-hand side, this retracts a fact or facts from the working memory.
Retracting facts from the right-hand side has slightly different semantics than insertion. As described
in the insert! documentation, inserts are logical and will automatically be retracted if the rule
that inserted them becomes false. This retract! function does not follow the inverse; retracted items
are simply removed, and not re-added if the rule that retracted them becomes false.
The reason for this is that retractions remove information from the knowledge base, and doing truth
maintenance over retractions would require holding onto all retracted items, which would be an issue
in some use cases. This retract! method is included to help with certain use cases, but unless you
have a specific need, it is better to simply do inserts on the rule's right-hand side, and let
Clara's underlying truth maintenance retract inserted items if their support becomes false."
[& facts]
(eng/rhs-retract-facts! facts))
(defn accumulate
"DEPRECATED. Use clara.rules.accumulators/accum instead.
Creates a new accumulator based on the given properties:
* An initial-value to be used with the reduced operations.
* A reduce-fn that can be used with the Clojure Reducers library to reduce items.
* An optional combine-fn that can be used with the Clojure Reducers library to combine reduced items.
* An optional retract-fn that can remove a retracted fact from a previously reduced computation
* An optional convert-return-fn that converts the reduced data into something useful to the caller.
Simply uses identity by default.
"
[& {:keys [initial-value reduce-fn combine-fn retract-fn convert-return-fn] :as args}]
(eng/map->Accumulator
(merge {;; Default conversion does nothing, so use identity.
:convert-return-fn identity}
args)))
#?(:cljs
(defrecord Rulebase [alpha-roots beta-roots productions queries production-nodes query-nodes id-to-node]))
#?(:cljs
(defn- create-get-alphas-fn
"Returns a function that given a sequence of facts,
returns a map associating alpha nodes with the facts they accept."
[fact-type-fn ancestors-fn merged-rules]
;; We preserve a map of fact types to alpha nodes for efficiency,
;; effectively memoizing this operation.
(let [alpha-map (atom {})
wrapped-fact-type-fn (if (= fact-type-fn type)
type
(fn [fact]
(if (isa? (type fact) :clara.rules.engine/system-type)
;; Internal system types always use ClojureScript's type mechanism.
(type fact)
;; All other types defer to the provided function.
(fact-type-fn fact))))
wrapped-ancestors-fn (fn [fact-type]
(if (isa? fact-type :clara.rules.engine/system-type)
;; Exclude system types from having ancestors for now
;; since none of our use-cases require them. If this changes
;; we may need to define a custom hierarchy for them.
#{}
(ancestors-fn fact-type)))]
(fn [facts]
(for [[fact-type facts] (platform/tuned-group-by wrapped-fact-type-fn facts)]
(if-let [alpha-nodes (get @alpha-map fact-type)]
;; If the matching alpha nodes are cached, simply return them.
[alpha-nodes facts]
;; The alpha nodes weren't cached for the type, so get them now.
(let [ancestors (conj (wrapped-ancestors-fn fact-type) fact-type)
;; Get all alpha nodes for all ancestors.
new-nodes (distinct
(reduce
(fn [coll ancestor]
(concat
coll
(get-in merged-rules [:alpha-roots ancestor])))
[]
ancestors))]
(swap! alpha-map assoc fact-type new-nodes)
[new-nodes facts])))))))
#?(:cljs
(defn- mk-rulebase
[beta-roots alpha-fns productions]
(let [beta-nodes (for [root beta-roots
node (tree-seq :children :children root)]
node)
production-nodes (for [node beta-nodes
:when (= eng/ProductionNode (type node))]
node)
query-nodes (for [node beta-nodes
:when (= eng/QueryNode (type node))]
node)
query-map (into {} (for [query-node query-nodes
;; Queries can be looked up by reference or by name;
entry [[(:query query-node) query-node]
[(:name (:query query-node)) query-node]]]
entry))
;; Map of node ids to beta nodes.
id-to-node (into {} (for [node beta-nodes]
[(:id node) node]))
;; type, alpha node tuples.
alpha-nodes (for [{:keys [id type alpha-fn children env]} alpha-fns
:let [beta-children (map id-to-node children)]]
[type (eng/->AlphaNode id env beta-children alpha-fn type)])
;; Merge the alpha nodes into a multi-map
alpha-map (reduce
(fn [alpha-map [type alpha-node]]
(update-in alpha-map [type] conj alpha-node))
{}
alpha-nodes)]
(map->Rulebase
{:alpha-roots alpha-map
:beta-roots beta-roots
:productions (filter :rhs productions)
:queries (remove :rhs productions)
:production-nodes production-nodes
:query-nodes query-map
:id-to-node id-to-node}))))
#?(:cljs
(defn assemble-session
"This is used by tools to create a session; most users won't use this function."
[beta-roots alpha-fns productions options]
(let [rulebase (mk-rulebase beta-roots alpha-fns productions)
transport (eng/LocalTransport.)
;; The fact-type uses Clojure's type function unless overridden.
fact-type-fn (or (get options :fact-type-fn)
type)
;; The ancestors for a logical type uses Clojurescript's ancestors function unless overridden.
ancestors-fn (or (get options :ancestors-fn)
ancestors)
;; Create a function that groups a sequence of facts by the collection
;; of alpha nodes they target.
;; We cache an alpha-map for facts of a given type to avoid computing
;; them for every fact entered.
get-alphas-fn (create-get-alphas-fn fact-type-fn ancestors-fn rulebase)
activation-group-sort-fn (eng/options->activation-group-sort-fn options)
activation-group-fn (eng/options->activation-group-fn options)
listener (if-let [listeners (:listeners options)]
(l/delegating-listener listeners)
l/default-listener)]
(eng/LocalSession. rulebase
(eng/local-memory rulebase transport activation-group-sort-fn activation-group-fn get-alphas-fn)
transport
listener
get-alphas-fn
[]))))
#?(:clj
(extend-type clojure.lang.Symbol
com/IRuleSource
(load-rules [sym]
;; Find the rules and queries in the namespace, shred them,
;; and compile them into a rule base.
(if (namespace sym)
;; The symbol is qualified, so load rules in the qualified symbol.
(let [resolved (resolve sym)]
(when (nil? resolved)
(throw (ex-info (str "Unable to resolve rule source: " sym) {:sym sym})))
(cond
;; The symbol references a rule or query, so just return it
(or (:query (meta resolved))
(:rule (meta resolved))) [@resolved]
;; The symbol refernces a sequence, so return it.
(sequential? @resolved) @resolved
:default
(throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym} ))))
;; The symbol is not qualified, so treat it as a namespace.
(->> (ns-interns sym)
(vals) ; Get the references in the namespace.
(filter var?)
(filter (comp (some-fn :rule :query :production-seq) meta)) ; Filter down to rules, queries, and seqs of both.
;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
;; it is possible that they won't have :line metadata, so we have a default of 0.
(sort (fn [v1 v2]
(compare (or (:line (meta v1)) 0)
(or (:line (meta v2)) 0))))
(mapcat #(if (:production-seq (meta %))
(deref %)
[(deref %)])))))))
#?(:clj
(defmacro mk-session
"Creates a new session using the given rule sources. The resulting session
is immutable, and can be used with insert, retract, fire-rules, and query functions.
If no sources are provided, it will attempt to load rules from the caller's namespace,
which is determined by reading Clojure's *ns* var.
This will use rules defined with defrule, queries defined with defquery, and sequences
of rule and/or query structures in vars that are annotated with the metadata ^:production-seq.
The caller may also specify keyword-style options at the end of the parameters. Currently five
options are supported, although most users will either not need these or just the first two:
* :fact-type-fn, which must have a value of a function used to determine the logical type of a given
fact. Defaults to Clojure's type function.
* :cache, indicating whether the session creation can be cached, effectively memoizing mk-session.
Defaults to true. Callers may wish to set this to false when needing to dynamically reload rules.
* :ancestors-fn, which returns a collection of ancestors for a given type. Defaults to Clojure's ancestors function. A
fact of a given type will match any rule that uses one of that type's ancestors. Note that if the collection is ordered
this ordering will be maintained by Clara; ordering the ancestors consistently will increase the consistency of overall performance.
* :activation-group-fn, a function applied to production structures and returns the group they should be activated with.
It defaults to checking the :salience property, or 0 if none exists.
* :activation-group-sort-fn, a comparator function used to sort the values returned by the above :activation-group-fn.
Defaults to >, so rules with a higher salience are executed first.
* :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
Larger batch sizes should see better performance compared to smaller batch sizes. (Only applicable to Clojure)
Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
* :omit-compile-ctx - When false Clara, in Clojure, retains additional information to improve error messages during
session deserialization at the cost of additional memory use.
By default this information is retained until the session is initially compiled and then will be discarded. This
information might prove useful for debugging compilation errors within the rulebase, eg. rulebase serialization
(ie. via Clara's durability support).
Defaults to true, see clara.rules.compiler/omit-compile-ctx-default for more information.
This is not supported in ClojureScript, since it requires eval to dynamically build a session. ClojureScript
users must use pre-defined rule sessions using defsession."
[& args]
(if (and (seq args) (not (keyword? (first args))))
`(com/mk-session ~(vec args)) ; At least one namespace given, so use it.
`(com/mk-session (concat [(ns-name *ns*)] ~(vec args)))))) ; No namespace given, so use the current one.
#?(:clj
(defmacro defsession
"Creates a sesson given a list of sources and keyword-style options, which are typically Clojure namespaces.
Typical usage would be like this, with a session defined as a var:
(defsession my-session 'example.namespace)
That var contains an immutable session that then can be used as a starting point to create sessions with
caller-provided data. Since the session itself is immutable, it can be safely used from multiple threads
and will not be modified by callers. So a user might grab it, insert facts, and otherwise
use it as follows:
(-> my-session
(insert (->Temperature 23))
(fire-rules))"
[name & sources-and-options]
(if (com/compiling-cljs?)
`(clara.macros/defsession ~name ~@sources-and-options)
`(def ~name (com/mk-session ~(vec sources-and-options))))))
#?(:clj
(defmacro defrule
"Defines a rule and stores it in the given var. For instance, a simple rule would look like this:
(defrule hvac-approval
\"HVAC repairs need the appropriate paperwork, so insert
a validation error if approval is not present.\"
[WorkOrder (= type :hvac)]
[:not [ApprovalForm (= formname \"27B-6\")]]
=>
(insert! (->ValidationError
:approval
\"HVAC repairs must include a 27B-6 form.\")))
See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
[name & body]
(if (com/compiling-cljs?)
`(clara.macros/defrule ~name ~@body)
(let [doc (if (string? (first body)) (first body) nil)]
`(def ~(vary-meta name assoc :rule true :doc doc)
~(dsl/build-rule name body (meta &form)))))))
#?(:clj
(defmacro defquery
"Defines a query and stored it in the given var. For instance, a simple query that accepts no
parameters would look like this:
(defquery check-job
\"Checks the job for validation errors.\"
[]
[?issue <- ValidationError])
See the [query authoring documentation](http://www.clara-rules.org/docs/queries/) for details."
[name & body]
(if (com/compiling-cljs?)
`(clara.macros/defquery ~name ~@body)
(let [doc (if (string? (first body)) (first body) nil)
binding (if doc (second body) (first body))
definition (if doc (drop 2 body) (rest body) )]
`(def ~(vary-meta name assoc :query true :doc doc)
~(dsl/build-query name body (meta &form)))))))
#?(:clj
(defmacro clear-ns-productions!
"Ensures that any rule/query definitions which have been cached will be cleared from the associated namespace.
Rule and query definitions can be cached such that if their definitions are not explicitly overwritten with the same
name (i.e. deleted or renamed), the stale definitions can be loaded into a session using that namespace on
reload via the REPL or mechanism such as figwheel. Place (clear-ns-productions!) at the top of any namespace
defining rules/queries to ensure the cache is cleared properly."
[]
(if (com/compiling-cljs?)
`(clara.macros/clear-ns-productions!)
(let [production-syms (->> (ns-interns *ns*)
(filter (comp var? second))
(filter (comp (some-fn :rule :query :production-seq) meta second)) ; Filter down to rules, queries, and seqs of both.
(map first) ; Take the symbols for the rule/query vars
)]
(doseq [psym production-syms]
(ns-unmap *ns* psym))))))
|
[
{
"context": "ace\n \"Local interface to bind the server to.\"\n \"127.0.0.1\")\n\n\n(def port\n \"Local port to bind the server to",
"end": 562,
"score": 0.9982959032058716,
"start": 553,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " \"Root token set for the development server.\"\n \"t0p-53cr3t\")\n\n\n(defn start-server!\n \"Start a local Vault de",
"end": 808,
"score": 0.9989892244338989,
"start": 798,
"tag": "PASSWORD",
"value": "t0p-53cr3t"
}
] |
test/vault/integration.clj
|
ieugen/vault-clj
| 52 |
(ns vault.integration
"Integration test support code. Manages running a local Vault server in
development mode in order to truly exercise the client code."
(:require
[clojure.java.io :as io]
[clojure.java.shell :as shell]
[clojure.string :as str]
[vault.client.http]
[vault.core :as vault])
(:import
(java.net
InetSocketAddress
Socket
SocketTimeoutException)
java.util.List
java.util.concurrent.TimeUnit))
;; ## Development Server
(def interface
"Local interface to bind the server to."
"127.0.0.1")
(def port
"Local port to bind the server to."
8201)
(def address
"Local address the development server is bound to."
(str "http://" interface ":" port))
(def root-token
"Root token set for the development server."
"t0p-53cr3t")
(defn start-server!
"Start a local Vault development server process. Returns the child process
object."
^Process
[]
(let [command ["vault" "server" "-dev"
(str "-dev-listen-address=" (str interface ":" port))
(str "-dev-root-token-id=" root-token)
"-dev-no-store-token"]
work-dir (io/file "target/vault")
builder (doto (ProcessBuilder. ^List command)
(.directory work-dir)
(.redirectErrorStream true)
(.redirectOutput (io/file work-dir "vault.log")))]
(.mkdirs work-dir)
(.start builder)))
(defn- port-open?
"Returns true if the given port is open, false otherwise."
[host port]
(let [socket-addr (InetSocketAddress. (str host) (long port))
socket (Socket.)]
(try
(.connect socket socket-addr 10)
true
(catch SocketTimeoutException _
false)
(catch Exception _
false)
(finally
(.close socket)))))
(defn await-server
"Wait until the server port is available, trying up to `n` times, sleeping
for `ms` between each attempt."
[n ms]
(loop [i 0]
(if (< i n)
(when-not (port-open? interface port)
(Thread/sleep ms)
(recur (inc i)))
(throw (ex-info (format "Vault server not available on port %d after %d attempts (%d ms)"
port n (* n ms))
{:address address})))))
(defn stop-server!
"Stop the local development server process."
[^Process proc]
(when (.isAlive proc)
(.destroy proc)
(when-not (.waitFor proc 5 TimeUnit/SECONDS)
(binding [*out* *err*]
(println "Server did not stop cleanly after 5 seconds! Terminating..."))
(.destroyForcibly proc)))
(let [exit (.exitValue proc)]
(when-not (zero? exit)
(binding [*out* *err*]
(println "Vault server exited with code:" exit))))
nil)
;; ## Client Setup
(defn test-client
"Construct a new test client pointed at the local development server."
[]
(doto (vault/new-client address)
(vault/authenticate! :token root-token)))
(defmacro with-dev-server
"Macro which executes the provided body with a development vault server and
initialized test client bound to `client`."
[& body]
`(let [proc# (start-server!)]
(try
(await-server 10 100)
(let [~'client (test-client)]
~@body)
(finally
(stop-server! proc#)))))
;; ## Utilities
(defn cli
"Perform a vault command by shelling out to the command-line client. Useful
for actions which have not been implemented in the Clojure client yet.
Returns the parsed JSON result of the command, or throws an exception if the
command fails."
[& args]
(let [result (shell/with-sh-env {"VAULT_ADDR" address
"VAULT_TOKEN" root-token
"VAULT_FORMAT" "json"}
(apply shell/sh (cons "vault" args)))]
(if (zero? (:exit result))
;; Command succeeded, parse result.
;; TODO: parse json
(:out result)
;; Command failed.
(throw (ex-info (format "vault command failed: %s (%d)"
(str/join " " args)
(:exit result))
{:args args
:exit (:exit result)
:out (:out result)
:err (:err result)})))))
|
38740
|
(ns vault.integration
"Integration test support code. Manages running a local Vault server in
development mode in order to truly exercise the client code."
(:require
[clojure.java.io :as io]
[clojure.java.shell :as shell]
[clojure.string :as str]
[vault.client.http]
[vault.core :as vault])
(:import
(java.net
InetSocketAddress
Socket
SocketTimeoutException)
java.util.List
java.util.concurrent.TimeUnit))
;; ## Development Server
(def interface
"Local interface to bind the server to."
"127.0.0.1")
(def port
"Local port to bind the server to."
8201)
(def address
"Local address the development server is bound to."
(str "http://" interface ":" port))
(def root-token
"Root token set for the development server."
"<PASSWORD>")
(defn start-server!
"Start a local Vault development server process. Returns the child process
object."
^Process
[]
(let [command ["vault" "server" "-dev"
(str "-dev-listen-address=" (str interface ":" port))
(str "-dev-root-token-id=" root-token)
"-dev-no-store-token"]
work-dir (io/file "target/vault")
builder (doto (ProcessBuilder. ^List command)
(.directory work-dir)
(.redirectErrorStream true)
(.redirectOutput (io/file work-dir "vault.log")))]
(.mkdirs work-dir)
(.start builder)))
(defn- port-open?
"Returns true if the given port is open, false otherwise."
[host port]
(let [socket-addr (InetSocketAddress. (str host) (long port))
socket (Socket.)]
(try
(.connect socket socket-addr 10)
true
(catch SocketTimeoutException _
false)
(catch Exception _
false)
(finally
(.close socket)))))
(defn await-server
"Wait until the server port is available, trying up to `n` times, sleeping
for `ms` between each attempt."
[n ms]
(loop [i 0]
(if (< i n)
(when-not (port-open? interface port)
(Thread/sleep ms)
(recur (inc i)))
(throw (ex-info (format "Vault server not available on port %d after %d attempts (%d ms)"
port n (* n ms))
{:address address})))))
(defn stop-server!
"Stop the local development server process."
[^Process proc]
(when (.isAlive proc)
(.destroy proc)
(when-not (.waitFor proc 5 TimeUnit/SECONDS)
(binding [*out* *err*]
(println "Server did not stop cleanly after 5 seconds! Terminating..."))
(.destroyForcibly proc)))
(let [exit (.exitValue proc)]
(when-not (zero? exit)
(binding [*out* *err*]
(println "Vault server exited with code:" exit))))
nil)
;; ## Client Setup
(defn test-client
"Construct a new test client pointed at the local development server."
[]
(doto (vault/new-client address)
(vault/authenticate! :token root-token)))
(defmacro with-dev-server
"Macro which executes the provided body with a development vault server and
initialized test client bound to `client`."
[& body]
`(let [proc# (start-server!)]
(try
(await-server 10 100)
(let [~'client (test-client)]
~@body)
(finally
(stop-server! proc#)))))
;; ## Utilities
(defn cli
"Perform a vault command by shelling out to the command-line client. Useful
for actions which have not been implemented in the Clojure client yet.
Returns the parsed JSON result of the command, or throws an exception if the
command fails."
[& args]
(let [result (shell/with-sh-env {"VAULT_ADDR" address
"VAULT_TOKEN" root-token
"VAULT_FORMAT" "json"}
(apply shell/sh (cons "vault" args)))]
(if (zero? (:exit result))
;; Command succeeded, parse result.
;; TODO: parse json
(:out result)
;; Command failed.
(throw (ex-info (format "vault command failed: %s (%d)"
(str/join " " args)
(:exit result))
{:args args
:exit (:exit result)
:out (:out result)
:err (:err result)})))))
| true |
(ns vault.integration
"Integration test support code. Manages running a local Vault server in
development mode in order to truly exercise the client code."
(:require
[clojure.java.io :as io]
[clojure.java.shell :as shell]
[clojure.string :as str]
[vault.client.http]
[vault.core :as vault])
(:import
(java.net
InetSocketAddress
Socket
SocketTimeoutException)
java.util.List
java.util.concurrent.TimeUnit))
;; ## Development Server
(def interface
"Local interface to bind the server to."
"127.0.0.1")
(def port
"Local port to bind the server to."
8201)
(def address
"Local address the development server is bound to."
(str "http://" interface ":" port))
(def root-token
"Root token set for the development server."
"PI:PASSWORD:<PASSWORD>END_PI")
(defn start-server!
"Start a local Vault development server process. Returns the child process
object."
^Process
[]
(let [command ["vault" "server" "-dev"
(str "-dev-listen-address=" (str interface ":" port))
(str "-dev-root-token-id=" root-token)
"-dev-no-store-token"]
work-dir (io/file "target/vault")
builder (doto (ProcessBuilder. ^List command)
(.directory work-dir)
(.redirectErrorStream true)
(.redirectOutput (io/file work-dir "vault.log")))]
(.mkdirs work-dir)
(.start builder)))
(defn- port-open?
"Returns true if the given port is open, false otherwise."
[host port]
(let [socket-addr (InetSocketAddress. (str host) (long port))
socket (Socket.)]
(try
(.connect socket socket-addr 10)
true
(catch SocketTimeoutException _
false)
(catch Exception _
false)
(finally
(.close socket)))))
(defn await-server
"Wait until the server port is available, trying up to `n` times, sleeping
for `ms` between each attempt."
[n ms]
(loop [i 0]
(if (< i n)
(when-not (port-open? interface port)
(Thread/sleep ms)
(recur (inc i)))
(throw (ex-info (format "Vault server not available on port %d after %d attempts (%d ms)"
port n (* n ms))
{:address address})))))
(defn stop-server!
"Stop the local development server process."
[^Process proc]
(when (.isAlive proc)
(.destroy proc)
(when-not (.waitFor proc 5 TimeUnit/SECONDS)
(binding [*out* *err*]
(println "Server did not stop cleanly after 5 seconds! Terminating..."))
(.destroyForcibly proc)))
(let [exit (.exitValue proc)]
(when-not (zero? exit)
(binding [*out* *err*]
(println "Vault server exited with code:" exit))))
nil)
;; ## Client Setup
(defn test-client
"Construct a new test client pointed at the local development server."
[]
(doto (vault/new-client address)
(vault/authenticate! :token root-token)))
(defmacro with-dev-server
"Macro which executes the provided body with a development vault server and
initialized test client bound to `client`."
[& body]
`(let [proc# (start-server!)]
(try
(await-server 10 100)
(let [~'client (test-client)]
~@body)
(finally
(stop-server! proc#)))))
;; ## Utilities
(defn cli
"Perform a vault command by shelling out to the command-line client. Useful
for actions which have not been implemented in the Clojure client yet.
Returns the parsed JSON result of the command, or throws an exception if the
command fails."
[& args]
(let [result (shell/with-sh-env {"VAULT_ADDR" address
"VAULT_TOKEN" root-token
"VAULT_FORMAT" "json"}
(apply shell/sh (cons "vault" args)))]
(if (zero? (:exit result))
;; Command succeeded, parse result.
;; TODO: parse json
(:out result)
;; Command failed.
(throw (ex-info (format "vault command failed: %s (%d)"
(str/join " " args)
(:exit result))
{:args args
:exit (:exit result)
:out (:out result)
:err (:err result)})))))
|
[
{
"context": "unt-id 1)\n\n(def create-command (d/create-account \"John Doe\" 10))\n\n(def deposit-command (d/deposit 10))\n\n(def",
"end": 416,
"score": 0.999107837677002,
"start": 408,
"tag": "NAME",
"value": "John Doe"
},
{
"context": "e\n (handle create-command nil)\n (is (= {:owner \"John Doe\" :balance 10}\n (load-account))))\n\n(deftes",
"end": 1335,
"score": 0.9986625909805298,
"start": 1327,
"tag": "NAME",
"value": "John Doe"
},
{
"context": "\n (handle deposit-command nil)\n (is (= {:owner \"John Doe\" :balance 20}\n (load-account))))\n\n(deftes",
"end": 1499,
"score": 0.9989463090896606,
"start": 1491,
"tag": "NAME",
"value": "John Doe"
},
{
"context": "ndle deposit-command version)\n (is (= {:owner \"John Doe\" :balance 20}\n (load-account)))))\n\n(def",
"end": 2019,
"score": 0.9858016967773438,
"start": 2011,
"tag": "NAME",
"value": "John Doe"
}
] |
test/org/amitayh/revent_clj/cqrs_test.clj
|
amitayh/revent-clj
| 1 |
(ns org.amitayh.revent-clj.cqrs-test
(:require [clojure.test :refer :all]
[org.amitayh.either :refer :all]
[org.amitayh.revent-clj.cqrs :as cqrs]
[org.amitayh.revent-clj.memory-event-store :as s]
[org.amitayh.revent-clj.banking-domain :as d]
[org.amitayh.revent-clj.repository :as r]))
(def account-id 1)
(def create-command (d/create-account "John Doe" 10))
(def deposit-command (d/deposit 10))
(def ^:dynamic handle)
(def ^:dynamic load-snapshot)
(defn load-account []
(-> account-id load-snapshot first :aggregate))
(defn last-version [[events error]]
(if (nil? error)
(-> events last :version)
nil))
(defn setup-handler [test]
(let [store (s/empty-store)
read-events (partial s/read-events store)
persist-events (partial s/persist-events store s/now)]
(binding [load-snapshot (partial r/load-snapshot read-events d/reducer 100)
handle (fn [command expected-version]
(cqrs/handle
load-snapshot
persist-events
(cqrs/->Command account-id command expected-version)))]
(test))))
(use-fixtures :each setup-handler)
(deftest handle-command-for-new-aggregate
(handle create-command nil)
(is (= {:owner "John Doe" :balance 10}
(load-account))))
(deftest handle-multiple-commands
(handle create-command nil)
(handle deposit-command nil)
(is (= {:owner "John Doe" :balance 20}
(load-account))))
(deftest fail-if-expected-version-is-wrong
(let [result (handle create-command nil)
version (last-version result)
wrong-expected-version (dec version)]
(is (= (handle deposit-command wrong-expected-version)
(failure :invalid-aggregate-version)))))
(deftest succeed-if-expected-version-is-correct
(let [result (handle create-command nil)
version (last-version result)]
(handle deposit-command version)
(is (= {:owner "John Doe" :balance 20}
(load-account)))))
(deftest fail-if-command-is-rejected
(handle create-command nil)
(is (= (handle (d/withdraw 20) nil)
(failure :insufficient-funds))))
|
107357
|
(ns org.amitayh.revent-clj.cqrs-test
(:require [clojure.test :refer :all]
[org.amitayh.either :refer :all]
[org.amitayh.revent-clj.cqrs :as cqrs]
[org.amitayh.revent-clj.memory-event-store :as s]
[org.amitayh.revent-clj.banking-domain :as d]
[org.amitayh.revent-clj.repository :as r]))
(def account-id 1)
(def create-command (d/create-account "<NAME>" 10))
(def deposit-command (d/deposit 10))
(def ^:dynamic handle)
(def ^:dynamic load-snapshot)
(defn load-account []
(-> account-id load-snapshot first :aggregate))
(defn last-version [[events error]]
(if (nil? error)
(-> events last :version)
nil))
(defn setup-handler [test]
(let [store (s/empty-store)
read-events (partial s/read-events store)
persist-events (partial s/persist-events store s/now)]
(binding [load-snapshot (partial r/load-snapshot read-events d/reducer 100)
handle (fn [command expected-version]
(cqrs/handle
load-snapshot
persist-events
(cqrs/->Command account-id command expected-version)))]
(test))))
(use-fixtures :each setup-handler)
(deftest handle-command-for-new-aggregate
(handle create-command nil)
(is (= {:owner "<NAME>" :balance 10}
(load-account))))
(deftest handle-multiple-commands
(handle create-command nil)
(handle deposit-command nil)
(is (= {:owner "<NAME>" :balance 20}
(load-account))))
(deftest fail-if-expected-version-is-wrong
(let [result (handle create-command nil)
version (last-version result)
wrong-expected-version (dec version)]
(is (= (handle deposit-command wrong-expected-version)
(failure :invalid-aggregate-version)))))
(deftest succeed-if-expected-version-is-correct
(let [result (handle create-command nil)
version (last-version result)]
(handle deposit-command version)
(is (= {:owner "<NAME>" :balance 20}
(load-account)))))
(deftest fail-if-command-is-rejected
(handle create-command nil)
(is (= (handle (d/withdraw 20) nil)
(failure :insufficient-funds))))
| true |
(ns org.amitayh.revent-clj.cqrs-test
(:require [clojure.test :refer :all]
[org.amitayh.either :refer :all]
[org.amitayh.revent-clj.cqrs :as cqrs]
[org.amitayh.revent-clj.memory-event-store :as s]
[org.amitayh.revent-clj.banking-domain :as d]
[org.amitayh.revent-clj.repository :as r]))
(def account-id 1)
(def create-command (d/create-account "PI:NAME:<NAME>END_PI" 10))
(def deposit-command (d/deposit 10))
(def ^:dynamic handle)
(def ^:dynamic load-snapshot)
(defn load-account []
(-> account-id load-snapshot first :aggregate))
(defn last-version [[events error]]
(if (nil? error)
(-> events last :version)
nil))
(defn setup-handler [test]
(let [store (s/empty-store)
read-events (partial s/read-events store)
persist-events (partial s/persist-events store s/now)]
(binding [load-snapshot (partial r/load-snapshot read-events d/reducer 100)
handle (fn [command expected-version]
(cqrs/handle
load-snapshot
persist-events
(cqrs/->Command account-id command expected-version)))]
(test))))
(use-fixtures :each setup-handler)
(deftest handle-command-for-new-aggregate
(handle create-command nil)
(is (= {:owner "PI:NAME:<NAME>END_PI" :balance 10}
(load-account))))
(deftest handle-multiple-commands
(handle create-command nil)
(handle deposit-command nil)
(is (= {:owner "PI:NAME:<NAME>END_PI" :balance 20}
(load-account))))
(deftest fail-if-expected-version-is-wrong
(let [result (handle create-command nil)
version (last-version result)
wrong-expected-version (dec version)]
(is (= (handle deposit-command wrong-expected-version)
(failure :invalid-aggregate-version)))))
(deftest succeed-if-expected-version-is-correct
(let [result (handle create-command nil)
version (last-version result)]
(handle deposit-command version)
(is (= {:owner "PI:NAME:<NAME>END_PI" :balance 20}
(load-account)))))
(deftest fail-if-command-is-rejected
(handle create-command nil)
(is (= (handle (d/withdraw 20) nil)
(failure :insufficient-funds))))
|
[
{
"context": "e-list (r/atom\n [{:first-name \"Jonathan\" :last-name \"Dannel\"}\n {:firs",
"end": 242,
"score": 0.9997386336326599,
"start": 234,
"tag": "NAME",
"value": "Jonathan"
},
{
"context": " [{:first-name \"Jonathan\" :last-name \"Dannel\"}\n {:first-name \"Bardia\" :las",
"end": 262,
"score": 0.9993103742599487,
"start": 256,
"tag": "NAME",
"value": "Dannel"
},
{
"context": "name \"Dannel\"}\n {:first-name \"Bardia\" :last-name \"Pourvakil\"}\n {:f",
"end": 306,
"score": 0.9997879862785339,
"start": 300,
"tag": "NAME",
"value": "Bardia"
},
{
"context": " {:first-name \"Bardia\" :last-name \"Pourvakil\"}\n {:first-name \"Conor\" :last",
"end": 329,
"score": 0.9994611740112305,
"start": 320,
"tag": "NAME",
"value": "Pourvakil"
},
{
"context": "e \"Pourvakil\"}\n {:first-name \"Conor\" :last-name \"Sullivan\"}]))\n\n(defonce filter-query",
"end": 372,
"score": 0.9997705221176147,
"start": 367,
"tag": "NAME",
"value": "Conor"
},
{
"context": " {:first-name \"Conor\" :last-name \"Sullivan\"}]))\n\n(defonce filter-query (r/atom \"\"))\n(defonce",
"end": 394,
"score": 0.999738335609436,
"start": 386,
"tag": "NAME",
"value": "Sullivan"
},
{
"context": "m \"\"))\n(defonce active-name (r/atom {:first-name \"Jonathan\" :last-name \"Dannel\"}))\n(defonce updating (r/atom",
"end": 487,
"score": 0.9997846484184265,
"start": 479,
"tag": "NAME",
"value": "Jonathan"
},
{
"context": "-name (r/atom {:first-name \"Jonathan\" :last-name \"Dannel\"}))\n(defonce updating (r/atom {:first-name \"Jonat",
"end": 507,
"score": 0.9986644983291626,
"start": 501,
"tag": "NAME",
"value": "Dannel"
},
{
"context": "annel\"}))\n(defonce updating (r/atom {:first-name \"Jonathan\" :last-name \"Dannel\"}))\n\n(defn select-name [perso",
"end": 560,
"score": 0.9997819662094116,
"start": 552,
"tag": "NAME",
"value": "Jonathan"
},
{
"context": "ating (r/atom {:first-name \"Jonathan\" :last-name \"Dannel\"}))\n\n(defn select-name [person]\n (reset! active-",
"end": 580,
"score": 0.99895179271698,
"start": 574,
"tag": "NAME",
"value": "Dannel"
}
] |
src/seven/components/crud.cljs
|
jonathandannel/seven
| 0 |
(ns seven.components.crud
(:require [reagent.core :as r]
[clojure.string :refer [lower-case]]
[seven.components.ui :refer [component-wrapper]]))
(defonce name-list (r/atom
[{:first-name "Jonathan" :last-name "Dannel"}
{:first-name "Bardia" :last-name "Pourvakil"}
{:first-name "Conor" :last-name "Sullivan"}]))
(defonce filter-query (r/atom ""))
(defonce active-name (r/atom {:first-name "Jonathan" :last-name "Dannel"}))
(defonce updating (r/atom {:first-name "Jonathan" :last-name "Dannel"}))
(defn select-name [person]
(reset! active-name person)
(reset! updating person))
(defn can-create? [n]
(not (some #(= n %) @name-list)))
(defn create-entry []
(let [first-name (@active-name :first-name)
last-name (@active-name :last-name)]
(when (and
(not= first-name "")
(not= last-name "")
(can-create? @active-name))
(swap! name-list conj
{:first-name first-name :last-name last-name}))))
(defn update-entry []
(let [index (.indexOf @name-list @updating)]
(when (can-create? @active-name)
(swap! name-list assoc index @active-name))
(select-name @active-name)))
(defn delete-entry []
(let [filtered (filterv #(not= % @active-name) @name-list)]
(reset! name-list filtered)
(reset! active-name {:first-name "" :last-name ""})))
(defn filter-entry [curr]
(let [query-length (count @filter-query)]
(=
(subs (lower-case (get curr :last-name)) 0 query-length)
(lower-case @filter-query))))
(defn handle-filter-change [e]
(reset! filter-query (-> e .-target .-value))
(let [filtered (filterv #(filter-entry %) @name-list)]
(when (> (count filtered) 0) (select-name (first filtered)))))
(defn main []
[component-wrapper "CRUD"
[:div.rows
[:div.column-is-half
[:div.field.is-flex.is-flex-row.mb-5
[:div.level
[:div.level-left.mr-3
[:label.label "Filter "]]
[:div.level-right
[:div.control
[:input.input.is-primary.pr-1
{:placeholder "Surname"
:on-change handle-filter-change}]]]]]]
[:div.columns
; List
[:div.column.is-half
[:div.menu {:style {:overflow-y "scroll" :height "150px"}}
[:ul.menu-list.pr-2 {:style {:list-style-type "none" :margin 0}}
(doall
(for [{:keys [first-name last-name] :as person}
(filterv #(filter-entry %) @name-list)]
^{:key (str first-name last-name)}
[:li {:on-click #(select-name person)}
[:a
{:class (when (= @active-name person)
"is-active")}
first-name " "
last-name]]))]]]
; Right side edit
[:div.column.is-half
[:div.columns
[:div.column.is-one-quarter.mr-5.is-hidden-mobile
[:div.is-flex.is-flex-direction-column
[:label.label.pt-2 "Name"]
[:label.label.surname "Surname"]]]
[:div.column.auto
[:div.is-flex.is-flex-direction-column
[:input.input.mb-2
{:on-change
#(swap! active-name assoc :first-name (-> % .-target .-value))
:value (@active-name :first-name)}]
[:input.input
{:on-change
#(swap! active-name assoc :last-name (-> % .-target .-value))
:value (@active-name :last-name)}]]]]]]
; Bottom buttons
[:div.is-flex
[:button.button.is-primary.mr-3
{:disabled (not (can-create? @active-name))
:on-click create-entry} "Create"]
[:button.button.is-primary.mr-3
{:on-click update-entry} "Update"]
[:button.button.is-danger.mr-3
{:on-click delete-entry} "Delete"]]]])
|
120633
|
(ns seven.components.crud
(:require [reagent.core :as r]
[clojure.string :refer [lower-case]]
[seven.components.ui :refer [component-wrapper]]))
(defonce name-list (r/atom
[{:first-name "<NAME>" :last-name "<NAME>"}
{:first-name "<NAME>" :last-name "<NAME>"}
{:first-name "<NAME>" :last-name "<NAME>"}]))
(defonce filter-query (r/atom ""))
(defonce active-name (r/atom {:first-name "<NAME>" :last-name "<NAME>"}))
(defonce updating (r/atom {:first-name "<NAME>" :last-name "<NAME>"}))
(defn select-name [person]
(reset! active-name person)
(reset! updating person))
(defn can-create? [n]
(not (some #(= n %) @name-list)))
(defn create-entry []
(let [first-name (@active-name :first-name)
last-name (@active-name :last-name)]
(when (and
(not= first-name "")
(not= last-name "")
(can-create? @active-name))
(swap! name-list conj
{:first-name first-name :last-name last-name}))))
(defn update-entry []
(let [index (.indexOf @name-list @updating)]
(when (can-create? @active-name)
(swap! name-list assoc index @active-name))
(select-name @active-name)))
(defn delete-entry []
(let [filtered (filterv #(not= % @active-name) @name-list)]
(reset! name-list filtered)
(reset! active-name {:first-name "" :last-name ""})))
(defn filter-entry [curr]
(let [query-length (count @filter-query)]
(=
(subs (lower-case (get curr :last-name)) 0 query-length)
(lower-case @filter-query))))
(defn handle-filter-change [e]
(reset! filter-query (-> e .-target .-value))
(let [filtered (filterv #(filter-entry %) @name-list)]
(when (> (count filtered) 0) (select-name (first filtered)))))
(defn main []
[component-wrapper "CRUD"
[:div.rows
[:div.column-is-half
[:div.field.is-flex.is-flex-row.mb-5
[:div.level
[:div.level-left.mr-3
[:label.label "Filter "]]
[:div.level-right
[:div.control
[:input.input.is-primary.pr-1
{:placeholder "Surname"
:on-change handle-filter-change}]]]]]]
[:div.columns
; List
[:div.column.is-half
[:div.menu {:style {:overflow-y "scroll" :height "150px"}}
[:ul.menu-list.pr-2 {:style {:list-style-type "none" :margin 0}}
(doall
(for [{:keys [first-name last-name] :as person}
(filterv #(filter-entry %) @name-list)]
^{:key (str first-name last-name)}
[:li {:on-click #(select-name person)}
[:a
{:class (when (= @active-name person)
"is-active")}
first-name " "
last-name]]))]]]
; Right side edit
[:div.column.is-half
[:div.columns
[:div.column.is-one-quarter.mr-5.is-hidden-mobile
[:div.is-flex.is-flex-direction-column
[:label.label.pt-2 "Name"]
[:label.label.surname "Surname"]]]
[:div.column.auto
[:div.is-flex.is-flex-direction-column
[:input.input.mb-2
{:on-change
#(swap! active-name assoc :first-name (-> % .-target .-value))
:value (@active-name :first-name)}]
[:input.input
{:on-change
#(swap! active-name assoc :last-name (-> % .-target .-value))
:value (@active-name :last-name)}]]]]]]
; Bottom buttons
[:div.is-flex
[:button.button.is-primary.mr-3
{:disabled (not (can-create? @active-name))
:on-click create-entry} "Create"]
[:button.button.is-primary.mr-3
{:on-click update-entry} "Update"]
[:button.button.is-danger.mr-3
{:on-click delete-entry} "Delete"]]]])
| true |
(ns seven.components.crud
(:require [reagent.core :as r]
[clojure.string :refer [lower-case]]
[seven.components.ui :refer [component-wrapper]]))
(defonce name-list (r/atom
[{:first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"}
{:first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"}
{:first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"}]))
(defonce filter-query (r/atom ""))
(defonce active-name (r/atom {:first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"}))
(defonce updating (r/atom {:first-name "PI:NAME:<NAME>END_PI" :last-name "PI:NAME:<NAME>END_PI"}))
(defn select-name [person]
(reset! active-name person)
(reset! updating person))
(defn can-create? [n]
(not (some #(= n %) @name-list)))
(defn create-entry []
(let [first-name (@active-name :first-name)
last-name (@active-name :last-name)]
(when (and
(not= first-name "")
(not= last-name "")
(can-create? @active-name))
(swap! name-list conj
{:first-name first-name :last-name last-name}))))
(defn update-entry []
(let [index (.indexOf @name-list @updating)]
(when (can-create? @active-name)
(swap! name-list assoc index @active-name))
(select-name @active-name)))
(defn delete-entry []
(let [filtered (filterv #(not= % @active-name) @name-list)]
(reset! name-list filtered)
(reset! active-name {:first-name "" :last-name ""})))
(defn filter-entry [curr]
(let [query-length (count @filter-query)]
(=
(subs (lower-case (get curr :last-name)) 0 query-length)
(lower-case @filter-query))))
(defn handle-filter-change [e]
(reset! filter-query (-> e .-target .-value))
(let [filtered (filterv #(filter-entry %) @name-list)]
(when (> (count filtered) 0) (select-name (first filtered)))))
(defn main []
[component-wrapper "CRUD"
[:div.rows
[:div.column-is-half
[:div.field.is-flex.is-flex-row.mb-5
[:div.level
[:div.level-left.mr-3
[:label.label "Filter "]]
[:div.level-right
[:div.control
[:input.input.is-primary.pr-1
{:placeholder "Surname"
:on-change handle-filter-change}]]]]]]
[:div.columns
; List
[:div.column.is-half
[:div.menu {:style {:overflow-y "scroll" :height "150px"}}
[:ul.menu-list.pr-2 {:style {:list-style-type "none" :margin 0}}
(doall
(for [{:keys [first-name last-name] :as person}
(filterv #(filter-entry %) @name-list)]
^{:key (str first-name last-name)}
[:li {:on-click #(select-name person)}
[:a
{:class (when (= @active-name person)
"is-active")}
first-name " "
last-name]]))]]]
; Right side edit
[:div.column.is-half
[:div.columns
[:div.column.is-one-quarter.mr-5.is-hidden-mobile
[:div.is-flex.is-flex-direction-column
[:label.label.pt-2 "Name"]
[:label.label.surname "Surname"]]]
[:div.column.auto
[:div.is-flex.is-flex-direction-column
[:input.input.mb-2
{:on-change
#(swap! active-name assoc :first-name (-> % .-target .-value))
:value (@active-name :first-name)}]
[:input.input
{:on-change
#(swap! active-name assoc :last-name (-> % .-target .-value))
:value (@active-name :last-name)}]]]]]]
; Bottom buttons
[:div.is-flex
[:button.button.is-primary.mr-3
{:disabled (not (can-create? @active-name))
:on-click create-entry} "Create"]
[:button.button.is-primary.mr-3
{:on-click update-entry} "Update"]
[:button.button.is-danger.mr-3
{:on-click delete-entry} "Delete"]]]])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.