gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2013 Netatmo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package losty.netatmo.model; public class Measures { private static final double NO_DATA = Double.NaN; private long beginTime; private double temperature; private double minTemp; private double maxTemp; private double humidity; private double pressure; private double noise; private double CO2; private double rain; private double sum_rain; private double sum_rain_24; private double sum_rain_1; private double windAngle; private double windStrength; private double gustAngle; private double gustStrength; public Measures() { beginTime = 0; temperature = NO_DATA; minTemp = NO_DATA; maxTemp = NO_DATA; humidity = NO_DATA; pressure = NO_DATA; noise = NO_DATA; CO2 = NO_DATA; rain = NO_DATA; sum_rain = NO_DATA; sum_rain_1 = NO_DATA; sum_rain_24 = NO_DATA; windAngle = NO_DATA; windStrength = NO_DATA; gustAngle = NO_DATA; gustStrength = NO_DATA; } public long getBeginTime() { return this.beginTime; } public void setBeginTime(final long beginTime) { this.beginTime = beginTime; } public double getTemperature() { return temperature; } public void setTemperature(final double temperature) { this.temperature = temperature; } public double getCO2() { return CO2; } public void setCO2(final double CO2) { this.CO2 = CO2; } public double getHumidity() { return humidity; } public void setHumidity(final double humidity) { this.humidity = humidity; } public double getPressure() { return pressure; } public void setPressure(final double pressure) { this.pressure = pressure; } public double getNoise() { return noise; } public void setNoise(final double noise) { this.noise = noise; } public double getMinTemp() { return minTemp; } public void setMinTemp(final double minTemp) { this.minTemp = minTemp; } public double getMaxTemp() { return maxTemp; } public void setMaxTemp(final double maxTemp) { this.maxTemp = maxTemp; } /** * Will be filled by getPublicData() requests. * @return sum_rain_24 */ public double getSum_rain_24() { return sum_rain_24; } public void setSum_rain_24(final double sum_rain_24) { this.sum_rain_24 = sum_rain_24; } /** * Will be filled by getPublicData() requests. * @return sum_rain_1 */ public double getSum_rain_1() { return sum_rain_1; } public void setSum_rain_1(final double sum_rain_1) { this.sum_rain_1 = sum_rain_1; } /** * Will be filled by getMeasures() requests with aggregation (scale != max). * @return sum_rain */ public double getSum_rain() { return sum_rain; } public void setSum_rain(final double sum_rain) { this.sum_rain = sum_rain; } /** * Will be filled by getMeasures() requests without aggregation (scale == max). This gets also filled for requests * with aggregation (scale != max), but not sure how the data should make sense... * @return rain */ public double getRain() { return rain; } public void setRain(final double rain) { this.rain = rain; } public double getWindAngle() { return windAngle; } public void setWindAngle(final double windAngle) { this.windAngle = windAngle; } public double getWindStrength() { return windStrength; } public void setWindStrength(final double windStrength) { this.windStrength = windStrength; } public double getGustAngle() { return gustAngle; } public void setGustAngle(final double gustAngle) { this.gustAngle = gustAngle; } public double getGustStrength() { return gustStrength; } public void setGustStrength(final double gustStrength) { this.gustStrength = gustStrength; } }
/**********************************************************\ | | | hprose | | | | Official WebSite: http://www.hprose.com/ | | http://www.hprose.org/ | | | \**********************************************************/ /**********************************************************\ * * * HproseTcpServer.java * * * * hprose tcp server class for Java. * * * * LastModified: May 3, 2016 * * Author: Ma Bingyao <[email protected]> * * * \**********************************************************/ package hprose.server; import hprose.common.HproseContext; import hprose.common.HproseMethods; import hprose.io.ByteBufferStream; import hprose.net.Acceptor; import hprose.net.Connection; import hprose.net.ConnectionHandler; import hprose.net.TimeoutType; import hprose.util.concurrent.Action; import hprose.util.concurrent.Promise; import java.io.IOException; import java.lang.reflect.Type; import java.net.Socket; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; public class HproseTcpServer extends HproseService { private final static ThreadLocal<TcpContext> currentContext = new ThreadLocal<TcpContext>(); private volatile ExecutorService threadPool = null; private volatile int readTimeout = 30000; private volatile int writeTimeout = 30000; private boolean enabledThreadPool = false; private int reactorThreads = 2; private Acceptor acceptor = null; private String host = null; private int port = 0; private final class ServerHandler implements Runnable { private final Connection conn; private final ByteBuffer data; private final Integer id; public ServerHandler(Connection conn, ByteBuffer data, Integer id) { this.conn = conn; this.data = data; this.id = id; } @SuppressWarnings("unchecked") public final void run() { TcpContext context = new TcpContext(HproseTcpServer.this, conn.socketChannel()); currentContext.set(context); Object response; try { response = HproseTcpServer.this.handle(data, context); } catch (Throwable e) { conn.close(); currentContext.remove(); return; } finally { ByteBufferStream.free(data); } if (response instanceof Promise) { ((Promise<ByteBuffer>)response).then(new Action<ByteBuffer>() { public void call(ByteBuffer value) throws Throwable { conn.send(value, id); } }, new Action<Throwable>() { public void call(Throwable e) throws Throwable { conn.close(); } }).complete(new Action<Object>() { public void call(Object o) throws Throwable { currentContext.remove(); } }); } else { try { conn.send((ByteBuffer)response, id); } catch (Throwable e) { conn.close(); } finally { currentContext.remove(); } } } } private final class ServerConnectionHandler implements ConnectionHandler { public void onConnect(Connection conn) {} public void onConnected(Connection conn) { fireAcceptEvent(conn.socketChannel()); } public final void onReceived(Connection conn, ByteBuffer data, Integer id) { ServerHandler handler = new ServerHandler(conn, data, id); if (threadPool != null) { try { threadPool.execute(handler); } catch (RejectedExecutionException e) { conn.close(); } } else { handler.run(); } } public final void onSended(Connection conn, Integer id) {} public final void onClose(Connection conn) { fireCloseEvent(conn.socketChannel()); } public void onError(Connection conn, Exception e) { if (conn == null) { fireErrorEvent(e, null); } } public void onTimeout(Connection conn, TimeoutType type) {} public int getReadTimeout() { return readTimeout; } public int getWriteTimeout() { return writeTimeout; } public int getConnectTimeout() { throw new UnsupportedOperationException(); } } public HproseTcpServer(String uri) throws URISyntaxException { URI u = new URI(uri); host = u.getHost(); port = u.getPort(); } public HproseTcpServer(String host, int port) { this.host = host; this.port = port; } public String getHost() { return host; } public void setHost(String value) { host = value; } public int getPort() { return port; } public void setPort(int value) { port = value; } public int getReactorThreads() { return reactorThreads; } public void setReactorThreads(int reactorThreads) { this.reactorThreads = reactorThreads; } public boolean isStarted() { return (acceptor != null); } public void start() throws IOException { if (!isStarted()) { acceptor = new Acceptor(host, port, new ServerConnectionHandler(), reactorThreads); acceptor.start(); } } public void stop() { if (isStarted()) { acceptor.close(); if (threadPool != null && !threadPool.isShutdown()) { try { threadPool.shutdown(); } catch (SecurityException e) { fireErrorEvent(e, null); } } acceptor = null; } } @Override public HproseMethods getGlobalMethods() { if (globalMethods == null) { globalMethods = new HproseTcpMethods(); } return globalMethods; } @Override public void setGlobalMethods(HproseMethods methods) { if (methods instanceof HproseTcpMethods) { this.globalMethods = methods; } else { throw new ClassCastException("methods must be a HproseTcpMethods instance"); } } @Override protected Object[] fixArguments(Type[] argumentTypes, Object[] arguments, ServiceContext context) { int count = arguments.length; TcpContext tcpContext = (TcpContext)context; if (argumentTypes.length != count) { Object[] args = new Object[argumentTypes.length]; System.arraycopy(arguments, 0, args, 0, count); Class<?> argType = (Class<?>) argumentTypes[count]; if (argType.equals(HproseContext.class) || argType.equals(ServiceContext.class)) { args[count] = context; } else if (argType.equals(TcpContext.class)) { args[count] = tcpContext; } else if (argType.equals(SocketChannel.class)) { args[count] = tcpContext.getSocketChannel(); } else if (argType.equals(Socket.class)) { args[count] = tcpContext.getSocket(); } return args; } return arguments; } public static TcpContext getCurrentContext() { return currentContext.get(); } /** * Is enabled thread pool. * This thread pool is not for the service threads, it is for the user service method. * The default value is false. * @return is enabled thread pool */ public boolean isEnabledThreadPool() { return enabledThreadPool; } /** * Set enabled thread pool. * This thread pool is not for the service threads, it is for the user service method. * If your service method takes a long time, or will be blocked, please set this property to be true. * @param value is enabled thread pool */ public void setEnabledThreadPool(boolean value) { if (value && (threadPool == null)) { threadPool = Executors.newCachedThreadPool(); } enabledThreadPool = value; } /** * get the thread pool. * This thread pool is not for the service threads, it is for the user service method. * The default value is null. * @return the thread pool */ public ExecutorService getThreadPool() { return threadPool; } /** * set the thread pool. * This thread pool is not for the service threads, it is for the user service method. * Set it to null will disable thread pool. * @param value is the thread pool */ public void setThreadPool(ExecutorService value) { threadPool = value; enabledThreadPool = (value != null); } protected void fireAcceptEvent(SocketChannel channel) { if (event != null && HproseTcpServiceEvent.class.isInstance(event)) { ((HproseTcpServiceEvent)event).onAccept(new TcpContext(this, channel)); } } protected void fireCloseEvent(SocketChannel channel) { if (event != null && HproseTcpServiceEvent.class.isInstance(event)) { ((HproseTcpServiceEvent)event).onClose(new TcpContext(this, channel)); } } public int getReadTimeout() { return readTimeout; } public void setReadTimeout(int readTimeout) { this.readTimeout = readTimeout; } public int getWriteTimeout() { return writeTimeout; } public void setWriteTimeout(int writeTimeout) { this.writeTimeout = writeTimeout; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.io; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Stack; import org.apache.avro.AvroTypeException; import org.apache.avro.Schema; import org.apache.avro.io.parsing.JsonGrammarGenerator; import org.apache.avro.io.parsing.Parser; import org.apache.avro.io.parsing.Symbol; import org.apache.avro.util.Utf8; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.util.TokenBuffer; /** * A {@link Decoder} for Avro's JSON data encoding. * </p> * Construct using {@link DecoderFactory}. * </p> * JsonDecoder is not thread-safe. */ public class JsonDecoder extends ParsingDecoder implements Parser.ActionHandler { private JsonParser in; private static JsonFactory jsonFactory = new JsonFactory(); Stack<ReorderBuffer> reorderBuffers = new Stack<>(); ReorderBuffer currentReorderBuffer; private static class ReorderBuffer { public Map<String, TokenBuffer> savedFields = new HashMap<>(); public JsonParser origParser = null; } private JsonDecoder(Symbol root, InputStream in) throws IOException { super(root); configure(in); } private JsonDecoder(Symbol root, String in) throws IOException { super(root); configure(in); } JsonDecoder(Schema schema, InputStream in) throws IOException { this(getSymbol(schema), in); } JsonDecoder(Schema schema, String in) throws IOException { this(getSymbol(schema), in); } private static Symbol getSymbol(Schema schema) { Objects.requireNonNull(schema, "Schema cannot be null"); return new JsonGrammarGenerator().generate(schema); } /** * Reconfigures this JsonDecoder to use the InputStream provided. * <p/> * If the InputStream provided is null, a NullPointerException is thrown. * <p/> * Otherwise, this JsonDecoder will reset its state and then reconfigure its * input. * * @param in The InputStream to read from. Cannot be null. * @throws IOException * @throws NullPointerException if {@code in} is {@code null} * @return this JsonDecoder */ public JsonDecoder configure(InputStream in) throws IOException { Objects.requireNonNull(in, "InputStream cannot be null"); parser.reset(); reorderBuffers.clear(); currentReorderBuffer = null; this.in = jsonFactory.createParser(in); this.in.nextToken(); return this; } /** * Reconfigures this JsonDecoder to use the String provided for input. * <p/> * If the String provided is null, a NullPointerException is thrown. * <p/> * Otherwise, this JsonDecoder will reset its state and then reconfigure its * input. * * @param in The String to read from. Cannot be null. * @throws IOException * @throws NullPointerException if {@code in} is {@code null} * @return this JsonDecoder */ public JsonDecoder configure(String in) throws IOException { Objects.requireNonNull(in, "String to read from cannot be null"); parser.reset(); reorderBuffers.clear(); currentReorderBuffer = null; this.in = new JsonFactory().createParser(in); this.in.nextToken(); return this; } private void advance(Symbol symbol) throws IOException { this.parser.processTrailingImplicitActions(); if (in.getCurrentToken() == null && this.parser.depth() == 1) throw new EOFException(); parser.advance(symbol); } @Override public void readNull() throws IOException { advance(Symbol.NULL); if (in.getCurrentToken() == JsonToken.VALUE_NULL) { in.nextToken(); } else { throw error("null"); } } @Override public boolean readBoolean() throws IOException { advance(Symbol.BOOLEAN); JsonToken t = in.getCurrentToken(); if (t == JsonToken.VALUE_TRUE || t == JsonToken.VALUE_FALSE) { in.nextToken(); return t == JsonToken.VALUE_TRUE; } else { throw error("boolean"); } } @Override public int readInt() throws IOException { advance(Symbol.INT); if (in.getCurrentToken().isNumeric()) { int result = in.getIntValue(); in.nextToken(); return result; } else { throw error("int"); } } @Override public long readLong() throws IOException { advance(Symbol.LONG); if (in.getCurrentToken().isNumeric()) { long result = in.getLongValue(); in.nextToken(); return result; } else { throw error("long"); } } @Override public float readFloat() throws IOException { advance(Symbol.FLOAT); if (in.getCurrentToken().isNumeric()) { float result = in.getFloatValue(); in.nextToken(); return result; } else { throw error("float"); } } @Override public double readDouble() throws IOException { advance(Symbol.DOUBLE); if (in.getCurrentToken().isNumeric()) { double result = in.getDoubleValue(); in.nextToken(); return result; } else { throw error("double"); } } @Override public Utf8 readString(Utf8 old) throws IOException { return new Utf8(readString()); } @Override public String readString() throws IOException { advance(Symbol.STRING); if (parser.topSymbol() == Symbol.MAP_KEY_MARKER) { parser.advance(Symbol.MAP_KEY_MARKER); if (in.getCurrentToken() != JsonToken.FIELD_NAME) { throw error("map-key"); } } else { if (in.getCurrentToken() != JsonToken.VALUE_STRING) { throw error("string"); } } String result = in.getText(); in.nextToken(); return result; } @Override public void skipString() throws IOException { advance(Symbol.STRING); if (parser.topSymbol() == Symbol.MAP_KEY_MARKER) { parser.advance(Symbol.MAP_KEY_MARKER); if (in.getCurrentToken() != JsonToken.FIELD_NAME) { throw error("map-key"); } } else { if (in.getCurrentToken() != JsonToken.VALUE_STRING) { throw error("string"); } } in.nextToken(); } @Override public ByteBuffer readBytes(ByteBuffer old) throws IOException { advance(Symbol.BYTES); if (in.getCurrentToken() == JsonToken.VALUE_STRING) { byte[] result = readByteArray(); in.nextToken(); return ByteBuffer.wrap(result); } else { throw error("bytes"); } } private byte[] readByteArray() throws IOException { byte[] result = in.getText().getBytes(StandardCharsets.ISO_8859_1); return result; } @Override public void skipBytes() throws IOException { advance(Symbol.BYTES); if (in.getCurrentToken() == JsonToken.VALUE_STRING) { in.nextToken(); } else { throw error("bytes"); } } private void checkFixed(int size) throws IOException { advance(Symbol.FIXED); Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol(); if (size != top.size) { throw new AvroTypeException( "Incorrect length for fixed binary: expected " + top.size + " but received " + size + " bytes."); } } @Override public void readFixed(byte[] bytes, int start, int len) throws IOException { checkFixed(len); if (in.getCurrentToken() == JsonToken.VALUE_STRING) { byte[] result = readByteArray(); in.nextToken(); if (result.length != len) { throw new AvroTypeException("Expected fixed length " + len + ", but got" + result.length); } System.arraycopy(result, 0, bytes, start, len); } else { throw error("fixed"); } } @Override public void skipFixed(int length) throws IOException { checkFixed(length); doSkipFixed(length); } private void doSkipFixed(int length) throws IOException { if (in.getCurrentToken() == JsonToken.VALUE_STRING) { byte[] result = readByteArray(); in.nextToken(); if (result.length != length) { throw new AvroTypeException("Expected fixed length " + length + ", but got" + result.length); } } else { throw error("fixed"); } } @Override protected void skipFixed() throws IOException { advance(Symbol.FIXED); Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol(); doSkipFixed(top.size); } @Override public int readEnum() throws IOException { advance(Symbol.ENUM); Symbol.EnumLabelsAction top = (Symbol.EnumLabelsAction) parser.popSymbol(); if (in.getCurrentToken() == JsonToken.VALUE_STRING) { in.getText(); int n = top.findLabel(in.getText()); if (n >= 0) { in.nextToken(); return n; } throw new AvroTypeException("Unknown symbol in enum " + in.getText()); } else { throw error("fixed"); } } @Override public long readArrayStart() throws IOException { advance(Symbol.ARRAY_START); if (in.getCurrentToken() == JsonToken.START_ARRAY) { in.nextToken(); return doArrayNext(); } else { throw error("array-start"); } } @Override public long arrayNext() throws IOException { advance(Symbol.ITEM_END); return doArrayNext(); } private long doArrayNext() throws IOException { if (in.getCurrentToken() == JsonToken.END_ARRAY) { parser.advance(Symbol.ARRAY_END); in.nextToken(); return 0; } else { return 1; } } @Override public long skipArray() throws IOException { advance(Symbol.ARRAY_START); if (in.getCurrentToken() == JsonToken.START_ARRAY) { in.skipChildren(); in.nextToken(); advance(Symbol.ARRAY_END); } else { throw error("array-start"); } return 0; } @Override public long readMapStart() throws IOException { advance(Symbol.MAP_START); if (in.getCurrentToken() == JsonToken.START_OBJECT) { in.nextToken(); return doMapNext(); } else { throw error("map-start"); } } @Override public long mapNext() throws IOException { advance(Symbol.ITEM_END); return doMapNext(); } private long doMapNext() throws IOException { if (in.getCurrentToken() == JsonToken.END_OBJECT) { in.nextToken(); advance(Symbol.MAP_END); return 0; } else { return 1; } } @Override public long skipMap() throws IOException { advance(Symbol.MAP_START); if (in.getCurrentToken() == JsonToken.START_OBJECT) { in.skipChildren(); in.nextToken(); advance(Symbol.MAP_END); } else { throw error("map-start"); } return 0; } @Override public int readIndex() throws IOException { advance(Symbol.UNION); Symbol.Alternative a = (Symbol.Alternative) parser.popSymbol(); String label; if (in.getCurrentToken() == JsonToken.VALUE_NULL) { label = "null"; } else if (in.getCurrentToken() == JsonToken.START_OBJECT && in.nextToken() == JsonToken.FIELD_NAME) { label = in.getText(); in.nextToken(); parser.pushSymbol(Symbol.UNION_END); } else { throw error("start-union"); } int n = a.findLabel(label); if (n < 0) throw new AvroTypeException("Unknown union branch " + label); parser.pushSymbol(a.getSymbol(n)); return n; } @Override public Symbol doAction(Symbol input, Symbol top) throws IOException { if (top instanceof Symbol.FieldAdjustAction) { Symbol.FieldAdjustAction fa = (Symbol.FieldAdjustAction) top; String name = fa.fname; if (currentReorderBuffer != null) { try (TokenBuffer tokenBuffer = currentReorderBuffer.savedFields.get(name)) { if (tokenBuffer != null) { currentReorderBuffer.savedFields.remove(name); currentReorderBuffer.origParser = in; in = tokenBuffer.asParser(); in.nextToken(); return null; } } } if (in.getCurrentToken() == JsonToken.FIELD_NAME) { do { String fn = in.getText(); in.nextToken(); if (name.equals(fn) || fa.aliases.contains(fn)) { return null; } else { if (currentReorderBuffer == null) { currentReorderBuffer = new ReorderBuffer(); } try (TokenBuffer tokenBuffer = new TokenBuffer(in)) { // Moves the parser to the end of the current event e.g. END_OBJECT tokenBuffer.copyCurrentStructure(in); currentReorderBuffer.savedFields.put(fn, tokenBuffer); } in.nextToken(); } } while (in.getCurrentToken() == JsonToken.FIELD_NAME); throw new AvroTypeException("Expected field name not found: " + fa.fname); } } else if (top == Symbol.FIELD_END) { if (currentReorderBuffer != null && currentReorderBuffer.origParser != null) { in = currentReorderBuffer.origParser; currentReorderBuffer.origParser = null; } } else if (top == Symbol.RECORD_START) { if (in.getCurrentToken() == JsonToken.START_OBJECT) { in.nextToken(); reorderBuffers.push(currentReorderBuffer); currentReorderBuffer = null; } else { throw error("record-start"); } } else if (top == Symbol.RECORD_END || top == Symbol.UNION_END) { // AVRO-2034 advance to the end of our object while (in.getCurrentToken() != JsonToken.END_OBJECT) { in.nextToken(); } if (top == Symbol.RECORD_END) { if (currentReorderBuffer != null && !currentReorderBuffer.savedFields.isEmpty()) { throw error("Unknown fields: " + currentReorderBuffer.savedFields.keySet()); } currentReorderBuffer = reorderBuffers.pop(); } // AVRO-2034 advance beyond the end object for the next record. in.nextToken(); } else { throw new AvroTypeException("Unknown action symbol " + top); } return null; } private AvroTypeException error(String type) { return new AvroTypeException("Expected " + type + ". Got " + in.getCurrentToken()); } }
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.config.application.api; import com.yahoo.io.IOUtils; import com.yahoo.path.Path; import com.yahoo.vespa.config.util.ConfigUtils; import org.junit.Test; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.StringReader; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; /** * @author Ulf Lilleengen */ public abstract class ApplicationFileTest { protected void writeAppTo(File destFolder) throws IOException { createFiles(destFolder, "vespa-services.xml", "vespa-hosts.xml"); createFolders(destFolder, "searchdefinitions", "components", "files", "templates"); File sds = new File(destFolder, "searchdefinitions"); createFiles(sds, "sock.sd"); File files = new File(destFolder, "files"); createFiles(files, "foo.json"); IOUtils.writeFile(new File(files, "foo.json"), "foo : foo\n", false); File templates = new File(destFolder, "templates"); createFolders(templates, "basic", "simple_html", "text"); createFiles(templates, "basic/error.templ", "basic/header.templ", "basic/hit.templ", "simple_html/footer.templ", "simple_html/header.templ", "simple_html/hit.templ", "text/error.templ", "text/footer.templ", "text/header.templ", "text/hit.templ", "text/nohits.templ"); File components = new File(destFolder, "components"); createFiles(components, "defs-only.jar", "file.txt"); } private void createFiles(File destFolder, String ... names) throws IOException { for (String name : names) { File f = new File(destFolder, name); assertTrue(f.createNewFile()); IOUtils.writeFile(f, "foo", false); } } private void createFolders(File destFolder, String ... names) { for (String name : names) { new File(destFolder, name).mkdirs(); } } @Test public void testApplicationFile() throws Exception { Path p1 = Path.fromString("foo/bar/baz"); ApplicationFile f1 = getApplicationFile(p1); ApplicationFile f2 = getApplicationFile(p1); assertEquals(p1, f1.getPath()); assertEquals(p1, f2.getPath()); } @Test public void testApplicationFileEquals() throws Exception { Path p1 = Path.fromString("foo/bar/baz"); Path p2 = Path.fromString("foo/bar"); ApplicationFile f1 = getApplicationFile(p1); ApplicationFile f2 = getApplicationFile(p2); assertEquals(f1, f1); assertNotEquals(f1, f2); assertNotEquals(f2, f1); assertEquals(f2, f2); } @Test public void testApplicationFileIsDirectory() throws Exception { assertFalse(getApplicationFile(Path.fromString("vespa-services.xml")).isDirectory()); assertTrue(getApplicationFile(Path.fromString("searchdefinitions")).isDirectory()); assertFalse(getApplicationFile(Path.fromString("searchdefinitions/sock.sd")).isDirectory()); assertFalse(getApplicationFile(Path.fromString("doesnotexist")).isDirectory()); } @Test public void testApplicationFileExists() throws Exception { assertTrue(getApplicationFile(Path.fromString("vespa-services.xml")).exists()); assertTrue(getApplicationFile(Path.fromString("searchdefinitions")).exists()); assertTrue(getApplicationFile(Path.fromString("searchdefinitions/sock.sd")).exists()); assertFalse(getApplicationFile(Path.fromString("doesnotexist")).exists()); } @Test public void testApplicationFileReadContent() throws Exception { assertFileContent("foo : foo\n", "files/foo.json"); } @Test (expected = FileNotFoundException.class) public void testApplicationFileReadContentInvalidFile() throws Exception { assertFileContent("foo : foo\n", "doesnotexist"); } @Test public void testApplicationFileCreateDirectory() throws Exception { ApplicationFile file = getApplicationFile(Path.fromString("/notyet/exists/here")); assertFalse(file.exists()); file.createDirectory(); assertTrue(file.exists()); assertTrue(file.isDirectory()); file = getApplicationFile(Path.fromString("myDir")).createDirectory(); assertTrue(file.isDirectory()); file = getApplicationFile(Path.fromString("myDir/sub")).createDirectory(); file = getApplicationFile(Path.fromString("myDir/sub")).createDirectory(); assertTrue(file.isDirectory()); file = getApplicationFile(Path.fromString("searchdefinitions/myDir2/")).createDirectory(); assertTrue(file.isDirectory()); file = getApplicationFile(Path.fromString("myDir3/myDir4/myDir5")).createDirectory(); assertTrue(file.exists()); assertTrue(file.isDirectory()); } @Test (expected = IllegalArgumentException.class) public void testApplicationFileCreateDirectoryOverFile() throws Exception { getApplicationFile(Path.fromString("vespa-services.xml")).createDirectory(); } @Test public void testApplicationFileCreateFile() throws Exception { ApplicationFile file = getApplicationFile(Path.fromString("newfile.txt")); assertFalse(file.exists()); file.writeFile(new StringReader("foobar")); assertTrue(file.exists()); assertFalse(file.isDirectory()); assertEquals("foobar", com.yahoo.io.IOUtils.readAll(file.createReader())); } @Test public void testApplicationFileCreateFileWithPath() throws Exception { ApplicationFile file = getApplicationFile(Path.fromString("subdir/newfile.txt")); assertFalse(file.exists()); file.writeFile(new StringReader("foobar")); assertTrue(file.exists()); assertFalse(file.isDirectory()); assertEquals("foobar", com.yahoo.io.IOUtils.readAll(file.createReader())); } @Test public void testApplicationFileListFiles() throws Exception { ApplicationFile file = getApplicationFile(Path.createRoot()); assertTrue(file.exists()); assertTrue(file.isDirectory()); List<ApplicationFile> list = file.listFiles(); assertEquals(6, list.size()); assertTrue(listContains(list, "vespa-services.xml")); assertTrue(listContains(list, "vespa-hosts.xml")); assertTrue(listContains(list, "components/")); assertTrue(listContains(list, "searchdefinitions/")); assertTrue(listContains(list, "templates/")); assertTrue(listContains(list, "files/")); list = getApplicationFile(Path.fromString("templates")).listFiles(false); assertTrue(listContains(list, "templates/basic/")); assertTrue(listContains(list, "templates/simple_html/")); assertTrue(listContains(list, "templates/text/")); list = getApplicationFile(Path.fromString("components")).listFiles(false); assertTrue(listContains(list, "components/defs-only.jar")); assertTrue(listContains(list, "components/file.txt")); list = getApplicationFile(Path.fromString("components")).listFiles(true); assertTrue(listContains(list, "components/defs-only.jar")); assertTrue(listContains(list, "components/file.txt")); list = getApplicationFile(Path.fromString("templates")).listFiles(true); assertEquals(14, list.size()); assertTrue(listContains(list, "templates/basic/")); assertTrue(listContains(list, "templates/basic/error.templ")); assertTrue(listContains(list, "templates/basic/header.templ")); assertTrue(listContains(list, "templates/basic/hit.templ")); assertTrue(listContains(list, "templates/simple_html/")); assertTrue(listContains(list, "templates/simple_html/footer.templ")); assertTrue(listContains(list, "templates/simple_html/header.templ")); assertTrue(listContains(list, "templates/simple_html/hit.templ")); assertTrue(listContains(list, "templates/text/")); assertTrue(listContains(list, "templates/text/error.templ")); assertTrue(listContains(list, "templates/text/footer.templ")); assertTrue(listContains(list, "templates/text/header.templ")); assertTrue(listContains(list, "templates/text/hit.templ")); assertTrue(listContains(list, "templates/text/nohits.templ")); list = getApplicationFile(Path.createRoot()).listFiles(true); assertTrue(listContains(list, "components/")); assertTrue(listContains(list, "files/")); assertTrue(listContains(list, "searchdefinitions/")); assertTrue(listContains(list, "templates/")); assertTrue(listContains(list, "vespa-hosts.xml")); assertTrue(listContains(list, "vespa-services.xml")); assertTrue(listContains(list, "templates/text/")); assertTrue(listContains(list, "templates/text/error.templ")); assertTrue(listContains(list, "templates/text/footer.templ")); assertTrue(listContains(list, "templates/text/header.templ")); assertTrue(listContains(list, "templates/text/hit.templ")); assertTrue(listContains(list, "templates/text/nohits.templ")); list = getApplicationFile(Path.createRoot()).listFiles(new ApplicationFile.PathFilter() { @Override public boolean accept(Path path) { return path.getName().endsWith(".xml"); } }); assertEquals(2, list.size()); assertFalse(listContains(list, "components/")); assertFalse(listContains(list, "files/")); assertFalse(listContains(list, "searchdefinitions/")); assertFalse(listContains(list, "templates/")); assertTrue(listContains(list, "vespa-hosts.xml")); assertTrue(listContains(list, "vespa-services.xml")); } private boolean listContains(List<ApplicationFile> list, String s) { for (ApplicationFile file : list) { String actual = file.getPath().toString(); if (file.isDirectory()) { actual += "/"; } if (actual.equals(s)) { return true; } } return false; } @Test public void testApplicationFileCanBeDeleted() throws Exception { ApplicationFile file = getApplicationFile(Path.fromString("file1.txt")); file.writeFile(new StringReader("file1")); assertEquals("file1.txt", file.getPath().getName()); file.delete(); assertFalse(file.exists()); assertFalse(file.isDirectory()); List<ApplicationFile> files = file.listFiles(true); assertTrue(files.isEmpty()); file = getApplicationFile(Path.fromString("subdir/file2.txt")); file.writeFile(new StringReader("file2")); assertEquals("file2.txt", file.getPath().getName()); file.delete(); assertFalse(file.exists()); assertFalse(file.isDirectory()); files = file.listFiles(true); assertTrue(files.isEmpty()); } @Test public void getGetMetaPath() throws Exception { ApplicationFile file = getApplicationFile(Path.fromString("file1.txt")); assertEquals(".meta/file1.txt", file.getMetaPath().toString()); file = getApplicationFile(Path.fromString("dir/file1.txt")); assertEquals("dir/.meta/file1.txt", file.getMetaPath().toString()); file = getApplicationFile(Path.fromString("dir")); assertEquals(".meta/dir", file.getMetaPath().toString()); file = getApplicationFile(Path.fromString("")); assertEquals(".meta/.root", file.getMetaPath().toString()); } @Test public void getGetMetaContent() throws Exception { String testFileName = "file1.txt"; ApplicationFile file = getApplicationFile(Path.fromString(testFileName)); assertEquals(".meta/" + testFileName, file.getMetaPath().toString()); String input = "a"; file.writeFile(new StringReader(input)); assertEquals(ApplicationFile.ContentStatusNew, file.getMetaData().getStatus()); assertEquals(ConfigUtils.getMd5(input), file.getMetaData().getMd5()); testFileName = "foo"; ApplicationFile fooDir = getApplicationFile(Path.fromString(testFileName)); fooDir.createDirectory(); assertEquals(ApplicationFile.ContentStatusNew, fooDir.getMetaData().getStatus()); assertTrue(fooDir.getMetaData().getMd5().isEmpty()); testFileName = "foo/file2.txt"; file = getApplicationFile(Path.fromString(testFileName)); input = "a"; file.writeFile(new StringReader(input)); assertEquals(ApplicationFile.ContentStatusNew, file.getMetaData().getStatus()); assertEquals(ConfigUtils.getMd5(input), file.getMetaData().getMd5()); file.delete(); assertEquals(ApplicationFile.ContentStatusDeleted, file.getMetaData().getStatus()); assertTrue(file.getMetaData().getMd5().isEmpty()); fooDir.delete(); assertEquals(ApplicationFile.ContentStatusDeleted, fooDir.getMetaData().getStatus()); assertTrue(file.getMetaData().getMd5().isEmpty()); // non-existing file testFileName = "non-existing"; file = getApplicationFile(Path.fromString(testFileName)); assertNull(file.getMetaData()); } @Test(expected = RuntimeException.class) public void testApplicationFileCantDeleteDirNotEmpty() throws Exception { getApplicationFile(Path.fromString("searchdefinitions")).delete(); } @Test public void testReadingFromInputStream() throws Exception { String data = IOUtils.readAll(getApplicationFile(Path.fromString("files/foo.json")).createReader()); assertTrue(data.contains("foo : foo")); } private void assertFileContent(String expected, String path) throws Exception { ApplicationFile file = getApplicationFile(Path.fromString(path)); String actual = com.yahoo.io.IOUtils.readAll(file.createReader()); assertEquals(expected, actual); } public abstract ApplicationFile getApplicationFile(Path path) throws Exception; }
/* * Copyright 2006 Project JCows. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jcows.view.vc; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.ToolBar; import org.eclipse.swt.widgets.ToolItem; import org.jcows.JCowsException; import org.jcows.controller.JCowsController; import org.jcows.model.vc.IValidator; import org.jcows.model.vc.ParamListItem; import org.jcows.system.Properties; import com.cloudgarden.resource.SWTResourceManager; /** * The <code>VCArray</code> class represents an array of Java types. * This class holds other Visual Components and manages their layout. * <code>VCArray</code> can add or remove (increase or decrease the array size) components.<br/><br/> * For example a <code>VCArray</code> object can add several <code>VCint</code> * objects. An <code>int[5]</code> is represented as a <code>VCArray</code> object * that contains five <code>VCint</code> objects. * * @author Marco Schmid ([email protected]) * @version $LastChangedRevision: 222 $, $LastChangedDate: 2006-11-07 07:35:44 +0000 (Tue, 07 Nov 2006) $ */ public class VCArray extends VC { private static final Logger LOGGER = Logger.getLogger(VCArray.class); private Composite m_compositeGUI; private List<IVC> m_addedVC; private Class m_visualClass; private Composite m_compositeElements; private Composite m_compositeToolBar; private Group m_groupArray=null; private ToolItem m_toolItemAdd=null; private ToolItem m_toolItemRemove=null; private Object m_startValue; private int m_arraypos=-1; /** * Constructs a new instance of this class. * * @param compositeGUI the composite where new components can be drawn. * @param addedVC the list that contains all visual components. * @param paramListItem the correspondung {@link org.jcows.model.vc.ParamListItem} for this object. * @param parent the parent composite. * @param visualClass the visual class that will be instantiated for a new array element. */ public VCArray(final Composite compositeGUI,final List<IVC> addedVC,ParamListItem paramListItem, final Composite parent,final Class visualClass) throws JCowsException { super(paramListItem,parent); m_compositeGUI=compositeGUI; m_addedVC=addedVC; m_visualClass=visualClass; GridLayout layoutGroup=new GridLayout(); layoutGroup.makeColumnsEqualWidth=false; layoutGroup.numColumns=1; FillLayout layoutButtons=new FillLayout(); GridLayout layoutElements=new GridLayout(); layoutElements.makeColumnsEqualWidth=false; layoutElements.numColumns=1; m_groupArray=new Group(this,SWT.NONE); m_groupArray.setLayout(layoutGroup); m_groupArray.setText(paramListItem.getLabel()); /* * Reset the current default label. */ m_label.setText(""); m_compositeToolBar=new Composite(m_groupArray,SWT.NONE); m_compositeToolBar.setLayout(layoutButtons); m_compositeElements=new Composite(m_groupArray,SWT.NONE); m_compositeElements.setLayout(layoutElements); ToolBar toolBar=new ToolBar(m_compositeToolBar,SWT.FLAT); m_toolItemAdd=new ToolItem(toolBar,SWT.NONE); m_toolItemAdd.setText("Add"); m_toolItemAdd.setImage(SWTResourceManager.getImage("resources/add.png")); m_toolItemRemove=new ToolItem(toolBar,SWT.NONE); m_toolItemRemove.setText("Remove"); m_toolItemRemove.setImage(SWTResourceManager.getImage("resources/remove.png")); /* * Add a listener to add components. */ m_toolItemAdd.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent evt) { LOGGER.debug(evt); addArrayElement(); } }); /* * Add a listener to remove components. */ m_toolItemRemove.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent evt) { LOGGER.debug(evt); removeArrayElement(); } }); m_startValue=paramListItem.getVectorData().get(0); /* * Add one array element by default. */ // TODO: is one element really necessary? //addArrayElement(); } public void addArrayElement() { IVC vc=null; try { m_arraypos++; if(m_arraypos>0) m_paramListItem.getVectorData().add(m_startValue); Constructor constructor=m_visualClass.getConstructor(new Class[]{ParamListItem.class,Composite.class,int.class}); vc=(IVC)constructor.newInstance(new Object[]{m_paramListItem,m_compositeElements,m_arraypos}); m_addedVC.add(vc); /* * Updates the request composite size. * In test cases, the reference may be null. */ if(JCowsController.m_mainWindowController!=null) JCowsController.m_mainWindowController.m_mainWindow.scrolledCompositeGUIRequestUpdate(); } catch(SecurityException e) { new JCowsException(Properties.getMessage("error.SecurityException"),e); } catch(IllegalArgumentException e) { m_arraypos--; new JCowsException(Properties.getMessage("error.IllegalArgumentException"),e); } catch(NoSuchMethodException e) { m_arraypos--; new JCowsException(Properties.getMessage("error.NoSuchMethodException"),e); } catch(InstantiationException e) { m_arraypos--; new JCowsException(Properties.getMessage("error.InstantiationException"),e); } catch(IllegalAccessException e) { m_arraypos--; new JCowsException(Properties.getMessage("error.IllegalAccessException"),e); } catch(InvocationTargetException e) { m_arraypos--; new JCowsException(Properties.getMessage("error.InvocationTargetException"),e); } } private void removeArrayElement() { if(m_arraypos>0) { m_paramListItem.getVectorData().remove(m_arraypos); Control[] controls=m_compositeElements.getChildren(); m_addedVC.remove(controls[controls.length-1]); controls[controls.length-1].dispose(); /* * Updates the request composite size. * In test cases, the reference may be null. */ if(JCowsController.m_mainWindowController!=null) JCowsController.m_mainWindowController.m_mainWindow.scrolledCompositeGUIRequestUpdate(); m_arraypos--; } } public Composite getComposite() { return m_groupArray; } public void addValidator(IValidator validator) { /* * Do nothing. */ } public IValidator[] getValidators() { return null; } public String getLabel() { return m_groupArray.getText(); } public void setLabel(String value) { m_groupArray.setText(value); } public boolean validate() { return true; } // TODO: Implement error state in VCArray. public void setErrorState() { } public void setEditable(boolean editable) { m_compositeToolBar.dispose(); m_groupArray.layout(); } }
package rest; import java.sql.SQLException; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import tm.RotondAndesException; import tm.RotondAndesTM; import vos.Usuario; @Path(URLS.USUARIO) @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public class UsuarioServices extends BaseServices implements URLS { @GET @Path("{usu}-{con}") public Response get(@PathParam("usu") String usu, @PathParam("con") String con) { RotondAndesTM tm = new RotondAndesTM(getPath()); Usuario videos; try { videos = tm.login(con, usu); } catch (RotondAndesException e) { return Response.status(412).entity(doErrorMessage(e)).build(); } catch (SQLException e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(videos).build(); } @POST public Response add(Usuario data) { RotondAndesTM tm = new RotondAndesTM(getPath()); try { data = tm.creaUsuario(data); } catch (RotondAndesException e) { return Response.status(412).entity(doErrorMessage(e)).build(); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(data).build(); } @PUT @Path("{" + USUARIOID + ": \\d+}") public Response update(Usuario data, @PathParam(USUARIOID) long codigo) { data.setCodigo(codigo); RotondAndesTM tm = new RotondAndesTM(getPath()); try { data = tm.updateUsuario(data); } catch (RotondAndesException ex) { return Response.status(404).entity(doErrorMessage(ex)).build(); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(data).build(); } @DELETE @Path("{" + USUARIOID + ": \\d+}") public Response delete(@PathParam(USUARIOID) long codigo) { Usuario data; RotondAndesTM tm = new RotondAndesTM(getPath()); try { data = tm.deleteUsuario(codigo); } catch (RotondAndesException ex) { return Response.status(404).entity(doErrorMessage(ex)).build(); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(data).build(); } @Path("/{" + USUARIOID + ": \\d+}/" + CLIENTE) public ClienteServices clienteServices(@PathParam(USUARIOID) Long id) { try { return new ClienteServices(context); } catch (Exception e) { throw new WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); } } @Path("{" + USUARIOID + ": \\d+}/" + RESTAURANTE) public RestauranteAdminServices restauranteServices(@PathParam(USUARIOID) Long id) { try { return new RestauranteAdminServices(context); } catch (Exception e) { throw new WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); } } @Path("{" + USUARIOID + ": \\d+}/" + TIPOCOMIDA) public TipoComidaAdminServices TipoComidaServices() { try { return new TipoComidaAdminServices(context); } catch (Exception e) { throw new WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); } } @Path("{" + USUARIOID + ": \\d+}/" + INGREDIENTE) public IngredienteServices ingredinteServices() { try { return new IngredienteServices(context); } catch (Exception e) { throw new WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); } } @Path("{" + USUARIOID + ": \\d+}/" + PRODUCTO) public ProductoServices productoServices() { try { return new ProductoServices(context); } catch (Exception e) { throw new WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); } } @Path("{" + USUARIOID + ": \\d+}/" + ZONA) public ZonaAdminServices zonaServices() { try { return new ZonaAdminServices(context); } catch (Exception e) { throw new WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); } } // @Path(CHANGE+"/{" + REGISTROID + ": \\d+}/" + ZONA) // public ZonaModificationServices getZona(@PathParam(REGISTROID) Long id) { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 3) // throw new RotondAndesException("no tiene los permisos necesarios"); // // return new ZonaModificationServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // // @Path(CHANGE+"/{" + REGISTROID + ": \\d+}/" + RESERVA) // public ReservaModificationServices getReservas(@PathParam(REGISTROID) Long // id) { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 3) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new ReservaModificationServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path(CHANGE+"/{" + REGISTROID + ": \\d+}/" + ESPACIO) // public EspacioModificationServices getEspacio(@PathParam(REGISTROID) Long id) // { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 3) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new EspacioModificationServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path(CHANGE+"/{" + REGISTROID + ": \\d+}/" + REPRESENTANTE) // public RepresentateModificationServices // getRepresentante(@PathParam(REGISTROID) Long id) { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 3) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new RepresentateModificationServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path(CHANGE+"/{" + REGISTROID + ": \\d+}/" + INGREDIENTE) // public IngredienteModificationServices getIngrediente(@PathParam(REGISTROID) // Long id) { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 3) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new IngredienteModificationServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path("{" + REGISTROID + ": \\d+}/" + INGREDIENTE) // public IngredienteCreatorServices // getIngredienteCliente(@PathParam(REGISTROID) Long id) { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 2) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new IngredienteCreatorServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path(CHANGE+"/{" + REGISTROID + ": \\d+}/" + PRODUCTO) // public ProductoModificationServices getProducto(@PathParam(REGISTROID) Long // id) { // System.out.println("asdas3"); // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 3) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new ProductoModificationServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path("{" + REGISTROID + ": \\d+}/" + PRODUCTO) // public ProductoCreatorServices getProductoCliente(@PathParam(REGISTROID) Long // id) { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 2) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new ProductoCreatorServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path("{" + REGISTROID + ": \\d+}/" + MENU) // public MenuModificationServices getMenu(@PathParam(REGISTROID) Long id) { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 2) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new MenuModificationServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path(CHANGE+"/{" + REGISTROID + ": \\d+}/" + PEDIDO) // public PedidoModificationServices getPedido(@PathParam(REGISTROID) Long id) { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 3) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new PedidoModificationServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // @Path("{" + REGISTROID + ": \\d+}/" + PEDIDO) // public PedidoCreatorServices getPedidoCliente(@PathParam(REGISTROID) Long id) // { // RotondAndesTM tm = new RotondAndesTM(getPath()); // try { // if (tm.getRegistro(id).getPermisos() != 1) // throw new RotondAndesException("No tiene los permisos necesarios"); // // return new PedidoCreatorServices(context); // } catch (RotondAndesException ex) { // throw new // WebApplicationException(Response.status(404).entity(doErrorMessage(ex)).build()); // } catch (Exception e) { // throw new // WebApplicationException(Response.status(500).entity(doErrorMessage(e)).build()); // } // } // // // public void integridad(Registro data) throws RotondAndesException { // if (data.getCodigo() == null) // throw new RotondAndesException("el codigo no puede ser null"); // if (data.getUsuario() == null) // throw new RotondAndesException("el usuario no puede ser null"); // if (data.getContrasenia() == null) // throw new RotondAndesException("la contrasenia no piede ser null"); // if (data.getPermisos() == null) // throw new RotondAndesException("el permiso no puede se null"); // if (data.getUsuario().equals("")) // throw new RotondAndesException("no puede agregar un usuario vacio"); // if (data.getUsuario().contains(" ")) // throw new RotondAndesException("un usuario no puede tener espacios"); // if (data.getUsuario().length() > 100) // throw new RotondAndesException("la cadena ususrio supera el limite permitido // de caracteres"); // if (data.getContrasenia().equals("")) // throw new RotondAndesException("no puede agregar un usuario vacio"); // if (data.getContrasenia().length() > 100) // throw new RotondAndesException("la cadena ususrio supera el limite permitido // de caracteres"); // if (data.getPermisos() <= 0 || data.getPermisos() > 3) // throw new RotondAndesException("el permiso no es valido"); // } }
/* * Copyright 2015 AML Innovation & Consulting LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.amlinv.stats; import java.math.BigDecimal; import java.util.Iterator; import java.util.LinkedList; /** * Missing samples are treated as 0 values. Until at least one sample is recorded, however, the result is treated as * no data. * <p> * Note that adding elements out-of-order is supported, but is tricky and will lead to poor results unless the * values are tightly packed. For example, adding values that pre-date the covered time period has no effect, and * adding values far into the future compared to the rest of the values will lead to premature devaluation of the * entire average until the intermediate values are filled-in. * * In addition, updates anywhere besides the end of the list run in linear time - i.e. O(n). * </p> * * <b>CONCURRENCY WARNING:</b> this class is not safe under concurrency; synchronize all accesses as-needed for * concurrent use. * * Created by art on 5/27/15. */ public class MovingAverage { private final LinkedList<SampleData> samples; private final long sampleTimePeriod; // In milliseconds /** * Total number of slots covered by the average; sampleTimePeriod * maxSlots gives the total time covered by the * average. Virtual slots are included here, meaning slots with no recorded data, so it's not possible to increase * the total time covered by leaving slots empty. */ private final long maxSlots; // Number of slots private BigDecimal accumulator = BigDecimal.ZERO; private boolean full; public MovingAverage(long sampleTimePeriod, long maxSlots) { this.sampleTimePeriod = sampleTimePeriod; this.maxSlots = maxSlots; this.samples = new LinkedList<>(); } /** * Put the given value at the timeslot for the given timestamp, replacing the existing value, if any. * * @param timestamp * @param value */ public void put (long timestamp, long value) { this.update(timestamp, value, false); } /** * Add the given value with the specified timestamp to the average. If data already exists for the same time * slot, the value is added and the average updated. * * @param timestamp * @param value */ public void add (long timestamp, long value) { this.update(timestamp, value, true); } /** * Update the average history and accumulator with the given value at the given timestamp, adding or replacing the * value as specified. * * @param timestamp timestamp assigned to the value. * @param value value to store in the history for the moving average. * @param addInd true => add the value to any existing value; false => replace the current value with the new one. */ protected void update (long timestamp, long value, boolean addInd) { long newSlot = timestamp / sampleTimePeriod; if ( samples.isEmpty() ) { // // First value; simply store it. // samples.add(new SampleData(newSlot, value)); accumulator = accumulator.add(new BigDecimal(value)); } else { // // Determine where this value fits into the history (after the end, before the start, or in the middle). // long lastSlot = samples.getLast().slot; if ( newSlot == lastSlot ) { // // Determine the right amount to add to the existing sample. // long updateAmount; if ( addInd ) { updateAmount = value; } else { updateAmount = value - samples.getLast().value; } // // Update by the amount needed. // samples.getLast().value += updateAmount; accumulator = accumulator.add(new BigDecimal(updateAmount)); } else if ( newSlot > lastSlot ) { // // Add at the end and decay out old values at the start. // samples.addLast(new SampleData(newSlot, value)); BigDecimal decayAmount = this.decayOldValues(newSlot); accumulator = accumulator.subtract(decayAmount); accumulator = accumulator.add(BigDecimal.valueOf(value)); } else { long firstSlot = samples.getFirst().slot; if ( newSlot >= firstSlot ) { // // Linear search; start at the end and work back as the most obvious use case is one adding values // at or near the end. // int curPos; curPos = samples.size() - 1; Iterator<SampleData> iter = samples.descendingIterator(); SampleData curSample = iter.next(); while ( curSample.slot > newSlot ) { curSample = iter.next(); curPos--; } long updateAmount; if ( newSlot == curSample.slot ) { // // Found a matching slot; add to it. // if ( addInd ) { updateAmount = value; } else { updateAmount = value - curSample.value; } curSample.value += updateAmount; } else { // // Put the new sample after the last one checked. // updateAmount = value; samples.add(curPos + 1, new SampleData(newSlot, value)); } // // Add the amount of the update to the accumulator. // accumulator = accumulator.add(BigDecimal.valueOf(updateAmount)); } else { if ( newSlot > ( lastSlot - this.maxSlots ) ) { // // New value at the start. // accumulator = accumulator.add(BigDecimal.valueOf(value)); samples.addFirst(new SampleData(newSlot, value)); } else { // Ignore the value - it's before the time period allowed by the moving average } } } } } /** * Return the moving average of the values collected. Early in the moving average, the values will appear strongly * weighted since so few samples are included in the average. * * @return average of the collected values. */ public double getAverage () { if ( samples.size() == 0 ) { return 0.0; } return ( accumulator.doubleValue() / calcNumSlot() ); } /** * Return the moving average of the values collected while assuming 0 values for any slots not yet collected. * Early in the moving average, the values will appear weakly weighted since so few actual values are included in * the average and a large number of 0 values are assumed. * * @return average of the collected values and assumed 0 values to fill out the moving average period. */ public double getFullPeriodAverage () { return accumulator.doubleValue() / maxSlots; } /** * Decay old slots given the new ending slot number. * * @param endSlot the new ending slot number. * @return sum of the values removed from history. */ protected BigDecimal decayOldValues (long endSlot) { BigDecimal result = BigDecimal.ZERO; long newFirstSlot = ( endSlot - this.maxSlots ) + 1; while ( samples.getFirst().slot < newFirstSlot ) { full = true; result = result.add(BigDecimal.valueOf(samples.pop().value)); } return result; } /** * Calculate the number of effective slots used by the history, including "sparse" slots - i.e. slots for which no * data is actually stored. * * @return number of slots to which the accumulated data applies. */ protected long calcNumSlot () { if ( full ) { return this.maxSlots; } return ( samples.getLast().slot - samples.getFirst().slot ) + 1; } protected class SampleData { public long slot; public long value; public SampleData(long slot, long value) { this.slot = slot; this.value = value; } } }
package org.clinical3PO.common.controller; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import org.clinical3PO.common.environment.EnvironmentType; import org.clinical3PO.common.form.FEArffForm; import org.clinical3PO.common.form.FEMatrixform; import org.clinical3PO.common.form.FEMlFlexForm; import org.clinical3PO.common.form.FEUgeneForm; import org.clinical3PO.common.security.model.User; import org.clinical3PO.model.JobSearchDetails; import org.clinical3PO.model.JobSearchParameter; import org.clinical3PO.services.JobSearchService; import org.clinical3PO.services.constants.JobSearchConstants; import org.clinical3PO.services.constants.SearchOn; import org.clinical3PO.services.constants.SearchScript; import org.clinical3PO.services.dao.model.JobSearch; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.ui.ModelMap; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @Controller @RequestMapping("/FExtraction") public class FeatureExtraction { @Autowired JobSearchService jobSearchService; private static final int BUFFER_SIZE = 4096; private @Autowired ServletContext servletContext; @Autowired private EnvironmentType envType; @RequestMapping(value = "/ML-FLEX", method = RequestMethod.POST) public String getMlFlexObservations(@Valid @ModelAttribute("mlflexForm") FEMlFlexForm mlflexform, BindingResult result, Model model) { if (result.hasErrors()) { return "FEMLFlex"; } assert (jobSearchService != null); User user = (User) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); assert (user != null); List<JobSearchParameter> searchParameters = new ArrayList<JobSearchParameter>(9); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSPROPERTY.getSearchKey(), mlflexform.getClassProperty(), 1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSIFIER.getSearchKey(), mlflexform.getClassificationAlgorithm(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.FOLDS.getSearchKey(), mlflexform.getFolds().toString(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.NoITERATIONS.getSearchKey(), mlflexform.getNumberOfIterations().toString(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSTIME.getSearchKey(), mlflexform.getClassTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.STARTDATE.getSearchKey(), mlflexform.getStartDate(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.ENDDATE.getSearchKey(), mlflexform.getEndDate(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.STARTTIME.getSearchKey(), mlflexform.getStartTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.ENDTIME.getSearchKey(), mlflexform.getEndTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.NOCLASSPROPERTYFEATURE.getSearchKey(), mlflexform.getNoCPFeatures(),1)); JobSearch jobSearch = new JobSearch(); jobSearch.setSearchBy(user.getId()); jobSearch.setSearchParameters(searchParameters); JobSearchDetails jobSearchDetails = new JobSearchDetails(); jobSearchDetails.setSearchOn(SearchOn.FEMLFLEX.getSearchOn()); jobSearchDetails.setSearchType("FEMlFlex"); jobSearchDetails.setSearchParameters(jobSearch.getSearchParameters().get(0).getValue()); jobSearchDetails.setScriptType(SearchScript.FEMLFLEX.getSearchScript()); jobSearchDetails.setScriptParameters(jobSearchService.getScriptParameters(jobSearch)); jobSearchService.searchJob(jobSearch, jobSearchDetails); return "redirect:/MySearch/"; } @RequestMapping(value = "/ML-FLEX", method = RequestMethod.GET) public String setMlFlexObservations(ModelMap model) { model.addAttribute("mlflexForm", new FEMlFlexForm()); return "FEMLFlex"; } @RequestMapping(value = "/Ugene", method = RequestMethod.POST) public String getUgeneObservations(@Valid @ModelAttribute("feUgeneForm") FEUgeneForm feUgeneForm, BindingResult result,Model model) { if (result.hasErrors()) { return "FEUgene"; } assert (jobSearchService != null); User user = (User) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); assert (user != null); List<JobSearchParameter> searchParameters = new ArrayList<JobSearchParameter>(9); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSPROPERTY.getSearchKey(), feUgeneForm.getClassProperty(), 1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSTIME.getSearchKey(), feUgeneForm.getClassTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.STARTDATE.getSearchKey(), feUgeneForm.getStartDate(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.ENDDATE.getSearchKey(), feUgeneForm.getEndDate(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.STARTTIME.getSearchKey(), feUgeneForm.getStartTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.ENDTIME.getSearchKey(), feUgeneForm.getEndTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSIFIER.getSearchKey(), feUgeneForm.getClassificationAlgorithm(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.FOLDS.getSearchKey(), feUgeneForm.getFolds().toString(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.NoITERATIONS.getSearchKey(), feUgeneForm.getNumberOfIterations().toString(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.UGENETYPE.getSearchKey(), feUgeneForm.getUgeneAlgorithm(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.NOCLASSPROPERTYFEATURE.getSearchKey(), feUgeneForm.getNoCPFeatures(),1)); JobSearch jobSearch = new JobSearch(); jobSearch.setSearchBy(user.getId()); jobSearch.setSearchParameters(searchParameters); JobSearchDetails jobSearchDetails = new JobSearchDetails(); jobSearchDetails.setSearchOn(SearchOn.FEUGENE.getSearchOn()); jobSearchDetails.setSearchType("FEUGENE"); jobSearchDetails.setSearchParameters(jobSearch.getSearchParameters().get(0).getValue()); jobSearchDetails.setScriptType(SearchScript.FEUGENE.getSearchScript()); jobSearchDetails.setScriptParameters(jobSearchService.getScriptParameters(jobSearch)); jobSearchService.searchJob(jobSearch, jobSearchDetails); return "redirect:/MySearch/"; } @RequestMapping(value = "/Ugene", method = RequestMethod.GET) public String setUgeneObservations(ModelMap model) { model.addAttribute("feUgeneForm", new FEUgeneForm()); return "FEUgene"; } @RequestMapping(value = "/Arff", method = RequestMethod.POST) public String getArffObservations(@Valid @ModelAttribute("feArffForm") FEArffForm feArffForm, BindingResult result,Model model) { if (result.hasErrors()) { return "FEArff"; } assert (jobSearchService != null); User user = (User) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); assert (user != null); List<JobSearchParameter> searchParameters = new ArrayList<JobSearchParameter>(9); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSPROPERTY.getSearchKey(), feArffForm.getClassProperty(), 1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSTIME.getSearchKey(), feArffForm.getClassTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.STARTDATE.getSearchKey(), feArffForm.getStartDate(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.ENDDATE.getSearchKey(), feArffForm.getEndDate(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.STARTTIME.getSearchKey(), feArffForm.getStartTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.ENDTIME.getSearchKey(), feArffForm.getEndTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.NOCLASSPROPERTYFEATURE.getSearchKey(), feArffForm.getNoCPFeatures(),1)); JobSearch jobSearch = new JobSearch(); jobSearch.setSearchBy(user.getId()); jobSearch.setSearchParameters(searchParameters); JobSearchDetails jobSearchDetails = new JobSearchDetails(); jobSearchDetails.setSearchOn(SearchOn.FEARFF.getSearchOn()); jobSearchDetails.setSearchType("FEARFF"); jobSearchDetails.setSearchParameters(jobSearch.getSearchParameters().get(0).getValue()); jobSearchDetails.setScriptType(SearchScript.FEARFF.getSearchScript()); jobSearchDetails.setScriptParameters(jobSearchService.getScriptParameters(jobSearch)); jobSearchService.searchJob(jobSearch, jobSearchDetails); return "redirect:/MySearch/"; } @RequestMapping(value = "/Arff", method = RequestMethod.GET) public String setArffObservations(ModelMap model) { model.addAttribute("feArffForm", new FEArffForm()); return "FEArff"; } @RequestMapping(value = "/UgeneMatrix", method = RequestMethod.POST) public String getUgeneMatrixObservations(@Valid @ModelAttribute("feMatrixForm") FEMatrixform feUgeneForm, BindingResult result,Model model) { if (result.hasErrors()) { return "UgeneMatrix"; } assert (jobSearchService != null); User user = (User) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); assert (user != null); List<JobSearchParameter> searchParameters = new ArrayList<JobSearchParameter>(9); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSPROPERTY.getSearchKey(), feUgeneForm.getClassProperty(), 1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.CLASSTIME.getSearchKey(), feUgeneForm.getClassTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.STARTDATE.getSearchKey(), feUgeneForm.getStartDate(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.ENDDATE.getSearchKey(), feUgeneForm.getEndDate(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.STARTTIME.getSearchKey(), feUgeneForm.getStartTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.ENDTIME.getSearchKey(), feUgeneForm.getEndTime(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.NOCLASSPROPERTYFEATURE.getSearchKey(), feUgeneForm.getNoCPFeatures(),1)); searchParameters.add(new JobSearchParameter(JobSearchConstants.UGENETYPE.getSearchKey(), feUgeneForm.getUgeneAlgorithm(),1)); JobSearch jobSearch = new JobSearch(); jobSearch.setSearchBy(user.getId()); jobSearch.setSearchParameters(searchParameters); JobSearchDetails jobSearchDetails = new JobSearchDetails(); jobSearchDetails.setSearchOn(SearchOn.UGENEMATRIX.getSearchOn()); jobSearchDetails.setSearchType("UGENEMATRIX"); jobSearchDetails.setSearchParameters(jobSearch.getSearchParameters().get(0).getValue()); jobSearchDetails.setScriptType(SearchScript.UGENEMATRIX.getSearchScript()); jobSearchDetails.setScriptParameters(jobSearchService.getScriptParameters(jobSearch)); jobSearchService.searchJob(jobSearch, jobSearchDetails); return "redirect:/MySearch/"; } @RequestMapping(value = "/UgeneMatrix", method = RequestMethod.GET) public String setUgeneMatrixObservations(ModelMap model) { model.addAttribute("feMatrixForm", new FEUgeneForm()); return "UgeneMatrix"; } @RequestMapping(value="/DownloadArffReport/{id}", method = RequestMethod.GET) public void doDownloadArff(@PathVariable("id") String reportID, HttpServletRequest request, HttpServletResponse response) throws IOException { String fileName = "feArffFile"+ reportID + ".tar.gz"; String fullPath = jobSearchService.getHadoopLocalOutputDirectory() + File.separator + fileName; File downloadFile = new File(fullPath); FileInputStream inputStream = new FileInputStream(downloadFile); String mimeType = servletContext.getMimeType(fullPath); if (mimeType == null) { // set to binary type if MIME mapping not found mimeType = "application/octet-stream"; } response.setContentType(mimeType); response.setContentLength((int) downloadFile.length()); String headerKey = "Content-Disposition"; String headerValue = String.format("attachment; filename=\"%s\"", downloadFile.getName()); response.setHeader(headerKey, headerValue); OutputStream outStream = response.getOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; int bytesRead = -1; while ((bytesRead = inputStream.read(buffer)) != -1) { outStream.write(buffer, 0, bytesRead); } inputStream.close(); outStream.close(); } @RequestMapping(value="/DownloadUgeneReport/{id}", method = RequestMethod.GET) public void doDownload(@PathVariable("id") String reportID, HttpServletRequest request, HttpServletResponse response) throws IOException { String fileName = "feUgeneReport"+ reportID + ".tar.gz"; String fullPath = jobSearchService.getAppDataDirectory() + File.separator + "ugene"+ File.separator + "output" + File.separator + fileName; File downloadFile = new File(fullPath); FileInputStream inputStream = new FileInputStream(downloadFile); String mimeType = servletContext.getMimeType(fullPath); if (mimeType == null) { // set to binary type if MIME mapping not found mimeType = "application/octet-stream"; } response.setContentType(mimeType); response.setContentLength((int) downloadFile.length()); String headerKey = "Content-Disposition"; String headerValue = String.format("attachment; filename=\"%s\"", downloadFile.getName()); response.setHeader(headerKey, headerValue); OutputStream outStream = response.getOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; int bytesRead = -1; while ((bytesRead = inputStream.read(buffer)) != -1) { outStream.write(buffer, 0, bytesRead); } inputStream.close(); outStream.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.spark.structuredstreaming.translation.batch; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkState; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.beam.runners.core.construction.ParDoTranslation; import org.apache.beam.runners.spark.structuredstreaming.metrics.MetricsAccumulator; import org.apache.beam.runners.spark.structuredstreaming.metrics.MetricsContainerStepMapAccumulator; import org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator; import org.apache.beam.runners.spark.structuredstreaming.translation.TranslationContext; import org.apache.beam.runners.spark.structuredstreaming.translation.helpers.CoderHelpers; import org.apache.beam.runners.spark.structuredstreaming.translation.helpers.EncoderHelpers; import org.apache.beam.runners.spark.structuredstreaming.translation.helpers.MultiOuputCoder; import org.apache.beam.runners.spark.structuredstreaming.translation.helpers.SideInputBroadcast; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.DoFnSchemaInformation; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.reflect.DoFnSignatures; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PValue; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.WindowingStrategy; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.FilterFunction; import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import scala.Tuple2; /** * TODO: Add support for state and timers. * * @param <InputT> * @param <OutputT> */ class ParDoTranslatorBatch<InputT, OutputT> implements TransformTranslator<PTransform<PCollection<InputT>, PCollectionTuple>> { @Override public void translateTransform( PTransform<PCollection<InputT>, PCollectionTuple> transform, TranslationContext context) { String stepName = context.getCurrentTransform().getFullName(); // Check for not supported advanced features // TODO: add support of Splittable DoFn DoFn<InputT, OutputT> doFn = getDoFn(context); checkState( !DoFnSignatures.isSplittable(doFn), "Not expected to directly translate splittable DoFn, should have been overridden: %s", doFn); // TODO: add support of states and timers checkState( !DoFnSignatures.isStateful(doFn), "States and timers are not supported for the moment."); checkState( !DoFnSignatures.requiresTimeSortedInput(doFn), "@RequiresTimeSortedInput is not " + "supported for the moment"); DoFnSchemaInformation doFnSchemaInformation = ParDoTranslation.getSchemaInformation(context.getCurrentTransform()); // Init main variables PValue input = context.getInput(); Dataset<WindowedValue<InputT>> inputDataSet = context.getDataset(input); Map<TupleTag<?>, PValue> outputs = context.getOutputs(); TupleTag<?> mainOutputTag = getTupleTag(context); List<TupleTag<?>> outputTags = new ArrayList<>(outputs.keySet()); WindowingStrategy<?, ?> windowingStrategy = ((PCollection<InputT>) input).getWindowingStrategy(); Coder<InputT> inputCoder = ((PCollection<InputT>) input).getCoder(); Coder<? extends BoundedWindow> windowCoder = windowingStrategy.getWindowFn().windowCoder(); // construct a map from side input to WindowingStrategy so that // the DoFn runner can map main-input windows to side input windows List<PCollectionView<?>> sideInputs = getSideInputs(context); Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>(); for (PCollectionView<?> sideInput : sideInputs) { sideInputStrategies.put(sideInput, sideInput.getPCollection().getWindowingStrategy()); } SideInputBroadcast broadcastStateData = createBroadcastSideInputs(sideInputs, context); Map<TupleTag<?>, Coder<?>> outputCoderMap = context.getOutputCoders(); MetricsContainerStepMapAccumulator metricsAccum = MetricsAccumulator.getInstance(); List<TupleTag<?>> additionalOutputTags = new ArrayList<>(); for (TupleTag<?> tag : outputTags) { if (!tag.equals(mainOutputTag)) { additionalOutputTags.add(tag); } } Map<String, PCollectionView<?>> sideInputMapping = ParDoTranslation.getSideInputMapping(context.getCurrentTransform()); @SuppressWarnings("unchecked") DoFnFunction<InputT, OutputT> doFnWrapper = new DoFnFunction( metricsAccum, stepName, doFn, windowingStrategy, sideInputStrategies, context.getSerializableOptions(), additionalOutputTags, mainOutputTag, inputCoder, outputCoderMap, broadcastStateData, doFnSchemaInformation, sideInputMapping); MultiOuputCoder multipleOutputCoder = MultiOuputCoder.of(SerializableCoder.of(TupleTag.class), outputCoderMap, windowCoder); Dataset<Tuple2<TupleTag<?>, WindowedValue<?>>> allOutputs = inputDataSet.mapPartitions(doFnWrapper, EncoderHelpers.fromBeamCoder(multipleOutputCoder)); if (outputs.entrySet().size() > 1) { allOutputs.persist(); for (Map.Entry<TupleTag<?>, PValue> output : outputs.entrySet()) { pruneOutputFilteredByTag(context, allOutputs, output, windowCoder); } } else { Coder<OutputT> outputCoder = ((PCollection<OutputT>) outputs.get(mainOutputTag)).getCoder(); Coder<WindowedValue<?>> windowedValueCoder = (Coder<WindowedValue<?>>) (Coder<?>) WindowedValue.getFullCoder(outputCoder, windowCoder); Dataset<WindowedValue<?>> outputDataset = allOutputs.map( (MapFunction<Tuple2<TupleTag<?>, WindowedValue<?>>, WindowedValue<?>>) value -> value._2, EncoderHelpers.fromBeamCoder(windowedValueCoder)); context.putDatasetWildcard(outputs.entrySet().iterator().next().getValue(), outputDataset); } } private static SideInputBroadcast createBroadcastSideInputs( List<PCollectionView<?>> sideInputs, TranslationContext context) { JavaSparkContext jsc = JavaSparkContext.fromSparkContext(context.getSparkSession().sparkContext()); SideInputBroadcast sideInputBroadcast = new SideInputBroadcast(); for (PCollectionView<?> sideInput : sideInputs) { Coder<? extends BoundedWindow> windowCoder = sideInput.getPCollection().getWindowingStrategy().getWindowFn().windowCoder(); Coder<WindowedValue<?>> windowedValueCoder = (Coder<WindowedValue<?>>) (Coder<?>) WindowedValue.getFullCoder(sideInput.getPCollection().getCoder(), windowCoder); Dataset<WindowedValue<?>> broadcastSet = context.getSideInputDataSet(sideInput); List<WindowedValue<?>> valuesList = broadcastSet.collectAsList(); List<byte[]> codedValues = new ArrayList<>(); for (WindowedValue<?> v : valuesList) { codedValues.add(CoderHelpers.toByteArray(v, windowedValueCoder)); } sideInputBroadcast.add( sideInput.getTagInternal().getId(), jsc.broadcast(codedValues), windowedValueCoder); } return sideInputBroadcast; } private List<PCollectionView<?>> getSideInputs(TranslationContext context) { List<PCollectionView<?>> sideInputs; try { sideInputs = ParDoTranslation.getSideInputs(context.getCurrentTransform()); } catch (IOException e) { throw new RuntimeException(e); } return sideInputs; } private TupleTag<?> getTupleTag(TranslationContext context) { TupleTag<?> mainOutputTag; try { mainOutputTag = ParDoTranslation.getMainOutputTag(context.getCurrentTransform()); } catch (IOException e) { throw new RuntimeException(e); } return mainOutputTag; } @SuppressWarnings("unchecked") private DoFn<InputT, OutputT> getDoFn(TranslationContext context) { DoFn<InputT, OutputT> doFn; try { doFn = (DoFn<InputT, OutputT>) ParDoTranslation.getDoFn(context.getCurrentTransform()); } catch (IOException e) { throw new RuntimeException(e); } return doFn; } private void pruneOutputFilteredByTag( TranslationContext context, Dataset<Tuple2<TupleTag<?>, WindowedValue<?>>> allOutputs, Map.Entry<TupleTag<?>, PValue> output, Coder<? extends BoundedWindow> windowCoder) { Dataset<Tuple2<TupleTag<?>, WindowedValue<?>>> filteredDataset = allOutputs.filter(new DoFnFilterFunction(output.getKey())); Coder<WindowedValue<?>> windowedValueCoder = (Coder<WindowedValue<?>>) (Coder<?>) WindowedValue.getFullCoder( ((PCollection<OutputT>) output.getValue()).getCoder(), windowCoder); Dataset<WindowedValue<?>> outputDataset = filteredDataset.map( (MapFunction<Tuple2<TupleTag<?>, WindowedValue<?>>, WindowedValue<?>>) value -> value._2, EncoderHelpers.fromBeamCoder(windowedValueCoder)); context.putDatasetWildcard(output.getValue(), outputDataset); } static class DoFnFilterFunction implements FilterFunction<Tuple2<TupleTag<?>, WindowedValue<?>>> { private final TupleTag<?> key; DoFnFilterFunction(TupleTag<?> key) { this.key = key; } @Override public boolean call(Tuple2<TupleTag<?>, WindowedValue<?>> value) { return value._1.equals(key); } } }
// Portions copyright 2002, Google, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.alorma.github.inapp; // This code was converted from code at http://iharder.sourceforge.net/base64/ // Lots of extraneous features were removed. /* The original code said: * <p> * I am placing this code in the Public Domain. Do with it as you will. * This software comes with no guarantees or warranties but with * plenty of well-wishing instead! * Please visit * <a href="http://iharder.net/xmlizable">http://iharder.net/xmlizable</a> * periodically to check for updates or to contribute improvements. * </p> * * @author Robert Harder * @author [email protected] * @version 1.3 */ /** * Base64 converter class. This code is not a complete MIME encoder; * it simply converts binary data to base64 data and back. * <p/> * <p>Note {@link CharBase64} is a GWT-compatible implementation of this * class. */ public class Base64 { /** * Specify encoding (value is {@code true}). */ public final static boolean ENCODE = true; /** * Specify decoding (value is {@code false}). */ public final static boolean DECODE = false; /** * The equals sign (=) as a byte. */ private final static byte EQUALS_SIGN = (byte) '='; /** * The new line character (\n) as a byte. */ private final static byte NEW_LINE = (byte) '\n'; /** * The 64 valid Base64 values. */ private final static byte[] ALPHABET = {(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) '+', (byte) '/'}; /** * The 64 valid web safe Base64 values. */ private final static byte[] WEBSAFE_ALPHABET = {(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) '-', (byte) '_'}; /** * Translates a Base64 value to either its 6-bit reconstruction value * or a negative number indicating some other meaning. */ private final static byte[] DECODABET = {-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 -5, -5, // Whitespace: Tab and Linefeed -9, -9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 -9, -9, -9, -9, -9, // Decimal 27 - 31 -5, // Whitespace: Space -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 42 62, // Plus sign at decimal 43 -9, -9, -9, // Decimal 44 - 46 63, // Slash at decimal 47 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine -9, -9, -9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9, -9, -9, // Decimal 62 - 64 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N' 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z' -9, -9, -9, -9, -9, -9, // Decimal 91 - 96 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm' 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z' -9, -9, -9, -9, -9 // Decimal 123 - 127 /* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; /** * The web safe decodabet */ private final static byte[] WEBSAFE_DECODABET = {-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 -5, -5, // Whitespace: Tab and Linefeed -9, -9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 -9, -9, -9, -9, -9, // Decimal 27 - 31 -5, // Whitespace: Space -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 44 62, // Dash '-' sign at decimal 45 -9, -9, // Decimal 46-47 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine -9, -9, -9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9, -9, -9, // Decimal 62 - 64 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N' 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z' -9, -9, -9, -9, // Decimal 91-94 63, // Underscore '_' at decimal 95 -9, // Decimal 96 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm' 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z' -9, -9, -9, -9, -9 // Decimal 123 - 127 /* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; // Indicates white space in encoding private final static byte WHITE_SPACE_ENC = -5; // Indicates equals sign in encoding private final static byte EQUALS_SIGN_ENC = -1; /** * Defeats instantiation. */ private Base64() { } /* ******** E N C O D I N G M E T H O D S ******** */ /** * Encodes up to three bytes of the array <var>source</var> * and writes the resulting four Base64 bytes to <var>destination</var>. * The source and destination arrays can be manipulated * anywhere along their length by specifying * <var>srcOffset</var> and <var>destOffset</var>. * This method does not check to make sure your arrays * are large enough to accommodate <var>srcOffset</var> + 3 for * the <var>source</var> array or <var>destOffset</var> + 4 for * the <var>destination</var> array. * The actual number of significant bytes in your array is * given by <var>numSigBytes</var>. * * @param source the array to convert * @param srcOffset the index where conversion begins * @param numSigBytes the number of significant bytes in your array * @param destination the array to hold the conversion * @param destOffset the index where output will be put * @param alphabet is the encoding alphabet * @return the <var>destination</var> array * @since 1.3 */ private static byte[] encode3to4(byte[] source, int srcOffset, int numSigBytes, byte[] destination, int destOffset, byte[] alphabet) { // 1 2 3 // 01234567890123456789012345678901 Bit position // --------000000001111111122222222 Array position from threeBytes // --------| || || || | Six bit groups to index alphabet // >>18 >>12 >> 6 >> 0 Right shift necessary // 0x3f 0x3f 0x3f Additional AND // Create buffer with zero-padding if there are only one or two // significant bytes passed in the array. // We have to shift left 24 in order to flush out the 1's that appear // when Java treats a value as negative that is cast from a byte to an int. int inBuff = (numSigBytes > 0 ? ((source[srcOffset] << 24) >>> 8) : 0) | (numSigBytes > 1 ? ((source[srcOffset + 1] << 24) >>> 16) : 0) | (numSigBytes > 2 ? ((source[srcOffset + 2] << 24) >>> 24) : 0); switch (numSigBytes) { case 3: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f]; destination[destOffset + 3] = alphabet[(inBuff) & 0x3f]; return destination; case 2: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f]; destination[destOffset + 3] = EQUALS_SIGN; return destination; case 1: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = EQUALS_SIGN; destination[destOffset + 3] = EQUALS_SIGN; return destination; default: return destination; } // end switch } // end encode3to4 /** * Encodes a byte array into Base64 notation. * Equivalent to calling * {@code encodeBytes(source, 0, source.length)} * * @param source The data to convert * @since 1.4 */ public static String encode(byte[] source) { return encode(source, 0, source.length, ALPHABET, true); } /** * Encodes a byte array into web safe Base64 notation. * * @param source The data to convert * @param doPadding is {@code true} to pad result with '=' chars * if it does not fall on 3 byte boundaries */ public static String encodeWebSafe(byte[] source, boolean doPadding) { return encode(source, 0, source.length, WEBSAFE_ALPHABET, doPadding); } /** * Encodes a byte array into Base64 notation. * * @param source the data to convert * @param off offset in array where conversion should begin * @param len length of data to convert * @param alphabet the encoding alphabet * @param doPadding is {@code true} to pad result with '=' chars * if it does not fall on 3 byte boundaries * @since 1.4 */ public static String encode(byte[] source, int off, int len, byte[] alphabet, boolean doPadding) { byte[] outBuff = encode(source, off, len, alphabet, Integer.MAX_VALUE); int outLen = outBuff.length; // If doPadding is false, set length to truncate '=' // padding characters while (doPadding == false && outLen > 0) { if (outBuff[outLen - 1] != '=') { break; } outLen -= 1; } return new String(outBuff, 0, outLen); } /** * Encodes a byte array into Base64 notation. * * @param source the data to convert * @param off offset in array where conversion should begin * @param len length of data to convert * @param alphabet is the encoding alphabet * @param maxLineLength maximum length of one line. * @return the BASE64-encoded byte array */ public static byte[] encode(byte[] source, int off, int len, byte[] alphabet, int maxLineLength) { int lenDiv3 = (len + 2) / 3; // ceil(len / 3) int len43 = lenDiv3 * 4; byte[] outBuff = new byte[len43 // Main 4:3 + (len43 / maxLineLength)]; // New lines int d = 0; int e = 0; int len2 = len - 2; int lineLength = 0; for (; d < len2; d += 3, e += 4) { // The following block of code is the same as // encode3to4( source, d + off, 3, outBuff, e, alphabet ); // but inlined for faster encoding (~20% improvement) int inBuff = ((source[d + off] << 24) >>> 8) | ((source[d + 1 + off] << 24) >>> 16) | ((source[d + 2 + off] << 24) >>> 24); outBuff[e] = alphabet[(inBuff >>> 18)]; outBuff[e + 1] = alphabet[(inBuff >>> 12) & 0x3f]; outBuff[e + 2] = alphabet[(inBuff >>> 6) & 0x3f]; outBuff[e + 3] = alphabet[(inBuff) & 0x3f]; lineLength += 4; if (lineLength == maxLineLength) { outBuff[e + 4] = NEW_LINE; e++; lineLength = 0; } // end if: end of line } // end for: each piece of array if (d < len) { encode3to4(source, d + off, len - d, outBuff, e, alphabet); lineLength += 4; if (lineLength == maxLineLength) { // Add a last newline outBuff[e + 4] = NEW_LINE; e++; } e += 4; } assert (e == outBuff.length); return outBuff; } /* ******** D E C O D I N G M E T H O D S ******** */ /** * Decodes four bytes from array <var>source</var> * and writes the resulting bytes (up to three of them) * to <var>destination</var>. * The source and destination arrays can be manipulated * anywhere along their length by specifying * <var>srcOffset</var> and <var>destOffset</var>. * This method does not check to make sure your arrays * are large enough to accommodate <var>srcOffset</var> + 4 for * the <var>source</var> array or <var>destOffset</var> + 3 for * the <var>destination</var> array. * This method returns the actual number of bytes that * were converted from the Base64 encoding. * * @param source the array to convert * @param srcOffset the index where conversion begins * @param destination the array to hold the conversion * @param destOffset the index where output will be put * @param decodabet the decodabet for decoding Base64 content * @return the number of decoded bytes converted * @since 1.3 */ private static int decode4to3(byte[] source, int srcOffset, byte[] destination, int destOffset, byte[] decodabet) { // Example: Dk== if (source[srcOffset + 2] == EQUALS_SIGN) { int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12); destination[destOffset] = (byte) (outBuff >>> 16); return 1; } else if (source[srcOffset + 3] == EQUALS_SIGN) { // Example: DkL= int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12) | ((decodabet[source[srcOffset + 2]] << 24) >>> 18); destination[destOffset] = (byte) (outBuff >>> 16); destination[destOffset + 1] = (byte) (outBuff >>> 8); return 2; } else { // Example: DkLE int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12) | ((decodabet[source[srcOffset + 2]] << 24) >>> 18) | ((decodabet[source[srcOffset + 3]] << 24) >>> 24); destination[destOffset] = (byte) (outBuff >> 16); destination[destOffset + 1] = (byte) (outBuff >> 8); destination[destOffset + 2] = (byte) (outBuff); return 3; } } // end decodeToBytes /** * Decodes data from Base64 notation. * * @param s the string to decode (decoded in default encoding) * @return the decoded data * @since 1.4 */ public static byte[] decode(String s) throws Base64DecoderException { byte[] bytes = s.getBytes(); return decode(bytes, 0, bytes.length); } /** * Decodes data from web safe Base64 notation. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param s the string to decode (decoded in default encoding) * @return the decoded data */ public static byte[] decodeWebSafe(String s) throws Base64DecoderException { byte[] bytes = s.getBytes(); return decodeWebSafe(bytes, 0, bytes.length); } /** * Decodes Base64 content in byte array format and returns * the decoded byte array. * * @param source The Base64 encoded data * @return decoded data * @throws Base64DecoderException * @since 1.3 */ public static byte[] decode(byte[] source) throws Base64DecoderException { return decode(source, 0, source.length); } /** * Decodes web safe Base64 content in byte array format and returns * the decoded data. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param source the string to decode (decoded in default encoding) * @return the decoded data */ public static byte[] decodeWebSafe(byte[] source) throws Base64DecoderException { return decodeWebSafe(source, 0, source.length); } /** * Decodes Base64 content in byte array format and returns * the decoded byte array. * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @return decoded data * @throws Base64DecoderException * @since 1.3 */ public static byte[] decode(byte[] source, int off, int len) throws Base64DecoderException { return decode(source, off, len, DECODABET); } /** * Decodes web safe Base64 content in byte array format and returns * the decoded byte array. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @return decoded data */ public static byte[] decodeWebSafe(byte[] source, int off, int len) throws Base64DecoderException { return decode(source, off, len, WEBSAFE_DECODABET); } /** * Decodes Base64 content using the supplied decodabet and returns * the decoded byte array. * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @param decodabet the decodabet for decoding Base64 content * @return decoded data */ public static byte[] decode(byte[] source, int off, int len, byte[] decodabet) throws Base64DecoderException { int len34 = len * 3 / 4; byte[] outBuff = new byte[2 + len34]; // Upper limit on size of output int outBuffPosn = 0; byte[] b4 = new byte[4]; int b4Posn = 0; int i = 0; byte sbiCrop = 0; byte sbiDecode = 0; for (i = 0; i < len; i++) { sbiCrop = (byte) (source[i + off] & 0x7f); // Only the low seven bits sbiDecode = decodabet[sbiCrop]; if (sbiDecode >= WHITE_SPACE_ENC) { // White space Equals sign or better if (sbiDecode >= EQUALS_SIGN_ENC) { // An equals sign (for padding) must not occur at position 0 or 1 // and must be the last byte[s] in the encoded value if (sbiCrop == EQUALS_SIGN) { int bytesLeft = len - i; byte lastByte = (byte) (source[len - 1 + off] & 0x7f); if (b4Posn == 0 || b4Posn == 1) { throw new Base64DecoderException( "invalid padding byte '=' at byte offset " + i); } else if ((b4Posn == 3 && bytesLeft > 2) || (b4Posn == 4 && bytesLeft > 1)) { throw new Base64DecoderException( "padding byte '=' falsely signals end of encoded value " + "at offset " + i); } else if (lastByte != EQUALS_SIGN && lastByte != NEW_LINE) { throw new Base64DecoderException( "encoded value has invalid trailing byte"); } break; } b4[b4Posn++] = sbiCrop; if (b4Posn == 4) { outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet); b4Posn = 0; } } } else { throw new Base64DecoderException("Bad Base64 input character at " + i + ": " + source[i + off] + "(decimal)"); } } // Because web safe encoding allows non padding base64 encodes, we // need to pad the rest of the b4 buffer with equal signs when // b4Posn != 0. There can be at most 2 equal signs at the end of // four characters, so the b4 buffer must have two or three // characters. This also catches the case where the input is // padded with EQUALS_SIGN if (b4Posn != 0) { if (b4Posn == 1) { throw new Base64DecoderException("single trailing character at offset " + (len - 1)); } b4[b4Posn++] = EQUALS_SIGN; outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet); } byte[] out = new byte[outBuffPosn]; System.arraycopy(outBuff, 0, out, 0, outBuffPosn); return out; } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.ie; import org.openqa.selenium.Capabilities; import org.openqa.selenium.Platform; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.remote.CapabilityType; import org.openqa.selenium.remote.FileDetector; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.remote.service.DriverCommandExecutor; import java.io.File; public class InternetExplorerDriver extends RemoteWebDriver { /** * Capability that defines whether to ignore the browser zoom level or not. */ public final static String IGNORE_ZOOM_SETTING = "ignoreZoomSetting"; /** * Capability that defines to use whether to use native or javascript events during operations. */ public final static String NATIVE_EVENTS = CapabilityType.HAS_NATIVE_EVENTS; /** * Capability that defines the initial URL to be used when IE is launched. */ public final static String INITIAL_BROWSER_URL = "initialBrowserUrl"; /** * Capability that defines how elements are scrolled into view in the InternetExplorerDriver. */ public final static String ELEMENT_SCROLL_BEHAVIOR = CapabilityType.ELEMENT_SCROLL_BEHAVIOR; /** * Capability that defines which behaviour will be used if an unexpected Alert is found. */ public final static String UNEXPECTED_ALERT_BEHAVIOR = CapabilityType.UNEXPECTED_ALERT_BEHAVIOUR; /** * Capability that defines to use or not cleanup of element cache on document loading. */ public final static String ENABLE_ELEMENT_CACHE_CLEANUP = "enableElementCacheCleanup"; /** * Capability that defines timeout in milliseconds for attaching to new browser window. */ public final static String BROWSER_ATTACH_TIMEOUT = "browserAttachTimeout"; /** * Capability that defines to ignore ot not browser * protected mode settings during starting by IEDriverServer. * * Setting this capability will make your tests unstable and hard to debug. */ public final static String INTRODUCE_FLAKINESS_BY_IGNORING_SECURITY_DOMAINS = "ignoreProtectedModeSettings"; /** * Capability that defines to use persistent hovering or not. */ public final static String ENABLE_PERSISTENT_HOVERING = "enablePersistentHover"; /** * Capability that defines to focus to browser window or not before operation. */ public final static String REQUIRE_WINDOW_FOCUS = "requireWindowFocus"; /** * Capability that defines the location of the file where IEDriverServer * should write log messages to. */ public final static String LOG_FILE = "logFile"; /** * Capability that defines the detalization level the IEDriverServer logs. */ public final static String LOG_LEVEL = "logLevel"; /** * Capability that defines the address of the host adapter on which * the IEDriverServer will listen for commands. */ public final static String HOST = "host"; /** * Capability that defines full path to directory to which will be * extracted supporting files of the IEDriverServer. */ public final static String EXTRACT_PATH = "extractPath"; /** * Capability that defines suppress or not diagnostic output of the IEDriverServer. */ public final static String SILENT = "silent"; /** * Capability that defines launch API of IE used by IEDriverServer. */ public final static String FORCE_CREATE_PROCESS = "ie.forceCreateProcessApi"; /** * Capability that defines to clean or not browser cache before launching IE by IEDriverServer. */ public final static String IE_ENSURE_CLEAN_SESSION = "ie.ensureCleanSession"; /** * Capability that defines setting the proxy information for a single IE process * without affecting the proxy settings of other instances of IE. */ public final static String IE_USE_PER_PROCESS_PROXY = "ie.usePerProcessProxy"; /** * @deprecated Use {@link #IE_USE_PER_PROCESS_PROXY} (the one without the typo); */ @Deprecated public final static String IE_USE_PRE_PROCESS_PROXY = IE_USE_PER_PROCESS_PROXY; /** * Capability that defines used IE CLI switches when {@link #FORCE_CREATE_PROCESS} is enabled. */ public final static String IE_SWITCHES = "ie.browserCommandLineSwitches"; public InternetExplorerDriver() { this(null, null); } /** * @deprecated Use {@link #InternetExplorerDriver(InternetExplorerOptions)} */ @Deprecated public InternetExplorerDriver(Capabilities capabilities) { this(null, capabilities); } public InternetExplorerDriver(InternetExplorerOptions options) { this(null, options); } public InternetExplorerDriver(InternetExplorerDriverService service) { this(service, null); } /** * @deprecated Use {@link #InternetExplorerDriver(InternetExplorerDriverService, InternetExplorerOptions)} */ @Deprecated public InternetExplorerDriver(InternetExplorerDriverService service, Capabilities capabilities) { this(service, new InternetExplorerOptions(capabilities)); } public InternetExplorerDriver( InternetExplorerDriverService service, InternetExplorerOptions options) { if (options == null) { options = new InternetExplorerOptions(); } if (service == null) { service = setupService(options); } run(service, options); } private void run(InternetExplorerDriverService service, Capabilities capabilities) { assertOnWindows(); setCommandExecutor(new DriverCommandExecutor(service)); startSession(capabilities); } @Override public void setFileDetector(FileDetector detector) { throw new WebDriverException( "Setting the file detector only works on remote webdriver instances obtained " + "via RemoteWebDriver"); } protected void assertOnWindows() { Platform current = Platform.getCurrent(); if (!current.is(Platform.WINDOWS)) { throw new WebDriverException( String.format( "You appear to be running %s. The IE driver only runs on Windows.", current)); } } private InternetExplorerDriverService setupService(Capabilities caps) { InternetExplorerDriverService.Builder builder = new InternetExplorerDriverService.Builder(); if (caps != null) { if (caps.getCapability(LOG_FILE) != null) { String value = (String) caps.getCapability(LOG_FILE); if (value != null) { builder.withLogFile(new File(value)); } } if (caps.getCapability(LOG_LEVEL) != null) { String value = (String) caps.getCapability(LOG_LEVEL); if (value != null) { builder.withLogLevel(InternetExplorerDriverLogLevel.valueOf(value)); } } if (caps.getCapability(HOST) != null) { String value = (String) caps.getCapability(HOST); if (value != null) { builder.withHost(value); } } if (caps.getCapability(EXTRACT_PATH) != null) { String value = (String) caps.getCapability(EXTRACT_PATH); if (value != null) { builder.withExtractPath(new File(value)); } } if (caps.getCapability(SILENT) != null) { Boolean value = (Boolean) caps.getCapability(SILENT); if (value != null) { builder.withSilent(value); } } } return builder.build(); } }
/* * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.sfm.d2; import boofcv.abst.feature.tracker.PointTrack; import boofcv.abst.feature.tracker.PointTracker; import boofcv.struct.geo.AssociatedPair; import boofcv.struct.image.ImageUInt8; import georegression.struct.InvertibleTransform; import georegression.struct.se.Se2_F32; import org.ddogleg.fitting.modelset.ModelMatcher; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.*; /** * @author Peter Abeles */ public class TestImageMotionPointTrackerKey { /** * Give it a very simple example and see if it computes the correct motion and has the expected behavior * when processing an image */ @Test public void process() { // what the initial transform should be Se2_F32 computed = new Se2_F32(4,5,6); Se2_F32 model = new Se2_F32(); DummyTracker tracker = new DummyTracker(); DummyModelMatcher<Se2_F32> matcher = new DummyModelMatcher<Se2_F32>(computed,5); ImageUInt8 input = new ImageUInt8(20,30); ImageMotionPointTrackerKey<ImageUInt8,Se2_F32> alg = new ImageMotionPointTrackerKey<ImageUInt8,Se2_F32>(tracker,matcher,null,model,1000); // the first time it processes an image it should always return false since no motion can be estimated assertFalse(alg.process(input)); assertFalse(alg.isKeyFrame()); assertEquals(0, tracker.numSpawn); // make the current frame into the keyframe // request that the current frame is a keyframe alg.changeKeyFrame(); assertEquals(0, tracker.numDropAll); assertEquals(1, tracker.numSpawn); assertTrue(alg.isKeyFrame()); // now it should compute some motion assertTrue(alg.process(input)); assertFalse(alg.isKeyFrame()); // no new tracks should have been spawned assertEquals(1, tracker.numSpawn); // test the newly computed results assertEquals(computed.getX(), alg.getKeyToCurr().getX(), 1e-8); assertEquals(computed.getX(), alg.getWorldToCurr().getX(), 1e-8); // see if reset does its job assertEquals(0, tracker.numDropAll); alg.reset(); assertEquals(1, tracker.numDropAll); assertEquals(0, alg.getTotalFramesProcessed() ); assertEquals(0, alg.getKeyToCurr().getX(), 1e-8); assertEquals(0, alg.getWorldToCurr().getX(), 1e-8); } /** * Test the keyframe based on the definition of the keyframe */ @Test public void changeKeyFrame() { Se2_F32 computed = new Se2_F32(4,5,6); Se2_F32 model = new Se2_F32(); DummyTracker tracker = new DummyTracker(); DummyModelMatcher<Se2_F32> matcher = new DummyModelMatcher<Se2_F32>(computed,5); ImageUInt8 input = new ImageUInt8(20,30); ImageMotionPointTrackerKey<ImageUInt8,Se2_F32> alg = new ImageMotionPointTrackerKey<ImageUInt8,Se2_F32>(tracker,matcher,null,model,100); // process twice to change the transforms alg.process(input); alg.changeKeyFrame(); alg.process(input); // sanity check Se2_F32 worldToKey = alg.getWorldToKey(); assertEquals(0, worldToKey.getX(), 1e-8); assertEquals(1, tracker.numSpawn); // invoke the function being tested alg.changeKeyFrame(); // the keyframe should be changed and new tracks spawned assertEquals(2, tracker.numSpawn); // worldToKey should now be equal to worldToCurr worldToKey = alg.getWorldToKey(); assertEquals(computed.getX(), worldToKey.getX(), 1e-8); } /** * See if tracks are pruned after not being in inlier set for X time */ @Test public void testPrune() { Se2_F32 computed = new Se2_F32(4,5,6); Se2_F32 model = new Se2_F32(); DummyTracker tracker = new DummyTracker(); DummyModelMatcher<Se2_F32> matcher = new DummyModelMatcher<Se2_F32>(computed,5); ImageUInt8 input = new ImageUInt8(20,30); ImageMotionPointTrackerKey<ImageUInt8,Se2_F32> alg = new ImageMotionPointTrackerKey<ImageUInt8,Se2_F32>(tracker,matcher,null,model,5); // create tracks such that only some of them will be dropped alg.totalFramesProcessed = 9; for( int i = 0; i < 10; i++ ) { PointTrack t = new PointTrack(); AssociatedPairTrack a = new AssociatedPairTrack(); a.lastUsed = i; t.cookie = a; tracker.list.add(t); } // update alg.process(input); // check to see how many were dropped assertEquals(6,tracker.numDropped); } public static class DummyTracker implements PointTracker<ImageUInt8> { public int numSpawn = 0; public int numDropped = 0; public int numDropAll = 0; List<PointTrack> list = new ArrayList<PointTrack>(); List<PointTrack> listSpawned = new ArrayList<PointTrack>(); @Override public void reset() {} @Override public void process(ImageUInt8 image) {} @Override public void spawnTracks() { numSpawn++; listSpawned.clear(); for( int i = 0; i < 5; i++ ){ PointTrack t = new PointTrack(); listSpawned.add(t); list.add(t); } } @Override public void dropAllTracks() { numDropAll++; } @Override public boolean dropTrack(PointTrack track) {numDropped++;return true;} @Override public List<PointTrack> getAllTracks( List<PointTrack> list ) { if( list == null ) list = new ArrayList<PointTrack>(); list.addAll(this.list); return list; } @Override public List<PointTrack> getActiveTracks(List<PointTrack> list) { return getAllTracks(list); } @Override public List<PointTrack> getInactiveTracks(List<PointTrack> list) { if( list == null ) list = new ArrayList<PointTrack>(); return list; } @Override public List<PointTrack> getDroppedTracks(List<PointTrack> list) { return new ArrayList<PointTrack>(); } @Override public List<PointTrack> getNewTracks(List<PointTrack> list) { if( list == null ) list = new ArrayList<PointTrack>(); list.addAll(this.listSpawned); return list; } } public static class DummyModelMatcher<T extends InvertibleTransform> implements ModelMatcher<T,AssociatedPair> { T found; int matchSetSize; public DummyModelMatcher(T found, int matchSetSize) { this.found = found; this.matchSetSize = matchSetSize; } @Override public boolean process(List<AssociatedPair> dataSet) { return true; } @Override public T getModel() { return found; } @Override public List<AssociatedPair> getMatchSet() { List<AssociatedPair> ret = new ArrayList<AssociatedPair>(); for( int i = 0; i < matchSetSize; i++ ) { ret.add( new AssociatedPairTrack()); } return ret; } @Override public int getInputIndex(int matchIndex) { return matchIndex; } @Override public double getError() { return 0; } @Override public int getMinimumSize() { return matchSetSize; } public void setMotion(T se) { found = se; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.database.tree.util; import java.nio.ByteBuffer; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.pagemem.PageMemory; import org.apache.ignite.internal.pagemem.wal.IgniteWriteAheadLogManager; import org.apache.ignite.internal.pagemem.wal.record.delta.InitNewPageRecord; import org.apache.ignite.internal.processors.cache.database.tree.io.PageIO; import org.apache.ignite.internal.util.GridUnsafe; import static java.lang.Boolean.FALSE; import static java.lang.Boolean.TRUE; /** * Page handler. */ public abstract class PageHandler<X, R> { /** */ private static final PageHandler<Void, Boolean> NO_OP = new PageHandler<Void, Boolean>() { @Override public Boolean run(int cacheId, long pageId, long page, long pageAddr, PageIO io, Boolean walPlc, Void arg, int intArg) throws IgniteCheckedException { return Boolean.TRUE; } }; /** * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page absolute pointer. * @param pageAddr Page address. * @param io IO. * @param walPlc Full page WAL record policy. * @param arg Argument. * @param intArg Argument of type {@code int}. * @return Result. * @throws IgniteCheckedException If failed. */ public abstract R run( int cacheId, long pageId, long page, long pageAddr, PageIO io, Boolean walPlc, X arg, int intArg) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param pageAddr Page address. * @param arg Argument. * @param intArg Argument of type {@code int}. * @return {@code true} If release. */ public boolean releaseAfterWrite( int cacheId, long pageId, long page, long pageAddr, X arg, int intArg) { return true; } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param lsnr Lock listener. * @param h Handler. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param lockFailed Result in case of lock failure due to page recycling. * @return Handler result. * @throws IgniteCheckedException If failed. */ public static <X, R> R readPage( PageMemory pageMem, int cacheId, long pageId, PageLockListener lsnr, PageHandler<X, R> h, X arg, int intArg, R lockFailed ) throws IgniteCheckedException { long page = pageMem.acquirePage(cacheId, pageId); try { long pageAddr = readLock(pageMem, cacheId, pageId, page, lsnr); if (pageAddr == 0L) return lockFailed; try { PageIO io = PageIO.getPageIO(pageAddr); return h.run(cacheId, pageId, page, pageAddr, io, null, arg, intArg); } finally { readUnlock(pageMem, cacheId, pageId, page, pageAddr, lsnr); } } finally { pageMem.releasePage(cacheId, pageId, page); } } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param lsnr Lock listener. * @param h Handler. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param lockFailed Result in case of lock failure due to page recycling. * @return Handler result. * @throws IgniteCheckedException If failed. */ public static <X, R> R readPage( PageMemory pageMem, int cacheId, long pageId, long page, PageLockListener lsnr, PageHandler<X, R> h, X arg, int intArg, R lockFailed ) throws IgniteCheckedException { long pageAddr = readLock(pageMem, cacheId, pageId, page, lsnr); if (pageAddr == 0L) return lockFailed; try { PageIO io = PageIO.getPageIO(pageAddr); return h.run(cacheId, pageId, page, pageAddr, io, null, arg, intArg); } finally { readUnlock(pageMem, cacheId, pageId, page, pageAddr, lsnr); } } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param lsnr Lock listener. * @return Page address. */ public static long readLock( PageMemory pageMem, int cacheId, long pageId, long page, PageLockListener lsnr) { lsnr.onBeforeReadLock(cacheId, pageId, page); long pageAddr = pageMem.readLock(cacheId, pageId, page); lsnr.onReadLock(cacheId, pageId, page, pageAddr); return pageAddr; } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param pageAddr Page address (for-write pointer) * @param lsnr Lock listener. */ public static void readUnlock( PageMemory pageMem, int cacheId, long pageId, long page, long pageAddr, PageLockListener lsnr) { lsnr.onReadUnlock(cacheId, pageId, page, pageAddr); pageMem.readUnlock(cacheId, pageId, page); } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param init IO for new page initialization. * @param wal Write ahead log. * @param lsnr Lock listener. * @throws IgniteCheckedException If failed. */ public static void initPage( PageMemory pageMem, int cacheId, long pageId, PageIO init, IgniteWriteAheadLogManager wal, PageLockListener lsnr ) throws IgniteCheckedException { Boolean res = writePage(pageMem, cacheId, pageId, lsnr, PageHandler.NO_OP, init, wal, null, null, 0, FALSE); assert res != FALSE; } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param lsnr Lock listener. * @param h Handler. * @param init IO for new page initialization or {@code null} if it is an existing page. * @param wal Write ahead log. * @param walPlc Full page WAL record policy. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param lockFailed Result in case of lock failure due to page recycling. * @return Handler result. * @throws IgniteCheckedException If failed. */ public static <X, R> R writePage( PageMemory pageMem, int cacheId, long pageId, PageLockListener lsnr, PageHandler<X, R> h, PageIO init, IgniteWriteAheadLogManager wal, Boolean walPlc, X arg, int intArg, R lockFailed ) throws IgniteCheckedException { boolean releaseAfterWrite = true; long page = pageMem.acquirePage(cacheId, pageId); try { long pageAddr = writeLock(pageMem, cacheId, pageId, page, lsnr, false); if (pageAddr == 0L) return lockFailed; boolean ok = false; try { if (init != null) { // It is a new page and we have to initialize it. doInitPage(pageMem, cacheId, pageId, page, pageAddr, init, wal); walPlc = FALSE; } else { init = PageIO.getPageIO(pageAddr); } R res = h.run(cacheId, pageId, page, pageAddr, init, walPlc, arg, intArg); ok = true; return res; } finally { assert PageIO.getCrc(pageAddr) == 0; //TODO GG-11480 if (releaseAfterWrite = h.releaseAfterWrite(cacheId, pageId, page, pageAddr, arg, intArg)) writeUnlock(pageMem, cacheId, pageId, page, pageAddr, lsnr, walPlc, ok); } } finally { if (releaseAfterWrite) pageMem.releasePage(cacheId, pageId, page); } } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param lsnr Lock listener. * @param h Handler. * @param init IO for new page initialization or {@code null} if it is an existing page. * @param wal Write ahead log. * @param walPlc Full page WAL record policy. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param lockFailed Result in case of lock failure due to page recycling. * @return Handler result. * @throws IgniteCheckedException If failed. */ public static <X, R> R writePage( PageMemory pageMem, int cacheId, long pageId, long page, PageLockListener lsnr, PageHandler<X, R> h, PageIO init, IgniteWriteAheadLogManager wal, Boolean walPlc, X arg, int intArg, R lockFailed ) throws IgniteCheckedException { long pageAddr = writeLock(pageMem, cacheId, pageId, page, lsnr, false); if (pageAddr == 0L) return lockFailed; boolean ok = false; try { if (init != null) { // It is a new page and we have to initialize it. doInitPage(pageMem, cacheId, pageId, page, pageAddr, init, wal); walPlc = FALSE; } else { init = PageIO.getPageIO(pageAddr); } R res = h.run(cacheId, pageId, page, pageAddr, init, walPlc, arg, intArg); ok = true; return res; } finally { assert PageIO.getCrc(pageAddr) == 0; //TODO GG-11480 if (h.releaseAfterWrite(cacheId, pageId, page, pageAddr, arg, intArg)) writeUnlock(pageMem, cacheId, pageId, page, pageAddr, lsnr, walPlc, ok); } } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param pageAddr Page address. * @param lsnr Lock listener. * @param walPlc Full page WAL record policy. * @param dirty Page is dirty. */ public static void writeUnlock( PageMemory pageMem, int cacheId, long pageId, long page, long pageAddr, PageLockListener lsnr, Boolean walPlc, boolean dirty) { lsnr.onWriteUnlock(cacheId, pageId, page, pageAddr); pageMem.writeUnlock(cacheId, pageId, page, walPlc, dirty); } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param lsnr Lock listener. * @param tryLock Only try to lock without waiting. * @return Page address or {@code 0} if failed to lock due to recycling. */ public static long writeLock( PageMemory pageMem, int cacheId, long pageId, long page, PageLockListener lsnr, boolean tryLock) { lsnr.onBeforeWriteLock(cacheId, pageId, page); long pageAddr = tryLock ? pageMem.tryWriteLock(cacheId, pageId, page) : pageMem.writeLock(cacheId, pageId, page); lsnr.onWriteLock(cacheId, pageId, page, pageAddr); return pageAddr; } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param pageAddr Page address. * @param init Initial IO. * @param wal Write ahead log. * @throws IgniteCheckedException If failed. */ private static void doInitPage( PageMemory pageMem, int cacheId, long pageId, long page, long pageAddr, PageIO init, IgniteWriteAheadLogManager wal) throws IgniteCheckedException { assert PageIO.getCrc(pageAddr) == 0; //TODO GG-11480 init.initNewPage(pageAddr, pageId, pageMem.pageSize()); // Here we should never write full page, because it is known to be new. if (isWalDeltaRecordNeeded(pageMem, cacheId, pageId, page, wal, FALSE)) wal.log(new InitNewPageRecord(cacheId, pageId, init.getType(), init.getVersion(), pageId)); } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param wal Write ahead log. * @param walPlc Full page WAL record policy. * @return {@code true} If we need to make a delta WAL record for the change in this page. */ public static boolean isWalDeltaRecordNeeded( PageMemory pageMem, int cacheId, long pageId, long page, IgniteWriteAheadLogManager wal, Boolean walPlc) { // If the page is clean, then it is either newly allocated or just after checkpoint. // In both cases we have to write full page contents to WAL. return wal != null && !wal.isAlwaysWriteFullPages() && walPlc != TRUE && (walPlc == FALSE || pageMem.isDirty(cacheId, pageId, page)); } /** * @param src Source. * @param dst Destination. * @param srcOff Source offset in bytes. * @param dstOff Destination offset in bytes. * @param cnt Bytes count to copy. */ public static void copyMemory(ByteBuffer src, ByteBuffer dst, long srcOff, long dstOff, long cnt) { byte[] srcArr = src.hasArray() ? src.array() : null; byte[] dstArr = dst.hasArray() ? dst.array() : null; long srcArrOff = src.hasArray() ? src.arrayOffset() + GridUnsafe.BYTE_ARR_OFF : 0; long dstArrOff = dst.hasArray() ? dst.arrayOffset() + GridUnsafe.BYTE_ARR_OFF : 0; long srcPtr = src.isDirect() ? GridUnsafe.bufferAddress(src) : 0; long dstPtr = dst.isDirect() ? GridUnsafe.bufferAddress(dst) : 0; GridUnsafe.copyMemory(srcArr, srcPtr + srcArrOff + srcOff, dstArr, dstPtr + dstArrOff + dstOff, cnt); } /** * Will zero memory in buf * @param buf Buffer. * @param off Offset. * @param len Length. */ public static void zeroMemory(ByteBuffer buf, int off, int len) { if (buf.isDirect()) GridUnsafe.setMemory(GridUnsafe.bufferAddress(buf) + off, len, (byte)0); else { for (int i = off; i < off + len; i++) buf.put(i, (byte)0); //TODO Optimize! } } /** * @param srcAddr Source. * @param dstAddr Destination. * @param srcOff Source offset in bytes. * @param dstOff Destination offset in bytes. * @param cnt Bytes count to copy. */ public static void copyMemory(long srcAddr, long dstAddr, long srcOff, long dstOff, long cnt) { GridUnsafe.copyMemory(null, srcAddr + srcOff, null, dstAddr + dstOff, cnt); } /** * @param addr Address. * @param off Offset. * @param len Length. */ public static void zeroMemory(long addr, int off, int len) { GridUnsafe.setMemory(addr + off, len, (byte)0); } }
/* * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 1999-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: FilterExprIteratorSimple.java,v 1.2.4.2 2005/09/14 19:45:21 jeffsuttor Exp $ */ package com.sun.org.apache.xpath.internal.axes; import com.sun.org.apache.xml.internal.dtm.Axis; import com.sun.org.apache.xml.internal.dtm.DTM; import com.sun.org.apache.xml.internal.utils.PrefixResolver; import com.sun.org.apache.xpath.internal.Expression; import com.sun.org.apache.xpath.internal.ExpressionOwner; import com.sun.org.apache.xpath.internal.VariableStack; import com.sun.org.apache.xpath.internal.XPathContext; import com.sun.org.apache.xpath.internal.XPathVisitor; import com.sun.org.apache.xpath.internal.objects.XNodeSet; /** * Class to use for one-step iteration that doesn't have a predicate, and * doesn't need to set the context. */ public class FilterExprIteratorSimple extends LocPathIterator { static final long serialVersionUID = -6978977187025375579L; /** The contained expression. Should be non-null. * @serial */ private Expression m_expr; /** The result of executing m_expr. Needs to be deep cloned on clone op. */ transient private XNodeSet m_exprObj; private boolean m_mustHardReset = false; private boolean m_canDetachNodeset = true; /** * Create a FilterExprIteratorSimple object. * */ public FilterExprIteratorSimple() { super(null); } /** * Create a FilterExprIteratorSimple object. * */ public FilterExprIteratorSimple(Expression expr) { super(null); m_expr = expr; } /** * Initialize the context values for this expression * after it is cloned. * * @param context The XPath runtime context for this * transformation. */ public void setRoot(int context, Object environment) { super.setRoot(context, environment); m_exprObj = executeFilterExpr(context, m_execContext, getPrefixResolver(), getIsTopLevel(), m_stackFrame, m_expr); } /** * Execute the expression. Meant for reuse by other FilterExpr iterators * that are not derived from this object. */ public static XNodeSet executeFilterExpr(int context, XPathContext xctxt, PrefixResolver prefixResolver, boolean isTopLevel, int stackFrame, Expression expr ) throws com.sun.org.apache.xml.internal.utils.WrappedRuntimeException { PrefixResolver savedResolver = xctxt.getNamespaceContext(); XNodeSet result = null; try { xctxt.pushCurrentNode(context); xctxt.setNamespaceContext(prefixResolver); // The setRoot operation can take place with a reset operation, // and so we may not be in the context of LocPathIterator#nextNode, // so we have to set up the variable context, execute the expression, // and then restore the variable context. if (isTopLevel) { // System.out.println("calling m_expr.execute(getXPathContext())"); VariableStack vars = xctxt.getVarStack(); // These three statements need to be combined into one operation. int savedStart = vars.getStackFrame(); vars.setStackFrame(stackFrame); result = (com.sun.org.apache.xpath.internal.objects.XNodeSet) expr.execute(xctxt); result.setShouldCacheNodes(true); // These two statements need to be combined into one operation. vars.setStackFrame(savedStart); } else result = (com.sun.org.apache.xpath.internal.objects.XNodeSet) expr.execute(xctxt); } catch (javax.xml.transform.TransformerException se) { // TODO: Fix... throw new com.sun.org.apache.xml.internal.utils.WrappedRuntimeException(se); } finally { xctxt.popCurrentNode(); xctxt.setNamespaceContext(savedResolver); } return result; } /** * Returns the next node in the set and advances the position of the * iterator in the set. After a NodeIterator is created, the first call * to nextNode() returns the first node in the set. * * @return The next <code>Node</code> in the set being iterated over, or * <code>null</code> if there are no more members in that set. */ public int nextNode() { if(m_foundLast) return DTM.NULL; int next; if (null != m_exprObj) { m_lastFetched = next = m_exprObj.nextNode(); } else m_lastFetched = next = DTM.NULL; // m_lastFetched = next; if (DTM.NULL != next) { m_pos++; return next; } else { m_foundLast = true; return DTM.NULL; } } /** * Detaches the walker from the set which it iterated over, releasing * any computational resources and placing the iterator in the INVALID * state. */ public void detach() { if(m_allowDetach) { super.detach(); m_exprObj.detach(); m_exprObj = null; } } /** * This function is used to fixup variables from QNames to stack frame * indexes at stylesheet build time. * @param vars List of QNames that correspond to variables. This list * should be searched backwards for the first qualified name that * corresponds to the variable reference qname. The position of the * QName in the vector from the start of the vector will be its position * in the stack frame (but variables above the globalsTop value will need * to be offset to the current stack frame). */ public void fixupVariables(java.util.Vector vars, int globalsSize) { super.fixupVariables(vars, globalsSize); m_expr.fixupVariables(vars, globalsSize); } /** * Get the inner contained expression of this filter. */ public Expression getInnerExpression() { return m_expr; } /** * Set the inner contained expression of this filter. */ public void setInnerExpression(Expression expr) { expr.exprSetParent(this); m_expr = expr; } /** * Get the analysis bits for this walker, as defined in the WalkerFactory. * @return One of WalkerFactory#BIT_DESCENDANT, etc. */ public int getAnalysisBits() { if (null != m_expr && m_expr instanceof PathComponent) { return ((PathComponent) m_expr).getAnalysisBits(); } return WalkerFactory.BIT_FILTER; } /** * Returns true if all the nodes in the iteration well be returned in document * order. * Warning: This can only be called after setRoot has been called! * * @return true as a default. */ public boolean isDocOrdered() { return m_exprObj.isDocOrdered(); } class filterExprOwner implements ExpressionOwner { /** * @see ExpressionOwner#getExpression() */ public Expression getExpression() { return m_expr; } /** * @see ExpressionOwner#setExpression(Expression) */ public void setExpression(Expression exp) { exp.exprSetParent(FilterExprIteratorSimple.this); m_expr = exp; } } /** * This will traverse the heararchy, calling the visitor for * each member. If the called visitor method returns * false, the subtree should not be called. * * @param visitor The visitor whose appropriate method will be called. */ public void callPredicateVisitors(XPathVisitor visitor) { m_expr.callVisitors(new filterExprOwner(), visitor); super.callPredicateVisitors(visitor); } /** * @see Expression#deepEquals(Expression) */ public boolean deepEquals(Expression expr) { if (!super.deepEquals(expr)) return false; FilterExprIteratorSimple fet = (FilterExprIteratorSimple) expr; if (!m_expr.deepEquals(fet.m_expr)) return false; return true; } /** * Returns the axis being iterated, if it is known. * * @return Axis.CHILD, etc., or -1 if the axis is not known or is of multiple * types. */ public int getAxis() { if(null != m_exprObj) return m_exprObj.getAxis(); else return Axis.FILTEREDLIST; } }
package net.dempsy.router.group; import static net.dempsy.util.Functional.chain; import static net.dempsy.utils.test.ConditionPoll.poll; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Collectors; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.dempsy.Infrastructure; import net.dempsy.Manager; import net.dempsy.cluster.ClusterInfoException; import net.dempsy.cluster.ClusterInfoSession; import net.dempsy.cluster.ClusterInfoSessionFactory; import net.dempsy.config.ClusterId; import net.dempsy.messages.KeyedMessageWithType; import net.dempsy.router.BaseRouterTestWithSession; import net.dempsy.router.RoutingInboundManager; import net.dempsy.router.RoutingStrategy; import net.dempsy.router.RoutingStrategy.ContainerAddress; import net.dempsy.router.RoutingStrategyManager; import net.dempsy.router.group.intern.GroupDetails; import net.dempsy.router.shardutils.Utils; import net.dempsy.router.shardutils.Utils.ShardAssignment; import net.dempsy.transport.NodeAddress; import net.dempsy.util.TestInfrastructure; public class TestGroupRoutingStrategy extends BaseRouterTestWithSession { static final Logger LOGGER = LoggerFactory.getLogger(TestGroupRoutingStrategy.class); public TestGroupRoutingStrategy(final Supplier<ClusterInfoSessionFactory> factory, final String disruptorName, final Consumer<ClusterInfoSession> disruptor) { super(LOGGER, factory.get(), disruptor); } @Test public void testInboundSimpleHappyPathRegister() throws Exception { final int numShardsToExpect = Integer.parseInt(Utils.DEFAULT_TOTAL_SHARDS); final RoutingInboundManager manager = new RoutingInboundManager(); try (final RoutingStrategy.Inbound ib = manager .getAssociatedInstance(ClusterGroupInbound.class.getPackage().getName() + ":testInboundSimpleHappyPathRegister");) { final Utils<Object> msutils = new Utils<>(infra, "testInboundSimpleHappyPathRegister", new Object()); assertNotNull(ib); assertTrue(ClusterGroupInbound.Proxy.class.isAssignableFrom(ib.getClass())); ib.setContainerDetails(new ClusterId("test", "test"), new ContainerAddress(new DummyNodeAddress("testInboundSimpleHappyPathRegister"), 0), (l, m) -> {}); ib.start(infra); assertTrue(waitForShards(session, msutils, numShardsToExpect)); } } private static class MutableInt { public int val; } private static void checkForShardDistribution(final ClusterInfoSession session, final Utils<GroupDetails> utils, final int numShardsToExpect, final int numNodes) throws InterruptedException { final MutableInt iters = new MutableInt(); assertTrue(poll(o -> { try { iters.val++; final boolean showLog = LOGGER.isTraceEnabled() && (iters.val % 100 == 0); final List<ShardAssignment<?>> sas = getShardAssignments(utils); final Set<Integer> shards = getCurrentShards(sas); if (shards.size() != numShardsToExpect) { if (showLog) LOGGER.trace("Not all shards available. Expecting {} but got {}", numShardsToExpect, shards.size()); return false; } final Map<NodeAddress, AtomicInteger> counts = new HashMap<>(); for (final ShardAssignment<?> sa : sas) { final GroupDetails cur = (GroupDetails) sa.addr; AtomicInteger count = counts.get(cur.node); if (count == null) { count = new AtomicInteger(0); counts.put(cur.node, count); } count.addAndGet(sa.shards.length); } if (counts.size() != numNodes) { if (showLog) LOGGER.trace("Not all nodes registered. {} out of {}", counts.size(), numNodes); return false; } for (final Map.Entry<NodeAddress, AtomicInteger> entry : counts.entrySet()) { if (Math.abs(entry.getValue().get() - (numShardsToExpect / numNodes)) > 1) { if (showLog) LOGGER.trace("Counts for {} is below what's expected. {} is not 1 away from " + (numShardsToExpect / numNodes) + ")", entry.getKey(), entry.getValue()); return false; } } return true; } catch (final ClusterInfoException cie) { return false; } })); } @Test public void testInboundDoubleHappyPathRegister() throws Exception { final int numShardsToExpect = Integer.parseInt(Utils.DEFAULT_TOTAL_SHARDS); final String groupName = "testInboundDoubleHappyPathRegister"; try (final RoutingStrategy.Inbound ib1 = new RoutingInboundManager() .getAssociatedInstance(ClusterGroupInbound.class.getPackage().getName() + ":" + groupName); final RoutingStrategy.Inbound ib2 = new RoutingInboundManager() .getAssociatedInstance(ClusterGroupInbound.class.getPackage().getName() + ":" + groupName);) { final ClusterId clusterId = new ClusterId("test", "test"); final NodeAddress node1Addr = new DummyNodeAddress("node1"); final GroupDetails gd1 = new GroupDetails(groupName, node1Addr); final ContainerAddress node1Ca = new ContainerAddress(node1Addr, 0); final Utils<GroupDetails> utils = new Utils<>(infra, groupName, gd1); ib1.setContainerDetails(clusterId, node1Ca, (l, m) -> {}); ib1.start(infra); final NodeAddress node2Addr = new DummyNodeAddress("node2"); final ContainerAddress node2Ca = new ContainerAddress(node2Addr, 0); ib2.setContainerDetails(clusterId, node2Ca, (l, m) -> {}); try (final ClusterInfoSession session2 = sessFact.createSession();) { ib2.start(new TestInfrastructure(session2, infra.getScheduler())); assertTrue(waitForShards(session, utils, numShardsToExpect)); // if this worked right then numShardsToExpect/2 should be owned by each ... eventually. checkForShardDistribution(session, utils, numShardsToExpect, 2); // disrupt the session. This should cause a reshuffle but not fail disruptor.accept(session2); // everything should settle back checkForShardDistribution(session, utils, numShardsToExpect, 2); // now kill the second session. session2.close(); // this will disconnect the second Inbound and so the first should take over // see if we now have 1 session and it has all shards checkForShardDistribution(session, utils, numShardsToExpect, 1); } } } @Test public void testInboundResillience() throws Exception { final int numShardsToExpect = Integer.parseInt(Utils.DEFAULT_TOTAL_SHARDS); final String groupName = "testInboundResillience"; final Manager<RoutingStrategy.Inbound> manager = new RoutingInboundManager(); try (final RoutingStrategy.Inbound ib = manager.getAssociatedInstance(ClusterGroupInbound.class.getPackage().getName() + ":" + groupName);) { final ClusterId clusterId = super.setTestName("testInboundResillience"); final NodeAddress na = new DummyNodeAddress("theOnlyNode"); final ContainerAddress ca = new ContainerAddress(na, 0); final GroupDetails gd = new GroupDetails(groupName, na); final Infrastructure infra = makeInfra(session, sched); final Utils<GroupDetails> msutils = new Utils<>(infra, groupName, gd); ib.setContainerDetails(clusterId, ca, (l, m) -> {}); ib.start(infra); checkForShardDistribution(session, msutils, numShardsToExpect, 1); disruptor.accept(session); checkForShardDistribution(session, msutils, numShardsToExpect, 1); } } @Test public void testInboundWithOutbound() throws Exception { final int numShardsToExpect = Integer.parseInt(Utils.DEFAULT_TOTAL_SHARDS); final String groupName = "testInboundWithOutbound"; final Manager<RoutingStrategy.Inbound> manager = new RoutingInboundManager(); try (final RoutingStrategy.Inbound ib = manager.getAssociatedInstance(ClusterGroupInbound.class.getPackage().getName() + ":" + groupName);) { final ClusterId cid = setTestName("testInboundWithOutbound"); final NodeAddress na = new DummyNodeAddress("here"); final ContainerAddress oca = new ContainerAddress(na, 0); final Infrastructure infra = makeInfra(session, sched); final GroupDetails ogd = new GroupDetails(groupName, na); final Map<String, ContainerAddress> ocaiByCluster = new HashMap<>(); ocaiByCluster.put(cid.clusterName, oca); ogd.fillout(ocaiByCluster); final Utils<GroupDetails> msutils = new Utils<>(infra, groupName, ogd); ib.setContainerDetails(cid, oca, (l, m) -> {}); ib.start(infra); checkForShardDistribution(session, msutils, numShardsToExpect, 1); try (final ClusterInfoSession ses2 = sessFact.createSession(); final RoutingStrategyManager obman = chain(new RoutingStrategyManager(), o -> o.start(makeInfra(ses2, sched))); final RoutingStrategy.Factory obf = obman .getAssociatedInstance(ClusterGroupInbound.class.getPackage().getName() + ":" + groupName);) { obf.start(makeInfra(ses2, sched)); assertTrue(poll(o -> obf.isReady())); final RoutingStrategy.Router ob = obf.getStrategy(cid); assertTrue(poll(o -> obf.isReady())); final KeyedMessageWithType km = new KeyedMessageWithType(new Object(), new Object(), ""); assertTrue(poll(o -> ob.selectDestinationForMessage(km) != null)); final ContainerAddress ca = ob.selectDestinationForMessage(km); assertNotNull(ca); assertEquals("here", ((DummyNodeAddress) ca.node).name); // now disrupt the session session.close(); // the destination should clear until a new one runs // NO: destination will not necessarily clear. // poll(o -> ob.selectDestinationForMessage(km) == null); final ContainerAddress nca = new ContainerAddress(new DummyNodeAddress("here-again"), 0); ogd.fillout(ocaiByCluster); try (ClusterInfoSession ses3 = sessFact.createSession(); RoutingStrategy.Inbound ib2 = new RoutingInboundManager() .getAssociatedInstance(ClusterGroupInbound.class.getPackage().getName() + ":" + groupName)) { ib2.setContainerDetails(cid, nca, (l, m) -> {}); ib2.start(makeInfra(ses3, sched)); assertTrue(poll(o -> ob.selectDestinationForMessage(km) != null)); } } } } private static class DummyNodeAddress implements NodeAddress { private static final long serialVersionUID = 1L; public final String name; @SuppressWarnings("unused") private DummyNodeAddress() { name = null; } public DummyNodeAddress(final String name) { this.name = name; } @Override public boolean equals(final Object o) { return name.equals(((DummyNodeAddress) o).name); } @Override public int hashCode() { return name.hashCode(); } @Override public String toString() { return "DummyNodeAddress[ " + name + " ]"; } } @SuppressWarnings("unchecked") private static List<ShardAssignment<?>> getShardAssignments(final Utils<?> utils) throws ClusterInfoException { return (List<ShardAssignment<?>>) utils.persistentGetData(utils.shardsAssignedDir, null); } private static Set<Integer> getCurrentShards(final Utils<?> utils) throws ClusterInfoException { return getCurrentShards(getShardAssignments(utils)); } private static Set<Integer> getCurrentShards(final List<ShardAssignment<?>> sas) { if (sas == null) return new HashSet<>(); final Set<Integer> shards = sas.stream().map(sa -> Arrays.stream(sa.shards) .mapToObj(i -> Integer.valueOf(i))) .flatMap(i -> i) .collect(Collectors.toSet()); return shards; } private static boolean waitForShards(final ClusterInfoSession session, final Utils<?> utils, final int shardCount) throws InterruptedException { return poll(o -> { try { return getCurrentShards(utils).size() == shardCount; } catch (final ClusterInfoException e) { return false; } }); } }
package edu.psu.compbio.seqcode.gse.seqview.model; import java.util.*; import edu.psu.compbio.seqcode.gse.datasets.alignments.MultiZAlignRegion; import edu.psu.compbio.seqcode.gse.datasets.general.Region; import edu.psu.compbio.seqcode.gse.datasets.species.Genome; import edu.psu.compbio.seqcode.gse.ewok.verbs.MultiZAlignGenerator; import edu.psu.compbio.seqcode.gse.seqview.components.RegionPanel; public class SpeciesAlignModel extends SeqViewModel implements RegionModel, Runnable { private static Object theRegionPanels = null; private Map<Genome,Map<Genome,MultiZAlignGenerator>> generators; private Map<Genome,RegionPanel> regionpanels; private Region region; private Genome currentGenome; private Map<Genome,Region> bestRegion; private Map<Genome,ArrayList<MultiZAlignRegion>> alignedRegions; private boolean newinput; private String alignment; public SpeciesAlignModel() { generators = new Hashtable<Genome,Map<Genome,MultiZAlignGenerator>>(); regionpanels = new Hashtable<Genome,RegionPanel>(); if (theRegionPanels == null) { theRegionPanels = regionpanels; } else { throw new RuntimeException("Sorry, Can't have a second regionpanels"); } bestRegion = new Hashtable<Genome,Region>(); alignedRegions = new Hashtable<Genome,ArrayList<MultiZAlignRegion>>(); newinput = false; alignment = MultiZAlignGenerator.defaultAlignmentPrefix; } public void addRegionPanel(RegionPanel p) { if (!regionpanels.containsKey(p.getGenome())) { p.addModel(this); Hashtable<Genome,MultiZAlignGenerator> map = new Hashtable<Genome,MultiZAlignGenerator>(); for (Genome g : regionpanels.keySet()) { map.put(g, new MultiZAlignGenerator(p.getGenome(), g)); Map<Genome,MultiZAlignGenerator> existing = generators.get(g); existing.put(p.getGenome(), new MultiZAlignGenerator(g, p.getGenome())); } generators.put(p.getGenome(),map); regionpanels.put(p.getGenome(),p); } // System.err.println("RP Keyset at end of addRegionPanel is " + regionpanels.keySet()); } public void removeRegionPanel(RegionPanel p) { p.removeModel(this); Genome g = p.getGenome(); generators.remove(g); // System.err.println("\n\n===============================\nRemoving RP " + p + " which is genome " + g); for (Genome o : generators.keySet()) { generators.get(o).remove(g); } regionpanels.remove(g); bestRegion.remove(g); alignedRegions.remove(g); } public void setAlignment(String alignment) { for (Genome g1 : generators.keySet()) { for (Genome g2 : generators.get(g1).keySet()) { generators.get(g1).get(g2).setAlignPrefix(alignment); } } this.alignment = alignment; } public String getAlignment() {return alignment;} public void setRegion(Region r) { if (newinput) { return; } if (r == bestRegion.get(r.getGenome())) { notifyListeners(); } else { region = r; newinput = true; } } public void resetRegion(Region r) { if (newinput) { return; } region = r; newinput = true; } public synchronized void run() { while (keepRunning()) { try { if (!newinput) { // System.err.println("Waiting... regionpanel keyset is " + regionpanels.keySet()); // System.err.println(" regionpanels is " + regionpanels); // if (regionpanels != theRegionPanels) { // throw new RuntimeException("Someone changed regionpanels before wait"); // } wait(); // System.err.println("Woken... regionpanel keyset is " + regionpanels.keySet()); // System.err.println(" regionpanels is " + regionpanels); // if (regionpanels != theRegionPanels) { // throw new RuntimeException("Someone changed regionpanels after wait"); // } } } catch (InterruptedException ex) {} if (newinput) { try { // System.err.println("SAM executing on " + region); Genome g = region.getGenome(); int center = (region.getStart() + region.getEnd())/2; int halfsize = (region.getEnd() - region.getStart())/2; currentGenome = g; // System.err.println("SAM current Genome is " + g + ". this is " + this); // System.err.println("Current regionpanel keyset is " + regionpanels.keySet()); for (Genome o : generators.get(g).keySet()) { if (o.equals(g)) {continue;} // System.err.println(" SAM other genome is " + o); RegionPanel p = regionpanels.get(o); if (p == null) { throw new NullPointerException("No RP for Genome " + o ); } // System.err.println("====== Found RegionPanel for Genome " + o); MultiZAlignRegion best = null; Iterator<MultiZAlignRegion> iter = generators.get(g).get(o).execute(region); ArrayList<MultiZAlignRegion> list = new ArrayList<MultiZAlignRegion>(); while (iter.hasNext()) { MultiZAlignRegion mzar = iter.next(); list.add(mzar); if (mzar.getStart() <= center && mzar.getEnd() >= center && (best == null || mzar.getScore() > best.getScore())) { best = mzar; } } if (best == null) { alignedRegions.put(o,new ArrayList<MultiZAlignRegion>()); } else { ArrayList<MultiZAlignRegion> inWindow = new ArrayList<MultiZAlignRegion>(); float factor = (float)(center - best.getStart()) / (float)(best.getEnd() - best.getStart()); int bestcenter; if (best.getStrand() == '+') { bestcenter = best.getOtherStart() + (int)((best.getOtherEnd() - best.getOtherStart()) * factor); } else { bestcenter = best.getOtherEnd() - (int)((best.getOtherEnd() - best.getOtherStart()) * factor); } Region otherregion = new Region(o, best.getOtherChrom(), bestcenter - halfsize, bestcenter + halfsize); p.setRegion(otherregion); bestRegion.put(o,otherregion); for (MultiZAlignRegion r : list) { if (r.getOtherChrom().equals(otherregion.getChrom()) && ((r.getOtherStart() >= otherregion.getStart() && r.getOtherStart() <= otherregion.getEnd()) || (r.getOtherStart() <= otherregion.getStart() && r.getOtherEnd() >= otherregion.getStart()))) { inWindow.add(r); } } alignedRegions.put(o,inWindow); } } } catch (Exception ex) { ex.printStackTrace(); } newinput = false; notifyListeners(); } } } public boolean connectionOpen(){return true;} public void reconnect(){} public boolean isReady() {return !newinput;} public Region getRegion() {return region;} public Genome getCurrentGenome() {return currentGenome;} public Set<Genome> getGenomes() {return new HashSet(regionpanels.keySet());} public Region getBestRegion(Genome g) {return bestRegion.get(g);} public List<MultiZAlignRegion> getAlignedRegions(Genome g) { return alignedRegions.get(g); } /* return the set of alignment version for the current set of genomes */ public Set<String> getAlignments() { Set<String> genomes = new HashSet<String>(); for (Genome g: getGenomes()) { genomes.add(g.getVersion()); } // System.err.println("Looking for alignment versions for " + genomes); return MultiZAlignGenerator.getAlignmentVersions(genomes); } }
/* ========================================== * JGraphT : a free Java graph-theory library * ========================================== * * Project Info: http://jgrapht.sourceforge.net/ * Project Creator: Barak Naveh (http://sourceforge.net/users/barak_naveh) * * (C) Copyright 2003-2008, by Barak Naveh and Contributors. * * This program and the accompanying materials are dual-licensed under * either * * (a) the terms of the GNU Lesser General Public License version 2.1 * as published by the Free Software Foundation, or (at your option) any * later version. * * or (per the licensee's choosing) * * (b) the terms of the Eclipse Public License v1.0 as published by * the Eclipse Foundation. */ /* ----------------- * CompoundPermutationIter.java * ----------------- * (C) Copyright 2005-2008, by Assaf Lehr and Contributors. * * Original Author: Assaf Lehr * Contributor(s): - * * $Id$ * * Changes * ------- */ package org.jgrapht.experimental.permutation; import java.util.*; import org.jgrapht.util.*; /** * For permutation like this: * * <ol> * <li>1,2 are the same eq.group (numbers) * <li>a,b are og the same eq.group (letters) * <li>'$' is of its own eq. group (signs) Let the order of the group be * (arbitrary): signs,numbers,letters (note that for performance reasons, this * arbitrary order is the worst! see Performance section below) * </ol> * * <p>These are the possible compound perm: [$,1,2,a,b,c] * * <p>[$,1,2,a,c,b] * * <p>[$,1,2,b,a,c] * * <p>[$,1,2,b,c,a] * * <p>[$,1,2,c,a,b] * * <p>[$,1,2,c,b,a] * * <p>[$,2,1,a,b,c] * * <p>[$,2,1,a,c,b] * * <p>[$,2,1,b,a,c] * * <p>[$,2,1,b,c,a] * * <p>[$,2,1,c,a,b] * * <p>[$,2,1,c,b,a] * * <p>The overall number is the product of the factorials of each eq. group * size; in our example : (1!)x(2!)x(3!)=1x2x6=12. Using the constructor with * eq.group sizes and initial order [1,2,3], the result permutations are * retrieved as numbers in an array, where [0,1,2,3,4,5] means [$,1,2,a,b,c]: * * <p>[0,1,2,3,5,4] * * <p>[0,1,2,4,3,5] * * <p>etc. etc., till: * * <p>[0,2,1,5,4,3] means [$,2,1,c,b,a] * * <p> * <p><i>Performance:</i> The implementation tries to advance each time the * group zero, if it does not succeed, it tries the next group (1,2 and so on), * so: try to put the largest group as the first groups, UNLIKE the example. * Performance-wise it is better to do [a,b,c,1,2,$] .The effect is improvement * by constant (for example, by 2) * * @author Assaf * @since May 30, 2005 */ public class CompoundPermutationIter implements ArrayPermutationsIter, Iterator { IntegerPermutationIter [] permArray; /** * on the example 1+2+3=6 */ private int totalPermArraySize; /** * The overall number is the product of the factorial of each eq. group * size. */ private int max; private int iterCounter = 0; /** * For the class example, use [1,2,2]. order matters! (performance-wise too) * * @param equalityGroupsSizesArray */ public CompoundPermutationIter(int [] equalityGroupsSizesArray) { init(equalityGroupsSizesArray); } /** * Creates an IntegerPermutationIter class per equalityGroup with different * integers. * * @param equalityGroupsSizesArray */ private void init(int [] equalityGroupsSizesArray) { this.permArray = new IntegerPermutationIter[equalityGroupsSizesArray.length]; int counter = 0; this.max = 1; // each time , multiply by factorail(eqGroupSize) for ( int eqGroup = 0; eqGroup < equalityGroupsSizesArray.length; eqGroup++) { // create an array of eq.group size filled with values // of counter, counter+1, ... counter+size-1 int currGroupSize = equalityGroupsSizesArray[eqGroup]; int [] currArray = new int[currGroupSize]; for (int i = 0; i < currGroupSize; i++) { currArray[i] = counter; counter++; } this.permArray[eqGroup] = new IntegerPermutationIter(currArray); this.permArray[eqGroup].getNext(); // first iteration return the // source // each time , multiply by factorail(eqGroupSize) this.max *= MathUtil.factorial(currGroupSize); } this.totalPermArraySize = counter; // calc max } @Override public Object next() { return getNext(); } /** * Iteration may be one of these two: 1. the last group advances by one * iter, all else stay. 2. the last group cannot advance , so it restarts * but telling the group after it to advance (done recursively till some * group can advance) */ public int [] getNext() { if (this.iterCounter == 0) { // just return it , without change this.iterCounter++; return getPermAsArray(); } int firstGroupCapableOfAdvancing = -1; int currGroupIndex = 0; // while (firstGroupCapableOfAdvancing == -1) { IntegerPermutationIter currGroup = this.permArray[currGroupIndex]; if (currGroup.hasNext()) { currGroup.getNext(); // restart all that we passed on for (int i = 0; i < currGroupIndex; i++) { restartPermutationGroup(i); } firstGroupCapableOfAdvancing = currGroupIndex; } currGroupIndex++; if (currGroupIndex >= this.permArray.length) { break; } } this.iterCounter++; if (firstGroupCapableOfAdvancing == -1) { // nothing found. we finished all iterations return null; } else { int [] tempArray = getPermAsArray(); return tempArray; } } /** * Creates and returns a new array which consists of the eq. group current * permutation arrays. For example, in the 10th iter ([$,2,1,b,c,a]) The * permutations current statuses is [0] [2,1] [4,5,3] so retrieve * [0,2,1,4,5,3] */ public int [] getPermAsArray() { int [] resultArray = new int[this.totalPermArraySize]; int counter = 0; for ( int groupIndex = 0; groupIndex < this.permArray.length; groupIndex++) { int [] currPermArray = this.permArray[groupIndex].getCurrent(); System.arraycopy( currPermArray, 0, resultArray, counter, currPermArray.length); counter += currPermArray.length; } return resultArray; } /** * Restarts by creating a new one instead. * * @param groupIndex */ private void restartPermutationGroup(int groupIndex) { int [] oldPermArray = this.permArray[groupIndex].getCurrent(); Arrays.sort(oldPermArray); this.permArray[groupIndex] = new IntegerPermutationIter(oldPermArray); this.permArray[groupIndex].getNext(); } @Override public boolean hasNext() { boolean result; if (this.iterCounter < this.max) { result = true; } else { result = false; } return result; } public int getMax() { return max; } /* (non-Javadoc) * @see ArrayPermutationsIter#nextPermutation() */ @Override public int [] nextPermutation() { return (int []) next(); } /* (non-Javadoc) * @see ArrayPermutationsIter#hasNextPermutaions() */ @Override public boolean hasNextPermutaions() { return hasNext(); } /** * UNIMPLEMENTED. always throws new UnsupportedOperationException * * @see java.util.Iterator#remove() */ @Override public void remove() { throw new UnsupportedOperationException(); } } // End CompoundPermutationIter.java
/* * Copyright 2012-2014, Continuuity, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.continuuity.loom.http.handler; import com.continuuity.http.HttpResponder; import com.continuuity.loom.account.Account; import com.continuuity.loom.cluster.Cluster; import com.continuuity.loom.cluster.ClusterJobProgress; import com.continuuity.loom.cluster.ClusterSummary; import com.continuuity.loom.cluster.Node; import com.continuuity.loom.common.conf.Configuration; import com.continuuity.loom.common.conf.Constants; import com.continuuity.loom.http.request.AddServicesRequest; import com.continuuity.loom.http.request.ClusterConfigureRequest; import com.continuuity.loom.http.request.ClusterCreateRequest; import com.continuuity.loom.http.request.ClusterOperationRequest; import com.continuuity.loom.http.request.ClusterStatusResponse; import com.continuuity.loom.layout.InvalidClusterException; import com.continuuity.loom.provisioner.QuotaException; import com.continuuity.loom.scheduler.ClusterAction; import com.continuuity.loom.scheduler.task.ClusterJob; import com.continuuity.loom.scheduler.task.ClusterService; import com.continuuity.loom.scheduler.task.ClusterTask; import com.continuuity.loom.scheduler.task.JobId; import com.continuuity.loom.scheduler.task.MissingClusterException; import com.continuuity.loom.scheduler.task.MissingEntityException; import com.continuuity.loom.scheduler.task.TaskId; import com.continuuity.loom.store.cluster.ClusterStore; import com.continuuity.loom.store.cluster.ClusterStoreService; import com.continuuity.loom.store.cluster.ClusterStoreView; import com.continuuity.loom.store.tenant.TenantStore; import com.google.common.base.Charsets; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonSyntaxException; import com.google.gson.reflect.TypeToken; import com.google.inject.Inject; import org.jboss.netty.buffer.ChannelBufferInputStream; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.util.List; import java.util.Set; /** * Handler for performing cluster operations. */ @Path(Constants.API_BASE + "/clusters") public class ClusterHandler extends AbstractAuthHandler { private static final Logger LOG = LoggerFactory.getLogger(ClusterHandler.class); private final ClusterService clusterService; private final ClusterStoreService clusterStoreService; private final ClusterStore clusterStore; private final int maxClusterSize; private final Gson gson; @Inject private ClusterHandler(TenantStore tenantStore, ClusterService clusterService, ClusterStoreService clusterStoreService, Configuration conf, Gson gson) { super(tenantStore); this.clusterService = clusterService; this.clusterStoreService = clusterStoreService; this.clusterStore = clusterStoreService.getSystemView(); this.maxClusterSize = conf.getInt(Constants.MAX_CLUSTER_SIZE); this.gson = gson; } /** * Get all clusters visible to the user. * * @param request Request for clusters. * @param responder Responder for sending the response. */ @GET public void getClusters(HttpRequest request, HttpResponder responder) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { List<ClusterSummary> summaries = clusterService.getClusterSummaries(account); responder.sendJson(HttpResponseStatus.OK, summaries, new TypeToken<List<ClusterSummary>>() {}.getType(), gson); } catch (IOException e) { LOG.error("Exception getting all clusters for account {}.", account); responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception getting clusters."); } } /** * Get a specific cluster visible to the user. * * @param request Request for a cluster. * @param responder Responder for sending the response. * @param clusterId Id of the cluster to get. */ @GET @Path("/{cluster-id}") public void getCluster(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { ClusterStoreView view = clusterStoreService.getView(account); Cluster cluster = view.getCluster(clusterId); if (cluster == null) { responder.sendError(HttpResponseStatus.NOT_FOUND, "cluster " + clusterId + " not found."); return; } JsonObject jsonObject = gson.toJsonTree(cluster).getAsJsonObject(); // Update cluster Json with node information. Set<Node> clusterNodes = view.getClusterNodes(clusterId); jsonObject.add("nodes", gson.toJsonTree(clusterNodes)); // Add last job message if any ClusterJob clusterJob = clusterStore.getClusterJob(JobId.fromString(cluster.getLatestJobId())); if (clusterJob.getStatusMessage() != null) { jsonObject.addProperty("message", clusterJob.getStatusMessage()); } jsonObject.add("progress", gson.toJsonTree(new ClusterJobProgress(clusterJob))); responder.sendJson(HttpResponseStatus.OK, jsonObject); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception getting cluster " + clusterId); } } /** * Get the config used by the cluster. * * @param request Request for config of a cluster. * @param responder Responder for sending the response. * @param clusterId Id of the cluster containing the config to get. */ @GET @Path("/{cluster-id}/config") public void getClusterConfig(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { Cluster cluster = clusterStoreService.getView(account).getCluster(clusterId); if (cluster == null) { responder.sendError(HttpResponseStatus.NOT_FOUND, "cluster " + clusterId + " not found."); return; } responder.sendJson(HttpResponseStatus.OK, cluster.getConfig()); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception getting config for cluster " + clusterId); } } /** * Get all services on a specific cluster visible to the user. * * @param request Request for services on a cluster. * @param responder Responder for sending the response. * @param clusterId Id of the cluster containing the services to get. */ @GET @Path("/{cluster-id}/services") public void getClusterServices(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { Cluster cluster = clusterStoreService.getView(account).getCluster(clusterId); if (cluster == null) { responder.sendError(HttpResponseStatus.NOT_FOUND, "cluster " + clusterId + " not found."); return; } responder.sendJson(HttpResponseStatus.OK, cluster.getServices()); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception getting services for cluster " + clusterId); } } /** * Get the status of a specific cluster visible to the user. * * @param request Request for cluster status. * @param responder Responder for sending the response. * @param clusterId Id of the cluster whose status to get. */ @GET @Path("/{cluster-id}/status") public void getClusterStatus(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { ClusterStoreView view = clusterStoreService.getView(account); Cluster cluster = view.getCluster(clusterId); if (cluster == null){ responder.sendError(HttpResponseStatus.NOT_FOUND, String.format("cluster %s not found", clusterId)); return; } ClusterJob job = clusterStore.getClusterJob(JobId.fromString(cluster.getLatestJobId())); if (job == null){ responder.sendError(HttpResponseStatus.NOT_FOUND, String.format("job %s not found for cluster %s", cluster.getLatestJobId(), clusterId)); return; } ClusterStatusResponse statusResponse = new ClusterStatusResponse(cluster, job); responder.sendJson(HttpResponseStatus.OK, statusResponse, ClusterStatusResponse.class, gson); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception getting status of cluster " + clusterId); } } /** * Create a new cluster. Body must include a cluster template and a number of machines. Optionally it can include any * setting that will override the corresponding template default value. * * @param request Request to add a cluster. * @param responder Responder for sending the response. */ @POST public void createCluster(HttpRequest request, HttpResponder responder) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()), Charsets.UTF_8); try { ClusterCreateRequest clusterCreateRequest = gson.fromJson(reader, ClusterCreateRequest.class); if (clusterCreateRequest.getNumMachines() > maxClusterSize) { responder.sendError(HttpResponseStatus.BAD_REQUEST, "numMachines above max cluster size " + maxClusterSize); return; } String id = clusterService.requestClusterCreate(clusterCreateRequest, account); JsonObject response = new JsonObject(); response.addProperty("id", id); responder.sendJson(HttpResponseStatus.OK, response); } catch (IllegalAccessException e) { responder.sendError(HttpResponseStatus.FORBIDDEN, "User not authorized to create cluster."); } catch (IllegalArgumentException e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (MissingEntityException e) { responder.sendError(HttpResponseStatus.NOT_FOUND, e.getMessage()); } catch (IOException e) { LOG.error("Exception while trying to create cluster.", e); responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error requesting cluster create operation."); } catch (QuotaException e) { responder.sendError(HttpResponseStatus.CONFLICT, e.getMessage()); } catch (InvalidClusterException e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } finally { try { reader.close(); } catch (IOException e) { LOG.warn("Exception while closing request reader", e); } } } /** * Delete a specific cluster that is deletable by the user. * * @param request Request to delete cluster. * @param responder Responder for sending the response. * @param clusterId Id of the cluster to delete. */ @DELETE @Path("/{cluster-id}") public void deleteCluster(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { LOG.debug("Received a request to delete cluster {}", clusterId); Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } ClusterOperationRequest deleteRequest; Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()), Charsets.UTF_8); try { deleteRequest = gson.fromJson(reader, ClusterOperationRequest.class); } catch (Exception e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, "Invalid request body."); return; } try { Cluster cluster = clusterStoreService.getView(account).getCluster(clusterId); if (cluster == null) { responder.sendError(HttpResponseStatus.NOT_FOUND, "cluster " + clusterId + " not found."); return; } if (cluster.getStatus() == Cluster.Status.TERMINATED) { responder.sendStatus(HttpResponseStatus.OK); return; } ClusterJob clusterJob = clusterStore.getClusterJob(JobId.fromString(cluster.getLatestJobId())); // If previous job on a cluster is still underway, don't accept new jobs if (cluster.getStatus() == Cluster.Status.PENDING) { String message = String.format("Job %s is still underway for cluster %s", clusterJob.getJobId(), clusterId); LOG.error(message); responder.sendError(HttpResponseStatus.CONFLICT, message); return; } clusterService.requestClusterDelete(clusterId, account, deleteRequest); responder.sendStatus(HttpResponseStatus.OK); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error deleting cluster."); } catch (IllegalAccessException e) { responder.sendError(HttpResponseStatus.FORBIDDEN, "User unauthorized to perform delete."); } catch (IllegalArgumentException e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (MissingEntityException e) { responder.sendError(HttpResponseStatus.NOT_FOUND, e.getMessage()); } } /** * Abort the cluster operation that is currently running for the given cluster. * * @param request Request to abort the cluster operation. * @param responder Responder for sending the response. * @param clusterId Id of the cluster to abort. */ @POST @Path("/{cluster-id}/abort") public void abortClusterJob(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { LOG.debug("Received a request to abort job on cluster {}", clusterId); Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { clusterService.requestAbortJob(clusterId, account); responder.sendStatus(HttpResponseStatus.OK); } catch (MissingClusterException e) { responder.sendError(HttpResponseStatus.NOT_FOUND, "cluster " + clusterId + " not found."); } catch (IllegalStateException e) { responder.sendError(HttpResponseStatus.CONFLICT, "Cannot be aborted at this time."); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error aborting cluster."); } } /** * Changes a cluster parameter like lease time. * * @param request Request to change cluster parameter. * @param responder Responder to send the response. * @param clusterId Id of the cluster to change. */ @POST @Path("/{cluster-id}") public void changeClusterParameter(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { JsonObject jsonObject = gson.fromJson(request.getContent().toString(Charsets.UTF_8), JsonObject.class); if (jsonObject == null || !jsonObject.has("expireTime")) { responder.sendError(HttpResponseStatus.BAD_REQUEST, "expire time not specified"); return; } long expireTime = jsonObject.get("expireTime").getAsLong(); clusterService.changeExpireTime(clusterId, account, expireTime); responder.sendStatus(HttpResponseStatus.OK); } catch (IllegalArgumentException e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (JsonSyntaxException e) { LOG.error("Exception while parsing JSON.", e); responder.sendError(HttpResponseStatus.BAD_REQUEST, "Invalid JSON"); } catch (IllegalAccessException e) { responder.sendError(HttpResponseStatus.FORBIDDEN, "User does not have permission to change cluster parameter."); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception changing cluster parameter."); } } /** * Get the task plan for an operation that has taken place or is currently taking place on a cluster. * * @param request Request for the plan. * @param responder Responder to send the response. * @param clusterId Id of the cluster whose plan we want to get. * @param planId Id of the plan for the cluster. */ @GET @Path("/{cluster-id}/plans/{plan-id}") public void getPlanForJob(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId, @PathParam("plan-id") String planId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { ClusterStoreView view = clusterStoreService.getView(account); Cluster cluster = view.getCluster(clusterId); if (cluster == null) { responder.sendError(HttpResponseStatus.NOT_FOUND, "cluster " + clusterId + " not found."); return; } JobId jobId = JobId.fromString(planId); ClusterJob clusterJob = clusterStore.getClusterJob(jobId); if (!clusterJob.getClusterId().equals(clusterId)) { throw new IllegalArgumentException(String.format("Job %s does not belong to cluster %s", planId, clusterId)); } responder.sendJson(HttpResponseStatus.OK, formatJobPlan(clusterJob)); } catch (IllegalArgumentException e) { LOG.error("Exception get plan {} for cluster {}.", planId, clusterId, e); responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception getting plan."); } } /** * Get all plans for cluster operations that have taken place or are currently taking place on a cluster. * * @param request Request for cluster plans. * @param responder Responder for sending the response. * @param clusterId Id of the cluster whose plans we have to fetch. */ @GET @Path("/{cluster-id}/plans") public void getPlansForCluster(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { JsonArray jobsJson = new JsonArray(); List<ClusterJob> jobs = clusterStoreService.getView(account).getClusterJobs(clusterId, -1); if (jobs.isEmpty()) { responder.sendError(HttpResponseStatus.NOT_FOUND, "Plans for cluster " + clusterId + " not found."); return; } for (ClusterJob clusterJob : jobs) { jobsJson.add(formatJobPlan(clusterJob)); } responder.sendJson(HttpResponseStatus.OK, jobsJson); } catch (IllegalArgumentException e) { LOG.error("Exception getting plans for cluster {}.", clusterId, e); responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (IOException e) { responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception getting cluster plans."); } } /** * Overwrite the config used by an active cluster. The POST body should contain a "config" key containing the new * cluster config. Additionally, the body can contain a "restart" key whose value is true or false, indicating * whether or not cluster services should be restarted along with being reconfigured. If restart is not specified, * it defaults to true. * * @param request Request for config of a cluster. * @param responder Responder for sending the response. * @param clusterId Id of the cluster containing the config to get. */ @PUT @Path("/{cluster-id}/config") public void putClusterConfig(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } ClusterConfigureRequest configRequest; Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()), Charsets.UTF_8); try { configRequest = gson.fromJson(reader, ClusterConfigureRequest.class); } catch (Exception e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, "Invalid request body."); return; } try { clusterService.requestClusterReconfigure(clusterId, account, configRequest); responder.sendStatus(HttpResponseStatus.OK); } catch (IllegalArgumentException e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (MissingEntityException e) { responder.sendError(HttpResponseStatus.NOT_FOUND, e.getMessage()); } catch (IllegalStateException e) { responder.sendError(HttpResponseStatus.CONFLICT, "Cluster is not in a configurable state."); } catch (IllegalAccessException e) { responder.sendError(HttpResponseStatus.FORBIDDEN, "User is not authorized to perform a reconfigure."); } catch (IOException e) { LOG.error("Exception requesting reconfigure on cluster {}.", clusterId, e); responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Internal error while requesting cluster reconfigure"); } } /** * Add specific services to a cluster. The POST body must be a JSON Object with a 'services' key whose value is a * JSON Array of service names. Services must be compatible with the template used when the cluster was created, * and any dependencies of services to add must either already be on the cluster, or also in the list of services * to add. If any of these rules are violated, a BAD_REQUEST status is returned back. Otherwise, the request to add * services is queued up. * * @param request Request to add services to a cluster. * @param responder Responder for sending the response. * @param clusterId Id of the cluster to add services to. */ @POST @Path("/{cluster-id}/services") public void addClusterServices(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } AddServicesRequest addServicesRequest; Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()), Charsets.UTF_8); try { addServicesRequest = gson.fromJson(reader, AddServicesRequest.class); } catch (Exception e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, "Invalid request body."); return; } try { clusterService.requestAddServices(clusterId, account, addServicesRequest); responder.sendStatus(HttpResponseStatus.OK); } catch (IllegalArgumentException e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (MissingEntityException e) { responder.sendError(HttpResponseStatus.NOT_FOUND, e.getMessage()); } catch (IllegalStateException e) { responder.sendError(HttpResponseStatus.CONFLICT, "Cluster is not in a state where service actions can be performed."); } catch (IllegalAccessException e) { responder.sendError(HttpResponseStatus.FORBIDDEN, "User is not authorized to add services."); } catch (IOException e) { LOG.error("Exception requesting to add services to cluster {}.", clusterId, e); responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Internal error while requesting service action."); } } /** * Starts all services on the cluster, taking into account service dependencies for order of service starts. * * @param request Request to start cluster services. * @param responder Responder for sending the response. * @param clusterId Id of the cluster whose services should be started. */ @POST @Path("/{cluster-id}/services/start") public void startAllClusterServices(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { requestServiceAction(request, responder, clusterId, null, ClusterAction.START_SERVICES); } /** * Stops all services on the cluster, taking into account service dependencies for order of service stops. * * @param request Request to stop cluster services. * @param responder Responder for sending the response. * @param clusterId Id of the cluster whose services should be stopped. */ @POST @Path("/{cluster-id}/services/stop") public void stopAllClusterServices(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { requestServiceAction(request, responder, clusterId, null, ClusterAction.STOP_SERVICES); } /** * Restarts all services on the cluster, taking into account service dependencies for order of service stops * and starts. * * @param request Request to restart cluster services. * @param responder Responder for sending the response. * @param clusterId Id of the cluster whose services should be restarted. */ @POST @Path("/{cluster-id}/services/restart") public void restartAllClusterServices(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { requestServiceAction(request, responder, clusterId, null, ClusterAction.RESTART_SERVICES); } /** * Starts the specified service, plus all services it depends on, on the cluster. * * @param request Request to start cluster service. * @param responder Responder for sending the response. * @param clusterId Id of the cluster whose services should be started. */ @POST @Path("/{cluster-id}/services/{service-id}/start") public void startClusterService(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId, @PathParam("service-id") String serviceId) { requestServiceAction(request, responder, clusterId, serviceId, ClusterAction.START_SERVICES); } /** * Stops the specified service on the cluster, plus all services that depend on it, * taking into account service dependencies for order of service stops. * * @param request Request to stop cluster services. * @param responder Responder for sending the response. * @param clusterId Id of the cluster whose services should be stopped. */ @POST @Path("/{cluster-id}/services/{service-id}/stop") public void stopClusterService(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId, @PathParam("service-id") String serviceId) { requestServiceAction(request, responder, clusterId, serviceId, ClusterAction.STOP_SERVICES); } /** * Restarts the specified service on the cluster, plus all services that depend on it, * taking into account service dependencies for order of service stops and starts. * * @param request Request to restart cluster service. * @param responder Responder for sending the response. * @param clusterId Id of the cluster whose service should be restarted. */ @POST @Path("/{cluster-id}/services/{service-id}/restart") public void restartClusterService(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId, @PathParam("service-id") String serviceId) { requestServiceAction(request, responder, clusterId, serviceId, ClusterAction.RESTART_SERVICES); } /** * Sync the cluster template of the cluster to the current version of the cluster template. The cluster must be * active in order for this to work, and the cluster must be modifiable by the user making the request. * * @param request Request to sync the cluster template. * @param responder Responder for sending the response. * @param clusterId Id of the cluster that should be synced. */ @POST @Path("/{cluster-id}/clustertemplate/sync") public void syncClusterTemplate(HttpRequest request, HttpResponder responder, @PathParam("cluster-id") String clusterId) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } try { clusterService.syncClusterToCurrentTemplate(clusterId, account); responder.sendStatus(HttpResponseStatus.OK); } catch (IllegalStateException e) { responder.sendError(HttpResponseStatus.CONFLICT, "Cluster is not in a state where the template can by synced"); } catch (MissingEntityException e) { responder.sendError(HttpResponseStatus.NOT_FOUND, e.getMessage()); } catch (InvalidClusterException e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (IllegalAccessException e) { responder.sendError(HttpResponseStatus.FORBIDDEN, "User not authorized to perform template sync on cluster " + clusterId); } catch (IOException e) { LOG.error("Exception syncing template for cluster {}", clusterId, e); responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Internal error while syncing cluster template"); } } private void requestServiceAction(HttpRequest request, HttpResponder responder, String clusterId, String service, ClusterAction action) { Account account = getAndAuthenticateAccount(request, responder); if (account == null) { return; } ClusterOperationRequest operationRequest; Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()), Charsets.UTF_8); try { operationRequest = gson.fromJson(reader, ClusterOperationRequest.class); } catch (Exception e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, "Invalid request body."); return; } try { clusterService.requestServiceRuntimeAction(clusterId, account, action, service, operationRequest); responder.sendStatus(HttpResponseStatus.OK); } catch (MissingEntityException e) { responder.sendError(HttpResponseStatus.NOT_FOUND, e.getMessage()); } catch (IllegalStateException e) { responder.sendError(HttpResponseStatus.CONFLICT, "Cluster is not in a state where service actions can be performed."); } catch (IllegalAccessException e) { responder.sendError(HttpResponseStatus.FORBIDDEN, "User not authorized to perform service action."); } catch (IllegalArgumentException e) { responder.sendError(HttpResponseStatus.BAD_REQUEST, e.getMessage()); } catch (IOException e) { LOG.error("Exception performing service action for cluster {}", clusterId, e); responder.sendError(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Internal error performing service action"); } } private JsonObject formatJobPlan(ClusterJob job) throws IOException { JsonObject jobJson = new JsonObject(); jobJson.addProperty("id", job.getJobId()); jobJson.addProperty("clusterId", job.getClusterId()); jobJson.addProperty("action", job.getClusterAction().name()); jobJson.addProperty("currentStage", job.getCurrentStageNumber()); JsonArray stagesJson = new JsonArray(); for (Set<String> stage : job.getStagedTasks()) { JsonArray stageJson = new JsonArray(); for (String taskId : stage) { ClusterTask task = clusterStore.getClusterTask(TaskId.fromString(taskId)); JsonObject taskJson = new JsonObject(); taskJson.addProperty("id", task.getTaskId()); taskJson.addProperty("taskName", task.getTaskName().name()); taskJson.addProperty("nodeId", task.getNodeId()); taskJson.addProperty("service", task.getService()); stageJson.add(taskJson); } stagesJson.add(stageJson); } jobJson.add("stages", stagesJson); return jobJson; } }
package org.apache.maven.lifecycle.internal.builder.multithreaded; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.maven.lifecycle.internal.ProjectBuildList; import org.apache.maven.lifecycle.internal.ProjectSegment; /** * @since 3.0 * @author Kristian Rosenvold * <p/> * NOTE: This class is not part of any public api and can be changed or deleted without prior notice. * This class in particular may spontaneusly self-combust and be replaced by a plexus-compliant thread aware * logger implementation at any time. */ @SuppressWarnings( { "SynchronizationOnLocalVariableOrMethodParameter" } ) public class ThreadOutputMuxer { private final Iterator<ProjectSegment> projects; private final ThreadLocal<ProjectSegment> projectBuildThreadLocal = new ThreadLocal<>(); private final Map<ProjectSegment, ByteArrayOutputStream> streams = new HashMap<>(); private final Map<ProjectSegment, PrintStream> printStreams = new HashMap<>(); private final ByteArrayOutputStream defaultOutputStreamForUnknownData = new ByteArrayOutputStream(); private final PrintStream defaultPringStream = new PrintStream( defaultOutputStreamForUnknownData ); private final Set<ProjectSegment> completedBuilds = Collections.synchronizedSet( new HashSet<ProjectSegment>() ); private volatile ProjectSegment currentBuild; private final PrintStream originalSystemOUtStream; private final ConsolePrinter printer; /** * A simple but safe solution for printing to the console. */ class ConsolePrinter implements Runnable { public volatile boolean running; private final ProjectBuildList projectBuildList; ConsolePrinter( ProjectBuildList projectBuildList ) { this.projectBuildList = projectBuildList; } public void run() { running = true; for ( ProjectSegment projectBuild : projectBuildList ) { final PrintStream projectStream = printStreams.get( projectBuild ); ByteArrayOutputStream projectOs = streams.get( projectBuild ); do { synchronized ( projectStream ) { try { projectStream.wait( 100 ); } catch ( InterruptedException e ) { throw new RuntimeException( e ); } try { projectOs.writeTo( originalSystemOUtStream ); } catch ( IOException e ) { throw new RuntimeException( e ); } projectOs.reset(); } } while ( !completedBuilds.contains( projectBuild ) ); } running = false; } /* Wait until we are sure the print-stream thread is running. */ public void waitUntilRunning( boolean expect ) { while ( !running == expect ) { try { Thread.sleep( 10 ); } catch ( InterruptedException e ) { throw new RuntimeException( e ); } } } } public ThreadOutputMuxer( ProjectBuildList segmentChunks, PrintStream originalSystemOut ) { projects = segmentChunks.iterator(); for ( ProjectSegment segmentChunk : segmentChunks ) { final ByteArrayOutputStream value = new ByteArrayOutputStream(); streams.put( segmentChunk, value ); printStreams.put( segmentChunk, new PrintStream( value ) ); } setNext(); this.originalSystemOUtStream = originalSystemOut; System.setOut( new ThreadBoundPrintStream( this.originalSystemOUtStream ) ); printer = new ConsolePrinter( segmentChunks ); new Thread( printer ).start(); printer.waitUntilRunning( true ); } public void close() { printer.waitUntilRunning( false ); System.setOut( this.originalSystemOUtStream ); } private void setNext() { currentBuild = projects.hasNext() ? projects.next() : null; } private boolean ownsRealOutputStream( ProjectSegment projectBuild ) { return projectBuild.equals( currentBuild ); } private PrintStream getThreadBoundPrintStream() { ProjectSegment threadProject = projectBuildThreadLocal.get(); if ( threadProject == null ) { return defaultPringStream; } if ( ownsRealOutputStream( threadProject ) ) { return originalSystemOUtStream; } return printStreams.get( threadProject ); } public void associateThreadWithProjectSegment( ProjectSegment projectBuild ) { projectBuildThreadLocal.set( projectBuild ); } public void setThisModuleComplete( ProjectSegment projectBuild ) { completedBuilds.add( projectBuild ); PrintStream stream = printStreams.get( projectBuild ); synchronized ( stream ) { stream.notifyAll(); } disconnectThreadFromProject(); } private void disconnectThreadFromProject() { projectBuildThreadLocal.remove(); } private class ThreadBoundPrintStream extends PrintStream { public ThreadBoundPrintStream( PrintStream systemOutStream ) { super( systemOutStream ); } private PrintStream getOutputStreamForCurrentThread() { return getThreadBoundPrintStream(); } @Override public void println() { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.println(); currentStream.notifyAll(); } } @Override public void print( char c ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( c ); currentStream.notifyAll(); } } @Override public void println( char x ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.println( x ); currentStream.notifyAll(); } } @Override public void print( double d ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( d ); currentStream.notifyAll(); } } @Override public void println( double x ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.println( x ); currentStream.notifyAll(); } } @Override public void print( float f ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( f ); currentStream.notifyAll(); } } @Override public void println( float x ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.println( x ); currentStream.notifyAll(); } } @Override public void print( int i ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( i ); currentStream.notifyAll(); } } @Override public void println( int x ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.println( x ); currentStream.notifyAll(); } } @Override public void print( long l ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( l ); currentStream.notifyAll(); } } @Override public void println( long x ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( x ); currentStream.notifyAll(); } } @Override public void print( boolean b ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( b ); currentStream.notifyAll(); } } @Override public void println( boolean x ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( x ); currentStream.notifyAll(); } } @Override public void print( char s[] ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( s ); currentStream.notifyAll(); } } @Override public void println( char x[] ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( x ); currentStream.notifyAll(); } } @Override public void print( Object obj ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( obj ); currentStream.notifyAll(); } } @Override public void println( Object x ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.println( x ); currentStream.notifyAll(); } } @Override public void print( String s ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.print( s ); currentStream.notifyAll(); } } @Override public void println( String x ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.println( x ); currentStream.notifyAll(); } } @Override public void write( byte b[], int off, int len ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.write( b, off, len ); currentStream.notifyAll(); } } @Override public void close() { getOutputStreamForCurrentThread().close(); } @Override public void flush() { getOutputStreamForCurrentThread().flush(); } @Override public void write( int b ) { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.write( b ); currentStream.notifyAll(); } } @Override public void write( byte b[] ) throws IOException { final PrintStream currentStream = getOutputStreamForCurrentThread(); synchronized ( currentStream ) { currentStream.write( b ); currentStream.notifyAll(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.server.options; import java.time.format.DateTimeFormatter; import java.util.Set; import org.apache.drill.shaded.guava.com.google.common.base.Joiner; import org.apache.drill.shaded.guava.com.google.common.collect.Sets; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.util.DrillStringUtils; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.server.options.OptionValue.Kind; import org.apache.drill.exec.util.ImpersonationUtil; public class TypeValidators { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TypeValidators.class); public static class NonNegativeLongValidator extends LongValidator { private final long max; public NonNegativeLongValidator(String name, long max, OptionDescription description) { super(name, description); this.max = max; } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); if (v.num_val > max || v.num_val < 0) { throw UserException.validationError() .message(String.format("Option %s must be between %d and %d.", getOptionName(), 0, max)) .build(logger); } } } public static class PositiveLongValidator extends LongValidator { protected final long max; public PositiveLongValidator(String name, long max, OptionDescription description) { super(name, description); this.max = max; } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); if (v.num_val > max || v.num_val < 1) { throw UserException.validationError() .message(String.format("Option %s must be between %d and %d.", getOptionName(), 1, max)) .build(logger); } } } public static class PowerOfTwoLongValidator extends PositiveLongValidator { public PowerOfTwoLongValidator(String name, long max, OptionDescription description) { super(name, max, description); } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); if (!isPowerOfTwo(v.num_val)) { throw UserException.validationError() .message(String.format("Option %s must be a power of two.", getOptionName())) .build(logger); } } private static boolean isPowerOfTwo(long num) { return (num & (num - 1)) == 0; } } public static class RangeDoubleValidator extends DoubleValidator { protected final double min; protected final double max; public RangeDoubleValidator(String name, double min, double max, OptionDescription description) { super(name, description); this.min = min; this.max = max; } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); if (v.float_val > max || v.float_val < min) { throw UserException.validationError() .message(String.format("Option %s must be between %f and %f.", getOptionName(), min, max)) .build(logger); } } } public static class MinRangeDoubleValidator extends RangeDoubleValidator { private final String maxValidatorName; public MinRangeDoubleValidator(String name, double min, double max, String maxValidatorName, OptionDescription description) { super(name, min, max, description); this.maxValidatorName = maxValidatorName; } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); OptionValue maxValue = manager.getOption(maxValidatorName); if (v.float_val > maxValue.float_val) { throw UserException.validationError() .message(String.format("Option %s must be less than or equal to Option %s", getOptionName(), maxValidatorName)) .build(logger); } } } public static class MaxRangeDoubleValidator extends RangeDoubleValidator { private final String minValidatorName; public MaxRangeDoubleValidator(String name, double min, double max, String minValidatorName, OptionDescription description) { super(name, min, max, description); this.minValidatorName = minValidatorName; } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); OptionValue minValue = manager.getOption(minValidatorName); if (v.float_val < minValue.float_val) { throw UserException.validationError() .message(String.format("Option %s must be greater than or equal to Option %s", getOptionName(), minValidatorName)) .build(logger); } } } public static class BooleanValidator extends TypeValidator { public BooleanValidator(String name, OptionDescription description) { super(name, Kind.BOOLEAN, description); } } public static class StringValidator extends TypeValidator { public StringValidator(String name, OptionDescription description) { super(name, Kind.STRING, description); } } public static class LongValidator extends TypeValidator { public LongValidator(String name, OptionDescription description) { super(name, Kind.LONG, description); } } public static class DoubleValidator extends TypeValidator { public DoubleValidator(String name, OptionDescription description) { super(name, Kind.DOUBLE, description); } } public static class IntegerValidator extends LongValidator { public IntegerValidator(String name, OptionDescription description) { super(name, description); } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); if (v.num_val > Integer.MAX_VALUE || v.num_val < Integer.MIN_VALUE) { throw UserException.validationError() .message(String.format("Option %s does not have a valid integer value", getOptionName())) .build(logger); } } } public static class RangeLongValidator extends LongValidator { private final long min; private final long max; public RangeLongValidator(String name, long min, long max, OptionDescription description) { super(name, description); this.min = min; this.max = max; } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); if (v.num_val > max || v.num_val < min) { throw UserException.validationError() .message(String.format("Option %s must be between %d and %d.", getOptionName(), min, max)) .build(logger); } } } /** * Validator that checks if the given value is included in a list of acceptable values. Case insensitive. */ public static class EnumeratedStringValidator extends StringValidator { private final Set<String> valuesSet = Sets.newLinkedHashSet(); public EnumeratedStringValidator(String name, OptionDescription description, String... values) { super(name, description); for (String value : values) { valuesSet.add(value.toLowerCase()); } } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { super.validate(v, metaData, manager); if (!valuesSet.contains(v.string_val.toLowerCase())) { throw UserException.validationError() .message(String.format("Option %s must be one of: %s.", getOptionName(), valuesSet)) .build(logger); } } } /** * Unless explicitly changed by the user previously, the admin user * can only be determined at runtime */ public static class AdminUsersValidator extends StringValidator { public final String DEFAULT_ADMIN_USERS = "%drill_process_user%"; public AdminUsersValidator(String name, OptionDescription description) { super(name, description); } public String getAdminUsers(OptionSet optionManager) { String adminUsers = optionManager.getOption(ExecConstants.ADMIN_USERS_VALIDATOR); // if this option has not been changed by the user then return the // process user if (adminUsers.equals(DEFAULT_ADMIN_USERS)) { adminUsers = ImpersonationUtil.getProcessUserName(); } adminUsers = DrillStringUtils.sanitizeCSV(adminUsers); return adminUsers; } } /** * Unless explicitly changed by the user previously, the admin user * groups can only be determined at runtime */ public static class AdminUserGroupsValidator extends StringValidator { public final String DEFAULT_ADMIN_USER_GROUPS = "%drill_process_user_groups%"; public AdminUserGroupsValidator(String name, OptionDescription description) { super(name, description); } public String getAdminUserGroups(OptionSet optionManager) { String adminUserGroups = optionManager.getOption(ExecConstants.ADMIN_USER_GROUPS_VALIDATOR); // if this option has not been changed by the user then return the // process user groups if (adminUserGroups.equals(DEFAULT_ADMIN_USER_GROUPS)) { adminUserGroups = Joiner.on(",").join(ImpersonationUtil.getProcessUserGroupNames()); } adminUserGroups = DrillStringUtils.sanitizeCSV(adminUserGroups); return adminUserGroups; } } /** Max width is a special validator which computes and validates * the maxwidth. If the maxwidth is already set in system/session * the value is returned or else it is computed dynamically based on * the available number of processors and cpu load average */ public static class MaxWidthValidator extends LongValidator{ public MaxWidthValidator(String name, OptionDescription description) { super(name, description); } public int computeMaxWidth(double cpuLoadAverage, long maxWidth) { // if maxwidth is already set return it if (maxWidth != 0) { return (int) maxWidth; } // else compute the value and return else { int availProc = Runtime.getRuntime().availableProcessors(); long maxWidthPerNode = Math.max(1, Math.min(availProc, Math.round(availProc * cpuLoadAverage))); return (int) maxWidthPerNode; } } } /** * Validator that checks if the given DateTime format template is valid. * See {@link DateTimeFormatter} for the acceptable values. */ public static class DateTimeFormatValidator extends StringValidator { public DateTimeFormatValidator(String name, OptionDescription description) { super(name, description); } @Override public void validate(OptionValue v, OptionMetaData metaData, OptionSet manager) { super.validate(v, metaData, manager); if (!v.string_val.isEmpty()) { try { DateTimeFormatter.ofPattern(v.string_val); } catch (IllegalArgumentException e) { throw UserException.validationError() .message("'%s' is not a valid DateTime format pattern: %s", v.string_val, e.getMessage()) .build(logger); } } } } public static abstract class TypeValidator extends OptionValidator { private final Kind kind; public TypeValidator(final String name, final Kind kind, final OptionDescription description) { super(name, description); this.kind = kind; } @Override public void validate(final OptionValue v, final OptionMetaData metaData, final OptionSet manager) { if (v.kind != kind) { throw UserException.validationError() .message(String.format("Option %s must be of type %s but you tried to set to %s.", getOptionName(), kind.name(), v.kind.name())) .build(logger); } } @Override public Kind getKind() { return kind; } @Override public String getConfigProperty() { return ExecConstants.bootDefaultFor(getOptionName()); } } }
package com.communote.plugins.api.rest.resource.note; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.ws.rs.core.Request; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import com.communote.common.util.PageableList; import com.communote.plugins.api.rest.exception.ExtensionNotSupportedException; import com.communote.plugins.api.rest.exception.ResponseBuildException; import com.communote.plugins.api.rest.resource.DefaultResourceHandler; import com.communote.plugins.api.rest.resource.ResourceHandlerHelper; import com.communote.plugins.api.rest.resource.note.property.PropertyResourceHelper; import com.communote.plugins.api.rest.response.ResponseHelper; import com.communote.plugins.api.rest.service.IllegalRequestParameterException; import com.communote.server.api.ServiceLocator; import com.communote.server.api.core.blog.BlogNotFoundException; import com.communote.server.api.core.note.NoteData; import com.communote.server.api.core.note.NoteRenderContext; import com.communote.server.api.core.note.NoteStoringTO; import com.communote.server.api.core.note.processor.NoteStoringPreProcessorException; import com.communote.server.api.core.security.AuthorizationException; import com.communote.server.core.blog.NoteNotFoundException; import com.communote.server.core.storing.ResourceStoringManagement; import com.communote.server.core.vo.blog.NoteModificationResult; import com.communote.server.core.vo.query.QueryParameters.OrderDirection; import com.communote.server.core.vo.query.config.QueryParametersParameterNameProvider; import com.communote.server.core.vo.query.post.NoteQueryParameters; import com.communote.server.service.NoteService; /** * Is the handler class to provide data for a note resource to the resource class. All the list * parameter are collected in a parameter map expected the <tt>filterHtml</tt>. This value is * evaluated within this class. * * @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a> */ public class NoteResourceHandler extends DefaultResourceHandler<CreateNoteParameter, EditNoteParameter, DeleteNoteParameter, GetNoteParameter, GetCollectionNoteParameter> { /** * */ public NoteResourceHandler() { super(new NoteResourceValidator()); } /** * Removes orphaned attachments that were previously uploaded but were not attached to the * created note. * * @param request * the request * @param attribute * key under which the attachments are stored in the session * @param attachmentIds * the attachment IDs that were added to the note * @throws AuthorizationException * Thrown, when the user is not allowed to update the attachments. */ private void cleanupAttachments(Request request, String attribute, Long[] attachmentIds) throws AuthorizationException { Set<Long> uploadedAttachments = ResourceHandlerHelper.getUploadedAttachmentsFromSession( request, attribute); if (uploadedAttachments != null) { // remove all IDs that were saved with note if (attachmentIds != null) { for (Long id : attachmentIds) { uploadedAttachments.remove(id); } } ServiceLocator.findService(ResourceStoringManagement.class).deleteOrphanedAttachments( uploadedAttachments); ResourceHandlerHelper.removeUploadedAttachmentsFromSession(request, attribute); } } /** * Converts the note list data items to note resources. * * @param noteListItems * the items to convert * @param notes * collection for adding the converted notes */ private void convertToNoteResources(PageableList<NoteData> noteListItems, Collection<NoteResource> notes) { for (NoteData postListItem : noteListItems) { notes.add(NoteBuildHelper.buildNoteResource(postListItem)); } } /** * Extract the crosspost topic alias into a set * * @param crosspostTopicAliases * the aliases, can be null * @return the set of topic aliases or null if input was null */ private HashSet<String> extractTopicAliases(String[] crosspostTopicAliases) { if (crosspostTopicAliases == null) { return null; } return new HashSet<>(Arrays.asList(crosspostTopicAliases)); } /** * Get the notes filtered by direct message restriction and/or notifications for current user * restrictions and/or followed items restriction * * @param getCollectionNoteParameter * an object that contains all the parameters of the request * @param renderContext * the context for rendering * @param showDirectMessages * whether to show only direct messages * @param showNotesForMe * whether to show only notifications for current user * @param showFollowedItems * whether to show only followed items * @param request * The request. * @return the found notes */ private List<NoteResource> getNotesOfDirectNotifyFollowedFilter( GetCollectionNoteParameter getCollectionNoteParameter, NoteRenderContext renderContext, boolean showDirectMessages, boolean showNotesForMe, boolean showFollowedItems, Request request) { // TODO: KENMEI-3019 if ((showDirectMessages ? 1 : 0) + (showNotesForMe ? 1 : 0) + (showFollowedItems ? 1 : 0) > 1) { // Currently there is no opportunity to set a correct offset for // combined filters. getCollectionNoteParameter.setOffset(0); } HashSet<NoteResource> notes = new HashSet<>(); QueryParametersParameterNameProvider nameProvider = ResourceHandlerHelper .getNameProvider(request); // BE doesn't support OR combination of the 3 conditions so we do separate queries and merge // the results. Need at most 2 queries since notesForMe contain all DMs. if (showNotesForMe || showDirectMessages) { getCollectionNoteParameter.setF_showDirectMessages(!showNotesForMe); getCollectionNoteParameter.setF_showNotesForMe(showNotesForMe); getCollectionNoteParameter.setF_showFollowedItems(false); NoteQueryParameters noteQueryInstance = NoteResourceHelper.configureQueryInstance( getCollectionNoteParameter, nameProvider, renderContext.getLocale()); setSortByDate(getCollectionNoteParameter, noteQueryInstance); convertToNoteResources( NoteResourceHelper.getPageableList(noteQueryInstance, renderContext), notes); } if (showFollowedItems) { getCollectionNoteParameter.setF_showDirectMessages(false); getCollectionNoteParameter.setF_showNotesForMe(false); getCollectionNoteParameter.setF_showFollowedItems(true); NoteQueryParameters noteQueryInstance = NoteResourceHelper.configureQueryInstance( getCollectionNoteParameter, nameProvider, renderContext.getLocale()); setSortByDate(getCollectionNoteParameter, noteQueryInstance); // duplicates won't be added to the set convertToNoteResources( NoteResourceHelper.getPageableList(noteQueryInstance, renderContext), notes); } // sort the notes by date and return at most maxCount items ArrayList<NoteResource> foundNotes = new ArrayList<>(notes); return NoteResourceHelper.sortAndLimitNotes(foundNotes, getCollectionNoteParameter.getMaxCount()); } /** * Create a single note on the server. This note can either be a reply or a new note * * @param createNoteParameter * - an object that contains all the parameter that can or have to be used for such a * request * @param requestedMimeType * - is the mime type that indicates which data exchange format to use * @param uriInfo * - this object is created by the request and contains some request specific data * @param sessionId * sessionId * @param request * - javax request * @return an response object containing a http status code and a message * @throws NoteStoringPreProcessorException * Exception * @throws IllegalRequestParameterException * if text value is wrong * @throws AuthorizationException * user is not authorized * @throws BlogNotFoundException * blog was not found * * @throws ResponseBuildException * exception while building the response * @throws ExtensionNotSupportedException * extension is not supported */ @Override public Response handleCreateInternally(CreateNoteParameter createNoteParameter, String requestedMimeType, UriInfo uriInfo, String sessionId, Request request) throws IllegalRequestParameterException, BlogNotFoundException, AuthorizationException, NoteStoringPreProcessorException, ResponseBuildException, ExtensionNotSupportedException { NoteStoringTO noteStoringTO = NoteResourceHelper.buildNoteStoringTO(createNoteParameter); noteStoringTO.setProperties(PropertyResourceHelper .convertPropertyResourcesToStringPropertyTOs(createNoteParameter.getProperties())); NoteModificationResult result; if (createNoteParameter.getParentNoteId() == null) { Set<String> additionalBlogNameIds = extractTopicAliases(createNoteParameter .getCrossPostTopicAliases()); // create an entirely new note result = ServiceLocator.instance().getService(NoteService.class) .createNote(noteStoringTO, additionalBlogNameIds); } else { // create a reply note noteStoringTO.setParentNoteId(createNoteParameter.getParentNoteId()); result = ServiceLocator.instance().getService(NoteService.class) .createNote(noteStoringTO, null); } if (noteStoringTO.isPublish()) { // pass the upload session ID directly because it's not in the request anymore cleanupAttachments(request, createNoteParameter.getAttachmentUploadSessionId(), noteStoringTO.getAttachmentIds()); } return NoteBuildHelper.buildNoteResponse(request, result); } /** * Delete a note on the server * * @param deleteNoteParameter * - an object that contains all the parameter that can or have to be used for such a * request * @param requestedMimeType * - is the mime type that indicates which data exchange format to use * @param uriInfo * - this object is created by the request and contains some request specific data * @param sessionId * sessionId * @param request * - javax request * @return an response object containing a http status code and a message * @throws AuthorizationException * user is not authorized * @throws ResponseBuildException * exception while building the response * @throws ExtensionNotSupportedException * extension is not supported */ @Override public Response handleDeleteInternally(DeleteNoteParameter deleteNoteParameter, String requestedMimeType, UriInfo uriInfo, String sessionId, Request request) throws AuthorizationException, ResponseBuildException, ExtensionNotSupportedException { ServiceLocator.instance().getService(NoteService.class) .deleteNote(deleteNoteParameter.getNoteId(), false, false); return ResponseHelper.buildSuccessResponse(null, request, "restapi.message.resource.note.delete", deleteNoteParameter.getNoteId()); } /** * Changes an existing note on the server * * @param editNoteParameter * - an object that contains all the parameter that can or have to be used for such a * request * @param requestedMimeType * - is the mime type that indicates which data exchange format to use * @param uriInfo * - this object is created by the request and contains some request specific data * @param sessionId * sessionId * @param request * - javax request * @return an response object containing a http status code and a message * @throws NoteStoringPreProcessorException * Exception * @throws IllegalRequestParameterException * if text value is wrong * @throws AuthorizationException * user is not authorized * @throws BlogNotFoundException * blog was not found * @throws NoteNotFoundException * note was not found * @throws ResponseBuildException * exception while building the response * @throws ExtensionNotSupportedException * extension is not supported */ @Override public Response handleEditInternally(EditNoteParameter editNoteParameter, String requestedMimeType, UriInfo uriInfo, String sessionId, Request request) throws IllegalRequestParameterException, NoteNotFoundException, AuthorizationException, BlogNotFoundException, NoteStoringPreProcessorException, ResponseBuildException, ExtensionNotSupportedException { NoteModificationResult result; NoteStoringTO noteStoringTO = NoteResourceHelper.buildNoteStoringTO(editNoteParameter); noteStoringTO.setProperties(PropertyResourceHelper .convertPropertyResourcesToStringPropertyTOs(editNoteParameter.getProperties())); Set<String> additionalBlogNameIds = extractTopicAliases(editNoteParameter .getCrossPostTopicAliases()); result = ServiceLocator.instance().getService(NoteService.class) .updateNote(noteStoringTO, editNoteParameter.getNoteId(), additionalBlogNameIds); if (noteStoringTO.isPublish()) { // pass the upload session ID directly because it's not in the request anymore cleanupAttachments(request, editNoteParameter.getAttachmentUploadSessionId(), noteStoringTO.getAttachmentIds()); } return NoteBuildHelper.buildNoteResponse(request, result); } /** * Retrieve a single note from the server * * @param getNoteParameter * - an object that contains all the parameter that can or have to be used for such a * request * @param requestedMimeType * - is the mime type that indicates which data exchange format to use * @param uriInfo * - this object is created by the request and contains some request specific data * @param sessionId * sessionId * @param request * - javax request * @return an response object containing a http status code and a message * @throws NoteNotFoundException * Exception * @throws AuthorizationException * Exception * @throws ResponseBuildException * exception while building the response * @throws ExtensionNotSupportedException * extension is not supported */ @Override public Response handleGetInternally(GetNoteParameter getNoteParameter, String requestedMimeType, UriInfo uriInfo, String sessionId, Request request) throws AuthorizationException, NoteNotFoundException, ResponseBuildException, ExtensionNotSupportedException { // don't beautify because single GETs are usually made to edit the note later on NoteRenderContext context = NoteResourceHelper.createNoteRenderContext( getNoteParameter.getFilterHtml(), false, request); NoteData note = ServiceLocator.instance().getService(NoteService.class) .getNote(getNoteParameter.getNoteId(), context); NoteResource noteResource = NoteBuildHelper.buildNoteResource(note); return ResponseHelper.buildSuccessResponse(noteResource, request); } /** * Get method for the NoteCollection * * @param getCollectionNoteParameter * - an object that contains all the parameter that can or have to be used for such a * request * @param requestedMimeType * The requested mimetype. * @param uriInfo * All request information. * @param sessionId * the current session Id. * @param request * - javax request * @return Collection of Notes for the given parameters. * @throws ResponseBuildException * exception while building the response * @throws ExtensionNotSupportedException * extension is not supported */ @Override public Response handleListInternally(GetCollectionNoteParameter getCollectionNoteParameter, String requestedMimeType, UriInfo uriInfo, String sessionId, Request request) throws ResponseBuildException, ExtensionNotSupportedException { Boolean filterHtml = getCollectionNoteParameter.getFilterHtml(); NoteQueryParameters noteQueryInstance; boolean showDirectMessages = (getCollectionNoteParameter.getF_showDirectMessages() == null) ? false : getCollectionNoteParameter.getF_showDirectMessages(); boolean showNotesForMe = (getCollectionNoteParameter.getF_showNotesForMe() == null) ? false : getCollectionNoteParameter.getF_showNotesForMe(); boolean showFollowedItems = (getCollectionNoteParameter.getF_showFollowedItems() == null) ? false : getCollectionNoteParameter.getF_showFollowedItems(); NoteRenderContext renderContext = NoteResourceHelper.createNoteRenderContext(filterHtml, true, request); if (!(showDirectMessages || showNotesForMe || showFollowedItems)) { noteQueryInstance = NoteResourceHelper.configureQueryInstance( getCollectionNoteParameter, ResourceHandlerHelper.getNameProvider(request), renderContext.getLocale()); setSortByDate(getCollectionNoteParameter, noteQueryInstance); PageableList<NoteData> pageableNoteList = NoteResourceHelper.getPageableList( noteQueryInstance, renderContext); Map<String, Object> metaData = ResourceHandlerHelper.generateMetaDataForPaging( getCollectionNoteParameter.getOffset(), getCollectionNoteParameter.getMaxCount(), pageableNoteList.getMinNumberOfElements()); ArrayList<NoteResource> notes = new ArrayList<>(); convertToNoteResources(pageableNoteList, notes); return ResponseHelper.buildSuccessResponse(notes, request, metaData); } else { List<NoteResource> notes = getNotesOfDirectNotifyFollowedFilter( getCollectionNoteParameter, renderContext, showDirectMessages, showNotesForMe, showFollowedItems, request); return ResponseHelper.buildSuccessResponse(notes, request); } } /** * Set sort mode of notes * * @param getCollectionNoteParameter * {@link GetCollectionNoteParameter} * @param noteQueryInstance * {@link NoteQueryParameters} */ private void setSortByDate(GetCollectionNoteParameter getCollectionNoteParameter, NoteQueryParameters noteQueryInstance) { if (getCollectionNoteParameter.getF_discussionId() != null) { noteQueryInstance.setSortByDate(OrderDirection.ASCENDING); } else { noteQueryInstance.setSortByDate(OrderDirection.DESCENDING); } } }
package com.frostwire.jlibtorrent.alerts; import com.frostwire.jlibtorrent.swig.*; /** * @author gubatron * @author aldenml */ public enum AlertType { TORRENT_FINISHED(torrent_finished_alert.alert_type), TORRENT_REMOVED(torrent_removed_alert.alert_type), TORRENT_DELETED(torrent_deleted_alert.alert_type), TORRENT_PAUSED(torrent_paused_alert.alert_type), TORRENT_RESUMED(torrent_resumed_alert.alert_type), TORRENT_CHECKED(torrent_checked_alert.alert_type), TORRENT_ERROR(torrent_error_alert.alert_type), TORRENT_NEED_CERT(torrent_need_cert_alert.alert_type), INCOMING_CONNECTION(incoming_connection_alert.alert_type), ADD_TORRENT(add_torrent_alert.alert_type), SAVE_RESUME_DATA(save_resume_data_alert.alert_type), FASTRESUME_REJECTED(fastresume_rejected_alert.alert_type), BLOCK_FINISHED(block_finished_alert.alert_type), METADATA_RECEIVED(metadata_received_alert.alert_type), METADATA_FAILED(metadata_failed_alert.alert_type), FILE_COMPLETED(file_completed_alert.alert_type), FILE_RENAMED(file_renamed_alert.alert_type), FILE_RENAME_FAILED(file_rename_failed_alert.alert_type), FILE_ERROR(file_error_alert.alert_type), HASH_FAILED(hash_failed_alert.alert_type), PORTMAP(portmap_alert.alert_type), PORTMAP_ERROR(portmap_error_alert.alert_type), PORTMAP_LOG(portmap_log_alert.alert_type), TRACKER_ANNOUNCE(tracker_announce_alert.alert_type), TRACKER_REPLY(tracker_reply_alert.alert_type), TRACKER_WARNING(tracker_warning_alert.alert_type), TRACKER_ERROR(tracker_error_alert.alert_type), READ_PIECE(read_piece_alert.alert_type), STATE_CHANGED(state_changed_alert.alert_type), DHT_REPLY(dht_reply_alert.alert_type), DHT_BOOTSTRAP(dht_bootstrap_alert.alert_type), DHT_GET_PEERS(dht_get_peers_alert.alert_type), EXTERNAL_IP(external_ip_alert.alert_type), LISTEN_SUCCEEDED(listen_succeeded_alert.alert_type), STATE_UPDATE(state_update_alert.alert_type), SESSION_STATS(session_stats_alert.alert_type), SCRAPE_REPLY(scrape_reply_alert.alert_type), SCRAPE_FAILED(scrape_failed_alert.alert_type), LSD_PEER(lsd_peer_alert.alert_type), PEER_BLOCKED(peer_blocked_alert.alert_type), PERFORMANCE(performance_alert.alert_type), PIECE_FINISHED(piece_finished_alert.alert_type), SAVE_RESUME_DATA_FAILED(save_resume_data_failed_alert.alert_type), STATS(stats_alert.alert_type), STORAGE_MOVED(storage_moved_alert.alert_type), TORRENT_DELETE_FAILED(torrent_delete_failed_alert.alert_type), URL_SEED(url_seed_alert.alert_type), INVALID_REQUEST(invalid_request_alert.alert_type), LISTEN_FAILED(listen_failed_alert.alert_type), PEER_BAN(peer_ban_alert.alert_type), PEER_CONNECT(peer_connect_alert.alert_type), PEER_DISCONNECTED(peer_disconnected_alert.alert_type), PEER_ERROR(peer_error_alert.alert_type), PEER_SNUBBED(peer_snubbed_alert.alert_type), PEER_UNSNUBBED(peer_unsnubbed_alert.alert_type), REQUEST_DROPPED(request_dropped_alert.alert_type), UDP_ERROR(udp_error_alert.alert_type), BLOCK_DOWNLOADING(block_downloading_alert.alert_type), BLOCK_TIMEOUT(block_timeout_alert.alert_type), CACHE_FLUSHED(cache_flushed_alert.alert_type), DHT_ANNOUNCE(dht_announce_alert.alert_type), STORAGE_MOVED_FAILED(storage_moved_failed_alert.alert_type), TRACKERID(trackerid_alert.alert_type), UNWANTED_BLOCK(unwanted_block_alert.alert_type), DHT_ERROR(dht_error_alert.alert_type), DHT_PUT(dht_put_alert.alert_type), DHT_MUTABLE_ITEM(dht_mutable_item_alert.alert_type), DHT_IMMUTABLE_ITEM(dht_immutable_item_alert.alert_type), I2P(i2p_alert.alert_type), DHT_OUTGOING_GET_PEERS(dht_outgoing_get_peers_alert.alert_type), LOG(log_alert.alert_type), TORRENT_LOG(torrent_log_alert.alert_type), PEER_LOG(peer_log_alert.alert_type), LSD_ERROR(lsd_error_alert.alert_type), DHT_STATS(dht_stats_alert.alert_type), INCOMING_REQUEST(incoming_request_alert.alert_type), DHT_LOG(dht_log_alert.alert_type), DHT_PKT(dht_pkt_alert.alert_type), DHT_GET_PEERS_REPLY(dht_get_peers_reply_alert.alert_type), DHT_DIRECT_RESPONSE(dht_direct_response_alert.alert_type), PICKER_LOG(picker_log_alert.alert_type), SESSION_ERROR(session_error_alert.alert_type), DHT_LIVE_NODES(dht_live_nodes_alert.alert_type), SESSION_STATS_HEADER(session_stats_header_alert.alert_type), DHT_SAMPLE_INFOHASHES(dht_sample_infohashes_alert.alert_type), BLOCK_UPLOADED(block_uploaded_alert.alert_type), ALERTS_DROPPED(alerts_dropped_alert.alert_type), SOCKS5_ALERT(socks5_alert.alert_type), UNKNOWN(-1); private static final AlertType[] TABLE = buildTable(); AlertType(int swigValue) { this.swigValue = swigValue; } private final int swigValue; /** * @return the native swig value */ public int swig() { return swigValue; } /** * @param swigValue the native swig value * @return the API enum alert type */ public static AlertType fromSwig(int swigValue) { return TABLE[swigValue]; } private static AlertType[] buildTable() { AlertType[] arr = new AlertType[Alerts.NUM_ALERT_TYPES]; arr[0] = UNKNOWN; arr[1] = UNKNOWN; arr[2] = UNKNOWN; arr[3] = UNKNOWN; arr[4] = TORRENT_REMOVED; arr[5] = READ_PIECE; arr[6] = FILE_COMPLETED; arr[7] = FILE_RENAMED; arr[8] = FILE_RENAME_FAILED; arr[9] = PERFORMANCE; arr[10] = STATE_CHANGED; arr[11] = TRACKER_ERROR; arr[12] = TRACKER_WARNING; arr[13] = SCRAPE_REPLY; arr[14] = SCRAPE_FAILED; arr[15] = TRACKER_REPLY; arr[16] = DHT_REPLY; arr[17] = TRACKER_ANNOUNCE; arr[18] = HASH_FAILED; arr[19] = PEER_BAN; arr[20] = PEER_UNSNUBBED; arr[21] = PEER_SNUBBED; arr[22] = PEER_ERROR; arr[23] = PEER_CONNECT; arr[24] = PEER_DISCONNECTED; arr[25] = INVALID_REQUEST; arr[26] = TORRENT_FINISHED; arr[27] = PIECE_FINISHED; arr[28] = REQUEST_DROPPED; arr[29] = BLOCK_TIMEOUT; arr[30] = BLOCK_FINISHED; arr[31] = BLOCK_DOWNLOADING; arr[32] = UNWANTED_BLOCK; arr[33] = STORAGE_MOVED; arr[34] = STORAGE_MOVED_FAILED; arr[35] = TORRENT_DELETED; arr[36] = TORRENT_DELETE_FAILED; arr[37] = SAVE_RESUME_DATA; arr[38] = SAVE_RESUME_DATA_FAILED; arr[39] = TORRENT_PAUSED; arr[40] = TORRENT_RESUMED; arr[41] = TORRENT_CHECKED; arr[42] = URL_SEED; arr[43] = FILE_ERROR; arr[44] = METADATA_FAILED; arr[45] = METADATA_RECEIVED; arr[46] = UDP_ERROR; arr[47] = EXTERNAL_IP; arr[48] = LISTEN_FAILED; arr[49] = LISTEN_SUCCEEDED; arr[50] = PORTMAP_ERROR; arr[51] = PORTMAP; arr[52] = PORTMAP_LOG; arr[53] = FASTRESUME_REJECTED; arr[54] = PEER_BLOCKED; arr[55] = DHT_ANNOUNCE; arr[56] = DHT_GET_PEERS; arr[57] = STATS; arr[58] = CACHE_FLUSHED; arr[59] = UNKNOWN; arr[60] = LSD_PEER; arr[61] = TRACKERID; arr[62] = DHT_BOOTSTRAP; arr[63] = UNKNOWN; arr[64] = TORRENT_ERROR; arr[65] = TORRENT_NEED_CERT; arr[66] = INCOMING_CONNECTION; arr[67] = ADD_TORRENT; arr[68] = STATE_UPDATE; arr[69] = UNKNOWN; arr[70] = SESSION_STATS; arr[71] = UNKNOWN; arr[72] = UNKNOWN; arr[73] = DHT_ERROR; arr[74] = DHT_IMMUTABLE_ITEM; arr[75] = DHT_MUTABLE_ITEM; arr[76] = DHT_PUT; arr[77] = I2P; arr[78] = DHT_OUTGOING_GET_PEERS; arr[79] = LOG; arr[80] = TORRENT_LOG; arr[81] = PEER_LOG; arr[82] = LSD_ERROR; arr[83] = DHT_STATS; arr[84] = INCOMING_REQUEST; arr[85] = DHT_LOG; arr[86] = DHT_PKT; arr[87] = DHT_GET_PEERS_REPLY; arr[88] = DHT_DIRECT_RESPONSE; arr[89] = PICKER_LOG; arr[90] = SESSION_ERROR; arr[91] = DHT_LIVE_NODES; arr[92] = SESSION_STATS_HEADER; arr[93] = DHT_SAMPLE_INFOHASHES; arr[94] = BLOCK_UPLOADED; arr[95] = ALERTS_DROPPED; arr[96] = SOCKS5_ALERT; return arr; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.avatica.remote; import org.apache.calcite.avatica.AvaticaConnection; import org.apache.calcite.avatica.AvaticaStatement; import org.apache.calcite.avatica.ConnectionConfig; import org.apache.calcite.avatica.ConnectionPropertiesImpl; import org.apache.calcite.avatica.ConnectionSpec; import org.apache.calcite.avatica.Meta; import org.apache.calcite.avatica.jdbc.JdbcMeta; import org.apache.calcite.avatica.server.AvaticaJsonHandler; import org.apache.calcite.avatica.server.HttpServer; import org.apache.calcite.avatica.server.Main; import org.apache.calcite.avatica.server.Main.HandlerFactory; import com.google.common.cache.Cache; import org.eclipse.jetty.server.handler.AbstractHandler; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Random; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** * Tests that verify that the Driver still functions when requests are randomly bounced between * more than one server. */ public class AlternatingRemoteMetaTest { private static final ConnectionSpec CONNECTION_SPEC = ConnectionSpec.HSQLDB; private static String url; static { try { // Force DriverManager initialization before we hit AlternatingDriver->Driver.<clinit> // Otherwise Driver.<clinit> -> DriverManager.registerDriver -> scan service provider files // causes a deadlock; see [CALCITE-1060] DriverManager.getDrivers(); DriverManager.registerDriver(new AlternatingDriver()); } catch (SQLException e) { throw new RuntimeException(e); } } // Keep a reference to the servers we start to clean them up after private static final List<HttpServer> ACTIVE_SERVERS = new ArrayList<>(); /** Factory that provides a {@link JdbcMeta}. */ public static class FullyRemoteJdbcMetaFactory implements Meta.Factory { private static JdbcMeta instance = null; private static JdbcMeta getInstance() { if (instance == null) { try { instance = new JdbcMeta(CONNECTION_SPEC.url, CONNECTION_SPEC.username, CONNECTION_SPEC.password); } catch (SQLException e) { throw new RuntimeException(e); } } return instance; } @Override public Meta create(List<String> args) { return getInstance(); } } /** * AvaticaHttpClient implementation that randomly chooses among the provided URLs. */ public static class AlternatingAvaticaHttpClient implements AvaticaHttpClient { private final List<AvaticaHttpClientImpl> clients; private final Random r = new Random(); public AlternatingAvaticaHttpClient(List<URL> urls) { //System.out.println("Constructing clients for " + urls); clients = new ArrayList<>(urls.size()); for (URL url : urls) { clients.add(new AvaticaHttpClientImpl(url)); } } public byte[] send(byte[] request) { AvaticaHttpClientImpl client = clients.get(r.nextInt(clients.size())); //System.out.println("URL: " + client.url); return client.send(request); } } /** * Driver implementation {@link AlternatingAvaticaHttpClient}. */ public static class AlternatingDriver extends Driver { public static final String PREFIX = "jdbc:avatica:remote-alternating:"; @Override protected String getConnectStringPrefix() { return PREFIX; } @Override public Meta createMeta(AvaticaConnection connection) { final ConnectionConfig config = connection.config(); final Service service = new RemoteService(getHttpClient(connection, config)); connection.setService(service); return new RemoteMeta(connection, service); } @Override AvaticaHttpClient getHttpClient(AvaticaConnection connection, ConnectionConfig config) { return new AlternatingAvaticaHttpClient(parseUrls(config.url())); } List<URL> parseUrls(String urlStr) { final List<URL> urls = new ArrayList<>(); final char comma = ','; int prevIndex = 0; int index = urlStr.indexOf(comma); if (-1 == index) { try { return Collections.singletonList(new URL(urlStr)); } catch (MalformedURLException e) { throw new RuntimeException(e); } } // String split w/o regex while (-1 != index) { try { urls.add(new URL(urlStr.substring(prevIndex, index))); } catch (MalformedURLException e) { throw new RuntimeException(e); } prevIndex = index + 1; index = urlStr.indexOf(comma, prevIndex); } // Get the last one try { urls.add(new URL(urlStr.substring(prevIndex))); } catch (MalformedURLException e) { throw new RuntimeException(e); } return urls; } } @BeforeClass public static void beforeClass() throws Exception { final String[] mainArgs = new String[] { FullyRemoteJdbcMetaFactory.class.getName() }; // Bind to '0' to pluck an ephemeral port instead of expecting a certain one to be free StringBuilder sb = new StringBuilder(); for (int i = 0; i < 2; i++) { if (sb.length() > 0) { sb.append(","); } HttpServer jsonServer = Main.start(mainArgs, 0, new HandlerFactory() { @Override public AbstractHandler createHandler(Service service) { return new AvaticaJsonHandler(service); } }); ACTIVE_SERVERS.add(jsonServer); sb.append("http://localhost:").append(jsonServer.getPort()); } url = AlternatingDriver.PREFIX + "url=" + sb.toString(); } @AfterClass public static void afterClass() throws Exception { for (HttpServer server : ACTIVE_SERVERS) { if (server != null) { server.stop(); } } } private static Meta getMeta(AvaticaConnection conn) throws Exception { Field f = AvaticaConnection.class.getDeclaredField("meta"); f.setAccessible(true); return (Meta) f.get(conn); } private static Meta.ExecuteResult prepareAndExecuteInternal(AvaticaConnection conn, final AvaticaStatement statement, String sql, int maxRowCount) throws Exception { Method m = AvaticaConnection.class.getDeclaredMethod("prepareAndExecuteInternal", AvaticaStatement.class, String.class, long.class); m.setAccessible(true); return (Meta.ExecuteResult) m.invoke(conn, statement, sql, maxRowCount); } private static Connection getConnection(JdbcMeta m, String id) throws Exception { Field f = JdbcMeta.class.getDeclaredField("connectionCache"); f.setAccessible(true); //noinspection unchecked Cache<String, Connection> connectionCache = (Cache<String, Connection>) f.get(m); return connectionCache.getIfPresent(id); } @Test public void testRemoteExecuteMaxRowCount() throws Exception { ConnectionSpec.getDatabaseLock().lock(); try (AvaticaConnection conn = (AvaticaConnection) DriverManager.getConnection(url)) { final AvaticaStatement statement = conn.createStatement(); prepareAndExecuteInternal(conn, statement, "select * from (values ('a', 1), ('b', 2))", 0); ResultSet rs = statement.getResultSet(); int count = 0; while (rs.next()) { count++; } assertEquals("Check maxRowCount=0 and ResultSets is 0 row", count, 0); assertEquals("Check result set meta is still there", rs.getMetaData().getColumnCount(), 2); rs.close(); statement.close(); conn.close(); } finally { ConnectionSpec.getDatabaseLock().unlock(); } } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-780">[CALCITE-780] * HTTP error 413 when sending a long string to the Avatica server</a>. */ @Test public void testRemoteExecuteVeryLargeQuery() throws Exception { ConnectionSpec.getDatabaseLock().lock(); try { // Before the bug was fixed, a value over 7998 caused an HTTP 413. // 16K bytes, I guess. checkLargeQuery(8); checkLargeQuery(240); checkLargeQuery(8000); checkLargeQuery(240000); } finally { ConnectionSpec.getDatabaseLock().unlock(); } } private void checkLargeQuery(int n) throws Exception { try (AvaticaConnection conn = (AvaticaConnection) DriverManager.getConnection(url)) { final AvaticaStatement statement = conn.createStatement(); final String frenchDisko = "It said human existence is pointless\n" + "As acts of rebellious solidarity\n" + "Can bring sense in this world\n" + "La resistance!\n"; final String sql = "select '" + longString(frenchDisko, n) + "' as s from (values 'x')"; prepareAndExecuteInternal(conn, statement, sql, -1); ResultSet rs = statement.getResultSet(); int count = 0; while (rs.next()) { count++; } assertThat(count, is(1)); rs.close(); statement.close(); conn.close(); } } /** Creates a string of exactly {@code length} characters by concatenating * {@code fragment}. */ private static String longString(String fragment, int length) { assert fragment.length() > 0; final StringBuilder buf = new StringBuilder(); while (buf.length() < length) { buf.append(fragment); } buf.setLength(length); return buf.toString(); } @Test public void testRemoteConnectionProperties() throws Exception { ConnectionSpec.getDatabaseLock().lock(); try (AvaticaConnection conn = (AvaticaConnection) DriverManager.getConnection(url)) { String id = conn.id; final Map<String, ConnectionPropertiesImpl> m = ((RemoteMeta) getMeta(conn)).propsMap; assertFalse("remote connection map should start ignorant", m.containsKey(id)); // force creating a connection object on the remote side. try (final Statement stmt = conn.createStatement()) { assertTrue("creating a statement starts a local object.", m.containsKey(id)); assertTrue(stmt.execute("select count(1) from EMP")); } Connection remoteConn = getConnection(FullyRemoteJdbcMetaFactory.getInstance(), id); final boolean defaultRO = remoteConn.isReadOnly(); final boolean defaultAutoCommit = remoteConn.getAutoCommit(); final String defaultCatalog = remoteConn.getCatalog(); final String defaultSchema = remoteConn.getSchema(); conn.setReadOnly(!defaultRO); assertTrue("local changes dirty local state", m.get(id).isDirty()); assertEquals("remote connection has not been touched", defaultRO, remoteConn.isReadOnly()); conn.setAutoCommit(!defaultAutoCommit); assertEquals("remote connection has not been touched", defaultAutoCommit, remoteConn.getAutoCommit()); // further interaction with the connection will force a sync try (final Statement stmt = conn.createStatement()) { assertEquals(!defaultAutoCommit, remoteConn.getAutoCommit()); assertFalse("local values should be clean", m.get(id).isDirty()); } } finally { ConnectionSpec.getDatabaseLock().unlock(); } } @Test public void testQuery() throws Exception { ConnectionSpec.getDatabaseLock().lock(); try (AvaticaConnection conn = (AvaticaConnection) DriverManager.getConnection(url); Statement statement = conn.createStatement()) { assertFalse(statement.execute("SET SCHEMA \"SCOTT\"")); assertFalse( statement.execute( "CREATE TABLE \"FOO\"(\"KEY\" INTEGER NOT NULL, \"VALUE\" VARCHAR(10))")); assertFalse(statement.execute("SET TABLE \"FOO\" READONLY FALSE")); final int numRecords = 1000; for (int i = 0; i < numRecords; i++) { assertFalse(statement.execute("INSERT INTO \"FOO\" VALUES(" + i + ", '" + i + "')")); } // Make sure all the records are there that we expect ResultSet results = statement.executeQuery("SELECT count(KEY) FROM FOO"); assertTrue(results.next()); assertEquals(1000, results.getInt(1)); assertFalse(results.next()); results = statement.executeQuery("SELECT KEY, VALUE FROM FOO ORDER BY KEY ASC"); for (int i = 0; i < numRecords; i++) { assertTrue(results.next()); assertEquals(i, results.getInt(1)); assertEquals(Integer.toString(i), results.getString(2)); } } finally { ConnectionSpec.getDatabaseLock().unlock(); } } @Test public void testSingleUrlParsing() throws Exception { AlternatingDriver d = new AlternatingDriver(); List<URL> urls = d.parseUrls("http://localhost:1234"); assertEquals(Arrays.asList(new URL("http://localhost:1234")), urls); } @Test public void testMultipleUrlParsing() throws Exception { AlternatingDriver d = new AlternatingDriver(); List<URL> urls = d.parseUrls("http://localhost:1234,http://localhost:2345," + "http://localhost:3456"); List<URL> expectedUrls = Arrays.asList(new URL("http://localhost:1234"), new URL("http://localhost:2345"), new URL("http://localhost:3456")); assertEquals(expectedUrls, urls); } } // End AlternatingRemoteMetaTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kafka; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import org.apache.camel.Processor; import org.apache.camel.ResumeAware; import org.apache.camel.component.kafka.consumer.support.KafkaConsumerResumeStrategy; import org.apache.camel.health.HealthCheckAware; import org.apache.camel.health.HealthCheckHelper; import org.apache.camel.spi.StateRepository; import org.apache.camel.support.BridgeExceptionHandlerToErrorHandler; import org.apache.camel.support.DefaultConsumer; import org.apache.camel.support.service.ServiceHelper; import org.apache.camel.support.service.ServiceSupport; import org.apache.camel.util.ObjectHelper; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class KafkaConsumer extends DefaultConsumer implements ResumeAware<KafkaConsumerResumeStrategy>, HealthCheckAware { private static final Logger LOG = LoggerFactory.getLogger(KafkaConsumer.class); protected ExecutorService executor; private final KafkaEndpoint endpoint; private KafkaConsumerHealthCheck consumerHealthCheck; private KafkaHealthCheckRepository healthCheckRepository; // This list helps to work around the infinite loop of KAFKA-1894 private final List<KafkaFetchRecords> tasks = new ArrayList<>(); private volatile boolean stopOffsetRepo; private PollExceptionStrategy pollExceptionStrategy; private KafkaConsumerResumeStrategy resumeStrategy; public KafkaConsumer(KafkaEndpoint endpoint, Processor processor) { super(endpoint, processor); this.endpoint = endpoint; } @Override public void setResumeStrategy(KafkaConsumerResumeStrategy resumeStrategy) { this.resumeStrategy = resumeStrategy; } @Override public KafkaConsumerResumeStrategy getResumeStrategy() { return resumeStrategy; } @Override protected void doBuild() throws Exception { super.doBuild(); if (endpoint.getComponent().getPollExceptionStrategy() != null) { pollExceptionStrategy = endpoint.getComponent().getPollExceptionStrategy(); } else { pollExceptionStrategy = new DefaultPollExceptionStrategy(endpoint.getConfiguration().getPollOnError()); } } @Override public KafkaEndpoint getEndpoint() { return (KafkaEndpoint) super.getEndpoint(); } private String randomUUID() { return UUID.randomUUID().toString(); } Properties getProps() { KafkaConfiguration configuration = endpoint.getConfiguration(); Properties props = configuration.createConsumerProperties(); endpoint.updateClassProperties(props); ObjectHelper.ifNotEmpty(endpoint.getKafkaClientFactory().getBrokers(configuration), v -> props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, v)); String groupId = ObjectHelper.supplyIfEmpty(configuration.getGroupId(), this::randomUUID); props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); ObjectHelper.ifNotEmpty(configuration.getGroupInstanceId(), v -> props.put(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, v)); return props; } List<KafkaFetchRecords> getTasks() { return tasks; } @Override protected void doStart() throws Exception { LOG.info("Starting Kafka consumer on topic: {} with breakOnFirstError: {}", endpoint.getConfiguration().getTopic(), endpoint.getConfiguration().isBreakOnFirstError()); super.doStart(); // is the offset repository already started? StateRepository<String, String> repo = endpoint.getConfiguration().getOffsetRepository(); if (repo instanceof ServiceSupport) { boolean started = ((ServiceSupport) repo).isStarted(); // if not already started then we would do that and also stop it if (!started) { stopOffsetRepo = true; LOG.debug("Starting OffsetRepository: {}", repo); ServiceHelper.startService(endpoint.getConfiguration().getOffsetRepository()); } } executor = endpoint.createExecutor(); String topic = endpoint.getConfiguration().getTopic(); Pattern pattern = null; if (endpoint.getConfiguration().isTopicIsPattern()) { pattern = Pattern.compile(topic); } BridgeExceptionHandlerToErrorHandler bridge = new BridgeExceptionHandlerToErrorHandler(this); for (int i = 0; i < endpoint.getConfiguration().getConsumersCount(); i++) { KafkaFetchRecords task = new KafkaFetchRecords( this, pollExceptionStrategy, bridge, topic, pattern, i + "", getProps()); executor.submit(task); tasks.add(task); } // health-check is optional so discover and resolve healthCheckRepository = HealthCheckHelper.getHealthCheckRepository(endpoint.getCamelContext(), "camel-kafka", KafkaHealthCheckRepository.class); if (healthCheckRepository != null) { consumerHealthCheck = new KafkaConsumerHealthCheck(this, getRouteId()); healthCheckRepository.addHealthCheck(consumerHealthCheck); } } @Override protected void doStop() throws Exception { LOG.info("Stopping Kafka consumer on topic: {}", endpoint.getConfiguration().getTopic()); if (healthCheckRepository != null && consumerHealthCheck != null) { healthCheckRepository.removeHealthCheck(consumerHealthCheck); consumerHealthCheck = null; } if (executor != null) { if (getEndpoint() != null && getEndpoint().getCamelContext() != null) { // signal kafka consumer to stop for (KafkaFetchRecords task : tasks) { task.stop(); } int timeout = getEndpoint().getConfiguration().getShutdownTimeout(); LOG.debug("Shutting down Kafka consumer worker threads with timeout {} millis", timeout); getEndpoint().getCamelContext().getExecutorServiceManager().shutdownGraceful(executor, timeout); } else { executor.shutdown(); int timeout = endpoint.getConfiguration().getShutdownTimeout(); LOG.debug("Shutting down Kafka consumer worker threads with timeout {} millis", timeout); if (!executor.awaitTermination(timeout, TimeUnit.MILLISECONDS)) { LOG.warn("Shutting down Kafka {} consumer worker threads did not finish within {} millis", tasks.size(), timeout); } } if (!executor.isTerminated()) { tasks.forEach(KafkaFetchRecords::stop); executor.shutdownNow(); } } tasks.clear(); executor = null; if (stopOffsetRepo) { StateRepository<String, String> repo = endpoint.getConfiguration().getOffsetRepository(); LOG.debug("Stopping OffsetRepository: {}", repo); ServiceHelper.stopAndShutdownService(repo); } super.doStop(); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_08_01.implementation; import com.microsoft.azure.SubResource; import com.microsoft.azure.management.network.v2019_08_01.DeviceProperties; import com.microsoft.azure.management.network.v2019_08_01.AddressSpace; import com.microsoft.azure.management.network.v2019_08_01.BgpSettings; import com.microsoft.azure.management.network.v2019_08_01.ProvisioningState; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.rest.SkipParentValidation; import com.microsoft.azure.Resource; /** * VpnSite Resource. */ @JsonFlatten @SkipParentValidation public class VpnSiteInner extends Resource { /** * The VirtualWAN to which the vpnSite belongs. */ @JsonProperty(value = "properties.virtualWan") private SubResource virtualWan; /** * The device properties. */ @JsonProperty(value = "properties.deviceProperties") private DeviceProperties deviceProperties; /** * The ip-address for the vpn-site. */ @JsonProperty(value = "properties.ipAddress") private String ipAddress; /** * The key for vpn-site that can be used for connections. */ @JsonProperty(value = "properties.siteKey") private String siteKey; /** * The AddressSpace that contains an array of IP address ranges. */ @JsonProperty(value = "properties.addressSpace") private AddressSpace addressSpace; /** * The set of bgp properties. */ @JsonProperty(value = "properties.bgpProperties") private BgpSettings bgpProperties; /** * The provisioning state of the VPN site resource. Possible values * include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. */ @JsonProperty(value = "properties.provisioningState") private ProvisioningState provisioningState; /** * IsSecuritySite flag. */ @JsonProperty(value = "properties.isSecuritySite") private Boolean isSecuritySite; /** * List of all vpn site links. */ @JsonProperty(value = "properties.vpnSiteLinks") private List<VpnSiteLinkInner> vpnSiteLinks; /** * A unique read-only string that changes whenever the resource is updated. */ @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** * Resource ID. */ @JsonProperty(value = "id") private String id; /** * Get the VirtualWAN to which the vpnSite belongs. * * @return the virtualWan value */ public SubResource virtualWan() { return this.virtualWan; } /** * Set the VirtualWAN to which the vpnSite belongs. * * @param virtualWan the virtualWan value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withVirtualWan(SubResource virtualWan) { this.virtualWan = virtualWan; return this; } /** * Get the device properties. * * @return the deviceProperties value */ public DeviceProperties deviceProperties() { return this.deviceProperties; } /** * Set the device properties. * * @param deviceProperties the deviceProperties value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withDeviceProperties(DeviceProperties deviceProperties) { this.deviceProperties = deviceProperties; return this; } /** * Get the ip-address for the vpn-site. * * @return the ipAddress value */ public String ipAddress() { return this.ipAddress; } /** * Set the ip-address for the vpn-site. * * @param ipAddress the ipAddress value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withIpAddress(String ipAddress) { this.ipAddress = ipAddress; return this; } /** * Get the key for vpn-site that can be used for connections. * * @return the siteKey value */ public String siteKey() { return this.siteKey; } /** * Set the key for vpn-site that can be used for connections. * * @param siteKey the siteKey value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withSiteKey(String siteKey) { this.siteKey = siteKey; return this; } /** * Get the AddressSpace that contains an array of IP address ranges. * * @return the addressSpace value */ public AddressSpace addressSpace() { return this.addressSpace; } /** * Set the AddressSpace that contains an array of IP address ranges. * * @param addressSpace the addressSpace value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withAddressSpace(AddressSpace addressSpace) { this.addressSpace = addressSpace; return this; } /** * Get the set of bgp properties. * * @return the bgpProperties value */ public BgpSettings bgpProperties() { return this.bgpProperties; } /** * Set the set of bgp properties. * * @param bgpProperties the bgpProperties value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withBgpProperties(BgpSettings bgpProperties) { this.bgpProperties = bgpProperties; return this; } /** * Get the provisioning state of the VPN site resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. * * @return the provisioningState value */ public ProvisioningState provisioningState() { return this.provisioningState; } /** * Set the provisioning state of the VPN site resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. * * @param provisioningState the provisioningState value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withProvisioningState(ProvisioningState provisioningState) { this.provisioningState = provisioningState; return this; } /** * Get isSecuritySite flag. * * @return the isSecuritySite value */ public Boolean isSecuritySite() { return this.isSecuritySite; } /** * Set isSecuritySite flag. * * @param isSecuritySite the isSecuritySite value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withIsSecuritySite(Boolean isSecuritySite) { this.isSecuritySite = isSecuritySite; return this; } /** * Get list of all vpn site links. * * @return the vpnSiteLinks value */ public List<VpnSiteLinkInner> vpnSiteLinks() { return this.vpnSiteLinks; } /** * Set list of all vpn site links. * * @param vpnSiteLinks the vpnSiteLinks value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withVpnSiteLinks(List<VpnSiteLinkInner> vpnSiteLinks) { this.vpnSiteLinks = vpnSiteLinks; return this; } /** * Get a unique read-only string that changes whenever the resource is updated. * * @return the etag value */ public String etag() { return this.etag; } /** * Get resource ID. * * @return the id value */ public String id() { return this.id; } /** * Set resource ID. * * @param id the id value to set * @return the VpnSiteInner object itself. */ public VpnSiteInner withId(String id) { this.id = id; return this; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.http; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.camel.PollingConsumer; import org.apache.camel.Producer; import org.apache.camel.impl.DefaultPollingEndpoint; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategyAware; import org.apache.camel.util.ObjectHelper; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpConnectionManager; import org.apache.commons.httpclient.auth.AuthPolicy; import org.apache.commons.httpclient.params.HttpClientParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Represents a <a href="http://camel.apache.org/http.html">HTTP endpoint</a> * * @version */ public class HttpEndpoint extends DefaultPollingEndpoint implements HeaderFilterStrategyAware { private static final transient Logger LOG = LoggerFactory.getLogger(HttpEndpoint.class); private HeaderFilterStrategy headerFilterStrategy = new HttpHeaderFilterStrategy(); private HttpBinding binding; private HttpComponent component; private URI httpUri; private HttpClientParams clientParams; private HttpClientConfigurer httpClientConfigurer; private HttpConnectionManager httpConnectionManager; private boolean throwExceptionOnFailure = true; private boolean bridgeEndpoint; private boolean matchOnUriPrefix; private boolean chunked = true; private boolean disableStreamCache; private String proxyHost; private int proxyPort; private String authMethodPriority; private boolean transferException; public HttpEndpoint() { } public HttpEndpoint(String endPointURI, HttpComponent component, URI httpURI) throws URISyntaxException { this(endPointURI, component, httpURI, null); } public HttpEndpoint(String endPointURI, HttpComponent component, URI httpURI, HttpConnectionManager httpConnectionManager) throws URISyntaxException { this(endPointURI, component, httpURI, new HttpClientParams(), httpConnectionManager, null); } public HttpEndpoint(String endPointURI, HttpComponent component, URI httpURI, HttpClientParams clientParams, HttpConnectionManager httpConnectionManager, HttpClientConfigurer clientConfigurer) throws URISyntaxException { super(endPointURI, component); this.component = component; this.httpUri = httpURI; this.clientParams = clientParams; this.httpClientConfigurer = clientConfigurer; this.httpConnectionManager = httpConnectionManager; } public Producer createProducer() throws Exception { return new HttpProducer(this); } public PollingConsumer createPollingConsumer() throws Exception { return new HttpPollingConsumer(this); } /** * Factory method used by producers and consumers to create a new {@link HttpClient} instance */ public HttpClient createHttpClient() { ObjectHelper.notNull(clientParams, "clientParams"); ObjectHelper.notNull(httpConnectionManager, "httpConnectionManager"); HttpClient answer = new HttpClient(getClientParams()); // configure http proxy from camelContext if (ObjectHelper.isNotEmpty(getCamelContext().getProperties().get("http.proxyHost")) && ObjectHelper.isNotEmpty(getCamelContext().getProperties().get("http.proxyPort"))) { String host = getCamelContext().getProperties().get("http.proxyHost"); int port = Integer.parseInt(getCamelContext().getProperties().get("http.proxyPort")); if (LOG.isDebugEnabled()) { LOG.debug("CamelContext properties http.proxyHost and http.proxyPort detected. Using http proxy host: " + host + " port: " + port); } answer.getHostConfiguration().setProxy(host, port); } if (proxyHost != null) { if (LOG.isDebugEnabled()) { LOG.debug("Using proxy: " + proxyHost + ":" + proxyPort); } answer.getHostConfiguration().setProxy(proxyHost, proxyPort); } if (authMethodPriority != null) { List<String> authPrefs = new ArrayList<String>(); Iterator it = getCamelContext().getTypeConverter().convertTo(Iterator.class, authMethodPriority); int i = 1; while (it.hasNext()) { Object value = it.next(); AuthMethod auth = getCamelContext().getTypeConverter().convertTo(AuthMethod.class, value); if (auth == null) { throw new IllegalArgumentException("Unknown authMethod: " + value + " in authMethodPriority: " + authMethodPriority); } if (LOG.isDebugEnabled()) { LOG.debug("Using authSchemePriority #" + i + ": " + auth); } authPrefs.add(auth.name()); i++; } if (!authPrefs.isEmpty()) { answer.getParams().setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, authPrefs); } } answer.setHttpConnectionManager(httpConnectionManager); HttpClientConfigurer configurer = getHttpClientConfigurer(); if (configurer != null) { configurer.configureHttpClient(answer); } return answer; } public void connect(HttpConsumer consumer) throws Exception { component.connect(consumer); } public void disconnect(HttpConsumer consumer) throws Exception { component.disconnect(consumer); } public boolean isLenientProperties() { // true to allow dynamic URI options to be configured and passed to external system for eg. the HttpProducer return true; } public boolean isSingleton() { return true; } // Properties //------------------------------------------------------------------------- /** * Provide access to the client parameters used on new {@link HttpClient} instances * used by producers or consumers of this endpoint. */ public HttpClientParams getClientParams() { return clientParams; } /** * Provide access to the client parameters used on new {@link HttpClient} instances * used by producers or consumers of this endpoint. */ public void setClientParams(HttpClientParams clientParams) { this.clientParams = clientParams; } public HttpClientConfigurer getHttpClientConfigurer() { return httpClientConfigurer; } /** * Register a custom configuration strategy for new {@link HttpClient} instances * created by producers or consumers such as to configure authentication mechanisms etc * * @param httpClientConfigurer the strategy for configuring new {@link HttpClient} instances */ public void setHttpClientConfigurer(HttpClientConfigurer httpClientConfigurer) { this.httpClientConfigurer = httpClientConfigurer; } public HttpBinding getBinding() { if (binding == null) { binding = new DefaultHttpBinding(this); } return binding; } public void setBinding(HttpBinding binding) { this.binding = binding; } public String getPath() { return httpUri.getPath(); } public int getPort() { if (httpUri.getPort() == -1) { if ("https".equals(getProtocol())) { return 443; } else { return 80; } } return httpUri.getPort(); } public String getProtocol() { return httpUri.getScheme(); } public URI getHttpUri() { return httpUri; } public void setHttpUri(URI httpUri) { this.httpUri = httpUri; } public HttpConnectionManager getHttpConnectionManager() { return httpConnectionManager; } public void setHttpConnectionManager(HttpConnectionManager httpConnectionManager) { this.httpConnectionManager = httpConnectionManager; } public HeaderFilterStrategy getHeaderFilterStrategy() { return headerFilterStrategy; } public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; } public boolean isThrowExceptionOnFailure() { return throwExceptionOnFailure; } public void setThrowExceptionOnFailure(boolean throwExceptionOnFailure) { this.throwExceptionOnFailure = throwExceptionOnFailure; } public boolean isBridgeEndpoint() { return bridgeEndpoint; } public void setBridgeEndpoint(boolean bridge) { this.bridgeEndpoint = bridge; } public boolean isMatchOnUriPrefix() { return matchOnUriPrefix; } public void setMatchOnUriPrefix(boolean match) { this.matchOnUriPrefix = match; } public boolean isDisableStreamCache() { return this.disableStreamCache; } public void setDisableStreamCache(boolean disable) { this.disableStreamCache = disable; } public boolean isChunked() { return this.chunked; } public void setChunked(boolean chunked) { this.chunked = chunked; } public String getProxyHost() { return proxyHost; } public void setProxyHost(String proxyHost) { this.proxyHost = proxyHost; } public int getProxyPort() { return proxyPort; } public void setProxyPort(int proxyPort) { this.proxyPort = proxyPort; } public String getAuthMethodPriority() { return authMethodPriority; } public void setAuthMethodPriority(String authMethodPriority) { this.authMethodPriority = authMethodPriority; } public boolean isTransferException() { return transferException; } public void setTransferException(boolean transferException) { this.transferException = transferException; } }
package com.docusign.esign.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; /** * DisplayApplianceDocument */ public class DisplayApplianceDocument { @JsonProperty("attachmentDescription") private String attachmentDescription = null; @JsonProperty("documentId") private String documentId = null; @JsonProperty("documentType") private String documentType = null; @JsonProperty("envelopeId") private String envelopeId = null; @JsonProperty("externalDocumentId") private String externalDocumentId = null; @JsonProperty("latestPDFId") private String latestPDFId = null; @JsonProperty("name") private String name = null; @JsonProperty("pages") private Integer pages = null; public DisplayApplianceDocument attachmentDescription(String attachmentDescription) { this.attachmentDescription = attachmentDescription; return this; } /** * * @return attachmentDescription **/ @ApiModelProperty(example = "null", value = "") public String getAttachmentDescription() { return attachmentDescription; } public void setAttachmentDescription(String attachmentDescription) { this.attachmentDescription = attachmentDescription; } public DisplayApplianceDocument documentId(String documentId) { this.documentId = documentId; return this; } /** * Specifies the document ID number that the tab is placed on. This must refer to an existing Document's ID attribute. * @return documentId **/ @ApiModelProperty(example = "null", value = "Specifies the document ID number that the tab is placed on. This must refer to an existing Document's ID attribute.") public String getDocumentId() { return documentId; } public void setDocumentId(String documentId) { this.documentId = documentId; } public DisplayApplianceDocument documentType(String documentType) { this.documentType = documentType; return this; } /** * * @return documentType **/ @ApiModelProperty(example = "null", value = "") public String getDocumentType() { return documentType; } public void setDocumentType(String documentType) { this.documentType = documentType; } public DisplayApplianceDocument envelopeId(String envelopeId) { this.envelopeId = envelopeId; return this; } /** * The envelope ID of the envelope status that failed to post. * @return envelopeId **/ @ApiModelProperty(example = "null", value = "The envelope ID of the envelope status that failed to post.") public String getEnvelopeId() { return envelopeId; } public void setEnvelopeId(String envelopeId) { this.envelopeId = envelopeId; } public DisplayApplianceDocument externalDocumentId(String externalDocumentId) { this.externalDocumentId = externalDocumentId; return this; } /** * * @return externalDocumentId **/ @ApiModelProperty(example = "null", value = "") public String getExternalDocumentId() { return externalDocumentId; } public void setExternalDocumentId(String externalDocumentId) { this.externalDocumentId = externalDocumentId; } public DisplayApplianceDocument latestPDFId(String latestPDFId) { this.latestPDFId = latestPDFId; return this; } /** * * @return latestPDFId **/ @ApiModelProperty(example = "null", value = "") public String getLatestPDFId() { return latestPDFId; } public void setLatestPDFId(String latestPDFId) { this.latestPDFId = latestPDFId; } public DisplayApplianceDocument name(String name) { this.name = name; return this; } /** * * @return name **/ @ApiModelProperty(example = "null", value = "") public String getName() { return name; } public void setName(String name) { this.name = name; } public DisplayApplianceDocument pages(Integer pages) { this.pages = pages; return this; } /** * * @return pages **/ @ApiModelProperty(example = "null", value = "") public Integer getPages() { return pages; } public void setPages(Integer pages) { this.pages = pages; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DisplayApplianceDocument displayApplianceDocument = (DisplayApplianceDocument) o; return Objects.equals(this.attachmentDescription, displayApplianceDocument.attachmentDescription) && Objects.equals(this.documentId, displayApplianceDocument.documentId) && Objects.equals(this.documentType, displayApplianceDocument.documentType) && Objects.equals(this.envelopeId, displayApplianceDocument.envelopeId) && Objects.equals(this.externalDocumentId, displayApplianceDocument.externalDocumentId) && Objects.equals(this.latestPDFId, displayApplianceDocument.latestPDFId) && Objects.equals(this.name, displayApplianceDocument.name) && Objects.equals(this.pages, displayApplianceDocument.pages); } @Override public int hashCode() { return Objects.hash(attachmentDescription, documentId, documentType, envelopeId, externalDocumentId, latestPDFId, name, pages); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class DisplayApplianceDocument {\n"); sb.append(" attachmentDescription: ").append(toIndentedString(attachmentDescription)).append("\n"); sb.append(" documentId: ").append(toIndentedString(documentId)).append("\n"); sb.append(" documentType: ").append(toIndentedString(documentType)).append("\n"); sb.append(" envelopeId: ").append(toIndentedString(envelopeId)).append("\n"); sb.append(" externalDocumentId: ").append(toIndentedString(externalDocumentId)).append("\n"); sb.append(" latestPDFId: ").append(toIndentedString(latestPDFId)).append("\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" pages: ").append(toIndentedString(pages)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugins.ear.descriptor.internal; import groovy.lang.Closure; import groovy.util.Node; import groovy.util.XmlParser; import groovy.xml.QName; import org.gradle.api.Action; import org.gradle.api.UncheckedIOException; import org.gradle.api.XmlProvider; import org.gradle.api.internal.DomNode; import org.gradle.api.model.ObjectFactory; import org.gradle.internal.Cast; import org.gradle.internal.IoActions; import org.gradle.internal.UncheckedException; import org.gradle.internal.file.PathToFileResolver; import org.gradle.internal.xml.XmlTransformer; import org.gradle.plugins.ear.descriptor.DeploymentDescriptor; import org.gradle.plugins.ear.descriptor.EarModule; import org.gradle.plugins.ear.descriptor.EarSecurityRole; import org.gradle.plugins.ear.descriptor.EarWebModule; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import javax.inject.Inject; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; public class DefaultDeploymentDescriptor implements DeploymentDescriptor { private static final String ACCESS_EXTERNAL_DTD = "http://javax.xml.XMLConstants/property/accessExternalDTD"; private static final String ALLOW_ANY_EXTERNAL_DTD = "all"; private final XmlTransformer transformer = new XmlTransformer(); private final PathToFileResolver fileResolver; private ObjectFactory objectFactory; private String fileName = "application.xml"; private String version = "6"; private String applicationName; private Boolean initializeInOrder = Boolean.FALSE; private String description; private String displayName; private String libraryDirectory; private Set<EarModule> modules = new LinkedHashSet<EarModule>(); private Set<EarSecurityRole> securityRoles = new LinkedHashSet<EarSecurityRole>(); private Map<String, String> moduleTypeMappings = new LinkedHashMap<String, String>(); @Inject public DefaultDeploymentDescriptor(PathToFileResolver fileResolver, ObjectFactory objectFactory) { this.fileResolver = fileResolver; this.objectFactory = objectFactory; } @Override public String getFileName() { return fileName; } @Override public void setFileName(String fileName) { this.fileName = fileName; readFrom(new File("META-INF", fileName)); } @Override public String getVersion() { return version; } @Override public void setVersion(String version) { this.version = version; } @Override public String getApplicationName() { return applicationName; } @Override public void setApplicationName(String applicationName) { this.applicationName = applicationName; } @Override public Boolean getInitializeInOrder() { return initializeInOrder; } @Override public void setInitializeInOrder(Boolean initializeInOrder) { this.initializeInOrder = initializeInOrder; } @Override public String getDescription() { return description; } @Override public void setDescription(String description) { this.description = description; } @Override public String getDisplayName() { return displayName; } @Override public void setDisplayName(String displayName) { this.displayName = displayName; } @Override public String getLibraryDirectory() { return libraryDirectory; } @Override public void setLibraryDirectory(String libraryDirectory) { this.libraryDirectory = libraryDirectory; } @Override public Set<EarModule> getModules() { return modules; } @Override public void setModules(Set<EarModule> modules) { this.modules = modules; } @Override public Set<EarSecurityRole> getSecurityRoles() { return securityRoles; } @Override public void setSecurityRoles(Set<EarSecurityRole> securityRoles) { this.securityRoles = securityRoles; } @Override public Map<String, String> getModuleTypeMappings() { return moduleTypeMappings; } @Override public void setModuleTypeMappings(Map<String, String> moduleTypeMappings) { this.moduleTypeMappings = moduleTypeMappings; } @Override public DefaultDeploymentDescriptor module(EarModule module, String type) { modules.add(module); moduleTypeMappings.put(module.getPath(), type); return this; } @Override public DefaultDeploymentDescriptor module(String path, String type) { return module(new DefaultEarModule(path), type); } @Override public DefaultDeploymentDescriptor webModule(String path, String contextRoot) { modules.add(new DefaultEarWebModule(path, contextRoot)); moduleTypeMappings.put(path, "web"); return this; } @Override public DefaultDeploymentDescriptor securityRole(EarSecurityRole role) { securityRoles.add(role); return this; } @Override public DeploymentDescriptor securityRole(String role) { securityRoles.add(new DefaultEarSecurityRole(role)); return this; } @Override public DeploymentDescriptor securityRole(Action<? super EarSecurityRole> action) { EarSecurityRole role = objectFactory.newInstance(DefaultEarSecurityRole.class); action.execute(role); securityRoles.add(role); return this; } @Override public DeploymentDescriptor withXml(Closure closure) { transformer.addAction(closure); return this; } @Override public DeploymentDescriptor withXml(Action<? super XmlProvider> action) { transformer.addAction(action); return this; } @Override public boolean readFrom(Object path) { if (fileResolver == null) { return false; } File descriptorFile = fileResolver.resolve(path); if (descriptorFile == null || !descriptorFile.exists()) { return false; } try { FileReader reader = new FileReader(descriptorFile); readFrom(reader); return true; } catch (IOException e) { throw new UncheckedIOException(e); } } private static XmlParser createParser() { try { XmlParser parser = new XmlParser(false, true, true); try { // If not set for >= JAXP 1.5 / Java8 won't allow referencing DTDs, e.g. // using http URLs, because Groovy's XmlParser requests FEATURE_SECURE_PROCESSING parser.setProperty(ACCESS_EXTERNAL_DTD, ALLOW_ANY_EXTERNAL_DTD); } catch (SAXNotRecognizedException ignore) { // property requires >= JAXP 1.5 / Java8 } return parser; } catch (Exception ex) { throw UncheckedException.throwAsUncheckedException(ex); } } @Override public DeploymentDescriptor readFrom(Reader reader) { try { Node appNode = createParser().parse(reader); version = (String) appNode.attribute("version"); for (final Node child : Cast.<List<Node>>uncheckedCast(appNode.children())) { String childLocalName = localNameOf(child); switch (childLocalName) { case "application-name": applicationName = child.text(); break; case "initialize-in-order": initializeInOrder = Boolean.valueOf(child.text()); break; case "description": description = child.text(); break; case "display-name": displayName = child.text(); break; case "library-directory": libraryDirectory = child.text(); break; case "module": EarModule module = null; for (Node moduleNode : Cast.<List<Node>>uncheckedCast(child.children())) { String moduleNodeLocalName = localNameOf(moduleNode); if (moduleNodeLocalName.equals("web")) { String webUri = childNodeText(moduleNode, "web-uri"); String contextRoot = childNodeText(moduleNode, "context-root"); module = new DefaultEarWebModule(webUri, contextRoot); modules.add(module); moduleTypeMappings.put(module.getPath(), "web"); } else if (moduleNodeLocalName.equals("alt-dd")) { assert module != null; module.setAltDeployDescriptor(moduleNode.text()); } else { module = new DefaultEarModule(moduleNode.text()); modules.add(module); moduleTypeMappings.put(module.getPath(), moduleNodeLocalName); } } break; case "security-role": String roleName = childNodeText(child, "role-name"); String description = childNodeText(child, "description"); securityRoles.add(new DefaultEarSecurityRole(roleName, description)); break; default: withXml(new Action<XmlProvider>() { @Override public void execute(XmlProvider xmlProvider) { xmlProvider.asNode().append(child); } }); break; } } } catch (IOException ex) { throw new UncheckedIOException(ex); } catch (SAXException ex) { throw UncheckedException.throwAsUncheckedException(ex); } finally { IoActions.closeQuietly(reader); } return this; } private static String childNodeText(Node root, String name) { for (Node child : Cast.<List<Node>>uncheckedCast(root.children())) { if (localNameOf(child).equals(name)) { return child.text(); } } return null; } private static String localNameOf(Node node) { return node.name() instanceof QName ? ((QName) node.name()).getLocalPart() : String.valueOf(node.name()); } @Override public DefaultDeploymentDescriptor writeTo(Object path) { transformer.transform(toXmlNode(), fileResolver.resolve(path)); return this; } @Override public DefaultDeploymentDescriptor writeTo(Writer writer) { transformer.transform(toXmlNode(), writer); return this; } private DomNode toXmlNode() { DomNode root = new DomNode(nodeNameFor("application")); root.attributes().put("version", version); if (!"1.3".equals(version)) { root.attributes().put("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"); } if ("1.3".equals(version)) { root.setPublicId("-//Sun Microsystems, Inc.//DTD J2EE Application 1.3//EN"); root.setSystemId("http://java.sun.com/dtd/application_1_3.dtd"); } else if ("1.4".equals(version)) { root.attributes().put("xsi:schemaLocation", "http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/application_1_4.xsd"); } else if ("5".equals(version) || "6".equals(version)) { root.attributes().put("xsi:schemaLocation", "http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/application_" + version + ".xsd"); } else if ("7".equals(version)) { root.attributes().put("xsi:schemaLocation", "http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/application_" + version + ".xsd"); } if (applicationName != null) { new Node(root, nodeNameFor("application-name"), applicationName); } if (description != null) { new Node(root, nodeNameFor("description"), description); } if (displayName != null) { new Node(root, nodeNameFor("display-name"), displayName); } if (initializeInOrder != null && initializeInOrder) { new Node(root, nodeNameFor("initialize-in-order"), initializeInOrder); } for (EarModule module : modules) { Node moduleNode = new Node(root, nodeNameFor("module")); module.toXmlNode(moduleNode, moduleNameFor(module)); } if (securityRoles != null) { for (EarSecurityRole role : securityRoles) { Node roleNode = new Node(root, nodeNameFor("security-role")); if (role.getDescription() != null) { new Node(roleNode, nodeNameFor("description"), role.getDescription()); } new Node(roleNode, nodeNameFor("role-name"), role.getRoleName()); } } if (libraryDirectory != null) { new Node(root, nodeNameFor("library-directory"), libraryDirectory); } return root; } private Object moduleNameFor(EarModule module) { String name = moduleTypeMappings.get(module.getPath()); if (name == null) { if (module instanceof EarWebModule) { name = "web"; } else { // assume EJB is the most common kind of EAR deployment name = "ejb"; } } return nodeNameFor(name); } private Object nodeNameFor(String name) { if ("1.3".equals(version)) { return name; } else if ("1.4".equals(version)) { return new QName("http://java.sun.com/xml/ns/j2ee", name); } else if ("5".equals(version) || "6".equals(version)) { return new QName("http://java.sun.com/xml/ns/javaee", name); } else if ("7".equals(version)) { return new QName("http://xmlns.jcp.org/xml/ns/javaee", name); } else { return new QName(name); } } // For tests XmlTransformer getTransformer() { return transformer; } }
package synergyviewcore.annotations.model; import java.util.ArrayList; import java.util.List; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import synergyviewcore.attributes.model.Attribute; import synergyviewcore.model.PersistenceModelObject; import synergyviewcore.subjects.model.Subject; /** * The Class Annotation. */ @Entity @Inheritance(strategy = InheritanceType.JOINED) public class Annotation extends PersistenceModelObject { /** The Constant PROP_STARTTIME. */ public static final String PROP_STARTTIME = "startTime"; /** The Constant PROP_SUBJECT. */ public static final String PROP_SUBJECT = "subject"; /** The Constant PROP_TEXT. */ public static final String PROP_TEXT = "text"; /** The annotation set. */ @ManyToOne private AnnotationSet annotationSet; /** The attributes. */ @OneToMany(fetch = FetchType.EAGER) private List<Attribute> attributes = new ArrayList<Attribute>(); /** The hr. */ private int hr; /** The mi. */ private int mi; /** The milli sec. */ private int milliSec; // // @Lob // public static final String PROP_IMAGEDATA = "imageData"; // private byte[] imageData; /** The sec. */ private int sec; /** The start time. */ private long startTime; /** The subject. */ @OneToOne private Subject subject; /** The text. */ @Lob private String text; /** * Gets the annotation set. * * @return the annotation set */ public AnnotationSet getAnnotationSet() { return annotationSet; } /** * Gets the attributes. * * @return the attributes */ public List<Attribute> getAttributes() { return attributes; } /** * Gets the formatted start time. * * @return the formatted start time */ public String getFormattedStartTime() { return String.format("%02d:%02d:%02d", hr, mi, sec); } /** * Gets the hr. * * @return the hr */ public int getHr() { return hr; } /** * Gets the mi. * * @return the mi */ public int getMi() { return mi; } /** * Gets the milli sec. * * @return the milli sec */ public int getMilliSec() { return milliSec; } /** * Gets the sec. * * @return the sec */ public int getSec() { return sec; } /** * Gets the start time. * * @return the startTime */ public long getStartTime() { return startTime; } /** * Gets the subject. * * @return the subject */ public Subject getSubject() { return subject; } /** * Gets the text. * * @return the text */ public String getText() { return text; } /** * Sets the annotation set. * * @param annotationSet * the new annotation set */ public void setAnnotationSet(AnnotationSet annotationSet) { this.annotationSet = annotationSet; } /** * Sets the attributes. * * @param attributes * the new attributes */ public void setAttributes(List<Attribute> attributes) { this.attributes = attributes; } /** * Sets the hr. * * @param hr * the new hr */ public void setHr(int hr) { this.hr = hr; } /** * Sets the mi. * * @param mi * the new mi */ public void setMi(int mi) { this.mi = mi; } /** * Sets the milli sec. * * @param milliSec * the new milli sec */ public void setMilliSec(int milliSec) { this.milliSec = milliSec; } /** * Sets the sec. * * @param sec * the new sec */ public void setSec(int sec) { this.sec = sec; } /** * Sets the start time. * * @param startTime * the startTime to set */ public void setStartTime(long startTime) { this.firePropertyChange(PROP_STARTTIME, this.startTime, this.startTime = startTime); } /** * Sets the subject. * * @param subject * the new subject */ public void setSubject(Subject subject) { this.firePropertyChange(PROP_SUBJECT, this.subject, this.subject = subject); } /** * Sets the text. * * @param text * the text to set */ public void setText(String text) { this.firePropertyChange(PROP_TEXT, this.text, this.text = text); } // /** // * @param imageData the imageData to set // */ // public void setImageData(byte[] imageData) { // this.firePropertyChange(PROP_IMAGEDATA, this.imageData, this.imageData = // imageData); // } // // /** // * @return the imageData // */ // public byte[] getImageData() { // return imageData; // } // }
// ---------------------------------------------------------------------------- // Copyright (C) 2003 Rafael H. Bordini, Jomi F. Hubner, et al. // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // // To contact the authors: // http://www.inf.ufrgs.br/~bordini // http://www.das.ufsc.br/~jomi // //---------------------------------------------------------------------------- package jason.asSyntax; import jason.asSemantics.Unifier; import jason.asSyntax.parser.as2j; import java.io.StringReader; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * A Pred extends a Structure with annotations, e.g.: a(1)[an1,an2]. */ public class Pred extends Structure { private static final long serialVersionUID = 1L; private static Logger logger = Logger.getLogger(Pred.class.getName()); private ListTerm annots; public Pred(String functor) { super(functor); } public Pred(Literal l) { this(l.getNS(), l); } public Pred(Atom namespace, String functor) { super(namespace, functor); } public Pred(Atom namespace, Literal l) { super(namespace, l); if (l.hasAnnot()) { annots = l.getAnnots().cloneLT(); } else { annots = null; } } // used by capply protected Pred(Literal l, Unifier u) { super(l, u); if (l.hasAnnot()) { setAnnots( (ListTerm)l.getAnnots().capply(u) ); } else { annots = null; } } public Pred(String functor, int termsSize) { super(functor, termsSize); } public static Pred parsePred(String sPred) { as2j parser = new as2j(new StringReader(sPred)); try { return parser.pred(); } catch (Exception e) { logger.log(Level.SEVERE, "Error parsing predicate " + sPred, e); return null; } } @Override public boolean isPred() { return true; } @Override public boolean isAtom() { return super.isAtom() && !hasAnnot(); } @Override public boolean isGround() { if (annots == null) { return super.isGround(); } else { return super.isGround() && annots.isGround(); } } /* @Override public boolean apply(Unifier u) { boolean r1 = super.apply(u); boolean r2 = applyAnnots(u); return r1 || r2; } */ /* private final boolean applyAnnots(Unifier u) { boolean r = false; if (annots != null) { // if some annotation has variables that become ground, they need to be replaced in the list to maintain the order List<Term> toAdd = null; Iterator<ListTerm> i = annots.listTermIterator(); while (i.hasNext()) { ListTerm lt = i.next(); if (lt.isTail() && lt.getTail().apply(u)) { // have to test tail before term, since term test may lead to i.remove that remove also the tail r = true; lt.getTerm().apply(u); // apply for the term setAnnots(annots); // sort all annots given from tail ground break; // the iterator is inconsistent } else if (lt.getTerm() != null && lt.getTerm().apply(u)) { r = true; if (toAdd == null) toAdd = new ArrayList<Term>(); toAdd.add( lt.getTerm() ); i.remove(); } } if (toAdd != null) for (Term t: toAdd) addAnnot(t); } return r; } */ @Override public Literal setAnnots(ListTerm l) { annots = null; if (l == null) return this; Iterator<ListTerm> i = l.listTermIterator(); while (i.hasNext()) { ListTerm lt = i.next(); if (lt.getTerm() == null) return this; addAnnot(lt.getTerm()); // use addAnnot to sort them if (lt.isTail()) { annots.setTail(lt.getTail()); return this; } } return this; } @Override public boolean addAnnot(Term t) { if (annots == null) annots = new ListTermImpl(); Iterator<ListTerm> i = annots.listTermIterator(); while (i.hasNext()) { ListTerm lt = i.next(); int c = t.compareTo(lt.getTerm()); if (c == 0) { // equals return false; } else if (c < 0) { lt.insert(t); return true; } } return false; } @Override public Literal addAnnots(List<Term> l) { if (l != null) for (Term t : l) addAnnot(t); return this; } @Override public Literal addAnnots(Term ... l) { for (Term t : l) addAnnot(t); return this; } @Override public boolean delAnnot(Term t) { if (annots == null) return false; else return annots.remove(t); // TODO: use the sorted annots to reduce search (as in addAnnot) } @Override public void clearAnnots() { annots = null; } @Override public ListTerm getAnnots() { return annots; } @Override public boolean hasAnnot(Term t) { if (annots == null) return false; // annots are ordered Iterator<ListTerm> i = annots.listTermIterator(); while (i.hasNext()) { ListTerm lt = i.next(); int c = t.compareTo(lt.getTerm()); if (c == 0) { // equals return true; } else if (c < 0) { return false; } } return false; //annots.contains(t); } @Override public Literal getAnnot(String functor) { if (annots == null) return null; // annots are ordered for (Term t: annots) { if (t.isLiteral()) { Literal l = (Literal)t; int c = functor.compareTo(l.getFunctor()); if (c == 0) { // equals return l; } else if (c < 0) { return null; } } } return null; } @Override public boolean hasAnnot() { return annots != null && !annots.isEmpty(); } @Override public boolean hasVar(VarTerm t, Unifier u) { if (super.hasVar(t, u)) return true; if (annots != null) for (Term v: annots) if (v.hasVar(t, u)) return true; return false; } @Override public void countVars(Map<VarTerm, Integer> c) { super.countVars(c); if (annots != null) for (Term t: annots) { t.countVars(c); } } @Override public boolean importAnnots(Literal p) { boolean imported = false; if (p.hasAnnot()) { Iterator<Term> i = p.getAnnots().iterator(); while (i.hasNext()) { Term t = i.next(); // p will only contain the annots actually added (for Event) if (addAnnot(t.clone())) { imported = true; } else { i.remove(); // Remove what is not new from p } } } return imported; } @Override public boolean delAnnots(List<Term> l) { boolean removed = false; if (l != null && this.hasAnnot()) { for (Term t: l) { boolean r = delAnnot(t); removed = removed || r; } } return removed; } @Override public ListTerm getAnnots(String functor) { ListTerm ls = new ListTermImpl(); if (annots != null) { ListTerm tail = ls; for (Term ta : annots) { if (ta.isLiteral()) { if (((Literal)ta).getFunctor().equals(functor)) { tail = tail.append(ta); } } } } return ls; } @Override public boolean hasSubsetAnnot(Literal p) { if (annots == null) return true; if (hasAnnot() && !p.hasAnnot()) return false; // both has annots (annots are ordered) Iterator<Term> i2 = p.getAnnots().iterator(); int c = -1; for (Term myAnnot : annots) { // all my annots should be member of p annots // move i2 until it is >= myAnnot if (!i2.hasNext()) return false; while (i2.hasNext()) { Term t = i2.next(); c = myAnnot.compareTo(t); if (c <= 0) break; // found my annot in p's annots OR my annot is not in p's annots, stop searching } if (c != 0) return false; } return true; } @Override public boolean hasSubsetAnnot(Literal p, Unifier u) { if (annots == null) return true; if (!p.hasAnnot()) return false; Term thisTail = null; // since p's annots will be changed, clone the list (but not the terms) ListTerm pAnnots = p.getAnnots().cloneLTShallow(); VarTerm pTail = pAnnots.getTail(); Term pAnnot = null; ListTerm pAnnotsTail = null; Iterator<Term> i2 = pAnnots.iterator(); boolean i2Reset = false; Iterator<ListTerm> i1 = annots.listTermIterator(); // use this iterator to get the tail of the list while (i1.hasNext()) { ListTerm lt = i1.next(); Term annot = lt.getTerm(); if (annot == null) break; if (lt.isTail()) thisTail = lt.getTail(); if (annot.isVar() && !i2Reset) { // when we arrive to the vars in the annots of this, we need to start searching from the begin again i2Reset = true; i2 = pAnnots.iterator(); pAnnot = null; } // search annot in p's annots boolean ok = false; while (true) { if (pAnnot != null && u.unifiesNoUndo(annot, pAnnot)) { ok = true; i2.remove(); pAnnot = i2.next(); break; } else if (pAnnot != null && pAnnot.compareTo(annot) > 0) { break; // quite the loop, the current p annot is greater than this annot, so annot is not in p's annots } else if (i2.hasNext()) { pAnnot = i2.next(); } else { break; } } // if p has a tail, add annot in p's tail if (!ok && pTail != null) { if (pAnnotsTail == null) { pAnnotsTail = (ListTerm)u.get(pTail); if (pAnnotsTail == null) { pAnnotsTail = new ListTermImpl(); u.unifies(pTail, pAnnotsTail); pAnnotsTail = (ListTerm)u.get(pTail); } } pAnnotsTail.add(annot.clone()); ok = true; } if (!ok) return false; } // if this Pred has a Tail, unify it with p remaining annots if (thisTail != null) u.unifies(thisTail, pAnnots); return true; } @Override public void addSource(Term agName) { if (agName != null) addAnnot(createSource(agName)); } @Override public boolean delSource(Term agName) { if (annots != null) return delAnnot(createSource(agName)); else return false; } public static Pred createSource(Term source) { Pred s; if (source.isGround()) { s = new Pred("source",1) { @Override public Term clone() { return this; } @Override public Term capply(Unifier u) { return this; } @Override public boolean isGround() { return true; } @Override public Literal makeVarsAnnon() { return this; } @Override public Literal makeVarsAnnon(Unifier un) { return this; } }; } else { // source is a var, so cannot be optimised s = new Pred("source",1); } s.addTerm(source); return s; } @Override public ListTerm getSources() { ListTerm ls = new ListTermImpl(); if (annots != null) { ListTerm tail = ls; for (Term ta : annots) { if (ta.isStructure()) { Structure tas = (Structure)ta; if (tas.getFunctor().equals("source")) { tail = tail.append(tas.getTerm(0)); } } } } return ls; } @Override public void delSources() { if (annots != null) { Iterator<Term> i = annots.iterator(); while (i.hasNext()) { Term t = i.next(); if (t.isStructure()) { if (((Structure)t).getFunctor().equals("source")) { i.remove(); } } } } } @Override public boolean hasSource() { if (annots != null) { for (Term ta : annots) { if (ta.isStructure()) { if (((Structure)ta).getFunctor().equals("source")) { return true; } } } } return false; } @Override public boolean hasSource(Term agName) { if (annots != null) { return hasAnnot(createSource(agName)); } return false; } @Override public Literal makeVarsAnnon(Unifier un) { if (annots != null) { ListTerm lt = annots; while (!lt.isEmpty()) { Term ta = lt.getTerm(); if (ta.isVar()) lt.setTerm(varToReplace(ta, un)); else if (ta instanceof Structure) ((Structure)ta).makeVarsAnnon(un); if (lt.isTail() && lt.getNext().isVar()) { lt.setNext(varToReplace(lt.getNext(), un)); break; } lt = lt.getNext(); } } return super.makeVarsAnnon(un); } @Override public boolean equals(Object o) { if (o == null) return false; if (o == this) return true; if (o instanceof Pred) { final Pred p = (Pred) o; return super.equals(o) && this.hasSubsetAnnot(p) && p.hasSubsetAnnot(this); } else if (o instanceof Atom && !hasAnnot() ) { // if o is some object that extends Atom (e.g. structure), goes to super equals return super.equals(o); // consider super equals only when this has no annots } return false; } public boolean equalsAsStructure(Object p) { // this method must be in this class, do not move (I do not remember why!) return super.equals((Term) p); } @Override public int compareTo(Term t) { int c = super.compareTo(t); if (c != 0) return c; if (t.isPred()) { Pred tAsPred = (Pred)t; if (getAnnots() == null && tAsPred.getAnnots() == null) return 0; if (getAnnots() == null) return -1; if (tAsPred.getAnnots() == null) return 1; Iterator<Term> pai = tAsPred.getAnnots().iterator(); for (Term a : getAnnots()) { c = a.compareTo(pai.next()); if (c != 0) return c; } final int ats = getAnnots().size(); final int ots = tAsPred.getAnnots().size(); if (ats < ots) return -1; if (ats > ots) return 1; } return 0; } @Override public Term capply(Unifier u) { return new Pred(this,u); } public Term clone() { return new Pred(this); } @Override public Literal cloneNS(Atom newnamespace) { return new Pred(newnamespace, this); } public String toStringAsTerm() { return super.toString(); } /** get as XML */ @Override public Element getAsDOM(Document document) { Element u = super.getAsDOM(document); if (hasAnnot()) { Element ea = document.createElement("annotations"); ea.appendChild(getAnnots().getAsDOM(document)); u.appendChild(ea); } return u; } }
/* Licensed to UbiCollab.org under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. UbiCollab.org licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.ubicollab.nomad.home; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.util.ArrayList; import java.util.List; import org.json.JSONObject; import org.ubicollab.nomad.R; import org.ubicollab.nomad.SpaceManager; import org.ubicollab.nomad.auth.AuthorizationException; import org.ubicollab.nomad.auth.FrontController; import org.ubicollab.nomad.rss.RssReader; import org.ubicollab.nomad.space.Space; import org.ubicollab.nomad.util.TabGroupActivity; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.Display; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.PopupWindow; import android.widget.TextView; import android.widget.Toast; public class HomeScreen extends Activity implements Runnable{ private CameraView camera; private FrontController frontController; private SpaceAdapter spaceAdapter; private SpaceManager spaceManager; private ListView resultList, rssList; private TextView description, currentspaceDesc; private ImageButton space_image; private PopupWindow pw; private TextView more_text; private RssListAdapter adapter; private ArrayList<Space> spaceList; private ImageView current_picture_popup; private LinearLayout rss_layout; private List<JSONObject> jobs; private TextView latest_rss; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.home); try { frontController = FrontController.getInstance(getApplicationContext()); } catch (AuthorizationException e) { // TODO Auto-generated catch block e.printStackTrace(); } /* * Initializations */ resultList = (ListView) findViewById(R.id.home_spaces_list); description = (TextView) findViewById(R.id.currentplace); space_image = (ImageButton) findViewById(R.id.currentspace_image); currentspaceDesc = (TextView) findViewById(R.id.currentspace_desc); rss_layout = (LinearLayout) findViewById(R.id.home_rss_layout); latest_rss = (TextView) findViewById(R.id.latest_rss); } private void editPic(final int fileName) { // get the instance of the LayoutInflater LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); // inflate our view from the corresponding XML file View layout = inflater.inflate(R.layout.popup, (ViewGroup) findViewById(R.id.popup_menu_root), false); Button button_ok = (Button) layout.findViewById(R.id.popup_menu_ok); Button button_cancel = (Button) layout .findViewById(R.id.popup_menu_cancel); current_picture_popup = (ImageView) findViewById(R.id.popup_currentspace_image); // create a 100px width and 200px height popup window Display display = getWindowManager().getDefaultDisplay(); pw = new PopupWindow(layout, display.getWidth() / 4 + display.getWidth() / 2, display.getWidth() / 4 + display.getHeight() / 2, true); // set actions to buttons we have in our popup FileInputStream in = null; BufferedInputStream buf = null; try { in = new FileInputStream("/sdcard/" + fileName + ".jpg"); buf = new BufferedInputStream(in); Bitmap bMap = BitmapFactory.decodeStream(buf); current_picture_popup.setImageBitmap(bMap); if (in != null) { in.close(); } if (buf != null) { buf.close(); } } catch (Exception e) { Log.e("Error reading file", e.toString()); } button_ok.setOnClickListener(new OnClickListener() { @Override public void onClick(View vv) { takePicture(fileName); } }); button_cancel.setOnClickListener(new OnClickListener() { @Override public void onClick(View vv) { pw.dismiss(); } }); // finally show the popup in the center of the window pw.showAtLocation(layout, Gravity.CENTER, 0, 0); } @Override protected void onStart() { super.onStart(); // try { // spaceManager = frontController.getSpaceManager(); // } catch (AuthorizationException e) { // // If not authorized, force the user to login. // frontController.startLoginActivity(); // } try { spaceManager = frontController.getSpaceManager(); } catch (AuthorizationException e) { // TODO Auto-generated catch block e.printStackTrace(); } setupAdapter(); } @Override public void onResume() { super.onResume(); // // Check login and update // if (frontController.isAuthorized() && spaceAdapter != null) { // spaceAdapter.notifyDataSetChanged(); // } else { // frontController.startLoginActivity(); // } setupLightAdapter(); } private void setupLightAdapter() { if (spaceManager != null) { spaceList = spaceManager.getLog(); if (spaceList.isEmpty()) { clearScreen(); } else { openScreen(); if (spaceList.size() > 3) { ArrayList<Space> list = new ArrayList(spaceList.subList(0, 3)); spaceAdapter = new SpaceAdapter(this, R.layout.home_itemview, list); addFooter(); } else { spaceAdapter = new SpaceAdapter(this, R.layout.home_itemview, spaceList); } } // Connect the Adapter to the default View. resultList.setAdapter(spaceAdapter); resultList.setOnItemClickListener(spaceClickedHandler); } } private void setupAdapter() { removeFooter(); if (spaceManager != null) { spaceList = spaceManager.getLog(); if (spaceList.isEmpty()) { clearScreen(); } else { openScreen(); if (spaceList.size() > 3) { spaceAdapter = new SpaceAdapter(this, R.layout.home_itemview, spaceList.subList(0, 3)); addFooter(); } else { spaceAdapter = new SpaceAdapter(this, R.layout.home_itemview, spaceList); } } // Connect the Adapter to the default View. resultList.setAdapter(spaceAdapter); resultList.setOnItemClickListener(spaceClickedHandler); } } public void onPause() { super.onPause(); // removeFooter(); } private void clearScreen() { setContentView(R.layout.welcome); } private void openScreen() { setContentView(R.layout.home); int pos = spaceList.size() - 1; resultList = (ListView) findViewById(R.id.home_spaces_list); description = (TextView) findViewById(R.id.currentplace); space_image = (ImageButton) findViewById(R.id.currentspace_image); currentspaceDesc = (TextView) findViewById(R.id.currentspace_desc); rssList = (ListView) findViewById(R.id.updates_list); rssList.setSelector(android.R.color.transparent); rssList.setFocusable(false); rssList.setClickable(false); space_image.setVisibility(View.VISIBLE); final int fileName = Integer.parseInt(spaceList.get(pos).getId()); FileInputStream in; BufferedInputStream buf; try { in = new FileInputStream("/sdcard/" + fileName + ".jpg"); buf = new BufferedInputStream(in); Bitmap bMap = BitmapFactory.decodeStream(buf); space_image.setImageBitmap(bMap); if (in != null) { in.close(); } if (buf != null) { buf.close(); } } catch (Exception e) { Log.e("Error reading file", e.toString()); } //description.setText("Space Description:"); spaceList = spaceManager.getLog(); currentspaceDesc.setText(spaceList.get(pos).getDescription()); Thread thread = new Thread(this); thread.start(); space_image.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Toast toast = Toast.makeText(getApplicationContext(), "edit pic", Toast.LENGTH_SHORT); toast.show(); editPic(fileName); } }); // space_image.setOnClickListener(new View.OnClickListener() { // @Override // public void onClick(View v) { // Toast toast = Toast.makeText(getApplicationContext(), // "edit pic", Toast.LENGTH_SHORT); // toast.show(); // // editPic(fileName); // } // }); } public void run() { loadRSS(); handler.sendEmptyMessage(0); } private void loadRSS() { jobs = new ArrayList<JSONObject>(); try { jobs = RssReader.getLatestRssFeed(); if(jobs.size()==0){ //add warning as a JSONObject //List<JSONObject> warning = new ArrayList<JSONObject>(); } } catch (Exception e) { Log.e("RSS ERROR", "Error loading RSS Feed Stream >> " + e.getMessage() + " //" + e.toString()); } } private Handler handler = new Handler() { @Override public void handleMessage(Message msg) { Button rss_link = (Button) findViewById(R.id.home_get_rss); if (jobs.size()>5) { rss_link.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub Toast toast = Toast.makeText(getApplicationContext(), "Go to: " + RssReader.feed.toString(), Toast.LENGTH_SHORT); toast.show(); } }); adapter = new RssListAdapter(getParent(), jobs.subList(0, 5)); rssList.setAdapter(adapter); }else{ adapter = new RssListAdapter(getParent(), jobs); rssList.setAdapter(adapter); } if(jobs.size() == 0){ /// TODO Change the layout } } }; private boolean removeFooter() { return resultList.removeFooterView(more_text); } private void addFooter() { synchronized (resultList) { more_text = new TextView(this); more_text.setText("History"); more_text.setGravity(0x11); more_text.setTextSize(17); more_text.setHeight(40); resultList.addFooterView(more_text); more_text.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent i = new Intent(v.getContext(), Home_History.class); TabGroupActivity parentActivity = (TabGroupActivity) getParent(); parentActivity.startChildActivity("Home_history", i); //startActivity(new Intent(v.getContext(), Home_History.class)); } }); } } private void takePicture(int fileName) { Toast toast = Toast.makeText(this, String.valueOf(fileName), Toast.LENGTH_LONG); toast.show(); Intent intent = new Intent(); Bundle filename = new Bundle(); filename.putInt("file name", fileName); // add space id intent.setClass(this, CameraView.class); intent.putExtras(filename); startActivity(intent); // Intent i = new Intent(getApplicationContext(), CameraView.class); // i.putExtra("filename", fileName); // // TabGroupActivity parentActivity = (TabGroupActivity) getParent(); // // parentActivity.startChildActivity("CreateSpaceActivity", i); // startActivity(i); } /* * Starts a new activity based on the space that was pressed. */ private OnItemClickListener spaceClickedHandler = new OnItemClickListener() { public void onItemClick(AdapterView<?> parent, View v, int pos, long id) { if (pos != parent.getBottom()) { Space selected = (Space) parent.getItemAtPosition(pos); spaceManager.setCurrentSpace(selected); spaceManager.insertStatistics(selected); } if (spaceManager != null) { spaceList = spaceManager.getLog(); space_image.setEnabled(true); description.setText("Space Description:"); currentspaceDesc.setText(spaceList.get(pos).getName() .toString()); // Create a new (default) Adapter and fill it with a list of // spaces. spaceAdapter = new SpaceAdapter(getApplicationContext(), R.layout.home_itemview, spaceList); // Connect the Adapter to the default View. resultList.setAdapter(spaceAdapter); // Now hook into our object and set its onItemClickListener // member // to our class handler object. resultList.setOnItemClickListener(spaceClickedHandler); spaceAdapter.notifyDataSetChanged(); setupAdapter(); } } }; /* * A ListAdapter that manages a ListView backed by an array of space * objects. This class expects that the provided resource id references a * single TextView. The TextView is used to generate list items for the list * that this adapter is connected to. */ private class SpaceAdapter extends ArrayAdapter<Space> { private List<Space> items; public SpaceAdapter(Context context, int textViewResourceId, List<Space> items) { super(context, textViewResourceId, items); this.items = items; } @Override public View getView(int position, View convertView, ViewGroup parent) { View v = convertView; if (v == null) { LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); v = vi.inflate(R.layout.home_itemview, null); } Space o = items.get(position); if (o != null) { TextView itemName = (TextView) v .findViewById(R.id.home_spaces_list_item_name); if (itemName != null) { itemName.setText(o.getName()); } } return v; } } }
/** * Copyright 2014-2015 Joshua Asbury (@theoriginalbit) * <p/> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.theoriginalbit.peripheral.turtle; import com.theoriginalbit.peripheral.api.util.TurtleAttackException; import com.theoriginalbit.peripheral.api.util.TurtleDigException; import dan200.computercraft.api.peripheral.IPeripheral; import dan200.computercraft.api.turtle.*; import net.minecraft.block.Block; import net.minecraft.entity.Entity; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.IInventory; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.ChunkCoordinates; import net.minecraft.util.Facing; import net.minecraft.world.World; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * @author Joshua Asbury (@theoriginalbit) */ public abstract class UpgradeTool implements ITurtleUpgrade { private static final Random rand = new Random(); protected final ItemStack craftingStack; private final int id; private final String name; protected UpgradeTool(int upgradeId, String adjective, ItemStack craftingItemStack) { id = upgradeId; name = adjective; craftingStack = craftingItemStack; } @Override public final int getUpgradeID() { return id; } @Override public final String getUnlocalisedAdjective() { return name; } @Override public final TurtleUpgradeType getType() { return TurtleUpgradeType.Tool; } @Override public final ItemStack getCraftingItem() { return craftingStack; } @Override public final IPeripheral createPeripheral(ITurtleAccess turtle, TurtleSide side) { return null; } @Override public final TurtleCommandResult useTool(ITurtleAccess turtle, TurtleSide side, TurtleVerb verb, int direction) { switch (verb) { case Attack: return attack(turtle, direction); case Dig: return dig(turtle, direction); } return TurtleCommandResult.failure("Unsupported action"); } @Override public final void update(ITurtleAccess turtle, TurtleSide side) { // NO-OP } protected abstract boolean canAttackEntity(Entity entity); protected abstract ArrayList<ItemStack> attackEntity(ITurtleAccess turtle, Entity entity) throws TurtleAttackException; protected abstract boolean canAttackBlock(World world, int x, int y, int z, int dir, EntityPlayer turtle); protected abstract ArrayList<ItemStack> attackBlock(World world, int x, int y, int z, int dir, EntityPlayer turtle) throws TurtleAttackException; protected abstract boolean canHarvestBlock(World world, int x, int y, int z); protected abstract ArrayList<ItemStack> harvestBlock(World world, int x, int y, int z) throws TurtleDigException; protected String getAttackFailureMessage() { return "Nothing to attack"; } protected String getDigFailureMessage() { return "Nothing to dig"; } protected final void store(final ITurtleAccess turtle, final ArrayList<ItemStack> list) { if (list != null) { for (final ItemStack stack : list) { store(turtle, stack); } } } protected final void store(final ITurtleAccess turtle, final ItemStack stack) { if (!storeItemStack(turtle, stack)) { ChunkCoordinates coordinates = turtle.getPosition(); int direction = turtle.getDirection(); int x = coordinates.posX + Facing.offsetsXForSide[direction]; int y = coordinates.posY + Facing.offsetsYForSide[direction]; int z = coordinates.posZ + Facing.offsetsZForSide[direction]; spawnItemStackInWorld(stack, turtle.getWorld(), x, y, z); } } protected AxisAlignedBB getEntitySearchAABB(ITurtleAccess turtle, int dir) { final ChunkCoordinates coordinates = turtle.getPosition(); int x = coordinates.posX + Facing.offsetsXForSide[dir]; int y = coordinates.posY + Facing.offsetsYForSide[dir]; int z = coordinates.posZ + Facing.offsetsZForSide[dir]; return AxisAlignedBB.getBoundingBox( x, y, z, x + 1d, y + 1d, z + 1d ); } private TurtleCommandResult attack(ITurtleAccess turtle, int dir) { final World world = turtle.getWorld(); final ChunkCoordinates coordinates = turtle.getPosition(); int x = coordinates.posX + Facing.offsetsXForSide[dir]; int y = coordinates.posY + Facing.offsetsYForSide[dir]; int z = coordinates.posZ + Facing.offsetsZForSide[dir]; final EntityPlayer player = new PlayerTurtle(turtle); @SuppressWarnings("unchecked") final List<Entity> list = world.getEntitiesWithinAABBExcludingEntity( player, getEntitySearchAABB(turtle, dir) ); try { boolean someThingDone = false; for (Entity entity : list) { if (canAttackEntity(entity)) { store(turtle, attackEntity(turtle, entity)); someThingDone = true; } } if (canAttackBlock(world, x, y, z, dir, player)) { store(turtle, attackBlock(world, x, y, z, dir, player)); someThingDone = true; } if (someThingDone) { return TurtleCommandResult.success(); } return TurtleCommandResult.failure(getAttackFailureMessage()); } catch (TurtleAttackException e) { return TurtleCommandResult.failure(e.getMessage()); } } private TurtleCommandResult dig(ITurtleAccess turtle, int dir) { final World world = turtle.getWorld(); final ChunkCoordinates coordinates = turtle.getPosition(); int x = coordinates.posX + Facing.offsetsXForSide[dir]; int y = coordinates.posY + Facing.offsetsYForSide[dir]; int z = coordinates.posZ + Facing.offsetsZForSide[dir]; final Block block = world.getBlock(x, y, z); if (!world.isAirBlock(x, y, z) && block.getBlockHardness(world, x, y, z) > -1f && canHarvestBlock(world, x, y, z)) { try { final ArrayList<ItemStack> result = harvestBlock(world, x, y, z); if (result != null) { store(turtle, result); world.setBlockToAir(x, y, z); world.playAuxSFX(2001, x, y, z, Block.getIdFromBlock(block) + world.getBlockMetadata(x, y, z) * 4096); return TurtleCommandResult.success(); } } catch (TurtleDigException e) { return TurtleCommandResult.failure(e.getMessage()); } } return TurtleCommandResult.failure(getDigFailureMessage()); } private boolean storeItemStack(final ITurtleAccess turtle, ItemStack stack) { final IInventory inventory = turtle.getInventory(); for (int i = 0; i < inventory.getSizeInventory(); ++i) { ItemStack currentStack = inventory.getStackInSlot(i); if (currentStack == null) { inventory.setInventorySlotContents(i, stack.copy()); stack.stackSize = 0; return true; } else if (currentStack.isStackable() && currentStack.isItemEqual(stack)) { int space = currentStack.getMaxStackSize() - currentStack.stackSize; if (space >= stack.stackSize) { currentStack.stackSize += stack.stackSize; stack.stackSize = 0; return true; } else { currentStack.stackSize = currentStack.getMaxStackSize(); stack.stackSize -= space; } } } return false; } private static void spawnItemStackInWorld(ItemStack stack, World world, int x, int y, int z) { if (stack != null && stack.stackSize > 0) { float rx = rand.nextFloat() * 0.8f + 0.1f; float ry = rand.nextFloat() * 0.8f + 0.1f; float rz = rand.nextFloat() * 0.8f + 0.1f; EntityItem entityItem = new EntityItem(world, x + rx, y + ry, z + rz, new ItemStack(stack.getItem(), stack.stackSize, stack.getItemDamage())); if (stack.hasTagCompound()) { entityItem.getEntityItem().setTagCompound((NBTTagCompound) stack.getTagCompound().copy()); } float factor = 0.05f; entityItem.motionX = rand.nextGaussian() * factor; entityItem.motionY = rand.nextGaussian() * factor + 0.2f; entityItem.motionZ = rand.nextGaussian() * factor; world.spawnEntityInWorld(entityItem); stack.stackSize = 0; } } }
/* Copyright 2022 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.openapi.models; /** Generated */ public interface V2MetricStatusFluent< A extends io.kubernetes.client.openapi.models.V2MetricStatusFluent<A>> extends io.kubernetes.client.fluent.Fluent<A> { /** * This method has been deprecated, please use method buildContainerResource instead. * * @return The buildable object. */ @java.lang.Deprecated public io.kubernetes.client.openapi.models.V2ContainerResourceMetricStatus getContainerResource(); public io.kubernetes.client.openapi.models.V2ContainerResourceMetricStatus buildContainerResource(); public A withContainerResource( io.kubernetes.client.openapi.models.V2ContainerResourceMetricStatus containerResource); public java.lang.Boolean hasContainerResource(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ContainerResourceNested<A> withNewContainerResource(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ContainerResourceNested<A> withNewContainerResourceLike( io.kubernetes.client.openapi.models.V2ContainerResourceMetricStatus item); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ContainerResourceNested<A> editContainerResource(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ContainerResourceNested<A> editOrNewContainerResource(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ContainerResourceNested<A> editOrNewContainerResourceLike( io.kubernetes.client.openapi.models.V2ContainerResourceMetricStatus item); /** * This method has been deprecated, please use method buildExternal instead. * * @return The buildable object. */ @java.lang.Deprecated public io.kubernetes.client.openapi.models.V2ExternalMetricStatus getExternal(); public io.kubernetes.client.openapi.models.V2ExternalMetricStatus buildExternal(); public A withExternal(io.kubernetes.client.openapi.models.V2ExternalMetricStatus external); public java.lang.Boolean hasExternal(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ExternalNested<A> withNewExternal(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ExternalNested<A> withNewExternalLike(io.kubernetes.client.openapi.models.V2ExternalMetricStatus item); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ExternalNested<A> editExternal(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ExternalNested<A> editOrNewExternal(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ExternalNested<A> editOrNewExternalLike(io.kubernetes.client.openapi.models.V2ExternalMetricStatus item); /** * This method has been deprecated, please use method buildObject instead. * * @return The buildable object. */ @java.lang.Deprecated public io.kubernetes.client.openapi.models.V2ObjectMetricStatus getObject(); public io.kubernetes.client.openapi.models.V2ObjectMetricStatus buildObject(); public A withObject(io.kubernetes.client.openapi.models.V2ObjectMetricStatus _object); public java.lang.Boolean hasObject(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ObjectNested<A> withNewObject(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ObjectNested<A> withNewObjectLike( io.kubernetes.client.openapi.models.V2ObjectMetricStatus item); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ObjectNested<A> editObject(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ObjectNested<A> editOrNewObject(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ObjectNested<A> editOrNewObjectLike(io.kubernetes.client.openapi.models.V2ObjectMetricStatus item); /** * This method has been deprecated, please use method buildPods instead. * * @return The buildable object. */ @java.lang.Deprecated public io.kubernetes.client.openapi.models.V2PodsMetricStatus getPods(); public io.kubernetes.client.openapi.models.V2PodsMetricStatus buildPods(); public A withPods(io.kubernetes.client.openapi.models.V2PodsMetricStatus pods); public java.lang.Boolean hasPods(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.PodsNested<A> withNewPods(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.PodsNested<A> withNewPodsLike( io.kubernetes.client.openapi.models.V2PodsMetricStatus item); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.PodsNested<A> editPods(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.PodsNested<A> editOrNewPods(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.PodsNested<A> editOrNewPodsLike( io.kubernetes.client.openapi.models.V2PodsMetricStatus item); /** * This method has been deprecated, please use method buildResource instead. * * @return The buildable object. */ @java.lang.Deprecated public io.kubernetes.client.openapi.models.V2ResourceMetricStatus getResource(); public io.kubernetes.client.openapi.models.V2ResourceMetricStatus buildResource(); public A withResource(io.kubernetes.client.openapi.models.V2ResourceMetricStatus resource); public java.lang.Boolean hasResource(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ResourceNested<A> withNewResource(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ResourceNested<A> withNewResourceLike(io.kubernetes.client.openapi.models.V2ResourceMetricStatus item); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ResourceNested<A> editResource(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ResourceNested<A> editOrNewResource(); public io.kubernetes.client.openapi.models.V2MetricStatusFluent.ResourceNested<A> editOrNewResourceLike(io.kubernetes.client.openapi.models.V2ResourceMetricStatus item); public java.lang.String getType(); public A withType(java.lang.String type); public java.lang.Boolean hasType(); /** Method is deprecated. use withType instead. */ @java.lang.Deprecated public A withNewType(java.lang.String original); public interface ContainerResourceNested<N> extends io.kubernetes.client.fluent.Nested<N>, io.kubernetes.client.openapi.models.V2ContainerResourceMetricStatusFluent< io.kubernetes.client.openapi.models.V2MetricStatusFluent.ContainerResourceNested<N>> { public N and(); public N endContainerResource(); } public interface ExternalNested<N> extends io.kubernetes.client.fluent.Nested<N>, io.kubernetes.client.openapi.models.V2ExternalMetricStatusFluent< io.kubernetes.client.openapi.models.V2MetricStatusFluent.ExternalNested<N>> { public N and(); public N endExternal(); } public interface ObjectNested<N> extends io.kubernetes.client.fluent.Nested<N>, io.kubernetes.client.openapi.models.V2ObjectMetricStatusFluent< io.kubernetes.client.openapi.models.V2MetricStatusFluent.ObjectNested<N>> { public N and(); public N endObject(); } public interface PodsNested<N> extends io.kubernetes.client.fluent.Nested<N>, io.kubernetes.client.openapi.models.V2PodsMetricStatusFluent< io.kubernetes.client.openapi.models.V2MetricStatusFluent.PodsNested<N>> { public N and(); public N endPods(); } public interface ResourceNested<N> extends io.kubernetes.client.fluent.Nested<N>, io.kubernetes.client.openapi.models.V2ResourceMetricStatusFluent< io.kubernetes.client.openapi.models.V2MetricStatusFluent.ResourceNested<N>> { public N and(); public N endResource(); } }
/* * Copyright 2013 Netherlands eScience Center * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.esciencecenter.xenon.adaptors.filesystems.local; import static nl.esciencecenter.xenon.utils.LocalFileSystemUtils.isWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeFalse; import java.util.AbstractMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.junit.Test; import nl.esciencecenter.xenon.InvalidCredentialException; import nl.esciencecenter.xenon.InvalidLocationException; import nl.esciencecenter.xenon.XenonException; import nl.esciencecenter.xenon.adaptors.filesystems.FileSystemTestParent; import nl.esciencecenter.xenon.adaptors.filesystems.LocationConfig; import nl.esciencecenter.xenon.credentials.DefaultCredential; import nl.esciencecenter.xenon.credentials.PasswordCredential; import nl.esciencecenter.xenon.filesystems.DirectoryNotEmptyException; import nl.esciencecenter.xenon.filesystems.FileSystem; import nl.esciencecenter.xenon.filesystems.NoSuchPathException; import nl.esciencecenter.xenon.filesystems.Path; import nl.esciencecenter.xenon.filesystems.PathAttributes; import nl.esciencecenter.xenon.filesystems.PosixFilePermission; public class LocalFileSystemTest extends FileSystemTestParent { @Override protected LocationConfig setupLocationConfig(FileSystem fileSystem) { return new LocationConfig() { @Override public Path getExistingPath() { return new Path("/home/xenon/filesystem-test-fixture/links/file0"); } @Override public Map.Entry<Path, Path> getSymbolicLinksToExistingFile() { return new AbstractMap.SimpleEntry<>(new Path("/home/xenon/filesystem-test-fixture/links/link0"), new Path("/home/xenon/filesystem-test-fixture/links/file0")); } @Override public Path getWritableTestDir() { return new Path("/tmp"); } @Override public Path getExpectedWorkingDirectory() { // return new Path(System.getProperty("user.dir")); return new Path("/tmp"); } }; } @Override public FileSystem setupFileSystem() throws XenonException { FileSystem f = FileSystem.create("file"); f.setWorkingDirectory(new Path("/tmp")); return f; } @Test public void test_credential_default() throws XenonException { FileSystem.create("file", null, new DefaultCredential()).close(); } @Test(expected = InvalidCredentialException.class) public void test_credential_wrong() throws XenonException { FileSystem.create("file", null, new PasswordCredential("aap", "noot".toCharArray())); } @Test public void test_location_empty() throws XenonException { FileSystem.create("file", "").close(); } @Test public void test_location_localRoot() throws XenonException { FileSystem.create("file", "/").close(); } @Test(expected = InvalidLocationException.class) public void test_location_wrong() throws XenonException { FileSystem.create("file", "aap").close(); } @Test(expected = NoSuchPathException.class) public void test_deleteLocal_doesNotExist() throws Exception { LocalFileSystem f = (LocalFileSystem) fileSystem; Path doesNotExist = testRoot.resolve("foobar"); f.deleteLocal(doesNotExist); } @Test(expected = DirectoryNotEmptyException.class) public void test_deleteLocal_dirNotEmpty() throws Exception { LocalFileSystem f = (LocalFileSystem) fileSystem; generateAndCreateTestDir(); f.deleteLocal(testRoot); } @Test(expected = XenonException.class) public void test_deleteLocal_notAllowed() throws Exception { LocalFileSystem f = (LocalFileSystem) fileSystem; Path notAllowed = new Path("/dev/null"); f.deleteLocal(notAllowed); } @Test public void test_javaPath_empty_workingDirectory() throws Exception { LocalFileSystem f = (LocalFileSystem) fileSystem; assertEquals("/tmp", f.javaPath(new Path("")).toString()); } @Test(expected = XenonException.class) public void test_getAttributes_nonExisting() throws Exception { fileSystem.getAttributes(testRoot.resolve("foo")); } @Test(expected = XenonException.class) public void test_list_nonExisting() throws Exception { fileSystem.list(testRoot.resolve("foo"), true); } @Test(expected = XenonException.class) public void test_writeNotAllowed() throws Exception { fileSystem.writeToFile(new Path("/dev/foo")); } @Test(expected = XenonException.class) public void test_appendNotAllowed() throws Exception { fileSystem.appendToFile(new Path("/dev/foo")); } @Test(expected = XenonException.class) public void test_rename_invalidSource() throws Exception { fileSystem.rename(new Path("/dev/null"), testRoot.resolve("foo")); } @Test(expected = XenonException.class) public void test_createDirectory_invalidPath() throws Exception { fileSystem.createDirectory(new Path("/dev/foo")); } @Test(expected = XenonException.class) public void test_createFile_invalidPath() throws Exception { fileSystem.createFile(new Path("/dev/foo")); } @Test(expected = XenonException.class) public void test_createSymbolicLink_invalidPath() throws Exception { fileSystem.createSymbolicLink(new Path("/dev/foo"), new Path("/dev/null")); } public void test_setPermissionsNotAllowed() throws Exception { Set<PosixFilePermission> permissions = new HashSet<>(); permissions.add(PosixFilePermission.GROUP_EXECUTE); fileSystem.setPosixFilePermissions(new Path("/dev/null"), permissions); } @Test public void test_getAttributes_fileStartingWithDot_HiddenFile() throws Exception { // TODO move to FileSystemTestParent when we can detect // adaptor/filesystem supports hidden files generateAndCreateTestDir(); // assumes location has UNIX-like file system where starts with '.' // means hidden Path path = testDir.resolve(".myhiddenfile"); fileSystem.createFile(path); PathAttributes result = fileSystem.getAttributes(path); assertTrue(result.isHidden()); } @Test public void test_list_hiddenFile() throws Exception { assumeFalse(isWindows()); generateAndCreateTestDir(); // assumes location has UNIX-like file system where starts with '.' // means hidden Path path = testDir.resolve(".myhiddenfile"); fileSystem.createFile(path); Set<PathAttributes> res = listSet(testDir, false); assertTrue("Listing contains hidden file", res.stream().anyMatch(PathAttributes::isHidden)); } @Test public void test_exists_existingDot_returnTrue() throws Exception { testDir = new Path("."); assertTrue(fileSystem.exists(testDir)); } @Test public void test_exists_existingDoubleDot_returnTrue() throws Exception { testDir = new Path(".."); assertTrue(fileSystem.exists(testDir)); } }
package stdlib; /****************************************************************************** * Compilation: javac StdAudio.java * Execution: java StdAudio * Dependencies: none * * Simple library for reading, writing, and manipulating .wav files. * * * Limitations * ----------- * - Does not seem to work properly when reading .wav files from a .jar file. * - Assumes the audio is monaural, with sampling rate of 44,100. * ******************************************************************************/ import javax.sound.sampled.Clip; import java.io.File; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.IOException; import java.net.URL; import javax.sound.sampled.AudioFileFormat; import javax.sound.sampled.AudioFormat; import javax.sound.sampled.AudioInputStream; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.DataLine; import javax.sound.sampled.LineUnavailableException; import javax.sound.sampled.SourceDataLine; import javax.sound.sampled.UnsupportedAudioFileException; /** * <i>Standard audio</i>. This class provides a basic capability for * creating, reading, and saving audio. * <p> * The audio format uses a sampling rate of 44,100 (CD quality audio), 16-bit, monaural. * * <p> * For additional documentation, see <a href="http://introcs.cs.princeton.edu/15inout">Section 1.5</a> of * <i>Introduction to Programming in Java: An Interdisciplinary Approach</i> by Robert Sedgewick and Kevin Wayne. * * @author Robert Sedgewick * @author Kevin Wayne */ public final class StdAudio { /** * The sample rate - 44,100 Hz for CD quality audio. */ public static final int SAMPLE_RATE = 44100; private static final int BYTES_PER_SAMPLE = 2; // 16-bit audio private static final int BITS_PER_SAMPLE = 16; // 16-bit audio private static final double MAX_16_BIT = Short.MAX_VALUE; // 32,767 private static final int SAMPLE_BUFFER_SIZE = 4096; private static SourceDataLine line; // to play the sound private static byte[] buffer; // our internal buffer private static int bufferSize = 0; // number of samples currently in internal buffer private StdAudio() { // can not instantiate } // static initializer static { init(); } // open up an audio stream private static void init() { try { // 44,100 samples per second, 16-bit audio, mono, signed PCM, little Endian AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE); // the internal buffer is a fraction of the actual buffer size, this choice is arbitrary // it gets divided because we can't expect the buffered data to line up exactly with when // the sound card decides to push out its samples. buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE/3]; } catch (LineUnavailableException e) { System.out.println(e.getMessage()); } // no sound gets made before this call line.start(); } /** * Closes standard audio. */ public static void close() { line.drain(); line.stop(); } /** * Writes one sample (between -1.0 and +1.0) to standard audio. * If the sample is outside the range, it will be clipped. * * @param sample the sample to play * @throws IllegalArgumentException if the sample is <tt>Double.NaN</tt> */ public static void play(double sample) { // clip if outside [-1, +1] if (Double.isNaN(sample)) throw new IllegalArgumentException("sample is NaN"); if (sample < -1.0) sample = -1.0; if (sample > +1.0) sample = +1.0; // convert to bytes short s = (short) (MAX_16_BIT * sample); buffer[bufferSize++] = (byte) s; buffer[bufferSize++] = (byte) (s >> 8); // little Endian // send to sound card if buffer is full if (bufferSize >= buffer.length) { line.write(buffer, 0, buffer.length); bufferSize = 0; } } /** * Writes the array of samples (between -1.0 and +1.0) to standard audio. * If a sample is outside the range, it will be clipped. * * @param samples the array of samples to play * @throws IllegalArgumentException if any sample is <tt>Double.NaN</tt> */ public static void play(double[] samples) { if (samples == null) throw new NullPointerException("argument to play() is null"); for (int i = 0; i < samples.length; i++) { play(samples[i]); } } /** * Reads audio samples from a file (in .wav or .au format) and returns * them as a double array with values between -1.0 and +1.0. * * @param filename the name of the audio file * @return the array of samples */ public static double[] read(String filename) { byte[] data = readByte(filename); int n = data.length; double[] d = new double[n/2]; for (int i = 0; i < n/2; i++) { d[i] = ((short) (((data[2*i+1] & 0xFF) << 8) + (data[2*i] & 0xFF))) / ((double) MAX_16_BIT); } return d; } /** * Plays an audio file (in .wav, .mid, or .au format) in a background thread. * * @param filename the name of the audio file */ public static synchronized void play(String filename) { if (filename == null) throw new NullPointerException(); // code adapted from: http://stackoverflow.com/questions/26305/how-can-i-play-sound-in-java try { Clip clip = AudioSystem.getClip(); InputStream is = StdAudio.class.getResourceAsStream(filename); AudioInputStream ais = AudioSystem.getAudioInputStream(is); clip.open(ais); clip.start(); } catch (RuntimeException e) { System.out.println("could not play '" + filename + "'"); throw e; } catch (Exception e) { System.out.println("could not play '" + filename + "'"); e.printStackTrace(); } } /** * Loops an audio file (in .wav, .mid, or .au format) in a background thread. * * @param filename the name of the audio file */ public static synchronized void loop(String filename) { if (filename == null) throw new NullPointerException(); // code adapted from: http://stackoverflow.com/questions/26305/how-can-i-play-sound-in-java try { Clip clip = AudioSystem.getClip(); InputStream is = StdAudio.class.getResourceAsStream(filename); AudioInputStream ais = AudioSystem.getAudioInputStream(is); clip.open(ais); clip.loop(Clip.LOOP_CONTINUOUSLY); } catch (RuntimeException e) { System.out.println("could not play '" + filename + "'"); throw e; } catch (Exception e) { System.out.println("could not play '" + filename + "'"); e.printStackTrace(); } } // return data as a byte array private static byte[] readByte(String filename) { byte[] data = null; AudioInputStream ais = null; try { // try to read from file File file = new File(filename); if (file.exists()) { ais = AudioSystem.getAudioInputStream(file); int bytesToRead = ais.available(); data = new byte[bytesToRead]; int bytesRead = ais.read(data); if (bytesToRead != bytesRead) throw new RuntimeException("read only " + bytesRead + " of " + bytesToRead + " bytes"); } // try to read from URL else { URL url = StdAudio.class.getResource(filename); ais = AudioSystem.getAudioInputStream(url); int bytesToRead = ais.available(); data = new byte[bytesToRead]; int bytesRead = ais.read(data); if (bytesToRead != bytesRead) throw new RuntimeException("read only " + bytesRead + " of " + bytesToRead + " bytes"); } } catch (IOException e) { System.out.println(e.getMessage()); throw new RuntimeException("Could not read " + filename); } catch (UnsupportedAudioFileException e) { System.out.println(e.getMessage()); throw new RuntimeException(filename + " in unsupported audio format"); } return data; } /** * Saves the double array as an audio file (using .wav or .au format). * * @param filename the name of the audio file * @param samples the array of samples */ public static void save(String filename, double[] samples) { // assumes 44,100 samples per second // use 16-bit audio, mono, signed PCM, little Endian AudioFormat format = new AudioFormat(SAMPLE_RATE, 16, 1, true, false); byte[] data = new byte[2 * samples.length]; for (int i = 0; i < samples.length; i++) { int temp = (short) (samples[i] * MAX_16_BIT); data[2*i + 0] = (byte) temp; data[2*i + 1] = (byte) (temp >> 8); } // now save the file try { ByteArrayInputStream bais = new ByteArrayInputStream(data); AudioInputStream ais = new AudioInputStream(bais, format, samples.length); if (filename.endsWith(".wav") || filename.endsWith(".WAV")) { AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename)); } else if (filename.endsWith(".au") || filename.endsWith(".AU")) { AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename)); } else { throw new RuntimeException("File format not supported: " + filename); } } catch (IOException e) { System.out.println(e); } } /*************************************************************************** * Unit tests <tt>StdAudio</tt>. ***************************************************************************/ // create a note (sine wave) of the given frequency (Hz), for the given // duration (seconds) scaled to the given volume (amplitude) private static double[] note(double hz, double duration, double amplitude) { int n = (int) (StdAudio.SAMPLE_RATE * duration); double[] a = new double[n+1]; for (int i = 0; i <= n; i++) a[i] = amplitude * Math.sin(2 * Math.PI * i * hz / StdAudio.SAMPLE_RATE); return a; } /** * Test client - play an A major scale to standard audio. */ public static void main(String[] args) { // 440 Hz for 1 sec double freq = 440.0; for (int i = 0; i <= StdAudio.SAMPLE_RATE; i++) { StdAudio.play(0.5 * Math.sin(2*Math.PI * freq * i / StdAudio.SAMPLE_RATE)); } // scale increments int[] steps = { 0, 2, 4, 5, 7, 9, 11, 12 }; for (int i = 0; i < steps.length; i++) { double hz = 440.0 * Math.pow(2, steps[i] / 12.0); StdAudio.play(note(hz, 1.0, 0.5)); } // need to call this in non-interactive stuff so the program doesn't terminate // until all the sound leaves the speaker. StdAudio.close(); } }
package co.phoenixlab.common.localization; import java.util.Collection; import java.util.Locale; import java.util.Map; public interface Localizer { /** * Marker text returned by the localizer when a given key cannot be found. * Default value is {@code ##NOT_FOUND##}, can be set via the system * property {@code co.phoenixlab.localizer.notfound} */ String LOCALE_STRING_NOT_FOUND = System.getProperty("co.phoenixlab.localizer.notfound", "##NOT_FOUND##"); /** * Marker text returned by the localizer if the format string is invalid. * Default value is {@code ##INVALID_FORMAT##}, can be set via the system * property {@code co.phoenixlab.localizer.invalidformat} */ String INVALID_FORMAT_STRING = System.getProperty("co.phoenixlab.localizer.invalidformat", "##INVALID_FORMAT##"); /** * Marker text returned by the localizer if no plurality rule matched the given argument * Default value is {@code ##NO_MATCHING_PLURAL##}, can be set via the system * property {@code co.phoenixlab.localizer.nomatchingplural} */ String NO_MATCHING_PLURAL = System.getProperty("co.phoenixlab.localizer.nomatchingplural", "##NO_MATCHING_PLURAL##"); String PREFIX_FLAG_BASE = "@# "; int PREFIX_FLAG_LENGTH = PREFIX_FLAG_BASE.length(); int PREFIX_FLAG_DO_NOT_LOCALIZE_BIT = 0; /** * Prefix flag to prepend to a key to indicate that the key should be taken as-is and not localized. * * @see #doNotLocalize(String) */ String PREFIX_FLAG_DO_NOT_LOCALIZE = internalSetFlagBit(PREFIX_FLAG_BASE, PREFIX_FLAG_DO_NOT_LOCALIZE_BIT); int PREFIX_FLAG_DO_NOT_FORMAT_BIT = 1; /** * Prefix flag to prepend to a key to indicate that the localizer should not attempt to parse and replace * format tokens in the localization string. * * @see #doNotFormat(String) */ String PREFIX_FLAG_DO_NOT_FORMAT = internalSetFlagBit(PREFIX_FLAG_BASE, PREFIX_FLAG_DO_NOT_FORMAT_BIT); /** * Gets the locale for this Localizer. * * @return This Localizer's locale */ Locale getLocale(); /** * Registers a provider to this Localizer. * * @param provider A LocaleStringProvider to register */ void addLocaleStringProvider(LocaleStringProvider provider); /** * Unregisters a provider from this localizer. * * @param provider The LocaleStringProvider to unregister */ void removeLocaleStringProvider(LocaleStringProvider provider); /** * Gets the providers registered with this Localizer. * * @return An unmodifiable Collection view of the LocaleStringProviders registered with this Localizer */ Collection<LocaleStringProvider> getLocaleStringProviders(); /** * Unregisters all providers from this Localizer */ void removeAllLocaleStringProviders(); /** * Register the provided plurality rules to this Localizer * @param rules A map of plurality rules. key: name, value: LocalizerPluralRule */ void registerPluralityRules(Map<String, LocalizerPluralRule> rules); /** * Checks if a given key exists. * * @param key The key to test * @return True if there exists a localization string with the given key, false otherwise */ boolean containsKey(String key); /** * Localizes a given key. This is the zero-arg specialization of {@link #localize(String, Object...)}. * * @param key The key to localize * @return The localized string, or {@link #LOCALE_STRING_NOT_FOUND} if no such key exists * @see #localize(String, Object...) */ String localize(String key); /** * Localizes a given key with the given argument for formatting. This is the one-arg specialization of * {@link #localize(String, Object...)}. * * @param key The key to localize * @param arg0 The argument to pass in for formatting * @return The localized string, or {@link #LOCALE_STRING_NOT_FOUND} if no such key exists * @see #localize(String, Object...) */ String localize(String key, Object arg0); /** * Localizes a given key with the given arguments for formatting. This is the two-arg specialization of * {@link #localize(String, Object...)}. * * @param key The key to localize * @param arg0 The first argument to pass in for formatting * @param arg1 The second argument to pass in for formatting * @return The localized string, or {@link #LOCALE_STRING_NOT_FOUND} if no such key exists * @see #localize(String, Object...) */ String localize(String key, Object arg0, Object arg1); /** * Localizes a given key with the given argument for formatting. * <p> * The given key is used to fetch a special format string. Format strings may contain: * <ol> * <li>Other localization keys inside of single square brackets: [full.localization.key]</li> * <li>Relative localization keys inside of single square brackets: [.relative.localization.key] which will * resolve to the given key + the relative key, so given "foo.bar" as the key and "[.baz.biz]" in the * corresponding locale string, the string from "foo.bar.baz.biz" will be substituted.</li> * <li>Argument index, formatting, and pluralization controls inside curly braces (see below)</li> * </ol> * <p> * TODO Curly brace docs * * @param key The key to localize * @param args A varargs of arguments for formatting * @return The localized string, or {@link #LOCALE_STRING_NOT_FOUND} if no such key exists */ String localize(String key, Object... args); /** * Attempts to localize the given key, returning {@code def} if it could not be found. This is the zero-args * specialization for {@link #localizeOrDefault(String, String, Object...)}. * * @param key The key to localize * @param def The default String (does not get localized) to return * @return The localized string, or {@code def} if no such string exists * @see #localizeOrDefault(String, String, Object...) */ String localizeOrDefault(String key, String def); /** * Attempts to localize the given key with the given argument for formatting, returning {@code def} if it could * not be found. This is the one-arg specialization for {@link #localizeOrDefault(String, String, Object...)}. * * @param key The key to localize * @param def The default String (does not get localized) to return * @param arg0 The argument to pass in for formatting * @return The localized string, or {@code def} if no such string exists * @see #localizeOrDefault(String, String, Object...) */ String localizeOrDefault(String key, String def, Object arg0); /** * Attempts to localize the given key with the given arguments for formatting, returning {@code def} if it could * not be found. This is the two-arg specialization for {@link #localizeOrDefault(String, String, Object...)}. * * @param key The key to localize * @param def The default String (does not get localized) to return * @param arg0 The first argument to pass in for formatting * @param arg1 The second argument to pass in for formatting * @return The localized string, or {@code def} if no such string exists * @see #localizeOrDefault(String, String, Object...) */ String localizeOrDefault(String key, String def, Object arg0, Object arg1); /** * Attempts to localize the given key with the given arguments for formatting, returning {@code def} if it could * not be found. * * @param key The key to localize * @param def The default String (does not get localized) to return * @param args A varargs of arguments for formatting * @return The localized string, or {@code def} if no such string exists * @see #localize(String, Object...) */ String localizeOrDefault(String key, String def, Object... args); /** * Flags a string not to be localized by the localizer. The localizer will not attempt to perform a lookup and * will not perform the localization step. Other steps may still be performed, depending on the presence of other * control flags. * * @param key The string to prevent localization on * @return A key that will not be localized when passed to {@link #localize(String, Object...)} or * {@link #localizeOrDefault(String, String, Object...)} and their specializations */ static String doNotLocalize(String key) { if (key.startsWith(PREFIX_FLAG_BASE)) { return internalSetFlagBit(key, PREFIX_FLAG_DO_NOT_LOCALIZE_BIT); } else { return PREFIX_FLAG_DO_NOT_LOCALIZE + key; } } /** * Flags a string not to be formatted by the localizer. The localizer will not attempt to parse and handle * formatting tokens in the localized string. Other steps may still be performed, depending on the presence * of other control flags. * * @param key The string to prevent localization on * @return A key that will not be localized when passed to {@link #localize(String, Object...)} or * {@link #localizeOrDefault(String, String, Object...)} and their specializations */ static String doNotFormat(String key) { if (key.startsWith(PREFIX_FLAG_BASE)) { return internalSetFlagBit(key, PREFIX_FLAG_DO_NOT_FORMAT_BIT); } else { return PREFIX_FLAG_DO_NOT_FORMAT + key; } } /** * <strong>INTERNAL METHOD</strong> * <p> * Sets the bit to 1 at the given position in the flag section of a flagged key. * * @param current The existing string containing a flag section * @param bit The bit to set to 1 * @return The string with the updated flag section */ static String internalSetFlagBit(String current, int bit) { if (bit > 4) { throw new IllegalArgumentException("Bit must be between 0 and 4"); } char[] chars = current.toCharArray(); int bits = chars[2]; bits = bits | 1 << bit; chars[2] = (char) bits; return new String(chars); } /** * <strong>INTERNAL METHOD</strong> * <p> * Gets the value of the bit at the given position in the flag section of a flagged key. * @param key The flagged key to check from * @param bit The bit number to check * @return true if the bit is set, false if not */ static boolean internalIsFlagBitSet(String key, int bit) { if (bit > 4) { throw new IllegalArgumentException("Bit must be between 0 and 4"); } if (key.length() <= PREFIX_FLAG_LENGTH || !key.startsWith(PREFIX_FLAG_BASE)) { return false; } char[] chars = key.toCharArray(); int bits = chars[2]; return (bits & (1 << bit)) != 0; } /** * Removes all flags from a key. If the key is not flagged, then does nothing and returns the provided key. * @param key The key to remove all flags from * @return The key with all flags removed */ static String stripFlags(String key) { if (key.startsWith(PREFIX_FLAG_BASE) && key.length() > PREFIX_FLAG_LENGTH) { return key.substring(PREFIX_FLAG_LENGTH + 1); } return key; } }
/** * Copyright (c) 2013-2019 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import java.net.URL; import java.nio.ByteBuffer; import java.util.Iterator; import org.apache.commons.lang.ArrayUtils; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.GeoWaveAvroFeatureDataAdapter; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.data.field.FieldVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler; import org.locationtech.geowave.core.store.index.CommonIndexValue; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.google.common.base.Predicate; import com.google.common.collect.Iterators; public abstract class AbstractSimpleFeatureIngestPlugin<I> implements LocalFileIngestPlugin<SimpleFeature>, IngestFromHdfsPlugin<I, SimpleFeature>, GeoWaveAvroFormatPlugin<I, SimpleFeature>, Persistable { protected CQLFilterOptionProvider filterOptionProvider = new CQLFilterOptionProvider(); protected FeatureSerializationOptionProvider serializationFormatOptionProvider = new FeatureSerializationOptionProvider(); protected TypeNameOptionProvider typeNameProvider = new TypeNameOptionProvider(); protected GeometrySimpOptionProvider simpOptionProvider = new GeometrySimpOptionProvider(); public void setFilterProvider(final CQLFilterOptionProvider filterOptionProvider) { this.filterOptionProvider = filterOptionProvider; } public void setSerializationFormatProvider( final FeatureSerializationOptionProvider serializationFormatOptionProvider) { this.serializationFormatOptionProvider = serializationFormatOptionProvider; } public void setTypeNameProvider(final TypeNameOptionProvider typeNameProvider) { this.typeNameProvider = typeNameProvider; } public void setGeometrySimpOptionProvider(final GeometrySimpOptionProvider geometryProvider) { this.simpOptionProvider = geometryProvider; } @Override public byte[] toBinary() { final byte[] filterBinary = filterOptionProvider.toBinary(); final byte[] typeNameBinary = typeNameProvider.toBinary(); final byte[] simpBinary = simpOptionProvider.toBinary(); final byte[] backingBuffer = new byte[filterBinary.length + typeNameBinary.length + simpBinary.length + VarintUtils.unsignedIntByteLength(filterBinary.length) + VarintUtils.unsignedIntByteLength(typeNameBinary.length)]; final ByteBuffer buf = ByteBuffer.wrap(backingBuffer); VarintUtils.writeUnsignedInt(filterBinary.length, buf); buf.put(filterBinary); VarintUtils.writeUnsignedInt(typeNameBinary.length, buf); buf.put(typeNameBinary); buf.put(simpBinary); return ArrayUtils.addAll(serializationFormatOptionProvider.toBinary(), backingBuffer); } @Override public void fromBinary(final byte[] bytes) { final byte[] otherBytes = new byte[bytes.length - 1]; System.arraycopy(bytes, 1, otherBytes, 0, otherBytes.length); final byte[] kryoBytes = new byte[] {bytes[0]}; final ByteBuffer buf = ByteBuffer.wrap(otherBytes); final int filterBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] filterBinary = ByteArrayUtils.safeRead(buf, filterBinaryLength); final int typeNameBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] typeNameBinary = ByteArrayUtils.safeRead(buf, typeNameBinaryLength); final byte[] geometrySimpBinary = new byte[buf.remaining()]; buf.get(geometrySimpBinary); serializationFormatOptionProvider = new FeatureSerializationOptionProvider(); serializationFormatOptionProvider.fromBinary(kryoBytes); filterOptionProvider = new CQLFilterOptionProvider(); filterOptionProvider.fromBinary(filterBinary); typeNameProvider = new TypeNameOptionProvider(); typeNameProvider.fromBinary(typeNameBinary); simpOptionProvider = new GeometrySimpOptionProvider(); simpOptionProvider.fromBinary(geometrySimpBinary); } protected DataTypeAdapter<SimpleFeature> newAdapter( final SimpleFeatureType type, final FieldVisibilityHandler<SimpleFeature, Object> fieldVisiblityHandler) { if (serializationFormatOptionProvider.isAvro()) { return new GeoWaveAvroFeatureDataAdapter(type); } return new FeatureDataAdapter(type, fieldVisiblityHandler); } protected abstract SimpleFeatureType[] getTypes(); @Override public DataTypeAdapter<SimpleFeature>[] getDataAdapters(final String globalVisibility) { final FieldVisibilityHandler<SimpleFeature, Object> fieldVisiblityHandler = ((globalVisibility != null) && !globalVisibility.isEmpty()) ? new GlobalVisibilityHandler<>(globalVisibility) : null; final SimpleFeatureType[] types = getTypes(); final DataTypeAdapter<SimpleFeature>[] retVal = new DataTypeAdapter[types.length]; for (int i = 0; i < types.length; i++) { retVal[i] = newAdapter(types[i], fieldVisiblityHandler); } return retVal; } @Override public CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveData( final URL input, final String[] indexNames, final String globalVisibility) { final CloseableIterator<I> hdfsObjects = toAvroObjects(input); return new CloseableIterator<GeoWaveData<SimpleFeature>>() { CloseableIterator<GeoWaveData<SimpleFeature>> currentIterator = null; GeoWaveData<SimpleFeature> next = null; private void computeNext() { if (next == null) { if (currentIterator != null) { if (currentIterator.hasNext()) { next = currentIterator.next(); return; } else { currentIterator.close(); currentIterator = null; } } while (hdfsObjects.hasNext()) { final I hdfsObject = hdfsObjects.next(); currentIterator = wrapIteratorWithFilters( toGeoWaveDataInternal(hdfsObject, indexNames, globalVisibility)); if (currentIterator.hasNext()) { next = currentIterator.next(); return; } else { currentIterator.close(); currentIterator = null; } } } } @Override public boolean hasNext() { computeNext(); return next != null; } @Override public GeoWaveData<SimpleFeature> next() { computeNext(); final GeoWaveData<SimpleFeature> retVal = next; next = null; return retVal; } @Override public void close() { hdfsObjects.close(); } }; } protected CloseableIterator<GeoWaveData<SimpleFeature>> wrapIteratorWithFilters( final CloseableIterator<GeoWaveData<SimpleFeature>> geowaveData) { final CQLFilterOptionProvider internalFilterProvider; if ((filterOptionProvider != null) && (filterOptionProvider.getCqlFilterString() != null) && !filterOptionProvider.getCqlFilterString().trim().isEmpty()) { internalFilterProvider = filterOptionProvider; } else { internalFilterProvider = null; } final TypeNameOptionProvider internalTypeNameProvider; if ((typeNameProvider != null) && (typeNameProvider.getTypeName() != null) && !typeNameProvider.getTypeName().trim().isEmpty()) { internalTypeNameProvider = typeNameProvider; } else { internalTypeNameProvider = null; } final GeometrySimpOptionProvider internalSimpOptionProvider; if ((simpOptionProvider != null)) { internalSimpOptionProvider = simpOptionProvider; } else { internalSimpOptionProvider = null; } if ((internalFilterProvider != null) || (internalTypeNameProvider != null)) { final Iterator<GeoWaveData<SimpleFeature>> it = Iterators.filter(geowaveData, new Predicate<GeoWaveData<SimpleFeature>>() { @Override public boolean apply(final GeoWaveData<SimpleFeature> input) { if ((internalTypeNameProvider != null) && !internalTypeNameProvider.typeNameMatches(input.getTypeName())) { return false; } if ((internalFilterProvider != null) && !internalFilterProvider.evaluate(input.getValue())) { return false; } if ((internalSimpOptionProvider != null)) { final Geometry simpGeom = internalSimpOptionProvider.simplifyGeometry( (Geometry) input.getValue().getDefaultGeometry()); if (!internalSimpOptionProvider.filterGeometry(simpGeom)) { return false; } input.getValue().setDefaultGeometry(simpGeom); } return true; } }); return new CloseableIteratorWrapper<>(geowaveData, it); } return geowaveData; } protected abstract CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveDataInternal( final I hdfsObject, final String[] indexNames, final String globalVisibility); public abstract static class AbstractIngestSimpleFeatureWithMapper<I> implements IngestWithMapper<I, SimpleFeature> { protected AbstractSimpleFeatureIngestPlugin<I> parentPlugin; public AbstractIngestSimpleFeatureWithMapper( final AbstractSimpleFeatureIngestPlugin<I> parentPlugin) { this.parentPlugin = parentPlugin; } @Override public DataTypeAdapter<SimpleFeature>[] getDataAdapters(final String globalVisibility) { return parentPlugin.getDataAdapters(globalVisibility); } @Override public CloseableIterator<GeoWaveData<SimpleFeature>> toGeoWaveData( final I input, final String[] indexNames, final String globalVisibility) { return parentPlugin.wrapIteratorWithFilters( parentPlugin.toGeoWaveDataInternal(input, indexNames, globalVisibility)); } @Override public byte[] toBinary() { return parentPlugin.toBinary(); } @Override public void fromBinary(final byte[] bytes) { parentPlugin.fromBinary(bytes); } @Override public Class<? extends CommonIndexValue>[] getSupportedIndexableTypes() { return parentPlugin.getSupportedIndexableTypes(); } } }
package org.lemsml.jlems.viz.plot; import java.awt.Graphics2D; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.event.MouseMotionListener; import java.awt.event.MouseWheelEvent; import java.awt.event.MouseWheelListener; import org.lemsml.jlems.core.logging.E; public final class Mouse implements MouseListener, MouseMotionListener, MouseWheelListener { public final static int LEFT = 1; public final static int MIDDLE = 2; public final static int RIGHT = 3; private int button; private int canvasWidth; private int canvasHeight; private int xDown; private int yDown; private int xCurrent; private int yCurrent; private int scrollUnits; private boolean down; // private boolean onCanvas; // private long timeDown; // private long periodDownToDown; private BaseMouseHandler activeHandler; private BaseMouseHandler motionHandler; private final WorldCanvas canvas; private ClickListener clickListener; public Mouse(WorldCanvas c, boolean interactive) { super(); canvas = c; if (interactive) { canvas.addMouseListener(this); canvas.addMouseMotionListener(this); canvas.addMouseWheelListener(this); } } public void setHandler(BaseMouseHandler h) { activeHandler = h; } public void setClickListener(ClickListener cl) { clickListener = cl; } public void detach() { canvas.removeMouseListener(this); canvas.removeMouseMotionListener(this); } private void requestRepaint() { canvas.repaint(); } boolean leftButton() { return (button == LEFT); } boolean middleButton() { return (button == MIDDLE); } boolean rightButton() { return (button == RIGHT); } public void updateCanvasDimensions() { canvasWidth = canvas.getWidth(); canvasHeight = canvas.getHeight(); } int getCanvasWidth() { return canvasWidth; } int getCanvasHeight() { return canvasHeight; } public void mouseEntered(MouseEvent e) { // onCanvas = true; } public void mouseExited(MouseEvent e) { // onCanvas = false; } public void mouseClicked(MouseEvent e) { readPosition(e); if (clickListener != null) { clickListener.pointClicked(e.getX(), e.getY(), MouseUtil.getButton(e)); } } public void mouseMoved(MouseEvent e) { if (down) { // should only get dragged events when down; E.shortWarning("mouse moved when down?? " + e); down = false; return; } readPosition(e); if (motionHandler != null) { if (motionHandler.motionChange(this)) { // TODO this is lazy - the mh should be // allowed to say if it wants a complete repaint or // just an image without itself to paint on. canvas.repaint(); } } } public void mousePressed(MouseEvent e) { down = true; readButton(e); readPosition(e); readPressPosition(e); // long tp = e.getWhen(); // periodDownToDown = tp - timeDown; // timeDown = tp; if (activeHandler != null) { activeHandler.init(this); } if (activeHandler != null) { activeHandler.applyOnDown(this); } } public void mouseDragged(MouseEvent e) { if (!down) { return; } readPosition(e); if (activeHandler != null) { activeHandler.applyOnDrag(this); if (activeHandler.getRepaintStatus() == BaseMouseHandler.FULL) { requestRepaint(); } else if (activeHandler.getRepaintStatus() == BaseMouseHandler.BUFFERED) { // should do some ting more economical here EFF requestRepaint(); } else { // nothing to do... } } } public void mouseReleased(MouseEvent e) { if (!down) { return; } readPosition(e); if (activeHandler != null) { activeHandler.applyOnRelease(this); } down = false; requestRepaint(); canvas.fixRanges(); updateCanvasDimensions(); // EFF ?? here } @Override public void mouseWheelMoved(MouseWheelEvent e) { // TODO Auto-generated method stub scrollUnits = e.getWheelRotation(); readPosition(e); if (activeHandler == null) { } else { updateCanvasDimensions(); activeHandler.applyOnScrollWheel(this); if (activeHandler.getRepaintStatus() == BaseMouseHandler.FULL) { requestRepaint(); } else if (activeHandler.getRepaintStatus() == BaseMouseHandler.BUFFERED) { // should do some ting more economical here EFF requestRepaint(); } else { // nothing to do... } } } private void readPosition(MouseEvent e) { xCurrent = e.getX(); yCurrent = e.getY(); } private void readPressPosition(MouseEvent e) { xDown = e.getX(); yDown = e.getY(); } private void readButton(MouseEvent e) { button = MouseUtil.getButton(e); } public int getButton() { return button; } public boolean isDown() { return down; } int getX() { return xCurrent; } int getY() { return yCurrent; } int getScrollUnits() { return scrollUnits; } int getXDown() { return xDown; } int getYDown() { return yDown; } void echoPaint(Graphics2D g) { if (activeHandler != null) { activeHandler.echoPaint(g); activeHandler.setRepaintStatus(BaseMouseHandler.NONE); } else if (motionHandler != null) { motionHandler.echoPaint(g); // activeHandler.setRepaintStatus(MouseHandler.NONE); } } // TODO should these go via mouse?? void boxSelected(int x0, int y0, int x1, int y1) { canvas.boxSelected(x0, y0, x1, y1); } void initializeZoom(int xc, int yc) { canvas.initializeZoom(xc, yc); } void dragZoom(double fx, double fy, int xc, int yc) { canvas.dragZoom(fx, fy, xc, yc); } void zoom(double fac, int xc, int yc) { canvas.zoom(fac, xc, yc); } void zoom(double xfac, double yfac, int xc, int yc) { canvas.zoom(xfac, yfac, xc, yc); } void trialPan(int xfrom, int yfrom, int xto, int yto) { canvas.trialPan(xfrom, yfrom, xto, yto); } void permanentPan(int xfrom, int yfrom, int xto, int yto) { canvas.permanentPan(xfrom, yfrom, xto, yto); } public void dragRollRotate(int pdx, int pdy) { canvas.dragRollRotate(pdx, pdy); } public void dragZRotate(int pdx, int pdy) { canvas.dragZRotate(pdx, pdy); } public void initializeRotation(int ix, int iy) { canvas.initializeRotation(ix, iy); } public void initializeRotation(double x, double y, double z) { canvas.initializeRotation(x, y, z); } public void restoreAA() { canvas.restoreAA(); } }
package com.themastergeneral.ctdpaint.painting.imc; import java.util.List; import java.util.Locale; import com.themastergeneral.ctdpaint.CTDPaint; import com.themastergeneral.ctdpaint.painting.PaintBlack; import com.themastergeneral.ctdpaint.painting.PaintBlue; import com.themastergeneral.ctdpaint.painting.PaintBrown; import com.themastergeneral.ctdpaint.painting.PaintCyan; import com.themastergeneral.ctdpaint.painting.PaintGray; import com.themastergeneral.ctdpaint.painting.PaintLightBlue; import com.themastergeneral.ctdpaint.painting.PaintLime; import com.themastergeneral.ctdpaint.painting.PaintMagenta; import com.themastergeneral.ctdpaint.painting.PaintOrange; import com.themastergeneral.ctdpaint.painting.PaintPink; import com.themastergeneral.ctdpaint.painting.PaintPurple; import com.themastergeneral.ctdpaint.painting.PaintRed; import com.themastergeneral.ctdpaint.painting.PaintSilver; import com.themastergeneral.ctdpaint.painting.PaintWhite; import com.themastergeneral.ctdpaint.painting.PaintYellow; import net.minecraft.block.state.IBlockState; import net.minecraft.nbt.NBTTagCompound; import net.minecraftforge.fml.common.event.FMLInterModComms.IMCMessage; public class IMCHandler { public static final IMCHandler INSTANCE = new IMCHandler(); public void handleIMC(List<IMCMessage> messages) { NBTTagCompound nbt; for (IMCMessage message : messages) { try { if (!message.isNBTMessage()) { continue; } nbt = message.getNBTValue(); String operation = message.key.toLowerCase(Locale.US); switch (operation) { case ADD_BLACK: PaintBlack.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_BLACK: PaintBlack.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_BLUE: PaintBlue.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_BLUE: PaintBlue.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_BROWN: PaintBrown.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_BROWN: PaintBrown.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_CYAN: PaintCyan.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_CYAN: PaintCyan.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_GRAY: PaintGray.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_GRAY: PaintGray.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_LIGHT_BLUE: PaintLightBlue.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_LIGHT_BLUE: PaintLightBlue.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_LIME: PaintLime.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_LIME: PaintLime.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_MAGENTA: PaintMagenta.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_MAGENTA: PaintMagenta.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_ORANGE: PaintOrange.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_ORANGE: PaintOrange.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_PINK: PaintPink.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_PINK: PaintPink.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_PURPLE: PaintPurple.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_PURPLE: PaintPurple.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_RED: PaintRed.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_RED: PaintRed.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_SILVER: PaintSilver.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_SILVER: PaintSilver.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_WHITE: PaintWhite.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_WHITE: PaintWhite.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case ADD_YELLOW: PaintYellow.instance().addPaint( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; case REMOVE_YELLOW: PaintYellow.instance().removeRecipe( (IBlockState) nbt.getCompoundTag(PAINTED), (IBlockState) nbt.getCompoundTag(NEWBLOCK)); continue; } CTDPaint.logger.error("CTD Paint received an invalid IMC from " + message.getSender() + "! Key was " + message.key); } catch (Exception e) { CTDPaint.logger.error("CTD Paint received a broken IMC from " + message.getSender() + "!"); e.printStackTrace(); } } } /* * IMC Strings */ static final String PAINTED = "painted"; static final String NEWBLOCK = "newblock"; public static final String ADD_BLACK = "add_black_paint"; public static final String REMOVE_BLACK = "remove_black_paint"; public static final String ADD_BLUE = "add_blue_paint"; public static final String REMOVE_BLUE = "remove_blue_paint"; public static final String ADD_BROWN = "add_brown_paint"; public static final String REMOVE_BROWN = "remove_brown_paint"; public static final String ADD_CYAN = "add_cyan_paint"; public static final String REMOVE_CYAN = "remove_cyan_paint"; public static final String ADD_GRAY = "add_gray_paint"; public static final String REMOVE_GRAY = "remove_gray_paint"; public static final String ADD_GREEN = "add_green_paint"; public static final String REMOVE_GREEN = "remove_green_paint"; public static final String ADD_LIGHT_BLUE = "add_light_blue_paint"; public static final String REMOVE_LIGHT_BLUE = "remove_light_blue_paint"; public static final String ADD_LIME = "add_lime_paint"; public static final String REMOVE_LIME = "remove_lime_paint"; public static final String ADD_MAGENTA = "add_magenta_paint"; public static final String REMOVE_MAGENTA = "remove_magenta_paint"; public static final String ADD_ORANGE = "add_orange_paint"; public static final String REMOVE_ORANGE = "remove_orange_paint"; public static final String ADD_PINK = "add_pink_paint"; public static final String REMOVE_PINK = "remove_pink_paint"; public static final String ADD_PURPLE = "add_purple_paint"; public static final String REMOVE_PURPLE = "remove_purple_paint"; public static final String ADD_RED = "add_red_paint"; public static final String REMOVE_RED = "remove_red_paint"; public static final String ADD_SILVER = "add_silver_paint"; public static final String REMOVE_SILVER = "remove_silver_paint"; public static final String ADD_WHITE = "add_white_paint"; public static final String REMOVE_WHITE = "remove_white_paint"; public static final String ADD_YELLOW = "add_yellow_paint"; public static final String REMOVE_YELLOW = "remove_yellow_paint"; }
/* -*- mode: Java; c-basic-offset: 2; indent-tabs-mode: nil; coding: utf-8-unix -*- * * Copyright (c) 2013 Edugility LLC. * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, copy, * modify, merge, publish, distribute, sublicense and/or sell copies * of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THIS SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. * * The original copy of this license is available at * http://www.opensource.org/license/mit-license.html. */ package com.edugility.jaxb; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters; import javassist.CannotCompileException; import javassist.ClassPool; import javassist.CtClass; import javassist.NotFoundException; import javassist.bytecode.AnnotationsAttribute; import javassist.bytecode.ClassFile; import javassist.bytecode.ConstPool; import javassist.bytecode.annotation.Annotation; import javassist.bytecode.annotation.AnnotationMemberValue; import javassist.bytecode.annotation.ArrayMemberValue; import javassist.bytecode.annotation.ClassMemberValue; import javassist.bytecode.annotation.MemberValue; public class InterfaceDecorator { protected transient Logger logger; public InterfaceDecorator() { super(); this.logger = this.createLogger(); if (this.logger == null) { this.logger = Logger.getLogger(this.getClass().getName()); } } protected Logger createLogger() { return Logger.getLogger(this.getClass().getName()); } public Modification modify(final String interfaceName, final String adapterClassName) throws CannotCompileException, IOException, NotFoundException { if (this.logger != null && this.logger.isLoggable(Level.FINER)) { this.logger.entering(this.getClass().getName(), "modify", interfaceName); } if (interfaceName == null) { throw new IllegalArgumentException("interfaceName", new NullPointerException("interfaceName")); } if (adapterClassName == null) { throw new IllegalArgumentException("adapterClassName", new NullPointerException("adapterClassName")); } ClassPool classPool = this.getClassPool(interfaceName); if (classPool == null) { classPool = ClassPool.getDefault(); } assert classPool != null; final CtClass interfaceCtClass = classPool.getOrNull(interfaceName); if (interfaceCtClass == null) { throw new IllegalArgumentException("interfaceName"); } assert interfaceCtClass != null; final boolean modified = this.installXmlJavaTypeAdapter(interfaceCtClass, adapterClassName); final byte[] bytes = interfaceCtClass.toBytecode(); assert bytes != null; assert bytes.length > 0; final Modification returnValue = new Modification(interfaceCtClass, modified ? Modification.Kind.MODIFIED : Modification.Kind.UNMODIFIED, bytes); if (this.logger != null && this.logger.isLoggable(Level.FINER)) { this.logger.exiting(this.getClass().getName(), "modify", returnValue); } return returnValue; } /** * Returns a Javassist {@link ClassPool} that is appropriate for the * supplied class name. * * <p>The default implementation of this method ignores the {@code * className} parameter and returns the return value of {@link * ClassPool#getDefault()}. For nearly all cases, this is the * correct behavior and this method should not be overridden.</p> * * <p>If overrides of this method opt to return {@code null}, the * return value of {@link ClassPool#getDefault()} will be used * internally instead.</p> * * @param className the class name for which the returned {@link * ClassPool} might be appropriate; may be {@code null} and may * safely be ignored; provided for contextual information only * * @return a {@link ClassPool} instance, or {@code null} * * @see ClassPool * * @see ClassPool#getDefault() */ protected ClassPool getClassPool(final String className) { return ClassPool.getDefault(); } /** * Installs an {@link XmlJavaTypeAdapter} annotation on the supplied * {@link CtClass} or modifies an existing one. * * @param interfaceCtClass the {@link CtClass} to decorate; must not * be {@code null} * * @param adapterClassName the name of the {@link * XmlJavaTypeAdapter} class to use as the value for the {@link * XmlJavaTypeAdapter#value()} attribute; must not be {@code null} * * @return {@code true} if the supplied {@link CtClass} was * modified; {@code false} otherwise * * @exception NotFoundException if Javassist couldn't find something */ private final boolean installXmlJavaTypeAdapter(final CtClass interfaceCtClass, final String adapterClassName) throws NotFoundException { if (interfaceCtClass == null) { throw new IllegalArgumentException("interfaceCtClass", new NullPointerException("interfaceCtClass")); } if (adapterClassName == null) { throw new IllegalArgumentException("adapterClassName", new NullPointerException("adapterClassName")); } boolean modified = false; final ClassFile interfaceClassFile = interfaceCtClass.getClassFile(); assert interfaceClassFile != null; final ConstPool constantPool = interfaceClassFile.getConstPool(); assert constantPool != null; AnnotationsAttribute annotationsAttribute = (AnnotationsAttribute)interfaceClassFile.getAttribute(AnnotationsAttribute.visibleTag); if (annotationsAttribute == null) { annotationsAttribute = new AnnotationsAttribute(constantPool, AnnotationsAttribute.visibleTag); interfaceClassFile.addAttribute(annotationsAttribute); assert annotationsAttribute == interfaceClassFile.getAttribute(AnnotationsAttribute.visibleTag); modified = true; } assert annotationsAttribute != null; Annotation adapterAnnotation = annotationsAttribute.getAnnotation(XmlJavaTypeAdapter.class.getName()); if (adapterAnnotation == null) { ClassPool classPool = this.getClassPool(XmlJavaTypeAdapter.class.getName()); if (classPool == null) { classPool = ClassPool.getDefault(); } assert classPool != null; final CtClass xmlJavaTypeAdapterCtClass = classPool.getOrNull(XmlJavaTypeAdapter.class.getName()); assert xmlJavaTypeAdapterCtClass != null; adapterAnnotation = new Annotation(constantPool, xmlJavaTypeAdapterCtClass); modified = true; } else if (adapterAnnotation.getMemberValue("value") == null) { final ArrayMemberValue amv = new ArrayMemberValue(constantPool); amv.setValue(new AnnotationMemberValue[0]); adapterAnnotation.addMemberValue("value", amv); modified = true; } assert adapterAnnotation != null; assert adapterAnnotation.getMemberValue("value") != null; modified = this.installXmlJavaTypeAdapter(adapterAnnotation, adapterClassName, constantPool) || modified; /* * You would think this line would be required ONLY in the case * where the annotation itself was not found. But you actually * have to add it to its containing AnnotationsAttribute in ALL * cases. This doesn't make any sense. See * http://stackoverflow.com/questions/8689156/why-does-javassist-insist-on-looking-for-a-default-annotation-value-when-one-is/8689214#8689214 * for details. * * Additionally, you must re-add the annotation as the last * operation here in all cases. Otherwise the changes made by the * installXmlJavaTypeAdapter() method above are not actually made * permanent. */ if (modified) { annotationsAttribute.addAnnotation(adapterAnnotation); } return modified; } private final boolean installXmlJavaTypeAdapter(Annotation adapterAnnotation, final String adapterClassName, final ConstPool constantPool) throws NotFoundException { if (this.logger != null && this.logger.isLoggable(Level.FINER)) { this.logger.entering(this.getClass().getName(), "installXmlJavaTypeAdapter", new Object[] { adapterAnnotation, constantPool }); } if (adapterClassName == null) { throw new IllegalArgumentException("adapterClassName", new NullPointerException("adapterClassName")); } if (!XmlJavaTypeAdapter.class.getName().equals(adapterAnnotation.getTypeName())) { throw new IllegalArgumentException("Wrong annotation: " + adapterAnnotation.getTypeName()); } boolean modified = false; ClassPool classPool = this.getClassPool(XmlJavaTypeAdapter.class.getName()); if (classPool == null) { classPool = ClassPool.getDefault(); } assert classPool != null; final CtClass xmlJavaTypeAdapterCtClass = classPool.get(XmlJavaTypeAdapter.class.getName()); assert xmlJavaTypeAdapterCtClass != null; if (adapterAnnotation != null) { // Preexisting final ClassMemberValue v = (ClassMemberValue)adapterAnnotation.getMemberValue("value"); assert v != null; final String existingClassName = v.getValue(); if (adapterClassName.equals(existingClassName)) { // The annotation is already correctly specified // TODO do something; return? return false; } } else { adapterAnnotation = this.newXmlJavaTypeAdapter(constantPool); modified = true; } assert adapterAnnotation != null; assert XmlJavaTypeAdapter.class.getName().equals(adapterAnnotation.getTypeName()); modified = setXmlAdapter(adapterAnnotation, adapterClassName) || modified; assert adapterClassName.equals(((ClassMemberValue)adapterAnnotation.getMemberValue("value")).getValue()); System.out.println("Modified: " + modified); return modified; } private final Annotation newXmlJavaTypeAdapter(final ConstPool constantPool) throws NotFoundException { ClassPool classPool = this.getClassPool(XmlJavaTypeAdapter.class.getName()); if (classPool == null) { classPool = ClassPool.getDefault(); } assert classPool != null; return new Annotation(constantPool, classPool.getOrNull(XmlJavaTypeAdapter.class.getName())); } private static final boolean setXmlAdapter(final Annotation adapterAnnotation, final String adapterClassName) { if (adapterClassName == null) { throw new IllegalArgumentException("adapterClassName", new NullPointerException("adapterClassName")); } if (adapterAnnotation == null) { throw new IllegalArgumentException("adapterAnnotation", new NullPointerException("adapterAnnotation")); } if (!XmlJavaTypeAdapter.class.getName().equals(adapterAnnotation.getTypeName())) { throw new IllegalArgumentException("adapterAnnotation does not represent " + XmlJavaTypeAdapter.class.getName()); } // Retrieve the "holder" for the value() annotation // attribute ("FooToFooImplAdapter.class" in the // following sample: // // @XmlJavaTypeAdapter(type = Foo.class, value = FooToFooImplAdapter.class) // final ClassMemberValue adapterClassHolder = (ClassMemberValue)adapterAnnotation.getMemberValue("value"); assert adapterClassHolder != null; final String old = adapterClassHolder.getValue(); // Set the holder's value, thus installing the // annotation's value() value. adapterClassHolder.setValue(adapterClassName); if (old == null) { return adapterClassName != null; } return !old.equals(adapterClassName); } public static final class Modification implements Serializable { private static final long serialVersionUID = 1L; public static enum Kind { UNMODIFIED, MODIFIED; } private final CtClass interfaceCtClass; private final byte[] bytes; private final Kind kind; private Modification(final CtClass interfaceCtClass, final Kind kind, final byte[] bytes) { super(); if (interfaceCtClass == null) { throw new IllegalArgumentException("interfaceCtClass", new NullPointerException("interfaceCtClass")); } this.interfaceCtClass = interfaceCtClass; assert interfaceCtClass.isFrozen(); if (kind == null) { this.kind = Kind.MODIFIED; } else { this.kind = kind; } if (bytes == null) { this.bytes = new byte[0]; } else { this.bytes = bytes; } } public CtClass getInterfaceCtClass() { return this.interfaceCtClass; } public boolean isModified() { return Kind.MODIFIED.equals(this.getKind()); } public final Kind getKind() { return this.kind; } public final byte[] toByteArray() { return this.bytes; } } }
/** * Copyright (C) 2011-2012 Typesafe Inc. <http://typesafe.com> */ package com.typesafe.config.impl; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumMap; import java.util.Iterator; import java.util.List; import java.util.Map; import com.typesafe.config.ConfigException; import com.typesafe.config.ConfigOrigin; import com.typesafe.config.impl.SerializedConfigValue.SerializedField; // it would be cleaner to have a class hierarchy for various origin types, // but was hoping this would be enough simpler to be a little messy. eh. final class SimpleConfigOrigin implements ConfigOrigin { final private String description; final private int lineNumber; final private int endLineNumber; final private OriginType originType; final private String urlOrNull; final private List<String> commentsOrNull; protected SimpleConfigOrigin(String description, int lineNumber, int endLineNumber, OriginType originType, String urlOrNull, List<String> commentsOrNull) { if (description == null) throw new ConfigException.BugOrBroken("description may not be null"); this.description = description; this.lineNumber = lineNumber; this.endLineNumber = endLineNumber; this.originType = originType; this.urlOrNull = urlOrNull; this.commentsOrNull = commentsOrNull; } static SimpleConfigOrigin newSimple(String description) { return new SimpleConfigOrigin(description, -1, -1, OriginType.GENERIC, null, null); } static SimpleConfigOrigin newFile(String filename) { String url; try { url = (new File(filename)).toURI().toURL().toExternalForm(); } catch (MalformedURLException e) { url = null; } return new SimpleConfigOrigin(filename, -1, -1, OriginType.FILE, url, null); } static SimpleConfigOrigin newURL(URL url) { String u = url.toExternalForm(); return new SimpleConfigOrigin(u, -1, -1, OriginType.URL, u, null); } static SimpleConfigOrigin newResource(String resource, URL url) { return new SimpleConfigOrigin(resource, -1, -1, OriginType.RESOURCE, url != null ? url.toExternalForm() : null, null); } static SimpleConfigOrigin newResource(String resource) { return newResource(resource, null); } SimpleConfigOrigin setLineNumber(int lineNumber) { if (lineNumber == this.lineNumber && lineNumber == this.endLineNumber) { return this; } else { return new SimpleConfigOrigin(this.description, lineNumber, lineNumber, this.originType, this.urlOrNull, this.commentsOrNull); } } SimpleConfigOrigin addURL(URL url) { return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber, this.originType, url != null ? url.toExternalForm() : null, this.commentsOrNull); } SimpleConfigOrigin setComments(List<String> comments) { if (ConfigImplUtil.equalsHandlingNull(comments, this.commentsOrNull)) { return this; } else { return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber, this.originType, this.urlOrNull, comments); } } @Override public String description() { // not putting the URL in here for files and resources, because people // parsing "file: line" syntax would hit the ":" in the URL. if (lineNumber < 0) { return description; } else if (endLineNumber == lineNumber) { return description + ": " + lineNumber; } else { return description + ": " + lineNumber + "-" + endLineNumber; } } @Override public boolean equals(Object other) { if (other instanceof SimpleConfigOrigin) { SimpleConfigOrigin otherOrigin = (SimpleConfigOrigin) other; return this.description.equals(otherOrigin.description) && this.lineNumber == otherOrigin.lineNumber && this.endLineNumber == otherOrigin.endLineNumber && this.originType == otherOrigin.originType && ConfigImplUtil.equalsHandlingNull(this.urlOrNull, otherOrigin.urlOrNull); } else { return false; } } @Override public int hashCode() { int h = 41 * (41 + description.hashCode()); h = 41 * (h + lineNumber); h = 41 * (h + endLineNumber); h = 41 * (h + originType.hashCode()); if (urlOrNull != null) h = 41 * (h + urlOrNull.hashCode()); return h; } @Override public String toString() { // the url is only really useful on top of description for resources if (originType == OriginType.RESOURCE && urlOrNull != null) { return "ConfigOrigin(" + description + "," + urlOrNull + ")"; } else { return "ConfigOrigin(" + description + ")"; } } @Override public String filename() { if (originType == OriginType.FILE) { return description; } else if (urlOrNull != null) { URL url; try { url = new URL(urlOrNull); } catch (MalformedURLException e) { return null; } if (url.getProtocol().equals("file")) { return url.getFile(); } else { return null; } } else { return null; } } @Override public URL url() { if (urlOrNull == null) { return null; } else { try { return new URL(urlOrNull); } catch (MalformedURLException e) { return null; } } } @Override public String resource() { if (originType == OriginType.RESOURCE) { return description; } else { return null; } } @Override public int lineNumber() { return lineNumber; } @Override public List<String> comments() { if (commentsOrNull != null) { return commentsOrNull; } else { return Collections.emptyList(); } } static final String MERGE_OF_PREFIX = "merge of "; private static SimpleConfigOrigin mergeTwo(SimpleConfigOrigin a, SimpleConfigOrigin b) { String mergedDesc; int mergedStartLine; int mergedEndLine; List<String> mergedComments; OriginType mergedType; if (a.originType == b.originType) { mergedType = a.originType; } else { mergedType = OriginType.GENERIC; } // first use the "description" field which has no line numbers // cluttering it. String aDesc = a.description; String bDesc = b.description; if (aDesc.startsWith(MERGE_OF_PREFIX)) aDesc = aDesc.substring(MERGE_OF_PREFIX.length()); if (bDesc.startsWith(MERGE_OF_PREFIX)) bDesc = bDesc.substring(MERGE_OF_PREFIX.length()); if (aDesc.equals(bDesc)) { mergedDesc = aDesc; if (a.lineNumber < 0) mergedStartLine = b.lineNumber; else if (b.lineNumber < 0) mergedStartLine = a.lineNumber; else mergedStartLine = Math.min(a.lineNumber, b.lineNumber); mergedEndLine = Math.max(a.endLineNumber, b.endLineNumber); } else { // this whole merge song-and-dance was intended to avoid this case // whenever possible, but we've lost. Now we have to lose some // structured information and cram into a string. // description() method includes line numbers, so use it instead // of description field. String aFull = a.description(); String bFull = b.description(); if (aFull.startsWith(MERGE_OF_PREFIX)) aFull = aFull.substring(MERGE_OF_PREFIX.length()); if (bFull.startsWith(MERGE_OF_PREFIX)) bFull = bFull.substring(MERGE_OF_PREFIX.length()); mergedDesc = MERGE_OF_PREFIX + aFull + "," + bFull; mergedStartLine = -1; mergedEndLine = -1; } String mergedURL; if (ConfigImplUtil.equalsHandlingNull(a.urlOrNull, b.urlOrNull)) { mergedURL = a.urlOrNull; } else { mergedURL = null; } if (ConfigImplUtil.equalsHandlingNull(a.commentsOrNull, b.commentsOrNull)) { mergedComments = a.commentsOrNull; } else { mergedComments = new ArrayList<String>(); if (a.commentsOrNull != null) mergedComments.addAll(a.commentsOrNull); if (b.commentsOrNull != null) mergedComments.addAll(b.commentsOrNull); } return new SimpleConfigOrigin(mergedDesc, mergedStartLine, mergedEndLine, mergedType, mergedURL, mergedComments); } private static int similarity(SimpleConfigOrigin a, SimpleConfigOrigin b) { int count = 0; if (a.originType == b.originType) count += 1; if (a.description.equals(b.description)) { count += 1; // only count these if the description field (which is the file // or resource name) also matches. if (a.lineNumber == b.lineNumber) count += 1; if (a.endLineNumber == b.endLineNumber) count += 1; if (ConfigImplUtil.equalsHandlingNull(a.urlOrNull, b.urlOrNull)) count += 1; } return count; } // this picks the best pair to merge, because the pair has the most in // common. we want to merge two lines in the same file rather than something // else with one of the lines; because two lines in the same file can be // better consolidated. private static SimpleConfigOrigin mergeThree(SimpleConfigOrigin a, SimpleConfigOrigin b, SimpleConfigOrigin c) { if (similarity(a, b) >= similarity(b, c)) { return mergeTwo(mergeTwo(a, b), c); } else { return mergeTwo(a, mergeTwo(b, c)); } } static ConfigOrigin mergeOrigins(ConfigOrigin a, ConfigOrigin b) { return mergeTwo((SimpleConfigOrigin) a, (SimpleConfigOrigin) b); } static ConfigOrigin mergeOrigins(List<? extends AbstractConfigValue> stack) { List<ConfigOrigin> origins = new ArrayList<ConfigOrigin>(stack.size()); for (AbstractConfigValue v : stack) { origins.add(v.origin()); } return mergeOrigins(origins); } static ConfigOrigin mergeOrigins(Collection<? extends ConfigOrigin> stack) { if (stack.isEmpty()) { throw new ConfigException.BugOrBroken("can't merge empty list of origins"); } else if (stack.size() == 1) { return stack.iterator().next(); } else if (stack.size() == 2) { Iterator<? extends ConfigOrigin> i = stack.iterator(); return mergeTwo((SimpleConfigOrigin) i.next(), (SimpleConfigOrigin) i.next()); } else { List<SimpleConfigOrigin> remaining = new ArrayList<SimpleConfigOrigin>(); for (ConfigOrigin o : stack) { remaining.add((SimpleConfigOrigin) o); } while (remaining.size() > 2) { SimpleConfigOrigin c = remaining.get(remaining.size() - 1); remaining.remove(remaining.size() - 1); SimpleConfigOrigin b = remaining.get(remaining.size() - 1); remaining.remove(remaining.size() - 1); SimpleConfigOrigin a = remaining.get(remaining.size() - 1); remaining.remove(remaining.size() - 1); SimpleConfigOrigin merged = mergeThree(a, b, c); remaining.add(merged); } // should be down to either 1 or 2 return mergeOrigins(remaining); } } Map<SerializedField, Object> toFields() { Map<SerializedField, Object> m = new EnumMap<SerializedField, Object>(SerializedField.class); m.put(SerializedField.ORIGIN_DESCRIPTION, description); if (lineNumber >= 0) m.put(SerializedField.ORIGIN_LINE_NUMBER, lineNumber); if (endLineNumber >= 0) m.put(SerializedField.ORIGIN_END_LINE_NUMBER, endLineNumber); m.put(SerializedField.ORIGIN_TYPE, originType.ordinal()); if (urlOrNull != null) m.put(SerializedField.ORIGIN_URL, urlOrNull); if (commentsOrNull != null) m.put(SerializedField.ORIGIN_COMMENTS, commentsOrNull); return m; } Map<SerializedField, Object> toFieldsDelta(SimpleConfigOrigin baseOrigin) { Map<SerializedField, Object> baseFields; if (baseOrigin != null) baseFields = baseOrigin.toFields(); else baseFields = Collections.<SerializedField, Object> emptyMap(); return fieldsDelta(baseFields, toFields()); } // Here we're trying to avoid serializing the same info over and over // in the common case that child objects have the same origin fields // as their parent objects. e.g. we don't need to store the source // filename with every single value. static Map<SerializedField, Object> fieldsDelta(Map<SerializedField, Object> base, Map<SerializedField, Object> child) { Map<SerializedField, Object> m = new EnumMap<SerializedField, Object>(child); for (Map.Entry<SerializedField, Object> baseEntry : base.entrySet()) { SerializedField f = baseEntry.getKey(); if (m.containsKey(f) && ConfigImplUtil.equalsHandlingNull(baseEntry.getValue(), m.get(f))) { // if field is unchanged, just remove it so we inherit m.remove(f); } else if (!m.containsKey(f)) { // if field has been removed, we have to add a deletion entry switch (f) { case ORIGIN_DESCRIPTION: throw new ConfigException.BugOrBroken("origin missing description field? " + child); case ORIGIN_LINE_NUMBER: m.put(SerializedField.ORIGIN_LINE_NUMBER, -1); break; case ORIGIN_END_LINE_NUMBER: m.put(SerializedField.ORIGIN_END_LINE_NUMBER, -1); break; case ORIGIN_TYPE: throw new ConfigException.BugOrBroken("should always be an ORIGIN_TYPE field"); case ORIGIN_URL: m.put(SerializedField.ORIGIN_NULL_URL, ""); break; case ORIGIN_COMMENTS: m.put(SerializedField.ORIGIN_NULL_COMMENTS, ""); break; case ORIGIN_NULL_URL: // FALL THRU case ORIGIN_NULL_COMMENTS: throw new ConfigException.BugOrBroken( "computing delta, base object should not contain " + f + " " + base); case END_MARKER: case ROOT_VALUE: case ROOT_WAS_CONFIG: case UNKNOWN: case VALUE_DATA: case VALUE_ORIGIN: throw new ConfigException.BugOrBroken("should not appear here: " + f); } } else { // field is in base and child, but differs, so leave it } } return m; } static SimpleConfigOrigin fromFields(Map<SerializedField, Object> m) throws IOException { String description = (String) m.get(SerializedField.ORIGIN_DESCRIPTION); Integer lineNumber = (Integer) m.get(SerializedField.ORIGIN_LINE_NUMBER); Integer endLineNumber = (Integer) m.get(SerializedField.ORIGIN_END_LINE_NUMBER); Number originTypeOrdinal = (Number) m.get(SerializedField.ORIGIN_TYPE); if (originTypeOrdinal == null) throw new IOException("Missing ORIGIN_TYPE field"); OriginType originType = OriginType.values()[originTypeOrdinal.byteValue()]; String urlOrNull = (String) m.get(SerializedField.ORIGIN_URL); @SuppressWarnings("unchecked") List<String> commentsOrNull = (List<String>) m.get(SerializedField.ORIGIN_COMMENTS); return new SimpleConfigOrigin(description, lineNumber != null ? lineNumber : -1, endLineNumber != null ? endLineNumber : -1, originType, urlOrNull, commentsOrNull); } static Map<SerializedField, Object> applyFieldsDelta(Map<SerializedField, Object> base, Map<SerializedField, Object> delta) throws IOException { Map<SerializedField, Object> m = new EnumMap<SerializedField, Object>(delta); for (Map.Entry<SerializedField, Object> baseEntry : base.entrySet()) { SerializedField f = baseEntry.getKey(); if (delta.containsKey(f)) { // delta overrides when keys are in both // "m" should already contain the right thing } else { // base has the key and delta does not. // we inherit from base unless a "NULL" key blocks. switch (f) { case ORIGIN_DESCRIPTION: m.put(f, base.get(f)); break; case ORIGIN_URL: if (delta.containsKey(SerializedField.ORIGIN_NULL_URL)) { m.remove(SerializedField.ORIGIN_NULL_URL); } else { m.put(f, base.get(f)); } break; case ORIGIN_COMMENTS: if (delta.containsKey(SerializedField.ORIGIN_NULL_COMMENTS)) { m.remove(SerializedField.ORIGIN_NULL_COMMENTS); } else { m.put(f, base.get(f)); } break; case ORIGIN_NULL_URL: // FALL THRU case ORIGIN_NULL_COMMENTS: // FALL THRU // base objects shouldn't contain these, should just // lack the field. these are only in deltas. throw new ConfigException.BugOrBroken( "applying fields, base object should not contain " + f + " " + base); case ORIGIN_END_LINE_NUMBER: // FALL THRU case ORIGIN_LINE_NUMBER: // FALL THRU case ORIGIN_TYPE: m.put(f, base.get(f)); break; case END_MARKER: case ROOT_VALUE: case ROOT_WAS_CONFIG: case UNKNOWN: case VALUE_DATA: case VALUE_ORIGIN: throw new ConfigException.BugOrBroken("should not appear here: " + f); } } } return m; } static SimpleConfigOrigin fromBase(SimpleConfigOrigin baseOrigin, Map<SerializedField, Object> delta) throws IOException { Map<SerializedField, Object> baseFields; if (baseOrigin != null) baseFields = baseOrigin.toFields(); else baseFields = Collections.<SerializedField, Object> emptyMap(); Map<SerializedField, Object> fields = applyFieldsDelta(baseFields, delta); return fromFields(fields); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.internal; import com.google.common.collect.Iterables; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * */ public class IdFieldMapper extends MetadataFieldMapper { public static final String NAME = "_id"; public static final String CONTENT_TYPE = "_id"; public static class Defaults { public static final String NAME = IdFieldMapper.NAME; public static final MappedFieldType FIELD_TYPE = new IdFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } public static final String PATH = null; } public static class Builder extends MetadataFieldMapper.Builder<Builder, IdFieldMapper> { private String path = Defaults.PATH; public Builder(MappedFieldType existing) { super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); indexName = Defaults.NAME; } public Builder path(String path) { this.path = path; return builder; } // if we are indexed we use DOCS @Override protected IndexOptions getDefaultIndexOption() { return IndexOptions.DOCS; } @Override public IdFieldMapper build(BuilderContext context) { setupFieldType(context); return new IdFieldMapper(fieldType, path, context.indexSettings()); } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { throw new MapperParsingException(NAME + " is not configurable"); } Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); parseField(builder, builder.name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("path")) { builder.path(fieldNode.toString()); iterator.remove(); } } return builder; } } static final class IdFieldType extends MappedFieldType { public IdFieldType() { setFieldDataType(new FieldDataType("string")); } protected IdFieldType(IdFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new IdFieldType(this); } @Override public String typeName() { return CONTENT_TYPE; } @Override public String value(Object value) { if (value == null) { return null; } return value.toString(); } @Override public boolean useTermQueryWithQueryString() { return true; } @Override public Query termQuery(Object value, @Nullable QueryParseContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.termQuery(value, context); } final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value); return new TermsQuery(UidFieldMapper.NAME, uids); } @Override public Query termsQuery(List values, @Nullable QueryParseContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.termsQuery(values, context); } return new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values)); } @Override public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.prefixQuery(value, method, context); } Collection<String> queryTypes = context.queryTypes(); BooleanQuery query = new BooleanQuery(); for (String queryType : queryTypes) { PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); if (method != null) { prefixQuery.setRewriteMethod(method); } query.add(prefixQuery, BooleanClause.Occur.SHOULD); } return query; } @Override public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.regexpQuery(value, flags, maxDeterminizedStates, method, context); } Collection<String> queryTypes = context.queryTypes(); if (queryTypes.size() == 1) { RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); if (method != null) { regexpQuery.setRewriteMethod(method); } return regexpQuery; } BooleanQuery query = new BooleanQuery(); for (String queryType : queryTypes) { RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); if (method != null) { regexpQuery.setRewriteMethod(method); } query.add(regexpQuery, BooleanClause.Occur.SHOULD); } return query; } } private final String path; public IdFieldMapper(Settings indexSettings, MappedFieldType existing) { this(idFieldType(indexSettings, existing), Defaults.PATH, indexSettings); } protected IdFieldMapper(MappedFieldType fieldType, String path, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); this.path = path; } private static MappedFieldType idFieldType(Settings indexSettings, MappedFieldType existing) { if (existing != null) { return existing.clone(); } MappedFieldType fieldType = Defaults.FIELD_TYPE.clone(); boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1); if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) { fieldType.setTokenized(false); } return fieldType; } public String path() { return this.path; } @Override public void preParse(ParseContext context) throws IOException { if (context.sourceToParse().id() != null) { context.id(context.sourceToParse().id()); super.parse(context); } } @Override public void postParse(ParseContext context) throws IOException { if (context.id() == null && !context.sourceToParse().flyweight()) { throw new MapperParsingException("No id found while parsing the content source"); } // it either get built in the preParse phase, or get parsed... } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { XContentParser parser = context.parser(); if (parser.currentName() != null && parser.currentName().equals(Defaults.NAME) && parser.currentToken().isValue()) { // we are in the parse Phase String id = parser.text(); if (context.id() != null && !context.id().equals(id)) { throw new MapperParsingException("Provided id [" + context.id() + "] does not match the content one [" + id + "]"); } context.id(id); } // else we are in the pre/post parse phase if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { fields.add(new Field(fieldType().names().indexName(), context.id(), fieldType())); } if (fieldType().hasDocValues()) { fields.add(new BinaryDocValuesField(fieldType().names().indexName(), new BytesRef(context.id()))); } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (indexCreatedBefore2x == false) { return builder; } boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if all are defaults, no sense to write it at all if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && fieldType().indexOptions() == Defaults.FIELD_TYPE.indexOptions() && path == Defaults.PATH && hasCustomFieldDataSettings() == false) { return builder; } builder.startObject(CONTENT_TYPE); if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) { builder.field("store", fieldType().stored()); } if (includeDefaults || fieldType().indexOptions() != Defaults.FIELD_TYPE.indexOptions()) { builder.field("index", indexTokenizeOptionToString(fieldType().indexOptions() != IndexOptions.NONE, fieldType().tokenized())); } if (includeDefaults || path != Defaults.PATH) { builder.field("path", path); } if (includeDefaults || hasCustomFieldDataSettings()) { builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap()); } builder.endObject(); return builder; } @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { // do nothing here, no merging, but also no exception } }
package com.github.pozo.bkkinfo.activities.notificationsettings; import java.util.ArrayList; import java.util.Locale; import java.util.Map.Entry; import com.github.pozo.bkkinfo.R; import com.github.pozo.bkkinfo.activities.BasicPreferenceActivity; import com.github.pozo.bkkinfo.activities.LineColorHelper; import com.github.pozo.bkkinfo.db.DbConnector; import com.github.pozo.bkkinfo.model.Line.Type; import com.github.pozo.bkkinfo.tasks.TruncateDatabaseTask; import com.github.pozo.bkkinfo.tasks.RetriveRequiredLinesTask; import android.os.Bundle; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.graphics.drawable.GradientDrawable; import android.util.TypedValue; import android.view.Gravity; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.widget.LinearLayout; import android.widget.TableRow; import android.widget.TextView; public class NotificationSettingsActivity extends Activity { public static final String CHECKED = "checked"; private static final int OFFSET = 5; private int[] ids = new int[] { R.id.line_first, R.id.line_second, R.id.line_three, R.id.line_four, R.id.line_five }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_notification_settings); new RetriveRequiredLinesTask(this).execute(); } public void createTable(ArrayList<String> requiredLines) { LinearLayout ll = (LinearLayout) findViewById(R.id.lines_table); ll.addView(createEmptyTableRow()); for (Entry<Type, String[]> lines : AvailableLines.getLines().entrySet()) { for (int i = 0; i < lines.getValue().length; i += OFFSET) { TableRow tableRow = createTableRow(); ll.addView(tableRow); appendRow(i,lines.getKey().name().toLowerCase(Locale.ENGLISH), lines.getValue(), tableRow, requiredLines); } ll.addView(createEmptyTableRow()); } } private void appendRow(int index, String typename, String[] array, TableRow tableRow, ArrayList<String> requiredLines) { for (int j = 0; j < OFFSET; j++) { if(array.length > j + index) { createLineTextView(typename, array[j + index], tableRow, ids[j], requiredLines); } } } private void createLineTextView(String lineType, String lineName, TableRow tableRow, int cellId, ArrayList<String> requiredLines) { TextView textView = (TextView) tableRow.findViewById(cellId); textView.setText(lineName); textView.setTextColor(LineColorHelper.getTextColorByType(lineType, lineName)); correctTextSize(lineName, textView); GradientDrawable bgShape = (GradientDrawable) textView.getBackground(); final int colorByNameAndType = LineColorHelper.getColorByNameAndType(this, lineName, lineType); bgShape.setColor(colorByNameAndType); if(requiredLines.contains(lineName)) { setLineTextBackgroundDeselected(colorByNameAndType, textView); textView.setTag(CHECKED); } else { setLineTextBackgroundSelected(colorByNameAndType, textView); } textView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { TextView textView = (TextView) view; String tag = (String) textView.getTag(); DbConnector databaseConnection = DbConnector.getInstance(NotificationSettingsActivity.this); if(tag != null && tag.equals(CHECKED)) { setLineTextBackgroundSelected(colorByNameAndType, view); databaseConnection.removeRequiredLine(textView.getText().toString()); textView.setTag(""); } else { setLineTextBackgroundDeselected(colorByNameAndType, view); databaseConnection.addRequiredLine(textView.getText().toString()); textView.setTag(CHECKED); } } }); } private void correctTextSize(String lineName, TextView textView) { if(lineName.length()>=4) { textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 20f); } if(lineName.length()>=6) { textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 16f); } } private TableRow createTableRow() { LayoutInflater inflater = (LayoutInflater) getSystemService (Context.LAYOUT_INFLATER_SERVICE); TableRow tableRow = (TableRow) inflater.inflate(R.layout.notification_table_line_row, null); tableRow.setLayoutParams(new LayoutParams(android.view.ViewGroup.LayoutParams.FILL_PARENT, android.view.ViewGroup.LayoutParams.WRAP_CONTENT)); tableRow.setGravity(Gravity.CENTER); for(int id : ids) { TextView textView = (TextView) tableRow.findViewById(id); GradientDrawable bgShape = (GradientDrawable) textView.getBackground(); bgShape.setColor(Color.TRANSPARENT); } return tableRow; } private TableRow createEmptyTableRow() { TableRow tableRow = new TableRow(this); tableRow.setGravity(Gravity.CENTER); tableRow.setPadding(15, 15, 15, 15); return tableRow; } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.notifications, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { case R.id.remove_selections: new TruncateDatabaseTask(this).execute(); return true; case R.id.settings: String packageName = getPackageName(); Intent notificationSettingsActivity = new Intent(this, BasicPreferenceActivity.class); notificationSettingsActivity.setPackage(packageName); notificationSettingsActivity.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(notificationSettingsActivity); return true; default: return super.onOptionsItemSelected(item); } } private void setLineTextBackgroundSelected(final int colorByNameAndType, View textView) { GradientDrawable drawable = new GradientDrawable(); drawable.setColor(colorByNameAndType); drawable.setStroke(0, Color.BLACK); drawable.setShape(GradientDrawable.RECTANGLE); drawable.setCornerRadius(5f); textView.setBackgroundDrawable(drawable); } private void setLineTextBackgroundDeselected(final int colorByNameAndType, View textView) { GradientDrawable drawable = new GradientDrawable(); drawable.setColor(colorByNameAndType); drawable.setStroke(5, getStrokeColorByBackgroundColor(colorByNameAndType)); drawable.setShape(GradientDrawable.RECTANGLE); drawable.setCornerRadius(5f); textView.setBackgroundDrawable(drawable); } private int getStrokeColorByBackgroundColor(int colorByNameAndType) { if((colorByNameAndType == getResources().getColor(R.color.purple)) || (colorByNameAndType == getResources().getColor(R.color.h6)) || (colorByNameAndType == getResources().getColor(R.color.h7)) || (colorByNameAndType == getResources().getColor(R.color.h8)) || (colorByNameAndType == getResources().getColor(R.color.d12)) || (colorByNameAndType == getResources().getColor(R.color.d13)) || (colorByNameAndType == getResources().getColor(R.color.libego)) || (colorByNameAndType == Color.BLACK)) { return Color.RED; } else { return Color.BLACK; } } }
/* Copyright 2001-2003,2006 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.ext.awt.geom; import java.awt.Shape; import java.awt.geom.AffineTransform; import java.awt.geom.Arc2D; import java.awt.geom.GeneralPath; import java.awt.geom.PathIterator; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; /** * The <code>ExtendedGeneralPath</code> class represents a geometric * path constructed from straight lines, quadratic and cubic (Bezier) * curves and elliptical arc. This class delegates lines and curves to * an enclosed <code>GeneralPath</code>. Elliptical arc is implemented * using an <code>Arc2D</code> in float precision. * * <p><b>Warning</b> : An elliptical arc may be composed of several * path segments. For futher details, see the SVG Appendix&nbsp;F.6 * * @author <a href="mailto:[email protected]">Thierry Kormann</a> * @version $Id$ */ public class ExtendedGeneralPath implements ExtendedShape, Cloneable { /** The enclosed general path. */ protected GeneralPath path; int numVals = 0; int numSeg = 0; float [] values = null; int [] types = null; float mx, my, cx, cy; /** * Constructs a new <code>ExtendedGeneralPath</code>. */ public ExtendedGeneralPath() { path = new GeneralPath(); } /** * Constructs a new <code>ExtendedGeneralPath</code> with the * specified winding rule to control operations that require the * interior of the path to be defined. */ public ExtendedGeneralPath(int rule) { path = new GeneralPath(rule); } /** * Constructs a new <code>ExtendedGeneralPath</code> object with * the specified winding rule and the specified initial capacity * to store path coordinates. */ public ExtendedGeneralPath(int rule, int initialCapacity) { path = new GeneralPath(rule, initialCapacity); } /** * Constructs a new <code>ExtendedGeneralPath</code> object from * an arbitrary <code>Shape</code> object. */ public ExtendedGeneralPath(Shape s) { this(); append(s, false); } /** * Adds an elliptical arc, defined by two radii, an angle from the * x-axis, a flag to choose the large arc or not, a flag to * indicate if we increase or decrease the angles and the final * point of the arc. * * @param rx the x radius of the ellipse * @param ry the y radius of the ellipse * * @param angle the angle from the x-axis of the current * coordinate system to the x-axis of the ellipse in degrees. * * @param largeArcFlag the large arc flag. If true the arc * spanning less than or equal to 180 degrees is chosen, otherwise * the arc spanning greater than 180 degrees is chosen * * @param sweepFlag the sweep flag. If true the line joining * center to arc sweeps through decreasing angles otherwise it * sweeps through increasing angles * * @param x the absolute x coordinate of the final point of the arc. * @param y the absolute y coordinate of the final point of the arc. */ public synchronized void arcTo(float rx, float ry, float angle, boolean largeArcFlag, boolean sweepFlag, float x, float y) { // Ensure radii are valid if (rx == 0 || ry == 0) { lineTo((float) x, (float) y); return; } checkMoveTo(); // check if prev command was moveto // Get the current (x, y) coordinates of the path double x0 = cx; double y0 = cy; if (x0 == x && y0 == y) { // If the endpoints (x, y) and (x0, y0) are identical, then this // is equivalent to omitting the elliptical arc segment entirely. return; } Arc2D arc = computeArc(x0, y0, rx, ry, angle, largeArcFlag, sweepFlag, x, y); if (arc == null) return; AffineTransform t = AffineTransform.getRotateInstance (Math.toRadians(angle), arc.getCenterX(), arc.getCenterY()); Shape s = t.createTransformedShape(arc); path.append(s, true); makeRoom(7); types [numSeg++] = ExtendedPathIterator.SEG_ARCTO; values[numVals++] = rx; values[numVals++] = ry; values[numVals++] = angle; values[numVals++] = largeArcFlag?1:0; values[numVals++] = sweepFlag?1:0; cx = values[numVals++] = x; cy = values[numVals++] = y; } /** * This constructs an unrotated Arc2D from the SVG specification of an * Elliptical arc. To get the final arc you need to apply a rotation * transform such as: * * AffineTransform.getRotateInstance * (angle, arc.getX()+arc.getWidth()/2, arc.getY()+arc.getHeight()/2); */ public static Arc2D computeArc(double x0, double y0, double rx, double ry, double angle, boolean largeArcFlag, boolean sweepFlag, double x, double y) { // // Elliptical arc implementation based on the SVG specification notes // // Compute the half distance between the current and the final point double dx2 = (x0 - x) / 2.0; double dy2 = (y0 - y) / 2.0; // Convert angle from degrees to radians angle = Math.toRadians(angle % 360.0); double cosAngle = Math.cos(angle); double sinAngle = Math.sin(angle); // // Step 1 : Compute (x1, y1) // double x1 = (cosAngle * dx2 + sinAngle * dy2); double y1 = (-sinAngle * dx2 + cosAngle * dy2); // Ensure radii are large enough rx = Math.abs(rx); ry = Math.abs(ry); double Prx = rx * rx; double Pry = ry * ry; double Px1 = x1 * x1; double Py1 = y1 * y1; // check that radii are large enough double radiiCheck = Px1/Prx + Py1/Pry; if (radiiCheck > 1) { rx = Math.sqrt(radiiCheck) * rx; ry = Math.sqrt(radiiCheck) * ry; Prx = rx * rx; Pry = ry * ry; } // // Step 2 : Compute (cx1, cy1) // double sign = (largeArcFlag == sweepFlag) ? -1 : 1; double sq = ((Prx*Pry)-(Prx*Py1)-(Pry*Px1)) / ((Prx*Py1)+(Pry*Px1)); sq = (sq < 0) ? 0 : sq; double coef = (sign * Math.sqrt(sq)); double cx1 = coef * ((rx * y1) / ry); double cy1 = coef * -((ry * x1) / rx); // // Step 3 : Compute (cx, cy) from (cx1, cy1) // double sx2 = (x0 + x) / 2.0; double sy2 = (y0 + y) / 2.0; double cx = sx2 + (cosAngle * cx1 - sinAngle * cy1); double cy = sy2 + (sinAngle * cx1 + cosAngle * cy1); // // Step 4 : Compute the angleStart (angle1) and the angleExtent (dangle) // double ux = (x1 - cx1) / rx; double uy = (y1 - cy1) / ry; double vx = (-x1 - cx1) / rx; double vy = (-y1 - cy1) / ry; double p, n; // Compute the angle start n = Math.sqrt((ux * ux) + (uy * uy)); p = ux; // (1 * ux) + (0 * uy) sign = (uy < 0) ? -1d : 1d; double angleStart = Math.toDegrees(sign * Math.acos(p / n)); // Compute the angle extent n = Math.sqrt((ux * ux + uy * uy) * (vx * vx + vy * vy)); p = ux * vx + uy * vy; sign = (ux * vy - uy * vx < 0) ? -1d : 1d; double angleExtent = Math.toDegrees(sign * Math.acos(p / n)); if(!sweepFlag && angleExtent > 0) { angleExtent -= 360f; } else if (sweepFlag && angleExtent < 0) { angleExtent += 360f; } angleExtent %= 360f; angleStart %= 360f; // // We can now build the resulting Arc2D in double precision // Arc2D.Double arc = new Arc2D.Double(); arc.x = cx - rx; arc.y = cy - ry; arc.width = rx * 2.0; arc.height = ry * 2.0; arc.start = -angleStart; arc.extent = -angleExtent; return arc; } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized void moveTo(float x, float y) { // Don't add moveto to general path unless there is a reason. makeRoom(2); types [numSeg++] = PathIterator.SEG_MOVETO; cx = mx = values[numVals++] = x; cy = my = values[numVals++] = y; } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized void lineTo(float x, float y) { checkMoveTo(); // check if prev command was moveto path.lineTo(x, y); makeRoom(2); types [numSeg++] = PathIterator.SEG_LINETO; cx = values[numVals++] = x; cy = values[numVals++] = y; } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized void quadTo(float x1, float y1, float x2, float y2) { checkMoveTo(); // check if prev command was moveto path.quadTo(x1, y1, x2, y2); makeRoom(4); types [numSeg++] = PathIterator.SEG_QUADTO; values[numVals++] = x1; values[numVals++] = y1; cx = values[numVals++] = x2; cy = values[numVals++] = y2; } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized void curveTo(float x1, float y1, float x2, float y2, float x3, float y3) { checkMoveTo(); // check if prev command was moveto path.curveTo(x1, y1, x2, y2, x3, y3); makeRoom(6); types [numSeg++] = PathIterator.SEG_CUBICTO; values[numVals++] = x1; values[numVals++] = y1; values[numVals++] = x2; values[numVals++] = y2; cx = values[numVals++] = x3; cy = values[numVals++] = y3; } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized void closePath() { // Don't double close path. if ((numSeg != 0) && (types[numSeg-1] == PathIterator.SEG_CLOSE)) return; // Only close path if the previous command wasn't a moveto if ((numSeg != 0) && (types[numSeg-1] != PathIterator.SEG_MOVETO)) path.closePath(); makeRoom(0); types [numSeg++] = PathIterator.SEG_CLOSE; cx = mx; cy = my; } /** * Checks if previous command was a moveto command, * skipping a close command (if present). */ protected void checkMoveTo() { if (numSeg == 0) return; switch(types[numSeg-1]) { case PathIterator.SEG_MOVETO: path.moveTo(values[numVals-2], values[numVals-1]); break; case PathIterator.SEG_CLOSE: if (numSeg == 1) return; if (types[numSeg-2] == PathIterator.SEG_MOVETO) path.moveTo(values[numVals-2], values[numVals-1]); break; default: break; } } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public void append(Shape s, boolean connect) { append(s.getPathIterator(new AffineTransform()), connect); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public void append(PathIterator pi, boolean connect) { while (!pi.isDone()) { double [] vals = new double[6]; int type = pi.currentSegment(vals); pi.next(); if (connect && (numVals != 0)) { if (type == PathIterator.SEG_MOVETO) { double x = vals[0]; double y = vals[1]; if ((x != cx) || (y != cy)) { // Change MOVETO to LINETO. type = PathIterator.SEG_LINETO; } else { // Redundent segment (move to current loc) drop it... if (pi.isDone()) break; // Nothing interesting type = pi.currentSegment(vals); pi.next(); } } connect = false; } switch(type) { case PathIterator.SEG_CLOSE: closePath(); break; case PathIterator.SEG_MOVETO: moveTo ((float)vals[0], (float)vals[1]); break; case PathIterator.SEG_LINETO: lineTo ((float)vals[0], (float)vals[1]); break; case PathIterator.SEG_QUADTO: quadTo ((float)vals[0], (float)vals[1], (float)vals[2], (float)vals[3]); break; case PathIterator.SEG_CUBICTO: curveTo((float)vals[0], (float)vals[1], (float)vals[2], (float)vals[3], (float)vals[4], (float)vals[5]); break; } } } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public void append(ExtendedPathIterator epi, boolean connect) { while (!epi.isDone()) { float [] vals = new float[7]; int type = epi.currentSegment(vals); epi.next(); if (connect && (numVals != 0)) { if (type == PathIterator.SEG_MOVETO) { float x = vals[0]; float y = vals[1]; if ((x != cx) || (y != cy)) { // Change MOVETO to LINETO. type = PathIterator.SEG_LINETO; } else { // Redundent segment (move to current loc) drop it... if (epi.isDone()) break; // Nothing interesting type = epi.currentSegment(vals); epi.next(); } } connect = false; } switch(type) { case PathIterator.SEG_CLOSE: closePath(); break; case PathIterator.SEG_MOVETO: moveTo ((float)vals[0], (float)vals[1]); break; case PathIterator.SEG_LINETO: lineTo ((float)vals[0], (float)vals[1]); break; case PathIterator.SEG_QUADTO: quadTo ((float)vals[0], (float)vals[1], (float)vals[2], (float)vals[3]); break; case PathIterator.SEG_CUBICTO: curveTo((float)vals[0], (float)vals[1], (float)vals[2], (float)vals[3], (float)vals[4], (float)vals[5]); break; case ExtendedPathIterator.SEG_ARCTO: arcTo (vals[0], vals[1], vals[2], (vals[3]!=0), (vals[4]!=0), vals[5], vals[6]); break; } } } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized int getWindingRule() { return path.getWindingRule(); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public void setWindingRule(int rule) { path.setWindingRule(rule); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized Point2D getCurrentPoint() { if (numVals == 0) return null; return new Point2D.Double(cx, cy); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized void reset() { path.reset(); numSeg = 0; numVals = 0; } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public void transform(AffineTransform at) { if (at.getType() != AffineTransform.TYPE_IDENTITY) throw new IllegalArgumentException ("ExtendedGeneralPaths can not be transformed"); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized Shape createTransformedShape(AffineTransform at) { return path.createTransformedShape(at); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public java.awt.Rectangle getBounds() { return path.getBounds(); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public synchronized Rectangle2D getBounds2D() { return path.getBounds2D(); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public boolean contains(double x, double y) { return path.contains(x, y); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public boolean contains(Point2D p) { return path.contains(p); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public boolean contains(double x, double y, double w, double h) { return path.contains(x, y, w, h); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public boolean contains(Rectangle2D r) { return path.contains(r); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public boolean intersects(double x, double y, double w, double h) { return path.intersects(x, y, w, h); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public boolean intersects(Rectangle2D r) { return path.intersects(r); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public PathIterator getPathIterator(AffineTransform at) { return path.getPathIterator(at); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public PathIterator getPathIterator(AffineTransform at, double flatness) { return path.getPathIterator(at, flatness); } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public ExtendedPathIterator getExtendedPathIterator() { return new EPI(); } class EPI implements ExtendedPathIterator { int segNum = 0; int valsIdx = 0; public int currentSegment() { return types[segNum]; } public int currentSegment(double[] coords) { int ret = types[segNum]; switch (ret) { case SEG_CLOSE: break; case SEG_MOVETO: case SEG_LINETO: coords[0] = values[valsIdx]; coords[1] = values[valsIdx+1]; break; case SEG_QUADTO: coords[0] = values[valsIdx]; coords[1] = values[valsIdx+1]; coords[2] = values[valsIdx+2]; coords[3] = values[valsIdx+3]; break; case SEG_CUBICTO: coords[0] = values[valsIdx]; coords[1] = values[valsIdx+1]; coords[2] = values[valsIdx+2]; coords[3] = values[valsIdx+3]; coords[4] = values[valsIdx+4]; coords[5] = values[valsIdx+5]; break; case SEG_ARCTO: coords[0] = values[valsIdx]; coords[1] = values[valsIdx+1]; coords[2] = values[valsIdx+2]; coords[3] = values[valsIdx+3]; coords[4] = values[valsIdx+4]; coords[5] = values[valsIdx+5]; coords[6] = values[valsIdx+6]; break; } // System.out.println("Seg: [" + segNum + "] type: " + ret + // " vals: [" + coords[0] + ", " + coords[1] + // "]"); return ret; } public int currentSegment(float[] coords) { int ret = types[segNum]; switch (ret) { case SEG_CLOSE: break; case SEG_MOVETO: case SEG_LINETO: coords[0] = (float)values[valsIdx]; coords[1] = (float)values[valsIdx+1]; break; case SEG_QUADTO: coords[0] = (float)values[valsIdx]; coords[1] = (float)values[valsIdx+1]; coords[2] = (float)values[valsIdx+2]; coords[3] = (float)values[valsIdx+3]; break; case SEG_CUBICTO: coords[0] = (float)values[valsIdx]; coords[1] = (float)values[valsIdx+1]; coords[2] = (float)values[valsIdx+2]; coords[3] = (float)values[valsIdx+3]; coords[4] = (float)values[valsIdx+4]; coords[5] = (float)values[valsIdx+5]; break; case SEG_ARCTO: coords[0] = (float)values[valsIdx]; coords[1] = (float)values[valsIdx+1]; coords[2] = (float)values[valsIdx+2]; coords[3] = (float)values[valsIdx+3]; coords[4] = (float)values[valsIdx+4]; coords[5] = (float)values[valsIdx+5]; coords[6] = (float)values[valsIdx+6]; break; } return ret; } public int getWindingRule() { return path.getWindingRule(); } public boolean isDone() { return segNum == numSeg; } public void next() { int type = types[segNum++]; switch (type) { case SEG_CLOSE: break; case SEG_MOVETO: case SEG_LINETO: valsIdx+=2; break; case SEG_QUADTO: valsIdx+=4; break; case SEG_CUBICTO:valsIdx+=6; break; case SEG_ARCTO: valsIdx+=7; break; } } } /** * Delegates to the enclosed <code>GeneralPath</code>. */ public Object clone() { try { ExtendedGeneralPath result = (ExtendedGeneralPath) super.clone(); result.path = (GeneralPath) path.clone(); result.values = new float[values.length]; System.arraycopy(values, 0, result.values, 0, values.length); result.numVals = numVals; result.types = new int[types.length]; System.arraycopy(types, 0, result.types, 0, types.length); result.numSeg = numSeg; return result; } catch (CloneNotSupportedException ex) {} return null; } private void makeRoom(int numValues) { if (values == null) { values = new float[2*numValues]; types = new int[2]; numVals = 0; numSeg = 0; return; } if ((numVals + numValues) > values.length) { int nlen = values.length*2; if (nlen < (numVals + numValues)) nlen = numVals + numValues; float [] nvals = new float[nlen]; System.arraycopy(values, 0, nvals, 0, numVals); values = nvals; } if (numSeg == types.length) { int [] ntypes = new int[types.length*2]; System.arraycopy(types, 0, ntypes, 0, types.length); types = ntypes; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.scalar; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.io.SerializedString; import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import io.airlift.slice.DynamicSliceOutput; import io.airlift.slice.Slice; import io.airlift.slice.Slices; import java.io.IOException; import static com.fasterxml.jackson.core.JsonFactory.Feature.CANONICALIZE_FIELD_NAMES; import static com.fasterxml.jackson.core.JsonToken.END_ARRAY; import static com.fasterxml.jackson.core.JsonToken.END_OBJECT; import static com.fasterxml.jackson.core.JsonToken.FIELD_NAME; import static com.fasterxml.jackson.core.JsonToken.START_ARRAY; import static com.fasterxml.jackson.core.JsonToken.START_OBJECT; import static com.fasterxml.jackson.core.JsonToken.VALUE_NULL; import static com.google.common.base.Preconditions.checkNotNull; /** * Extracts values from JSON * <p/> * Supports the following JSON path primitives: * <pre> * $ : Root object * . or [] : Child operator * [] : Subscript operator for array * </pre> * <p/> * Supported JSON Path Examples: * <pre> * { "store": { * "book": [ * { "category": "reference", * "author": "Nigel Rees", * "title": "Sayings of the Century", * "price": 8.95, * "contributors": [["Adam", "Levine"], ["Bob", "Strong"]] * }, * { "category": "fiction", * "author": "Evelyn Waugh", * "title": "Sword of Honour", * "price": 12.99, * "isbn": "0-553-21311-3", * "last_owner": null * } * ], * "bicycle": { * "color": "red", * "price": 19.95 * } * } * } * </pre> * <p/> * With only scalar values using dot-notation of path: * <pre> * $.store.book[0].author => Nigel Rees * $.store.bicycle.price => 19.95 * $.store.book[0].isbn => NULL (Doesn't exist becomes java null) * $.store.book[1].last_owner => NULL (json null becomes java null) * $.store.book[0].contributors[0][1] => Levine * </pre> * <p/> * With json values using dot-notation of path: * <pre> * $.store.book[0].author => "Nigel Rees" * $.store.bicycle.price => 19.95 * $.store.book[0].isbn => NULL (Doesn't exist becomes java null) * $.store.book[1].last_owner => null (json null becomes the string "null") * $.store.book[0].contributors[0] => ["Adam", "Levine"] * $.store.bicycle => {"color": "red", "price": 19.95} * </pre> * With only scalar values using bracket-notation of path: * <pre> * $["store"]["book"][0]["author"] => Nigel Rees * $["store"]["bicycle"]["price"] => 19.95 * $["store"]["book"][0]["isbn"] => NULL (Doesn't exist becomes java null) * $["store"]["book"][1]["last_owner"] => NULL (json null becomes java null) * $["store"]["book"][0]["contributors"][0][1] => Levine * </pre> * <p/> * With json values using bracket-notation of path: * <pre> * $["store"]["book"][0]["author"] => "Nigel Rees" * $["store"]["bicycle"]["price"] => 19.95 * $["store"]["book"][0]["isbn"] => NULL (Doesn't exist becomes java null) * $["store"]["book"][1]["last_owner"] => null (json null becomes the string "null") * $["store"]["book"][0]["contributors"][0] => ["Adam", "Levine"] * $["store"]["bicycle"] => {"color": "red", "price": 19.95} * </pre> */ public final class JsonExtract { private static final int ESTIMATED_JSON_OUTPUT_SIZE = 512; private static final JsonFactory JSON_FACTORY = new JsonFactory() .disable(CANONICALIZE_FIELD_NAMES); private JsonExtract() {} public static <T> T extract(Slice jsonInput, JsonExtractor<T> jsonExtractor) { checkNotNull(jsonInput, "jsonInput is null"); try { try (JsonParser jsonParser = JSON_FACTORY.createJsonParser(jsonInput.getInput())) { // Initialize by advancing to first token and make sure it exists if (jsonParser.nextToken() == null) { throw new JsonParseException("Missing starting token", jsonParser.getCurrentLocation()); } return jsonExtractor.extract(jsonParser); } } catch (JsonParseException e) { // Return null if we failed to parse something return null; } catch (IOException e) { throw Throwables.propagate(e); } } public static <T> JsonExtractor<T> generateExtractor(String path, JsonExtractor<T> rootExtractor) { ImmutableList<String> tokens = ImmutableList.copyOf(new JsonPathTokenizer(path)); JsonExtractor<T> jsonExtractor = rootExtractor; for (String token : tokens.reverse()) { jsonExtractor = new ObjectFieldJsonExtractor<>(token, jsonExtractor); } return jsonExtractor; } public interface JsonExtractor<T> { /** * Executes the extraction on the existing content of the JsonParser and outputs the match. * <p/> * Notes: * <ul> * <li>JsonParser must be on the FIRST token of the value to be processed when extract is called</li> * <li>INVARIANT: when extract() returns, the current token of the parser will be the LAST token of the value</li> * </ul> * * @return the value, or null if not applicable */ T extract(JsonParser jsonParser) throws IOException; } public static class ObjectFieldJsonExtractor<T> implements JsonExtractor<T> { private final SerializedString fieldName; private final JsonExtractor<? extends T> delegate; private final int index; public ObjectFieldJsonExtractor(String fieldName, JsonExtractor<? extends T> delegate) { this.fieldName = new SerializedString(checkNotNull(fieldName, "fieldName is null")); this.delegate = checkNotNull(delegate, "delegate is null"); this.index = tryParseInt(fieldName, -1); } @Override public T extract(JsonParser jsonParser) throws IOException { if (jsonParser.getCurrentToken() == START_OBJECT) { return processJsonObject(jsonParser); } if (jsonParser.getCurrentToken() == START_ARRAY) { return processJsonArray(jsonParser); } throw new JsonParseException("Expected a JSON object or array", jsonParser.getCurrentLocation()); } public T processJsonObject(JsonParser jsonParser) throws IOException { while (!jsonParser.nextFieldName(fieldName)) { if (!jsonParser.hasCurrentToken()) { throw new JsonParseException("Unexpected end of object", jsonParser.getCurrentLocation()); } if (jsonParser.getCurrentToken() == END_OBJECT) { // Unable to find matching field return null; } jsonParser.skipChildren(); // Skip nested structure if currently at the start of one } jsonParser.nextToken(); // Shift to first token of the value return delegate.extract(jsonParser); } public T processJsonArray(JsonParser jsonParser) throws IOException { int currentIndex = 0; while (true) { JsonToken token = jsonParser.nextToken(); if (token == null) { throw new JsonParseException("Unexpected end of array", jsonParser.getCurrentLocation()); } if (token == END_ARRAY) { // Index out of bounds return null; } if (currentIndex == index) { break; } currentIndex++; jsonParser.skipChildren(); // Skip nested structure if currently at the start of one } return delegate.extract(jsonParser); } } public static class ScalarValueJsonExtractor implements JsonExtractor<Slice> { @Override public Slice extract(JsonParser jsonParser) throws IOException { JsonToken token = jsonParser.getCurrentToken(); if (token == null) { throw new JsonParseException("Unexpected end of value", jsonParser.getCurrentLocation()); } if (!token.isScalarValue() || token == VALUE_NULL) { return null; } return Slices.wrappedBuffer(jsonParser.getText().getBytes(Charsets.UTF_8)); } } public static class JsonValueJsonExtractor implements JsonExtractor<Slice> { @Override public Slice extract(JsonParser jsonParser) throws IOException { if (!jsonParser.hasCurrentToken()) { throw new JsonParseException("Unexpected end of value", jsonParser.getCurrentLocation()); } DynamicSliceOutput dynamicSliceOutput = new DynamicSliceOutput(ESTIMATED_JSON_OUTPUT_SIZE); try (JsonGenerator jsonGenerator = JSON_FACTORY.createJsonGenerator(dynamicSliceOutput)) { jsonGenerator.copyCurrentStructure(jsonParser); } return dynamicSliceOutput.slice(); } } public static class JsonSizeExtractor implements JsonExtractor<Long> { @Override public Long extract(JsonParser jsonParser) throws IOException { if (!jsonParser.hasCurrentToken()) { throw new JsonParseException("Unexpected end of value", jsonParser.getCurrentLocation()); } if (jsonParser.getCurrentToken() == START_ARRAY) { long length = 0; while (true) { JsonToken token = jsonParser.nextToken(); if (token == null) { return null; } if (token == END_ARRAY) { return length; } jsonParser.skipChildren(); length++; } } if (jsonParser.getCurrentToken() == START_OBJECT) { long length = 0; while (true) { JsonToken token = jsonParser.nextToken(); if (token == null) { return null; } if (token == END_OBJECT) { return length; } if (token == FIELD_NAME) { length++; } else { jsonParser.skipChildren(); } } } return 0L; } } private static int tryParseInt(String fieldName, int defaultValue) { int index = defaultValue; try { index = Integer.parseInt(fieldName); } catch (NumberFormatException ignored) { } return index; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.significant; import org.apache.lucene.index.*; import org.apache.lucene.index.FilterAtomicReader.FilterTermsEnum; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator.BucketAggregationMode; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource; import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; /** * */ public class SignificantTermsAggregatorFactory extends ValueSourceAggregatorFactory implements Releasable { public static final String EXECUTION_HINT_VALUE_MAP = "map"; public static final String EXECUTION_HINT_VALUE_ORDINALS = "ordinals"; static final int INITIAL_NUM_TERM_FREQS_CACHED = 512; private final int requiredSize; private final int shardSize; private final long minDocCount; private final IncludeExclude includeExclude; private final String executionHint; private String indexedFieldName; private FieldMapper mapper; private IntArray termDocFreqs; private BytesRefHash cachedTermOrds; private BigArrays bigArrays; private TermsEnum termsEnum; private int numberOfAggregatorsCreated = 0; public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, int requiredSize, int shardSize, long minDocCount, IncludeExclude includeExclude, String executionHint) { super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig); this.requiredSize = requiredSize; this.shardSize = shardSize; this.minDocCount = minDocCount; this.includeExclude = includeExclude; this.executionHint = executionHint; if (!valueSourceConfig.unmapped()) { this.indexedFieldName = valuesSourceConfig.fieldContext().field(); mapper = SearchContext.current().smartNameFieldMapper(indexedFieldName); } bigArrays = SearchContext.current().bigArrays(); } @Override protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) { return new UnmappedSignificantTermsAggregator(name, requiredSize, minDocCount, aggregationContext, parent, this); } private static boolean hasParentBucketAggregator(Aggregator parent) { if (parent == null) { return false; } else if (parent.bucketAggregationMode() == BucketAggregationMode.PER_BUCKET) { return true; } return hasParentBucketAggregator(parent.parent()); } @Override protected Aggregator create(ValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) { numberOfAggregatorsCreated++; if (numberOfAggregatorsCreated == 1) { // Setup a termsEnum for use by first aggregator try { SearchContext searchContext = aggregationContext.searchContext(); ContextIndexSearcher searcher = searchContext.searcher(); Terms terms = MultiFields.getTerms(searcher.getIndexReader(), indexedFieldName); // terms can be null if the choice of field is not found in this index if (terms != null) { termsEnum = terms.iterator(null); } } catch (IOException e) { throw new ElasticsearchException("IOException loading background document frequency info", e); } } else if (numberOfAggregatorsCreated == 2) { // When we have > 1 agg we have possibility of duplicate term frequency lookups and // so introduce a cache in the form of a wrapper around the plain termsEnum created // for use with the first agg if (termsEnum != null) { SearchContext searchContext = aggregationContext.searchContext(); termsEnum = new FrequencyCachingTermsEnumWrapper(termsEnum, searchContext.bigArrays(), true, false); } } long estimatedBucketCount = valuesSource.metaData().maxAtomicUniqueValuesCount(); if (estimatedBucketCount < 0) { // there isn't an estimation available.. 50 should be a good start estimatedBucketCount = 50; } // adding an upper bound on the estimation as some atomic field data in the future (binary doc values) and not // going to know their exact cardinality and will return upper bounds in AtomicFieldData.getNumberUniqueValues() // that may be largely over-estimated.. the value chosen here is arbitrary just to play nice with typical CPU cache // // Another reason is that it may be faster to resize upon growth than to start directly with the appropriate size. // And that all values are not necessarily visited by the matches. estimatedBucketCount = Math.min(estimatedBucketCount, 512); if (valuesSource instanceof BytesValuesSource) { if (executionHint != null && !executionHint.equals(EXECUTION_HINT_VALUE_MAP) && !executionHint.equals(EXECUTION_HINT_VALUE_ORDINALS)) { throw new ElasticsearchIllegalArgumentException("execution_hint can only be '" + EXECUTION_HINT_VALUE_MAP + "' or '" + EXECUTION_HINT_VALUE_ORDINALS + "', not " + executionHint); } String execution = executionHint; if (!(valuesSource instanceof BytesValuesSource.WithOrdinals)) { execution = EXECUTION_HINT_VALUE_MAP; } else if (includeExclude != null) { execution = EXECUTION_HINT_VALUE_MAP; } if (execution == null) { if ((valuesSource instanceof BytesValuesSource.WithOrdinals) && !hasParentBucketAggregator(parent)) { execution = EXECUTION_HINT_VALUE_ORDINALS; } else { execution = EXECUTION_HINT_VALUE_MAP; } } assert execution != null; if (execution.equals(EXECUTION_HINT_VALUE_ORDINALS)) { assert includeExclude == null; return new SignificantStringTermsAggregator.WithOrdinals(name, factories, (BytesValuesSource.WithOrdinals) valuesSource, estimatedBucketCount, requiredSize, shardSize, minDocCount, aggregationContext, parent, this); } return new SignificantStringTermsAggregator(name, factories, valuesSource, estimatedBucketCount, requiredSize, shardSize, minDocCount, includeExclude, aggregationContext, parent, this); } if (includeExclude != null) { throw new AggregationExecutionException("Aggregation [" + name + "] cannot support the include/exclude " + "settings as it can only be applied to string values"); } if (valuesSource instanceof NumericValuesSource) { if (((NumericValuesSource) valuesSource).isFloatingPoint()) { throw new UnsupportedOperationException("No support for examining floating point numerics"); } return new SignificantLongTermsAggregator(name, factories, (NumericValuesSource) valuesSource, estimatedBucketCount, requiredSize, shardSize, minDocCount, aggregationContext, parent, this); } throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field [" + valuesSourceConfig.fieldContext().field() + "]. It can only be applied to numeric or string fields."); } public long getBackgroundFrequency(BytesRef termBytes) { assert termsEnum !=null; // having failed to find a field in the index we don't expect any calls for frequencies long result = 0; try { if (termsEnum.seekExact(termBytes)) { result = termsEnum.docFreq(); } } catch (IOException e) { throw new ElasticsearchException("IOException loading background document frequency info", e); } return result; } public long getBackgroundFrequency(long term) { BytesRef indexedVal = mapper.indexedValueForSearch(term); return getBackgroundFrequency(indexedVal); } @Override public boolean release() throws ElasticsearchException { try { if (termsEnum instanceof Releasable) { ((Releasable) termsEnum).release(); } } finally { termsEnum = null; } return true; } // A specialist TermsEnum wrapper for use in the repeated look-ups of frequency stats. // TODO factor out as a utility class to replace similar org.elasticsearch.search.suggest.phrase.WordScorer.FrequencyCachingTermsEnumWrapper // This implementation is likely to produce less garbage than WordScorer's impl but will need benchmarking/testing for that use case. static class FrequencyCachingTermsEnumWrapper extends FilterTermsEnum implements Releasable { int currentTermDocFreq = 0; long currentTermTotalFreq = 0; private IntArray termDocFreqs; private LongArray termTotalFreqs; private BytesRefHash cachedTermOrds; protected BigArrays bigArrays; private boolean cacheDocFreqs; private boolean cacheTotalFreqs; private long currentTermOrd; public FrequencyCachingTermsEnumWrapper(TermsEnum delegate, BigArrays bigArrays, boolean cacheDocFreqs, boolean cacheTotalFreqs) { super(delegate); this.bigArrays = bigArrays; this.cacheDocFreqs = cacheDocFreqs; this.cacheTotalFreqs = cacheTotalFreqs; if (cacheDocFreqs) { termDocFreqs = bigArrays.newIntArray(INITIAL_NUM_TERM_FREQS_CACHED, false); } if (cacheTotalFreqs) { termTotalFreqs = bigArrays.newLongArray(INITIAL_NUM_TERM_FREQS_CACHED, false); } cachedTermOrds = new BytesRefHash(INITIAL_NUM_TERM_FREQS_CACHED, bigArrays); } @Override public boolean seekExact(BytesRef text) throws IOException { currentTermDocFreq = 0; currentTermTotalFreq = 0; currentTermOrd = cachedTermOrds.add(text); if (currentTermOrd < 0) { // already seen, initialize instance data with the cached frequencies currentTermOrd = -1 - currentTermOrd; if (cacheDocFreqs) { currentTermDocFreq = termDocFreqs.get(currentTermOrd); } if (cacheTotalFreqs) { currentTermTotalFreq = termTotalFreqs.get(currentTermOrd); } return true; } else { // cache miss - pre-emptively read and cache the required frequency values if (in.seekExact(text)) { if (cacheDocFreqs) { currentTermDocFreq = in.docFreq(); termDocFreqs = bigArrays.grow(termDocFreqs, currentTermOrd + 1); termDocFreqs.set(currentTermOrd, currentTermDocFreq); } if (cacheTotalFreqs) { currentTermTotalFreq = in.totalTermFreq(); termTotalFreqs = bigArrays.grow(termTotalFreqs, currentTermOrd + 1); termTotalFreqs.set(currentTermOrd, currentTermTotalFreq); } return true; } } return false; } @Override public long totalTermFreq() throws IOException { assert cacheTotalFreqs; return currentTermTotalFreq; } @Override public int docFreq() throws IOException { assert cacheDocFreqs; return currentTermDocFreq; } @Override public void seekExact(long ord) throws IOException { throw new UnsupportedOperationException(); } @Override public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException { throw new UnsupportedOperationException(); } @Override public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException { throw new UnsupportedOperationException(); } public SeekStatus seekCeil(BytesRef text) throws IOException { throw new UnsupportedOperationException(); } @Override public BytesRef next() { throw new UnsupportedOperationException(); } @Override public boolean release() throws ElasticsearchException { try { Releasables.release(cachedTermOrds, termDocFreqs, termTotalFreqs); } finally { cachedTermOrds = null; termDocFreqs = null; termTotalFreqs = null; } return true; } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2013 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.sse; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.parosproxy.paros.network.HttpMessage; import org.parosproxy.paros.network.HttpRequestHeader; import org.zaproxy.zap.ZapGetMethod; import org.zaproxy.zap.extension.sse.db.ServerSentEventStream; public class EventStreamProxy { private static final Logger logger = LogManager.getLogger(EventStreamProxy.class); private static Comparator<EventStreamObserver> observersComparator; /** WebSocket communication state. */ public enum State { CONNECTING, OPEN, CLOSED, // ready state EXCLUDED, INCLUDED; // no Server-Sent Events state, used for new allow/deny listed streams } // private final HttpMessage message; private final BufferedWriter writer; private EventStreamListener listener; private static AtomicInteger streamIdGenerator = new AtomicInteger(0); private AtomicInteger eventIdGenerator = new AtomicInteger(0); /** * Keep track of the last event ID as it is used by subsequent events as long as a new one is * retrieved. */ private String lastEventId = ""; /** List of observers, that are informed in case of a new event. */ private List<EventStreamObserver> observers = new ArrayList<>(); private ServerSentEventStream dataStreamObject; public EventStreamProxy( HttpMessage message, BufferedReader reader, BufferedWriter writer, ZapGetMethod method) { // this.message = message; this.writer = writer; listener = new EventStreamListener(this, reader, method); HttpRequestHeader reqHeader = message.getRequestHeader(); dataStreamObject = new ServerSentEventStream(); dataStreamObject.setId(streamIdGenerator.incrementAndGet()); dataStreamObject.setUrl(reqHeader.getURI().toString()); dataStreamObject.setStartTimestamp(Calendar.getInstance().getTimeInMillis()); dataStreamObject.setHost(reqHeader.getHostName()); dataStreamObject.setPort(reqHeader.getHostPort()); // wait until HistoryReference is saved to database while (message.getHistoryRef() == null) { try { Thread.sleep(5); } catch (InterruptedException e) { logger.warn(e.getMessage(), e); } } dataStreamObject.setHistoryId(message.getHistoryRef().getHistoryId()); } public void start() { // TODO use thread pool (new Thread(listener, "ZAP-SSE-Listener")).start(); notifyStateObservers(State.OPEN); } public void stop() { try { logger.debug("Close Server-Sent Events stream #{}", dataStreamObject.getId()); listener.close(); // closes reader writer.close(); notifyStateObservers(State.CLOSED); dataStreamObject.setEndTimestamp(Calendar.getInstance().getTimeInMillis()); } catch (IOException e) { logger.debug("An exception occurred while stopping the proxy:", e); } // TODO close thread also? } /** * Interprets the event according to {@link * http://www.w3.org/TR/eventsource/#event-stream-interpretation}. Call this method if newline * occurred. Do not call for incomplete events. Discard pending data once the end of file is * reached. * * @param event */ public ServerSentEvent processEvent(final String event) { ServerSentEvent sse = new ServerSentEvent(); sse.setTime(Calendar.getInstance().getTimeInMillis()); sse.setStreamId(dataStreamObject.getId()); sse.setRawEvent(event); int colonIndex; String field; String value; for (String line : event.split("\n")) { colonIndex = line.indexOf(":"); field = null; value = null; if (colonIndex == 0) { // line starts with a colon // according to specification line should be ignored, // but we want to keep track of this column // => comment can be viewed in raw data continue; } else if (colonIndex > -1) { field = line.substring(0, colonIndex); int dataIndex = colonIndex + 1; if (line.charAt(dataIndex) == ' ') { // do not include first whitespace dataIndex++; } value = line.substring(dataIndex); } else { // whole line is used as field name field = line; // the empty string is used as value value = ""; } switch (field) { case ServerSentEvent.FIELD_NAME_EVENT: sse.setEventType(value); break; case ServerSentEvent.FIELD_NAME_DATA: sse.appendData(value); break; case ServerSentEvent.FIELD_NAME_ID: lastEventId = value; break; case ServerSentEvent.FIELD_NAME_RETRY: if (value.matches("^[0-9]+")) { sse.setReconnectionTime(Integer.valueOf(value)); } break; default: // ignore the field break; } } // dispatch the event sse.setLastEventId(lastEventId); if (sse.isDataEmpty()) { sse.setEventType(""); } sse.setId(eventIdGenerator.incrementAndGet()); sse.setStreamId(dataStreamObject.getId()); sse.finishData(); logger.debug("Processed Server-Sent Event {}", sse); boolean doForward = notifyObservers(sse); if (doForward) { forward(sse); } return sse; } private void forward(ServerSentEvent sse) { try { // forward event and trigger processing in client via an empty line writer.write(sse.getRawEvent() + "\n\n"); writer.flush(); } catch (IOException e) { logger.warn("Forwarding event {} was not possible due to: {}", sse, e.getMessage(), e); stop(); } } private boolean notifyObservers(ServerSentEvent sse) { boolean doForward = true; synchronized (observers) { for (EventStreamObserver observer : observers) { if (!observer.onServerSentEvent(sse)) { doForward = false; break; } } } return doForward; } /** * Helper to inform about new {@link WebSocketProxy#state}. Also called when a former {@link * WebSocketProxy#isForwardOnly} channel is no longer deny listed {@link State#INCLUDED} or * vice-versa {@link State#EXCLUDED}. */ protected void notifyStateObservers(State state) { synchronized (observers) { for (EventStreamObserver observer : observers) { observer.onServerSentEventStateChange(state, getStreamValues()); } } } private ServerSentEventStream getStreamValues() { return dataStreamObject; } public void addObserver(EventStreamObserver observer) { synchronized (observers) { observers.add(observer); Collections.sort(observers, getObserversComparator()); } } /** * Returns the comparator used for determining order of notification. * * @return */ private static Comparator<EventStreamObserver> getObserversComparator() { if (observersComparator == null) { observersComparator = (o1, o2) -> { int order1 = o1.getServerSentEventObservingOrder(); int order2 = o2.getServerSentEventObservingOrder(); if (order1 < order2) { return -1; } else if (order1 > order2) { return 1; } return 0; }; } return observersComparator; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.plugin.core.debug.gui.time; import java.awt.BorderLayout; import java.util.Collection; import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Function; import javax.swing.*; import javax.swing.table.TableColumn; import javax.swing.table.TableColumnModel; import com.google.common.collect.Collections2; import docking.widgets.table.*; import docking.widgets.table.DefaultEnumeratedColumnTableModel.EnumeratedTableColumn; import ghidra.framework.model.DomainObject; import ghidra.trace.model.Trace; import ghidra.trace.model.Trace.TraceSnapshotChangeType; import ghidra.trace.model.TraceDomainObjectListener; import ghidra.trace.model.time.TraceSnapshot; import ghidra.trace.model.time.TraceTimeManager; import ghidra.util.table.GhidraTableFilterPanel; public class DebuggerSnapshotTablePanel extends JPanel { protected enum SnapshotTableColumns implements EnumeratedTableColumn<SnapshotTableColumns, SnapshotRow> { SNAP("Snap", Long.class, SnapshotRow::getSnap), TIMESTAMP("Timestamp", String.class, SnapshotRow::getTimeStamp), // TODO: Use Date type here EVENT_THREAD("Event Thread", String.class, SnapshotRow::getEventThreadName), SCHEDULE("Schedule", String.class, SnapshotRow::getSchedule), DESCRIPTION("Description", String.class, SnapshotRow::getDescription, SnapshotRow::setDescription); private final String header; private final Function<SnapshotRow, ?> getter; private final BiConsumer<SnapshotRow, Object> setter; private final Class<?> cls; <T> SnapshotTableColumns(String header, Class<T> cls, Function<SnapshotRow, T> getter) { this(header, cls, getter, null); } @SuppressWarnings("unchecked") <T> SnapshotTableColumns(String header, Class<T> cls, Function<SnapshotRow, T> getter, BiConsumer<SnapshotRow, T> setter) { this.header = header; this.cls = cls; this.getter = getter; this.setter = (BiConsumer<SnapshotRow, Object>) setter; } @Override public Class<?> getValueClass() { return cls; } @Override public Object getValueOf(SnapshotRow row) { return getter.apply(row); } @Override public String getHeader() { return header; } @Override public boolean isEditable(SnapshotRow row) { return setter != null; } @Override public void setValueOf(SnapshotRow row, Object value) { setter.accept(row, value); } } private class SnapshotListener extends TraceDomainObjectListener { public SnapshotListener() { listenForUntyped(DomainObject.DO_OBJECT_RESTORED, e -> objectRestored()); listenFor(TraceSnapshotChangeType.ADDED, this::snapAdded); listenFor(TraceSnapshotChangeType.CHANGED, this::snapChanged); listenFor(TraceSnapshotChangeType.DELETED, this::snapDeleted); } private void objectRestored() { loadSnapshots(); } private void snapAdded(TraceSnapshot snapshot) { if (snapshot.getKey() < 0 && hideScratch) { return; } SnapshotRow row = new SnapshotRow(currentTrace, snapshot); snapshotTableModel.add(row); if (currentSnap == snapshot.getKey()) { snapshotFilterPanel.setSelectedItem(row); } } private void snapChanged(TraceSnapshot snapshot) { if (snapshot.getKey() < 0 && hideScratch) { return; } snapshotTableModel.notifyUpdatedWith(row -> row.getSnapshot() == snapshot); } private void snapDeleted(TraceSnapshot snapshot) { if (snapshot.getKey() < 0 && hideScratch) { return; } snapshotTableModel.deleteWith(row -> row.getSnapshot() == snapshot); } } protected final EnumeratedColumnTableModel<SnapshotRow> snapshotTableModel = new DefaultEnumeratedColumnTableModel<>("Snapshots", SnapshotTableColumns.class); protected final GTable snapshotTable; protected final GhidraTableFilterPanel<SnapshotRow> snapshotFilterPanel; protected boolean hideScratch = true; private Trace currentTrace; private Long currentSnap; protected final SnapshotListener listener = new SnapshotListener(); public DebuggerSnapshotTablePanel() { super(new BorderLayout()); snapshotTable = new GTable(snapshotTableModel); snapshotTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); add(new JScrollPane(snapshotTable)); snapshotFilterPanel = new GhidraTableFilterPanel<>(snapshotTable, snapshotTableModel); add(snapshotFilterPanel, BorderLayout.SOUTH); TableColumnModel columnModel = snapshotTable.getColumnModel(); TableColumn snapCol = columnModel.getColumn(SnapshotTableColumns.SNAP.ordinal()); snapCol.setPreferredWidth(40); TableColumn timeCol = columnModel.getColumn(SnapshotTableColumns.TIMESTAMP.ordinal()); timeCol.setPreferredWidth(200); TableColumn etCol = columnModel.getColumn(SnapshotTableColumns.EVENT_THREAD.ordinal()); etCol.setPreferredWidth(40); TableColumn schdCol = columnModel.getColumn(SnapshotTableColumns.SCHEDULE.ordinal()); schdCol.setPreferredWidth(60); TableColumn descCol = columnModel.getColumn(SnapshotTableColumns.DESCRIPTION.ordinal()); descCol.setPreferredWidth(200); } private void addNewListeners() { if (currentTrace == null) { return; } currentTrace.addListener(listener); } private void removeOldListeners() { if (currentTrace == null) { return; } currentTrace.removeListener(listener); } public void setTrace(Trace trace) { if (currentTrace == trace) { return; } removeOldListeners(); currentTrace = trace; addNewListeners(); loadSnapshots(); } public Trace getTrace() { return currentTrace; } public void setHideScratchSnapshots(boolean hideScratch) { if (this.hideScratch == hideScratch) { return; } this.hideScratch = hideScratch; if (hideScratch) { deleteScratchSnapshots(); } else { loadScratchSnapshots(); } } protected void loadSnapshots() { snapshotTableModel.clear(); if (currentTrace == null) { return; } TraceTimeManager manager = currentTrace.getTimeManager(); Collection<? extends TraceSnapshot> snapshots = hideScratch ? manager.getSnapshots(0, true, Long.MAX_VALUE, true) : manager.getAllSnapshots(); snapshotTableModel.addAll(Collections2.transform(snapshots, s -> new SnapshotRow(currentTrace, s))); } protected void deleteScratchSnapshots() { snapshotTableModel.deleteWith(s -> s.getSnap() < 0); } protected void loadScratchSnapshots() { if (currentTrace == null) { return; } TraceTimeManager manager = currentTrace.getTimeManager(); snapshotTableModel.addAll(Collections2.transform( manager.getSnapshots(Long.MIN_VALUE, true, 0, false), s -> new SnapshotRow(currentTrace, s))); } public ListSelectionModel getSelectionModel() { return snapshotTable.getSelectionModel(); } public Long getSelectedSnapshot() { SnapshotRow row = snapshotFilterPanel.getSelectedItem(); return row == null ? null : row.getSnap(); } public void setSelectedSnapshot(Long snap) { currentSnap = snap; if (snap == null) { snapshotTable.clearSelection(); return; } SnapshotRow sel = snapshotFilterPanel.getSelectedItem(); Long curSnap = sel == null ? null : sel.getSnap(); if (Objects.equals(curSnap, snap)) { return; } SnapshotRow row = snapshotTableModel.findFirst(r -> r.getSnap() == snap); if (row == null) { snapshotTable.clearSelection(); return; } snapshotFilterPanel.setSelectedItem(row); } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.modelio; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; @Generated @Library("ModelIO") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class MDLPhysicallyPlausibleScatteringFunction extends MDLScatteringFunction { static { NatJ.register(); } @Generated protected MDLPhysicallyPlausibleScatteringFunction(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native MDLPhysicallyPlausibleScatteringFunction alloc(); @Owned @Generated @Selector("allocWithZone:") public static native MDLPhysicallyPlausibleScatteringFunction allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native MDLPhysicallyPlausibleScatteringFunction new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("version") @NInt public static native long version_static(); /** * shape of specular highlight */ @Generated @Selector("anisotropic") public native MDLMaterialProperty anisotropic(); /** * shape of specular highlight */ @Generated @Selector("anisotropicRotation") public native MDLMaterialProperty anisotropicRotation(); /** * like clear acrylic on a car */ @Generated @Selector("clearcoat") public native MDLMaterialProperty clearcoat(); /** * 0 = satin, 1 = glossy */ @Generated @Selector("clearcoatGloss") public native MDLMaterialProperty clearcoatGloss(); @Generated @Selector("init") public native MDLPhysicallyPlausibleScatteringFunction init(); /** * 0 = dielectric, 1 = metallic */ @Generated @Selector("metallic") public native MDLMaterialProperty metallic(); /** * diffuse and specular response */ @Generated @Selector("roughness") public native MDLMaterialProperty roughness(); /** * like velvet */ @Generated @Selector("sheen") public native MDLMaterialProperty sheen(); /** * bias towards surface color */ @Generated @Selector("sheenTint") public native MDLMaterialProperty sheenTint(); /** * specular value */ @Generated @Selector("specularAmount") public native MDLMaterialProperty specularAmount(); /** * bias towards surface color */ @Generated @Selector("specularTint") public native MDLMaterialProperty specularTint(); /** * diffuse shape */ @Generated @Selector("subsurface") public native MDLMaterialProperty subsurface(); /** * currently 1 */ @Generated @Selector("version") @NInt public native long version(); }
package com.github.onsdigital.api.search; import java.net.URI; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.GET; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import com.github.davidcarboni.restolino.framework.Endpoint; import com.github.davidcarboni.restolino.json.Serialiser; import com.github.onsdigital.configuration.Configuration; import com.github.onsdigital.json.ContentType; import com.github.onsdigital.json.timeseries.Timeseries; import com.github.onsdigital.search.bean.AggregatedSearchResult; import com.mongodb.BasicDBObject; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; import com.mongodb.MongoClient; import com.mongodb.MongoClientURI; @Endpoint public class SearchConsole { static String mongoUri = Configuration.getMongoDbUri(); static ExecutorService pool = Executors.newCachedThreadPool(); static String neverHadResults = "neverHadResults"; static String currentlyNoResults = "currentlyNoResults"; static String currentlySomeResults = "currentlySomeResults"; static String alwaysHadResults = "alwaysHadResults"; @GET public Object results(HttpServletRequest request, HttpServletResponse response) throws Exception { Serialiser.getBuilder().setPrettyPrinting(); List<DBObject> queryDocuments = getQueryDocuments(); List<QueryCount> queryCounts = countQueries(queryDocuments); JsonResult result = new JsonResult(); sortQueries(queryCounts, result); return result; } static void save(final String query, final int page, final Object searchResult) { if (Timeseries.class.isAssignableFrom(searchResult.getClass())) { saveTimeseries(query, page, (Timeseries) searchResult); } else { saveSearchResult(query, page, (AggregatedSearchResult) searchResult); } } static class JsonResult { int neverHadResults; int currentlyNoResults; int currentlySomeResults; int alwaysHadResults; Map<String, List<QueryCount>> categories; JsonResult() { // Ordered Map for the sake of human-readability: categories = new LinkedHashMap<>(); categories.put(SearchConsole.neverHadResults, new ArrayList<SearchConsole.QueryCount>()); categories.put(SearchConsole.currentlyNoResults, new ArrayList<SearchConsole.QueryCount>()); categories.put(SearchConsole.currentlySomeResults, new ArrayList<SearchConsole.QueryCount>()); categories.put(SearchConsole.alwaysHadResults, new ArrayList<SearchConsole.QueryCount>()); } } private List<DBObject> getQueryDocuments() throws Exception { List<DBObject> result = new ArrayList<>(); MongoClientURI uri = new MongoClientURI(Configuration.getMongoDbUri()); MongoClient client = null; try { // Connect to the database: client = new MongoClient(uri); DB db = client.getDB(uri.getDatabase()); // Get the collection: DBCollection searchTerms = db.getCollection("searchTerms"); DBCursor docs = searchTerms.find(); while (docs.hasNext()) { result.add(docs.next()); } } catch (Exception e) { System.out.println("Error connecting to MongoDB at: " + mongoUri); System.out.println(ExceptionUtils.getStackTrace(e)); throw e; } finally { if (client != null) { client.close(); } } return result; } private List<QueryCount> countQueries(List<DBObject> queryDocuments) { // Start with a map to collate queries that are the same: Map<String, QueryCount> queryCounts = new HashMap<>(); for (DBObject doc : queryDocuments) { // Get the count for this query, creating it if necessary: String query = (String) doc.get("query"); if (StringUtils.equals("inf", query)) { System.out.println("inf"); } System.out.println(); QueryCount count = queryCounts.get(query); if (count == null) { count = new QueryCount(query); queryCounts.put(query, count); } // Increment the times this query has been seen: count.count++; // Record the number of results returned by the query on that // occasion. Records with no date are the oldest, so we use a // default date value: Object resultCount = doc.get("results"); Object resultDate = doc.get("date"); Date nullDate = null; if (resultCount != null) { Date date = null; if (resultDate != null) { date = (Date) resultDate; } else { nullDate = toDate(nullDate); date = nullDate; } if (resultCount instanceof Integer) { count.results.put(date, Long.valueOf(resultCount.toString())); } else { count.results.put(date, (Long) resultCount); } } } return new ArrayList<SearchConsole.QueryCount>(queryCounts.values()); } private void sortQueries(List<QueryCount> queryCounts, JsonResult result) { // Sort into "never any results", "always results", // "currently some results" and "currently no results": for (QueryCount queryCount : queryCounts) { // Work out if this query has never/always/sometimes returned // results: boolean none = false; boolean some = false; Date mostRecent = null; for (Date queryDate : queryCount.results.keySet()) { long numberOfResults = queryCount.results.get(queryDate); if (numberOfResults == 0) { none = true; } else { some = true; } if (mostRecent == null || queryDate.after(mostRecent)) { mostRecent = queryDate; } } if (none && some) { // Are we currently getting some results or no results // for this query? long latestResultCount = queryCount.results.get(mostRecent); if (latestResultCount == 0) { result.currentlyNoResults++; result.categories.get(currentlyNoResults).add(queryCount); } else { result.currentlySomeResults++; result.categories.get(currentlySomeResults).add(queryCount); } } else if (none) { result.neverHadResults++; result.categories.get(neverHadResults).add(queryCount); } else if (some) { result.alwaysHadResults++; result.categories.get(alwaysHadResults).add(queryCount); } } // Sort the lists according to the number of searches: for (List<QueryCount> list : result.categories.values()) { Collections.sort(list); } } /** * Generates an approximation of when a record with a null date was created. * The earliest iteration of this feature didn't include a date. * * @param nullDate * Pass in the last result of this method to generate the next * date in the sequence. * @return A date, on or after 2014-12-03 */ private Date toDate(Date nullDate) { Date result; if (nullDate == null) { try { result = new SimpleDateFormat("yyyy-MM-dd").parse("2014-12-03"); } catch (ParseException e) { throw new RuntimeException("If you see this, something amazing just happened."); } } else { result = new Date(nullDate.getTime() + 1000); } return result; } private static void saveTimeseries(String query, int page, Timeseries timeseries) { Search search = new Search(); search.query = query; search.page = page; // Single hit: Result result = new Result(); result.name = timeseries.cdid(); result.description = timeseries.name; result.type = ContentType.timeseries; result.uri = timeseries.uri; search.hits.add(result); save(search); } private static void saveSearchResult(String query, int page, AggregatedSearchResult searchResult) { Search search = new Search(); search.query = query; search.page = page; // Add the hits: for (Map<String, Object> hit : searchResult.getAllResults()) { Result result = new Result(); result.name = hit.get("title").toString(); Object lede = hit.get("lede"); // Timeseries results does not have lede result.description = lede == null ? "" : lede.toString(); result.type = ContentType.valueOf(hit.get("type").toString()); result.uri = URI.create(hit.get("url").toString()); search.hits.add(result); } save(search); updateNoResults(query, searchResult); } private static void save(final Search search) { // Submit to be saved asynchronously. // This minimises response time and we're not too worried about whether // the data get committed - we're mainly after a sample: pool.execute(new Runnable() { @Override public void run() { MongoClientURI uri = new MongoClientURI(Configuration.getMongoDbUri()); MongoClient client = null; try { // Connect to the database: client = new MongoClient(uri); DB db = client.getDB(uri.getDatabase()); // Get the collection: DBCollection searchTerms = db.getCollection("searchTerms"); // Save the record: searchTerms.insert(search.build()); System.out.println("Total: " + searchTerms.getCount()); } catch (Exception e) { System.out.println("Error connecting to MongoDB at: " + mongoUri); System.out.println(ExceptionUtils.getStackTrace(e)); } finally { if (client != null) { client.close(); } } } }); } private static void updateNoResults(String query, AggregatedSearchResult searchResult) { try { if (searchResult.getNumberOfResults() == 0 && StringUtils.equals(StringUtils.trim(StringUtils.lowerCase(query)), "newport explorers")) { searchResult.contentSearchResult.setNumberOfResults(1);; searchResult.setSuggestion("The guys at Fields House"); searchResult.setSuggestionBasedResult(true); Map<String, Object> result = new HashMap<>(); result.put("title", "The Newport Explorers"); result.put("lede", "This prototype (\"Alpha\") ONS website was brobugt to you by, amongst many other heroes, a band of brothers who left kin and country " + "to make this happen - and it's been great. Here's a bit more about the guys.."); result.put("type", ContentType.unknown); result.put("url", "http://davidcarboni.github.io/newport-explorers/"); searchResult.contentSearchResult.getResults().add(result); } } catch (Throwable t) { // We don't want any exceptions propagated. System.out.println(ExceptionUtils.getStackTrace(t)); } } static class Search extends BasicDBObject { private static final long serialVersionUID = 2138332036592544966L; String query; int page; long results; // Ordered list of results - enables the ranking to be seen List<Result> hits = new ArrayList<SearchConsole.Result>(); DBObject build() { append("query", query); append("page", page); append("results", hits.size()); append("date", new Date()); for (Result hit : hits) { hit.build(); append("hits", hits); } return this; } } static class Result extends BasicDBObject { static final long serialVersionUID = 7760752367684896714L; String name; String description; URI uri; ContentType type; void build() { append("name", name); append("description", description); append("uri", uri.toString()); append("type", type.toString()); } } static class QueryCount implements Comparable<QueryCount> { int count; String query; Map<Date, Long> results = new TreeMap<>(); Date date; QueryCount(String query) { this.query = query; } @Override public int compareTo(QueryCount o) { return o.count - count; } @Override public int hashCode() { int result = 0; if (query != null) { result = query.hashCode(); } return result; } /** * Ignoring null and incompatible types - not going to happen. */ @Override public boolean equals(Object obj) { return StringUtils.equals(query, ((QueryCount) obj).query); } @Override public String toString() { return query + ":" + count; } } }
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.android.datatransport.runtime.scheduling.jobscheduling; import static android.os.Build.VERSION_CODES.LOLLIPOP; import static com.google.common.truth.Truth.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import androidx.test.core.app.ApplicationProvider; import com.google.android.datatransport.Encoding; import com.google.android.datatransport.runtime.EncodedPayload; import com.google.android.datatransport.runtime.EventInternal; import com.google.android.datatransport.runtime.TransportContext; import com.google.android.datatransport.runtime.backends.BackendRegistry; import com.google.android.datatransport.runtime.backends.BackendResponse; import com.google.android.datatransport.runtime.backends.TransportBackend; import com.google.android.datatransport.runtime.firebase.transport.ClientMetrics; import com.google.android.datatransport.runtime.firebase.transport.LogEventDropped; import com.google.android.datatransport.runtime.scheduling.persistence.ClientHealthMetricsStore; import com.google.android.datatransport.runtime.scheduling.persistence.EventStore; import com.google.android.datatransport.runtime.scheduling.persistence.InMemoryEventStore; import com.google.android.datatransport.runtime.scheduling.persistence.PersistedEvent; import com.google.android.datatransport.runtime.synchronization.SynchronizationGuard; import java.nio.charset.Charset; import java.util.Arrays; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.AdditionalAnswers; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; @Config(sdk = {LOLLIPOP}) @RunWith(RobolectricTestRunner.class) public class UploaderTest { private static final SynchronizationGuard guard = new SynchronizationGuard() { @Override public <T> T runCriticalSection(CriticalSection<T> criticalSection) { return criticalSection.execute(); } }; private static final String BACKEND_NAME = "backend1"; private static final String ANOTHER_BACKEND_NAME = "backend1"; private static final String CLIENT_HEALTH_METRICS_LOG_SOURCE = "GDT_CLIENT_METRICS"; private static final TransportContext TRANSPORT_CONTEXT = TransportContext.builder().setBackendName(BACKEND_NAME).build(); private static final TransportContext ANOTHER_TRANSPORT_CONTEXT = TransportContext.builder() .setBackendName(ANOTHER_BACKEND_NAME) .setExtras("foo".getBytes()) .build(); private static final EventInternal EVENT = EventInternal.builder() .setTransportName("42") .setEventMillis(1) .setUptimeMillis(2) .setEncodedPayload( new EncodedPayload(Encoding.of("proto"), "Hello".getBytes(Charset.defaultCharset()))) .addMetadata("key1", "value1") .addMetadata("key2", "value2") .build(); private static final EventInternal ANOTHER_EVENT = EventInternal.builder() .setTransportName("43") .setEventMillis(1) .setUptimeMillis(2) .setEncodedPayload( new EncodedPayload(Encoding.of("proto"), "Hello".getBytes(Charset.defaultCharset()))) .addMetadata("key1", "value1") .addMetadata("key2", "value2") .build(); private static final int MANY_EVENT_COUNT = 1000; private final EventStore store = spy(new InMemoryEventStore()); private final EventStore mockStore = mock(EventStore.class); private BackendRegistry mockRegistry = mock(BackendRegistry.class); private TransportBackend mockBackend = mock(TransportBackend.class); private WorkScheduler mockScheduler = mock(WorkScheduler.class); private Runnable mockRunnable = mock(Runnable.class); private ClientHealthMetricsStore mockClientHealthMetricsStore = mock(ClientHealthMetricsStore.class); private Uploader uploader = spy( new Uploader( ApplicationProvider.getApplicationContext(), mockRegistry, store, mockScheduler, Runnable::run, guard, () -> 2, () -> 2, mockClientHealthMetricsStore)); @Before public void setUp() { when(mockRegistry.get(BACKEND_NAME)).thenReturn(mockBackend); store.persist(TRANSPORT_CONTEXT, EVENT); } @Test public void upload_noNetwork() { when(uploader.isNetworkAvailable()).thenReturn(Boolean.FALSE); uploader.upload(TRANSPORT_CONTEXT, 1, mockRunnable); // Scheduler must be called with the attempt number incremented. verify(mockScheduler, times(1)).schedule(TRANSPORT_CONTEXT, 2); verify(mockRunnable, times(1)).run(); } @Test public void upload_yesNetwork() { when(mockBackend.send(any())).thenReturn(BackendResponse.ok(1000)); when(uploader.isNetworkAvailable()).thenReturn(Boolean.TRUE); uploader.upload(TRANSPORT_CONTEXT, 1, mockRunnable); verify(uploader, times(1)).logAndUpdateState(TRANSPORT_CONTEXT, 1); verify(mockRunnable, times(1)).run(); } @Test public void logAndUpdateStatus_okResponse() { when(mockBackend.send(any())).thenReturn(BackendResponse.ok(1000)); uploader.logAndUpdateState(TRANSPORT_CONTEXT, 1); verify(store, times(1)).recordSuccess(any()); verify(store, times(1)).recordNextCallTime(TRANSPORT_CONTEXT, 1002); } @Test public void logAndUpdateStatus_nontransientResponse() { when(mockBackend.send(any())).thenReturn(BackendResponse.fatalError()); uploader.logAndUpdateState(TRANSPORT_CONTEXT, 1); verify(store, times(1)).recordSuccess(any()); } @Test public void logAndUpdateStatus_transientReponse() { when(mockBackend.send(any())).thenReturn(BackendResponse.transientError()); uploader.logAndUpdateState(TRANSPORT_CONTEXT, 1); verify(store, times(1)).recordFailure(any()); verify(mockScheduler, times(1)).schedule(TRANSPORT_CONTEXT, 2, true); } @Test public void upload_singleEvent_withInvalidPayloadResponse_shouldRecordLogEventDroppedDueToInvalidPayload() { when(mockBackend.send(any())).thenReturn(BackendResponse.invalidPayload()); uploader.upload(TRANSPORT_CONTEXT, 1, mockRunnable); verify(mockClientHealthMetricsStore, times(1)) .recordLogEventDropped(1, LogEventDropped.Reason.INVALID_PAYLOD, EVENT.getTransportName()); } @Test public void upload_multipleEvents_withInvalidPayloadResponse_shouldRecordLogEventDroppedDueToInvalidPayload() { store.persist(TRANSPORT_CONTEXT, EVENT); store.persist(TRANSPORT_CONTEXT, ANOTHER_EVENT); when(mockBackend.send(any())).thenReturn(BackendResponse.invalidPayload()); uploader.upload(TRANSPORT_CONTEXT, 1, mockRunnable); verify(mockClientHealthMetricsStore, times(1)) .recordLogEventDropped(2, LogEventDropped.Reason.INVALID_PAYLOD, EVENT.getTransportName()); verify(mockClientHealthMetricsStore, times(1)) .recordLogEventDropped( 1, LogEventDropped.Reason.INVALID_PAYLOD, ANOTHER_EVENT.getTransportName()); } @Test public void logAndUpdateStatus_manyEvents_shouldUploadAll() { when(mockBackend.send(any())).thenReturn(BackendResponse.ok(1000)); for (int i = 0; i < MANY_EVENT_COUNT; i++) { store.persist(TRANSPORT_CONTEXT, EVENT); } Iterable<PersistedEvent> persistedEvents = store.loadBatch(TRANSPORT_CONTEXT); uploader.logAndUpdateState(TRANSPORT_CONTEXT, 1); assertThat(store.hasPendingEventsFor(TRANSPORT_CONTEXT)).isFalse(); } @Test public void upload_toFlgServer_shouldIncludeClientHealthMetrics() { final ClientMetrics expectedClientMetrics = ClientMetrics.getDefaultInstance(); when(mockRegistry.get(BACKEND_NAME)).thenReturn(mockBackend); when(mockBackend.send(any())).thenReturn(BackendResponse.ok(1000)); when(mockBackend.decorate(any())).then(AdditionalAnswers.returnsFirstArg()); when(mockClientHealthMetricsStore.loadClientMetrics()).thenReturn(expectedClientMetrics); store.persist(ANOTHER_TRANSPORT_CONTEXT, EVENT); uploader.upload(ANOTHER_TRANSPORT_CONTEXT, 0, mockRunnable); verify(mockClientHealthMetricsStore, times(1)).loadClientMetrics(); verify(mockBackend, times(1)) .send( argThat( (backendRequest -> { for (EventInternal eventInternal : backendRequest.getEvents()) { if (eventInternal.getTransportName().equals(CLIENT_HEALTH_METRICS_LOG_SOURCE) && Arrays.equals( eventInternal.getEncodedPayload().getBytes(), expectedClientMetrics.toByteArray())) { return true; } } return false; }))); } }
/* * Copyright 2000-2016 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client.ui; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.dom.client.Style.Visibility; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.ui.ComplexPanel; import com.google.gwt.user.client.ui.Widget; import com.vaadin.client.ComponentConnector; import com.vaadin.client.VCaption; import com.vaadin.client.WidgetUtil; import com.vaadin.client.ui.TouchScrollDelegate.TouchScrollHandler; import com.vaadin.shared.ComponentConstants; import com.vaadin.shared.ui.accordion.AccordionState; import com.vaadin.shared.ui.tabsheet.TabState; import com.vaadin.shared.ui.tabsheet.TabsheetServerRpc; import com.vaadin.shared.util.SharedUtil; public class VAccordion extends VTabsheetBase { public static final String CLASSNAME = AccordionState.PRIMARY_STYLE_NAME; private Set<Widget> widgets = new HashSet<>(); private StackItem openTab; /** For internal use only. May be removed or replaced in the future. */ public int selectedItemIndex = -1; private final TouchScrollHandler touchScrollHandler; public VAccordion() { super(CLASSNAME); touchScrollHandler = TouchScrollDelegate.enableTouchScrolling(this); } @Override public void renderTab(TabState tabState, int index) { StackItem item; int itemIndex; if (getWidgetCount() <= index) { // Create stackItem and render caption item = new StackItem(); if (getWidgetCount() == 0) { item.addStyleDependentName("first"); } itemIndex = getWidgetCount(); add(item, getElement()); } else { item = getStackItem(index); itemIndex = index; } item.updateCaption(tabState); item.updateTabStyleName(tabState.styleName); item.setVisible(tabState.visible); item.setId(tabState.id); } @Override public void selectTab(int index) { selectedItemIndex = index; } @Override public void setStylePrimaryName(String style) { super.setStylePrimaryName(style); updateStyleNames(style); } @Override public void setStyleName(String style) { super.setStyleName(style); updateStyleNames(style); } protected void updateStyleNames(String primaryStyleName) { for (Widget w : getChildren()) { if (w instanceof StackItem) { StackItem item = (StackItem) w; item.updateStyleNames(primaryStyleName); } } } /** For internal use only. May be removed or replaced in the future. */ public void open(int itemIndex) { StackItem item = (StackItem) getWidget(itemIndex); boolean alreadyOpen = false; if (openTab != null) { if (openTab.isOpen()) { if (openTab == item) { alreadyOpen = true; } else { openTab.close(); } } } if (!alreadyOpen) { item.open(); activeTabIndex = itemIndex; openTab = item; } } /** For internal use only. May be removed or replaced in the future. */ public void close(StackItem item) { if (!item.isOpen()) { return; } item.close(); activeTabIndex = -1; openTab = null; } public void onSelectTab(StackItem item) { final int index = getWidgetIndex(item); if (index != activeTabIndex && !disabled && !readonly && !disabledTabKeys.contains(tabKeys.get(index))) { addStyleDependentName("loading"); connector.getRpcProxy(TabsheetServerRpc.class) .setSelected(tabKeys.get(index).toString()); } } /** * A StackItem has always two children, Child 0 is a VCaption, Child 1 is * the actual child widget. */ public class StackItem extends ComplexPanel implements ClickHandler { private Widget widget; private String id; public void setHeight(int height) { if (height == -1) { super.setHeight(""); content.getStyle().setHeight(0, Unit.PX); } else { super.setHeight((height + getCaptionHeight()) + "px"); content.getStyle().setHeight(height, Unit.PX); content.getStyle().setTop(getCaptionHeight(), Unit.PX); } } public void setId(String newId) { if (!SharedUtil.equals(newId, id)) { if (id != null) { getElement().removeAttribute("id"); } id = newId; if (id != null && !id.isEmpty()) { getElement().setId(id); } } } public Widget getComponent() { return widget; } @Override public void setVisible(boolean visible) { super.setVisible(visible); } public void setHeightFromWidget() { Widget widget = getChildWidget(); if (widget == null) { return; } int paintableHeight = widget.getElement().getOffsetHeight(); setHeight(paintableHeight); } /** * Returns caption width including padding * * @return */ public int getCaptionWidth() { if (caption == null) { return 0; } int captionWidth = caption.getRequiredWidth(); int padding = WidgetUtil.measureHorizontalPaddingAndBorder( caption.getElement(), 18); return captionWidth + padding; } public void setWidth(int width) { if (width == -1) { super.setWidth(""); } else { super.setWidth(width + "px"); } } public int getHeight() { return getOffsetHeight(); } public int getCaptionHeight() { return captionNode.getOffsetHeight(); } private VCaption caption; private boolean open = false; private Element content = DOM.createDiv(); private Element captionNode = DOM.createDiv(); private String styleName; public StackItem() { setElement(DOM.createDiv()); caption = new VCaption(client); caption.addClickHandler(this); super.add(caption, captionNode); DOM.appendChild(captionNode, caption.getElement()); DOM.appendChild(getElement(), captionNode); DOM.appendChild(getElement(), content); updateStyleNames(VAccordion.this.getStylePrimaryName()); touchScrollHandler.addElement(getContainerElement()); close(); } private void updateStyleNames(String primaryStyleName) { content.removeClassName(getStylePrimaryName() + "-content"); captionNode.removeClassName(getStylePrimaryName() + "-caption"); setStylePrimaryName(primaryStyleName + "-item"); updateTabStyleName(getStylePrimaryName()); captionNode.addClassName(getStylePrimaryName() + "-caption"); content.addClassName(getStylePrimaryName() + "-content"); } @Override public void onBrowserEvent(Event event) { onSelectTab(this); } public com.google.gwt.user.client.Element getContainerElement() { return DOM.asOld(content); } public Widget getChildWidget() { return widget; } public void replaceWidget(Widget newWidget) { if (widget != null) { widgets.remove(widget); if (open) { remove(widget); } } widget = newWidget; widgets.add(newWidget); if (open) { add(widget, content); } } public void open() { add(widget, content); open = true; content.getStyle().setTop(getCaptionHeight(), Unit.PX); content.getStyle().setLeft(0, Unit.PX); content.getStyle().clearVisibility(); addStyleDependentName("open"); } public void hide() { content.getStyle().setVisibility(Visibility.HIDDEN); } public void close() { if (widget != null) { remove(widget); } content.getStyle().setVisibility(Visibility.HIDDEN); content.getStyle().setTop(-100000, Unit.PX); content.getStyle().setLeft(-100000, Unit.PX); removeStyleDependentName("open"); setHeight(-1); setWidth(""); open = false; } public boolean isOpen() { return open; } /** * Updates the content of the open tab of the accordion. * * This method is mostly for internal use and may change in future * versions. * * @since 7.2 * @param newWidget * new content */ public void setContent(Widget newWidget) { if (widget == null) { widget = newWidget; widgets.add(newWidget); } else if (widget != newWidget) { replaceWidget(newWidget); } if (isOpen() && isDynamicHeight()) { setHeightFromWidget(); } } @Override public void onClick(ClickEvent event) { onSelectTab(this); } public void updateCaption(TabState tabState) { // TODO need to call this because the caption does not have an owner caption.setCaptionAsHtml(isTabCaptionsAsHtml()); caption.updateCaptionWithoutOwner(tabState.caption, !tabState.enabled, hasAttribute(tabState.description), hasAttribute(tabState.componentError), connector.getResourceUrl( ComponentConstants.ICON_RESOURCE + tabState.key)); } private boolean hasAttribute(String string) { return string != null && !string.trim().isEmpty(); } /** * Updates a tabs stylename from the child UIDL * * @param uidl * The child uidl of the tab */ private void updateTabStyleName(String newStyleName) { if (newStyleName != null && newStyleName.length() != 0) { if (!newStyleName.equals(styleName)) { // If we have a new style name if (styleName != null && styleName.length() != 0) { // Remove old style name if present removeStyleDependentName(styleName); } // Set new style name addStyleDependentName(newStyleName); styleName = newStyleName; } } else if (styleName != null) { // Remove the set stylename if no stylename is present in the // uidl removeStyleDependentName(styleName); styleName = null; } } public int getWidgetWidth() { return DOM.getFirstChild(content).getOffsetWidth(); } public boolean contains(ComponentConnector p) { return (getChildWidget() == p.getWidget()); } public boolean isCaptionVisible() { return caption.isVisible(); } } @Override protected void clearPaintables() { clear(); } @Override public Iterator<Widget> getWidgetIterator() { return widgets.iterator(); } @Override public int getTabCount() { return getWidgetCount(); } @Override public void removeTab(int index) { StackItem item = getStackItem(index); remove(item); if (selectedItemIndex == index) { selectedItemIndex = -1; } touchScrollHandler.removeElement(item.getContainerElement()); } @Override public ComponentConnector getTab(int index) { if (index < getWidgetCount()) { StackItem stackItem = getStackItem(index); if (stackItem == null) { return null; } Widget w = stackItem.getChildWidget(); if (w != null) { return getConnectorForWidget(w); } } return null; } /** For internal use only. May be removed or replaced in the future. */ public StackItem getStackItem(int index) { return (StackItem) getWidget(index); } public Iterable<StackItem> getStackItems() { return (Iterable) getChildren(); } public StackItem getOpenStackItem() { return openTab; } }
/************************************************************************** * copyright file="ComplexProperty.java" company="Microsoft" * Copyright (c) Microsoft Corporation. All rights reserved. * * Defines the ComplexProperty.java. **************************************************************************/ package microsoft.exchange.webservices.data; import java.util.ArrayList; import java.util.List; /** * Represents a property that can be sent to or retrieved from EWS. * * */ @SuppressWarnings("unchecked") @EditorBrowsable(state = EditorBrowsableState.Never) public abstract class ComplexProperty implements ISelfValidate,ComplexFunctionDelegate { /** The xml namespace. */ private XmlNamespace xmlNamespace = XmlNamespace.Types; /** * Initializes a new instance. */ protected ComplexProperty() { } /** * Gets the namespace. * * @return the namespace. */ protected XmlNamespace getNamespace() { return xmlNamespace; } /** * Sets the namespace. * * @param xmlNamespace * the namespace. */ protected void setNamespace(XmlNamespace xmlNamespace) { this.xmlNamespace = xmlNamespace; } /** * Instance was changed. */ protected void changed() { if (!onChangeList.isEmpty()) { for (IComplexPropertyChangedDelegate change : onChangeList) { change.complexPropertyChanged(this); } } } /** * Sets value of field. * * @param <T> * Field type. * @param field * The field. * @param value * The value. * @return true, if successful */ protected <T> boolean canSetFieldValue(T field, T value) { boolean applyChange; if (field == null) { applyChange = value != null; } else { if (field instanceof Comparable<?>) { Comparable<T> c = (Comparable<T>)field; applyChange = value != null && c.compareTo(value) != 0; } else { applyChange = true; } } return applyChange; } /** * Clears the change log. */ protected void clearChangeLog() { } /** * Reads the attributes from XML. * * @param reader * The reader. * @throws Exception * the exception */ protected void readAttributesFromXml(EwsServiceXmlReader reader) throws Exception { } /** * Reads the text value from XML. * * @param reader * The reader. * @throws Exception * the exception */ protected void readTextValueFromXml(EwsServiceXmlReader reader) throws Exception { } /** * Tries to read element from XML. * * @param reader * The reader. * @return True if element was read. * @throws Exception * the exception */ protected boolean tryReadElementFromXml(EwsServiceXmlReader reader) throws Exception { return false; } /** * Tries to read element from XML to patch this property. * * @param reader The reader. * True if element was read. * */ protected boolean tryReadElementFromXmlToPatch(EwsServiceXmlReader reader) throws Exception { return false; } /** * Writes the attributes to XML. * * @param writer * The writer. * @throws ServiceXmlSerializationException * the service xml serialization exception */ protected void writeAttributesToXml(EwsServiceXmlWriter writer) throws ServiceXmlSerializationException { } /** * Writes elements to XML. * * @param writer * The writer. * @throws Exception * the exception */ protected void writeElementsToXml(EwsServiceXmlWriter writer) throws Exception { } /** * Loads from XML. * * @param reader * The reader. * @param xmlNamespace * the xml namespace * @param xmlElementName * Name of the XML element. * @throws Exception * the exception */ protected void loadFromXml(EwsServiceXmlReader reader, XmlNamespace xmlNamespace, String xmlElementName) throws Exception { /*reader.ensureCurrentNodeIsStartElement(xmlNamespace, xmlElementName); this.readAttributesFromXml(reader); if (!reader.isEmptyElement()) { do { reader.read(); switch (reader.getNodeType().nodeType) { case XmlNodeType.START_ELEMENT: if (!this.tryReadElementFromXml(reader)) { reader.skipCurrentElement(); } break; case XmlNodeType.CHARACTERS: this.readTextValueFromXml(reader); break; } } while (!reader.isEndElement(xmlNamespace, xmlElementName)); } else { // Adding this code to skip the END_ELEMENT of an Empty Element. reader.read(); reader.isEndElement(xmlNamespace, xmlElementName); } */ this.internalLoadFromXml(reader, xmlNamespace, xmlElementName); } /** * Loads from XML to update this property. * * @param reader The reader. * @param xmlElementName Name of the XML element. * @throws Exception */ protected void updateFromXml(EwsServiceXmlReader reader, String xmlElementName) throws Exception { this.updateFromXml(reader, this.getNamespace(), xmlElementName); } /** * Loads from XML to update itself. * * @param reader The reader. * @param xmlNamespace The XML namespace. * @param xmlElementName Name of the XML element. */ protected void updateFromXml( EwsServiceXmlReader reader, XmlNamespace xmlNamespace, String xmlElementName) throws Exception { this.internalupdateLoadFromXml(reader, xmlNamespace, xmlElementName); } /** * Loads from XML * @param reader The Reader. * @param xmlNamespace The Xml NameSpace. * @param xmlElementName The Xml ElementName */ private void internalLoadFromXml( EwsServiceXmlReader reader, XmlNamespace xmlNamespace, String xmlElementName) throws Exception { reader.ensureCurrentNodeIsStartElement(xmlNamespace, xmlElementName); this.readAttributesFromXml(reader); if (!reader.isEmptyElement()) { do { reader.read(); switch (reader.getNodeType().nodeType) { case XmlNodeType.START_ELEMENT: if (!this.tryReadElementFromXml(reader)) { reader.skipCurrentElement(); } break; case XmlNodeType.CHARACTERS: this.readTextValueFromXml(reader); break; } } while (!reader.isEndElement(xmlNamespace, xmlElementName)); } else { // Adding this code to skip the END_ELEMENT of an Empty Element. reader.read(); reader.isEndElement(xmlNamespace, xmlElementName); } } private void internalupdateLoadFromXml( EwsServiceXmlReader reader, XmlNamespace xmlNamespace, String xmlElementName) throws Exception { reader.ensureCurrentNodeIsStartElement(xmlNamespace, xmlElementName); this.readAttributesFromXml(reader); if (!reader.isEmptyElement()) { do { reader.read(); switch (reader.getNodeType().nodeType) { case XmlNodeType.START_ELEMENT: if (!this.tryReadElementFromXmlToPatch(reader)) { reader.skipCurrentElement(); } break; case XmlNodeType.CHARACTERS: this.readTextValueFromXml(reader); break; } } while (!reader.isEndElement(xmlNamespace, xmlElementName)); } } /** * Loads from XML. * * @param reader * The reader. * @param xmlElementName * Name of the XML element. * @throws Exception * the exception */ protected void loadFromXml(EwsServiceXmlReader reader, String xmlElementName) throws Exception { this.loadFromXml(reader, this.getNamespace(), xmlElementName); } /** * Writes to XML. * * @param writer * The writer. * @param xmlNamespace * The XML namespace. * @param xmlElementName * Name of the XML element. * @throws Exception * the exception */ protected void writeToXml(EwsServiceXmlWriter writer, XmlNamespace xmlNamespace, String xmlElementName) throws Exception { writer.writeStartElement(xmlNamespace, xmlElementName); this.writeAttributesToXml(writer); this.writeElementsToXml(writer); writer.writeEndElement(); } /** * Writes to XML. * * @param writer * The writer. * @param xmlElementName * Name of the XML element. * @throws Exception * the exception */ protected void writeToXml(EwsServiceXmlWriter writer, String xmlElementName) throws Exception { this.writeToXml(writer, this.getNamespace(), xmlElementName); } /** * Change events occur when property changed. */ private List<IComplexPropertyChangedDelegate> onChangeList = new ArrayList<IComplexPropertyChangedDelegate>(); /** * Set event to happen when property changed. * * @param change * change event */ protected void addOnChangeEvent( IComplexPropertyChangedDelegate change) { onChangeList.add(change); } /** * Remove the event from happening when property changed. * * @param change * change event */ protected void removeChangeEvent( IComplexPropertyChangedDelegate change) { onChangeList.remove(change); } /** * Clears change events list. */ protected void clearChangeEvents() { onChangeList.clear(); } /** * Implements ISelfValidate.validate. Validates this instance. * * @throws ServiceValidationException * the service validation exception * @throws Exception * the exception */ public void validate() throws ServiceValidationException, Exception { this.internalValidate(); } /** * Validates this instance. * * @throws ServiceValidationException * the service validation exception * @throws Exception */ protected void internalValidate() throws ServiceValidationException, Exception { } public Boolean func(EwsServiceXmlReader reader) throws Exception { return !this.tryReadElementFromXml(reader); } }
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.datasets.iterator; import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.junit.Ignore; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.api.DataSetPreProcessor; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.factory.Nd4j; import java.util.List; import static junit.framework.TestCase.assertTrue; import static org.junit.Assert.*; /** * * @author Alex Black */ @Ignore public class TestAsyncIterator extends BaseDL4JTest { @Test public void testBasic() { //Basic test. Make sure it returns the right number of elements, // hasNext() works, etc int size = 13; DataSetIterator baseIter = new TestIterator(size, 0); //async iterator with queue size of 1 DataSetIterator async = new AsyncDataSetIterator(baseIter, 1); for (int i = 0; i < size; i++) { assertTrue(async.hasNext()); DataSet ds = async.next(); assertEquals(ds.getFeatures().getDouble(0), i, 0.0); assertEquals(ds.getLabels().getDouble(0), i, 0.0); } assertFalse(async.hasNext()); async.reset(); assertTrue(async.hasNext()); ((AsyncDataSetIterator) async).shutdown(); //async iterator with queue size of 5 baseIter = new TestIterator(size, 5); async = new AsyncDataSetIterator(baseIter, 5); for (int i = 0; i < size; i++) { assertTrue(async.hasNext()); DataSet ds = async.next(); assertEquals(ds.getFeatures().getDouble(0), i, 0.0); assertEquals(ds.getLabels().getDouble(0), i, 0.0); } assertFalse(async.hasNext()); async.reset(); assertTrue(async.hasNext()); ((AsyncDataSetIterator) async).shutdown(); //async iterator with queue size of 100 baseIter = new TestIterator(size, 100); async = new AsyncDataSetIterator(baseIter, 100); for (int i = 0; i < size; i++) { assertTrue(async.hasNext()); DataSet ds = async.next(); while (ds == null) ds = async.next(); assertEquals(ds.getFeatures().getDouble(0), i, 0.0); assertEquals(ds.getLabels().getDouble(0), i, 0.0); } assertFalse(async.hasNext()); async.reset(); assertTrue(async.hasNext()); ((AsyncDataSetIterator) async).shutdown(); //Test iteration where performance is limited by baseIterator.next() speed baseIter = new TestIterator(size, 1000); async = new AsyncDataSetIterator(baseIter, 5); for (int i = 0; i < size; i++) { assertTrue(async.hasNext()); DataSet ds = async.next(); assertEquals(ds.getFeatures().getDouble(0), i, 0.0); assertEquals(ds.getLabels().getDouble(0), i, 0.0); } assertFalse(async.hasNext()); async.reset(); assertTrue(async.hasNext()); ((AsyncDataSetIterator) async).shutdown(); } @Test public void testInitializeNoNextIter() { DataSetIterator iter = new IrisDataSetIterator(10, 150); while (iter.hasNext()) iter.next(); DataSetIterator async = new AsyncDataSetIterator(iter, 2); assertFalse(iter.hasNext()); assertFalse(async.hasNext()); try { iter.next(); fail("Should have thrown NoSuchElementException"); } catch (Exception e) { //OK } async.reset(); int count = 0; while (async.hasNext()) { async.next(); count++; } assertEquals(150 / 10, count); } @Test public void testResetWhileBlocking() { int size = 6; //Test reset while blocking on baseIterator.next() DataSetIterator baseIter = new TestIterator(size, 1000); AsyncDataSetIterator async = new AsyncDataSetIterator(baseIter); async.next(); //Should be waiting on baseIter.next() async.reset(); for (int i = 0; i < 6; i++) { assertTrue(async.hasNext()); DataSet ds = async.next(); assertEquals(ds.getFeatures().getDouble(0), i, 0.0); assertEquals(ds.getLabels().getDouble(0), i, 0.0); } assertFalse(async.hasNext()); async.shutdown(); //Test reset while blocking on blockingQueue.put() baseIter = new TestIterator(size, 0); async = new AsyncDataSetIterator(baseIter); async.next(); async.next(); //Should be waiting on blocingQueue async.reset(); for (int i = 0; i < 6; i++) { assertTrue(async.hasNext()); DataSet ds = async.next(); assertEquals(ds.getFeatures().getDouble(0), i, 0.0); assertEquals(ds.getLabels().getDouble(0), i, 0.0); } assertFalse(async.hasNext()); async.shutdown(); } private static class TestIterator implements DataSetIterator { private int size; private int cursor; private long delayMSOnNext; private TestIterator(int size, long delayMSOnNext) { this.size = size; this.cursor = 0; this.delayMSOnNext = delayMSOnNext; } @Override public DataSet next(int num) { throw new UnsupportedOperationException(); } @Override public int inputColumns() { return 1; } @Override public int totalOutcomes() { return 1; } @Override public boolean resetSupported() { return true; } @Override public boolean asyncSupported() { return false; } @Override public void reset() { cursor = 0; } @Override public int batch() { return 1; } @Override public void setPreProcessor(DataSetPreProcessor preProcessor) { throw new UnsupportedOperationException(); } @Override public DataSetPreProcessor getPreProcessor() { throw new UnsupportedOperationException(); } @Override public List<String> getLabels() { return null; } @Override public boolean hasNext() { return cursor < size; } @Override public DataSet next() { if (delayMSOnNext > 0) { try { Thread.sleep(delayMSOnNext); } catch (InterruptedException e) { throw new RuntimeException(e); } } INDArray features = Nd4j.scalar(cursor); INDArray labels = Nd4j.scalar(cursor); cursor++; return new DataSet(features, labels); } @Override public void remove() {} } }
/** * Write a description of class Model here. * * @author (your name) * @version (a version number or a date) */ public class Model { // instance variables - replace the example below with your own private String firstName; private String lastName; private int height; private double weight; private boolean canTravel; private boolean smokes; // Adding static variables public static final int IN_PER_FOOT = 12; public static final int BASE_RATE = 60; public static final int TALL_INCHES = 67; public static final double THIN_POUNDS = 140.0; public static final int TALL_THIN_BONUS = 5; public static final int TRAVEL_BONUS = 4; public static final int SMOKER_DEDUCTION = 10; /** * Constructor for objects of class Model */ public Model() { } /** * Overleaded Constructor * * @param _firstName used to initiate first name field as a string * @param _lastName used to initiate last name field as a string * @param _height used to initiate the height field as a int * @param _weight used to initiate the weight field as a double * @param _canTravel used to initiate if the model can travel as a boolean * @param _smokes used to initiate if the model smokes as a boolean */ public Model(String _firstName, String _lastName, int _height, double _weight, boolean _canTravel, boolean _smokes) { setFirstName(_firstName); setLastName(_lastName); setHeight(_height); setWeight(_weight); setCanTravel(_canTravel); setSmokes(_smokes); } /** * @return _convertBoolean to yes or no rather then True or False for booleans */ public void booleanConvert(boolean _convertBoolean) { if (_convertBoolean) { return "Yes"; } return "No"; } /** * @param _height used to set the models height as a int */ public void setHeight(int _height) { if (_height >= 0) { height = _height; } else { System.out.println("Your value must not be less then 0"); } } /** * @param _weight used to set the models weight as a double */ public void setWeight(double _weight) { if (_weight >= 0) { weight = _weight; } else { System.out.println("Your value must not be less then 0"); } } /** * @param _firstName used to set the models first name as a string */ public void setFirstName(String _firstName) { if (!_firstName.isEmpty() && _firstName != null) { firstName = _firstName; } else { System.out.println("You must input a first name"); } } /** * @param _lastName used to set the models last name as a string */ public void setLastName(String _lastName) { if(!_lastName.isEmpty() && _lastName != null) { lastName = _lastName; } else { System.out.println("You must input a last name"); } } /** * @param _canTravel used to set the models ability to travel as a boolean */ public void setCanTravel(boolean _canTravel) { canTravel = _canTravel; } /** * @param _smokes used to set if the model smokes as a boolean */ public void setSmokes(boolean _smokes) { smokes = _smokes; } /** * @return Gets the first name of the model as a string */ public String getFirstName() { return firstName; } /** * @return Gets the last name of the model as a string */ public String getLastName() { return lastName; } /** * @return Gets the height of the model as a int */ public int getHeight() { return height; } /** * @return Gets the weight of the model as a double */ public double getWeight() { return weight; } /** * @return Gets if the model can travel or not as a boolean */ public boolean isCanTravel() { return canTravel; } /** * @return Gets if the model smokes or not as a boolean */ public boolean isSomkes() { return smokes; } // Displays the models details public void displayModelDetails() { System.out.println("Name: " + firstName + " " + lastName); System.out.println("Height: " + convertHeightToFeetInches()); System.out.println("Weight: " + weight + " pounds"); System.out.println("Travels: " + booleanConvert(canTravel)); System.out.println("Somkes: " + booleanConvert(smokes)); System.out.println("Hourly rate: " + calculatePayPerHour()); } /** * @return Converts the models height in inches to feet */ public String convertHeightToFeetInches() { return (height / IN_PER_FOOT) + " feet " + (height % IN_PER_FOOT) + " inches"; } /** * @return Calculates the models pay per hour */ public int calculatePayPerHour() { int payPerHour = BASE_RATE; if (TALL_INCHES <= height && THIN_POUNDS >= weight) { payPerHour += TALL_THIN_BONUS; } if (canTravel) { payPerHour += TRAVEL_BONUS; } if (smokes) { payPerHour -= SMOKER_DEDUCTION; } return payPerHour; } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.virtual; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.data.DBDAttributeTransformerDescriptor; import org.jkiss.dbeaver.model.exec.DBCLogicalOperator; import org.jkiss.dbeaver.model.struct.DBSEntity; import org.jkiss.dbeaver.model.struct.DBSEntityConstraint; import org.jkiss.dbeaver.model.struct.DBSEntityConstraintType; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.dbeaver.utils.GeneralUtils; import org.jkiss.utils.ArrayUtils; import org.jkiss.utils.CommonUtils; import org.jkiss.utils.xml.SAXListener; import org.jkiss.utils.xml.SAXReader; import org.jkiss.utils.xml.XMLBuilder; import org.jkiss.utils.xml.XMLException; import org.xml.sax.Attributes; import java.io.IOException; import java.util.Map; /** * DBVModelSerializerLegacy */ @Deprecated class DBVModelSerializerLegacy implements DBVModelSerializer { private static final Log log = Log.getLog(DBVModelSerializerLegacy.class); static void serializeContainer(XMLBuilder xml, DBVContainer object) throws IOException { if (!object.hasValuableData()) { // nothing to save return; } xml.startElement(TAG_CONTAINER); xml.addAttribute(ATTR_NAME, object.getName()); // Containers for (DBVContainer container : object.getContainers()) { serializeContainer(xml, container); } for (DBVEntity entity : object.getEntities()) { if (entity.hasValuableData()) { serializeEntity(xml, entity); } } xml.endElement(); } private static void serializeEntity(XMLBuilder xml, DBVEntity entity) throws IOException { xml.startElement(TAG_ENTITY); xml.addAttribute(ATTR_NAME, entity.getName()); if (!CommonUtils.isEmpty(entity.getDescriptionColumnNames())) { xml.addAttribute(ATTR_DESCRIPTION, entity.getDescriptionColumnNames()); } if (!CommonUtils.isEmpty(entity.getProperties())) { for (Map.Entry<String, Object> prop : entity.getProperties().entrySet()) { xml.startElement(TAG_PROPERTY); xml.addAttribute(ATTR_NAME, prop.getKey()); xml.addAttribute(ATTR_VALUE, CommonUtils.toString(prop.getValue())); xml.endElement(); } } // Attributes for (DBVEntityAttribute attr : CommonUtils.safeCollection(entity.getEntityAttributes())) { if (!attr.hasValuableData()) { continue; } try (final XMLBuilder.Element e3 = xml.startElement(TAG_ATTRIBUTE)) { xml.addAttribute(ATTR_NAME, attr.getName()); final DBVTransformSettings transformSettings = attr.getTransformSettings(); if (transformSettings != null && transformSettings.hasValuableData()) { try (final XMLBuilder.Element e4 = xml.startElement(TAG_TRANSFORM)) { if (!CommonUtils.isEmpty(transformSettings.getCustomTransformer())) { xml.addAttribute(ATTR_CUSTOM, transformSettings.getCustomTransformer()); } for (String id : CommonUtils.safeCollection(transformSettings.getIncludedTransformers())) { try (final XMLBuilder.Element e5 = xml.startElement(TAG_INCLUDE)) { xml.addAttribute(ATTR_ID, id); } } for (String id : CommonUtils.safeCollection(transformSettings.getExcludedTransformers())) { try (final XMLBuilder.Element e5 = xml.startElement(TAG_EXCLUDE)) { xml.addAttribute(ATTR_ID, id); } } final Map<String, Object> transformOptions = transformSettings.getTransformOptions(); if (transformOptions != null) { for (Map.Entry<String, Object> prop : transformOptions.entrySet()) { try (final XMLBuilder.Element e5 = xml.startElement(TAG_PROPERTY)) { if (prop.getValue() != null) { xml.addAttribute(ATTR_NAME, prop.getKey()); xml.addAttribute(ATTR_VALUE, CommonUtils.toString(prop.getValue())); } } } } } } if (!CommonUtils.isEmpty(attr.getProperties())) { for (Map.Entry<String, Object> prop : attr.getProperties().entrySet()) { xml.startElement(TAG_PROPERTY); xml.addAttribute(ATTR_NAME, prop.getKey()); xml.addAttribute(ATTR_VALUE, CommonUtils.toString(prop.getValue())); xml.endElement(); } } } } // Constraints for (DBVEntityConstraint c : CommonUtils.safeCollection(entity.getConstraints())) { if (c.hasAttributes()) { xml.startElement(TAG_CONSTRAINT); xml.addAttribute(ATTR_NAME, c.getName()); xml.addAttribute(ATTR_TYPE, c.getConstraintType().getName()); for (DBVEntityConstraintColumn cc : CommonUtils.safeCollection(c.getAttributeReferences(null))) { xml.startElement(TAG_ATTRIBUTE); xml.addAttribute(ATTR_NAME, cc.getAttributeName()); xml.endElement(); } xml.endElement(); } } // Foreign keys for (DBVEntityForeignKey fk : CommonUtils.safeCollection(entity.getForeignKeys())) { xml.startElement(TAG_ASSOCIATION); DBSEntity refEntity = fk.getAssociatedEntity(); xml.addAttribute(ATTR_ENTITY, DBUtils.getObjectFullId(refEntity)); DBSEntityConstraint refConstraint = fk.getReferencedConstraint(); if (refConstraint != null) { xml.addAttribute(ATTR_CONSTRAINT, refConstraint.getName()); } for (DBVEntityForeignKeyColumn cc : CommonUtils.safeCollection(fk.getAttributes())) { xml.startElement(TAG_ATTRIBUTE); xml.addAttribute(ATTR_NAME, cc.getAttributeName()); xml.endElement(); } xml.endElement(); } // Colors if (!CommonUtils.isEmpty(entity.getColorOverrides())) { xml.startElement(TAG_COLORS); for (DBVColorOverride color : entity.getColorOverrides()) { xml.startElement(TAG_COLOR); xml.addAttribute(ATTR_NAME, color.getAttributeName()); xml.addAttribute(ATTR_OPERATOR, color.getOperator().name()); if (color.isRange()) { xml.addAttribute(ATTR_RANGE, true); } if (color.isSingleColumn()) { xml.addAttribute(ATTR_SINGLE_COLUMN, true); } if (color.getColorForeground() != null) { xml.addAttribute(ATTR_FOREGROUND, color.getColorForeground()); } if (color.getColorForeground2() != null) { xml.addAttribute(ATTR_FOREGROUND2, color.getColorForeground2()); } if (color.getColorBackground() != null) { xml.addAttribute(ATTR_BACKGROUND, color.getColorBackground()); } if (color.getColorBackground2() != null) { xml.addAttribute(ATTR_BACKGROUND2, color.getColorBackground2()); } if (!ArrayUtils.isEmpty(color.getAttributeValues())) { for (Object value : color.getAttributeValues()) { if (value == null) { continue; } xml.startElement(TAG_VALUE); xml.addText(GeneralUtils.serializeObject(value)); xml.endElement(); } } xml.endElement(); } xml.endElement(); } xml.endElement(); } static class ModelParser implements SAXListener { private DBVContainer rootContainer; private DBVContainer curContainer = null; private DBVEntity curEntity = null; private DBVEntityAttribute curAttribute = null; private DBVTransformSettings curTransformSettings = null; private DBVEntityConstraint curConstraint; private DBVColorOverride curColor; private boolean colorValue = false; public ModelParser(DBVContainer rootContainer) { this.rootContainer = rootContainer; } @Override public void saxStartElement(SAXReader reader, String namespaceURI, String localName, Attributes atts) throws XMLException { switch (localName) { case TAG_CONTAINER: if (curContainer == null) { curContainer = rootContainer; } else { DBVContainer container = new DBVContainer( curContainer, atts.getValue(ATTR_NAME)); curContainer.addContainer(container); curContainer = container; } break; case TAG_ENTITY: curEntity = new DBVEntity( curContainer, atts.getValue(ATTR_NAME), atts.getValue(ATTR_DESCRIPTION)); curContainer.addEntity(curEntity); break; case TAG_PROPERTY: if (curTransformSettings != null) { curTransformSettings.setTransformOption( atts.getValue(ATTR_NAME), atts.getValue(ATTR_VALUE)); } else if (curAttribute != null) { curAttribute.setProperty( atts.getValue(ATTR_NAME), atts.getValue(ATTR_VALUE)); } else if (curEntity != null) { curEntity.setProperty( atts.getValue(ATTR_NAME), atts.getValue(ATTR_VALUE)); } break; case TAG_CONSTRAINT: if (curEntity != null) { curConstraint = new DBVEntityConstraint( curEntity, DBSEntityConstraintType.VIRTUAL_KEY, atts.getValue(ATTR_NAME)); curEntity.addConstraint(curConstraint, false); } break; case TAG_ATTRIBUTE: if (curConstraint != null) { curConstraint.addAttribute(atts.getValue(ATTR_NAME)); } else if (curAttribute != null) { DBVEntityAttribute childAttribute = new DBVEntityAttribute(curEntity, curAttribute, atts.getValue(ATTR_NAME)); curAttribute.addChild(childAttribute); curAttribute = childAttribute; } else if (curEntity != null) { curAttribute = new DBVEntityAttribute(curEntity, null, atts.getValue(ATTR_NAME)); curEntity.addVirtualAttribute(curAttribute, false); } break; case TAG_TRANSFORM: curTransformSettings = new DBVTransformSettings(); curTransformSettings.setCustomTransformer(atts.getValue(ATTR_CUSTOM)); if (curAttribute != null) { curAttribute.setTransformSettings(curTransformSettings); } else if (curEntity != null) { curEntity.setTransformSettings(curTransformSettings); } break; case TAG_INCLUDE: case TAG_EXCLUDE: String transformerId = atts.getValue(ATTR_ID); if (curTransformSettings != null && !CommonUtils.isEmpty(transformerId)) { final DBDAttributeTransformerDescriptor transformer = DBWorkbench.getPlatform().getValueHandlerRegistry().getTransformer(transformerId); if (transformer == null) { log.warn("Transformer '" + transformerId + "' not found"); } else { curTransformSettings.enableTransformer(transformer, TAG_INCLUDE.equals(localName)); } } break; case TAG_COLOR: if (curEntity != null) { try { curColor = new DBVColorOverride( atts.getValue(ATTR_NAME), DBCLogicalOperator.valueOf(atts.getValue(ATTR_OPERATOR)), null, atts.getValue(ATTR_FOREGROUND), atts.getValue(ATTR_BACKGROUND) ); curColor.setRange(CommonUtils.getBoolean(atts.getValue(ATTR_RANGE), false)); curColor.setSingleColumn(CommonUtils.getBoolean(atts.getValue(ATTR_SINGLE_COLUMN), false)); curColor.setColorForeground2(atts.getValue(ATTR_FOREGROUND2)); curColor.setColorBackground2(atts.getValue(ATTR_BACKGROUND2)); curEntity.addColorOverride(curColor); } catch (Throwable e) { log.warn("Error reading color settings", e); } } break; case TAG_VALUE: if (curColor != null) { colorValue = true; } break; } } @Override public void saxText(SAXReader reader, String data) { if (colorValue) { curColor.addAttributeValue(GeneralUtils.deserializeObject(data)); } } @Override public void saxEndElement(SAXReader reader, String namespaceURI, String localName) { switch (localName) { case TAG_CONTAINER: curContainer = curContainer.getParentObject(); break; case TAG_ENTITY: curEntity = null; break; case TAG_CONSTRAINT: curConstraint = null; break; case TAG_ATTRIBUTE: if (curAttribute != null) { curAttribute = curAttribute.getParent(); } break; case TAG_TRANSFORM: curTransformSettings = null; case TAG_COLOR: curColor = null; break; case TAG_VALUE: if (curColor != null) { colorValue = false; } break; } } } }
package com.xzchang.food2fork.view; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.view.MenuItemCompat; import android.support.v7.widget.DividerItemDecoration; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.SearchView; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import com.xzchang.food2fork.R; import com.xzchang.food2fork.app.AppComponent; import com.xzchang.food2fork.model.Recipie; import com.xzchang.food2fork.rpc.GetSearchEvent; import com.xzchang.food2fork.rpc.RecipieService; import com.xzchang.food2fork.util.heteroadapter.BindableViewHolder; import com.xzchang.food2fork.util.heteroadapter.HeterogenousAdapter; import com.xzchang.food2fork.util.EndlessRecyclerOnScrollListener; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import javax.inject.Inject; import butterknife.BindView; import retrofit2.Call; public class RecipieListFragment extends BaseFragment { @Inject RecipieService recipieService; @BindView(R.id.recipie_list) RecyclerView recipieList; private String keyWord; private Call ongoing; private EndlessRecyclerOnScrollListener scrollListener; public static RecipieListFragment newInstance() { return new RecipieListFragment(); } @Override protected boolean hasContent() { return recipieList.getAdapter().getItemCount() > 0; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); } private RecipieListAdapter getAdapter() { return (RecipieListAdapter) recipieList.getAdapter(); } @Override protected void setupFragmentComponent(AppComponent appComponent) { appComponent.plus(new RecipieListComponent.RecipieListModule(this)).inject(this); } @Override protected int getLayout() { return R.layout.fragment_recipie_list; } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View root = super.onCreateView(inflater, container, savedInstanceState); LinearLayoutManager llm = new LinearLayoutManager(getContext()); recipieList.setLayoutManager(llm); recipieList.addItemDecoration(new DividerItemDecoration(inflater.getContext(), DividerItemDecoration.VERTICAL)); scrollListener = new EndlessRecyclerOnScrollListener(llm) { @Override public void onLoadMore(int currentPage) { recipieService.searchRecipie(keyWord, currentPage); } }; recipieList.addOnScrollListener(scrollListener); return root; } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); recipieList.setAdapter(new RecipieListAdapter()); searchRecipie(null); } private void searchRecipie(String keyWord) { this.keyWord = keyWord; getAdapter().clearRecipies(); if (ongoing != null) { ongoing.cancel(); } scrollListener.reset(); recipieService.searchRecipie(keyWord, 1); onStartLoading(); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.menu_recipie_list, menu); final MenuItem searchItem = menu.findItem(R.id.menu_search); SearchView searchView = (SearchView) MenuItemCompat.getActionView(searchItem); searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { searchRecipie(query); return true; } @Override public boolean onQueryTextChange(String newText) { return false; } }); MenuItemCompat.setOnActionExpandListener(searchItem, new MenuItemCompat.OnActionExpandListener() { @Override public boolean onMenuItemActionExpand(MenuItem menuItem) { return true; } @Override public boolean onMenuItemActionCollapse(MenuItem menuItem) { searchRecipie(null); return true; } }); } @Subscribe (threadMode = ThreadMode.MAIN) public void onSearchResult(GetSearchEvent event) { ongoing = null; getAdapter().appendRecipies(event.recipies); onEndLoading(); } private static class RecipieListAdapter extends HeterogenousAdapter<BindableViewHolder> { private void appendRecipies(Recipie[] newRecipies) { // Remove the loading indicator for previous page. if (viewModels.size() > 0 && viewModels.get(viewModels.size() - 1) instanceof ProgressFooterView.ProgressFooterViewModel) { viewModels.remove(viewModels.size() - 1); } for (Recipie r: newRecipies) { viewModels.add(new RecipieItemView.RecipieViewModel(r)); } if (newRecipies.length > 0) { viewModels.add(new ProgressFooterView.ProgressFooterViewModel(null)); notifyDataSetChanged(); } } @Override public BindableViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View v = LayoutInflater.from(parent.getContext()).inflate(viewType, parent, false); // This is ugly. if (viewType == R.layout.footer_view_loading) { return new ProgressFooterViewHolder(v); } else if (viewType == R.layout.item_view_recipie){ return new RecipieListItemViewHolder(v); } return null; } private void clearRecipies() { int size = viewModels.size(); viewModels.clear(); notifyItemRangeRemoved(0, size); } } public static class ProgressFooterViewHolder extends BindableViewHolder<ProgressFooterView.ProgressFooterViewModel> { public ProgressFooterViewHolder(View itemView) { super(itemView); } } public static class RecipieListItemViewHolder extends BindableViewHolder<RecipieItemView.RecipieViewModel> { public RecipieListItemViewHolder(View itemView) { super(itemView); } @Override public void bind(final RecipieItemView.RecipieViewModel viewModel) { super.bind(viewModel); getView().setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Context context = view.getContext(); Intent intent = new Intent(context, RecipieDetailActivity.class); intent.putExtra(RecipieDetailFragment.PARAM_RECIPIE_STUB, viewModel.getWrapped()); context.startActivity(intent); } }); } } }
/* * Copyright 2010 Proofpoint, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.proofpoint.log; import com.google.common.io.Files; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static com.google.common.io.Files.createTempDir; import static com.google.common.io.MoreFiles.deleteRecursively; import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; public class TestLogging { private File tempDir; @BeforeMethod public void setup() { tempDir = createTempDir(); } @AfterMethod public void tearDown() throws IOException { deleteRecursively(tempDir.toPath(), ALLOW_INSECURE); } @Test public void testRecoverTempFiles() throws IOException { LoggingConfiguration configuration = new LoggingConfiguration(); configuration.setLogPath(new File(tempDir, "launcher.log").getPath()); File logFile1 = new File(tempDir, "test1.log"); Files.touch(logFile1); File logFile2 = new File(tempDir, "test2.log"); Files.touch(logFile2); File tempLogFile1 = new File(tempDir, "temp1.tmp"); Files.touch(tempLogFile1); File tempLogFile2 = new File(tempDir, "temp2.tmp"); Files.touch(tempLogFile2); Logging logging = Logging.initialize(); logging.configure(configuration); assertTrue(logFile1.exists()); assertTrue(logFile2.exists()); assertFalse(tempLogFile1.exists()); assertFalse(tempLogFile2.exists()); assertTrue(new File(tempDir, "temp1.log").exists()); assertTrue(new File(tempDir, "temp2.log").exists()); } @Test public void testPropagatesLevels() { Logging logging = Logging.initialize(); Logger logger = Logger.get("testPropagatesLevels"); logging.setLevel("testPropagatesLevels", Level.ERROR); assertFalse(logger.isDebugEnabled()); assertFalse(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevels", Level.WARN); assertFalse(logger.isDebugEnabled()); assertFalse(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevels", Level.INFO); assertFalse(logger.isDebugEnabled()); assertTrue(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevels", Level.DEBUG); assertTrue(logger.isDebugEnabled()); assertTrue(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevels", Level.TRACE); assertTrue(logger.isDebugEnabled()); assertTrue(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevels", Level.ALL); assertTrue(logger.isDebugEnabled()); assertTrue(logger.isInfoEnabled()); } @Test public void testPropagatesLevelsHierarchical() { Logging logging = Logging.initialize(); Logger logger = Logger.get("testPropagatesLevelsHierarchical.child"); logging.setLevel("testPropagatesLevelsHierarchical", Level.ERROR); assertFalse(logger.isDebugEnabled()); assertFalse(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevelsHierarchical", Level.WARN); assertFalse(logger.isDebugEnabled()); assertFalse(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevelsHierarchical", Level.INFO); assertFalse(logger.isDebugEnabled()); assertTrue(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevelsHierarchical", Level.DEBUG); assertTrue(logger.isDebugEnabled()); assertTrue(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevelsHierarchical", Level.TRACE); assertTrue(logger.isDebugEnabled()); assertTrue(logger.isInfoEnabled()); logging.setLevel("testPropagatesLevelsHierarchical", Level.ALL); assertTrue(logger.isDebugEnabled()); assertTrue(logger.isInfoEnabled()); } @Test public void testChildLevelOverridesParent() { Logging logging = Logging.initialize(); Logger logger = Logger.get("testChildLevelOverridesParent.child"); logging.setLevel("testChildLevelOverridesParent", Level.DEBUG); logging.setLevel("testChildLevelOverridesParent.child", Level.ERROR); assertFalse(logger.isDebugEnabled()); assertFalse(logger.isInfoEnabled()); } @Test public void testAddLogTester() { Logging.initialize(); List<String> logRecords = new ArrayList<>(); Logging.addLogTester(TestAddLogTester.class, (level, message, thrown) -> { assertEquals(level, Level.INFO); assertFalse(thrown.isPresent()); logRecords.add(message); }); Logger.get(TestAddLogTester.class).info("test log line"); assertEquals(logRecords.size(), 1); assertEquals(logRecords.get(0), "test log line"); } private static class TestAddLogTester { } @Test public void testAddLogTesterThrown() { Logging.initialize(); List<String> logRecords = new ArrayList<>(); Exception testingException = new Exception(); Logging.addLogTester(TestAddLogTesterThrown.class, (level, message, thrown) -> { assertEquals(level, Level.WARN); assertEquals(thrown.get(), testingException); logRecords.add(message); }); Logger.get(TestAddLogTesterThrown.class).warn(testingException, "test log line"); assertEquals(logRecords.size(), 1); assertEquals(logRecords.get(0), "test log line"); } private static class TestAddLogTesterThrown { } @Test public void testLoggingOutputStream() { Logging.initialize(); List<String> logRecords = new ArrayList<>(); Logging.addLogTester("stdout", (level, message, thrown) -> { assertEquals(level, Level.INFO); assertFalse(thrown.isPresent()); logRecords.add(message); }); System.out.println("test log line %"); assertEquals(logRecords.size(), 1); assertEquals(logRecords.get(0), "test log line %"); } @Test public void testResetLogHandlers() { Logging.initialize(); Logging.addLogTester(TestResetLogHandlers.class, (level, message, thrown) -> { fail("Unexpected call to publish"); }); Logging.resetLogTesters(); Logger.get(TestResetLogHandlers.class).info("test log line"); } private static class TestResetLogHandlers { } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.aggregation; import com.facebook.airlift.stats.QuantileDigest; import com.facebook.presto.common.Page; import com.facebook.presto.common.block.Block; import com.facebook.presto.common.type.SqlVarbinary; import com.facebook.presto.common.type.StandardTypes; import com.facebook.presto.common.type.Type; import com.facebook.presto.metadata.FunctionManager; import com.google.common.primitives.Doubles; import com.google.common.primitives.Floats; import org.testng.annotations.Test; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import java.util.stream.LongStream; import static com.facebook.presto.block.BlockAssertions.createBlockOfReals; import static com.facebook.presto.block.BlockAssertions.createLongSequenceBlock; import static com.facebook.presto.block.BlockAssertions.createLongsBlock; import static com.facebook.presto.block.BlockAssertions.createRLEBlock; import static com.facebook.presto.block.BlockAssertions.createSequenceBlockOfReal; import static com.facebook.presto.common.type.BigintType.BIGINT; import static com.facebook.presto.common.type.DoubleType.DOUBLE; import static com.facebook.presto.common.type.RealType.REAL; import static com.facebook.presto.common.type.StandardTypes.QDIGEST; import static com.facebook.presto.operator.aggregation.AggregationTestUtils.assertAggregation; import static com.facebook.presto.operator.aggregation.FloatingPointBitsConverterUtil.doubleToSortableLong; import static com.facebook.presto.operator.aggregation.FloatingPointBitsConverterUtil.floatToSortableInt; import static com.facebook.presto.operator.aggregation.TestMergeQuantileDigestFunction.QDIGEST_EQUALITY; import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypes; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.lang.Double.NaN; public class TestQuantileDigestAggregationFunction extends TestStatisticalDigestAggregationFunction { private static final double STANDARD_ERROR = 0.01; protected double getParameter() { return STANDARD_ERROR; } @Test public void testRealsWithWeights() { testAggregationReal( createBlockOfReals(1.0F, null, 2.0F, null, 3.0F, null, 4.0F, null, 5.0F, null), createRLEBlock(1, 10), 0.01, 1.0F, 2.0F, 3.0F, 4.0F, 5.0F); testAggregationReal( createBlockOfReals(null, null, null, null, null), createRLEBlock(1, 5), NaN); testAggregationReal( createBlockOfReals(-1.0F, -2.0F, -3.0F, -4.0F, -5.0F, -6.0F, -7.0F, -8.0F, -9.0F, -10.0F), createRLEBlock(1, 10), 0.01, -1.0F, -2.0F, -3.0F, -4.0F, -5.0F, -6.0F, -7.0F, -8.0F, -9.0F, -10.0F); testAggregationReal( createBlockOfReals(1.0F, 2.0F, 3.0F, 4.0F, 5.0F, 6.0F, 7.0F, 8.0F, 9.0F, 10.0F), createRLEBlock(1, 10), 0.01, 1.0F, 2.0F, 3.0F, 4.0F, 5.0F, 6.0F, 7.0F, 8.0F, 9.0F, 10.0F); testAggregationReal( createBlockOfReals(), createRLEBlock(1, 0), NaN); testAggregationReal( createBlockOfReals(1.0F), createRLEBlock(1, 1), 0.01, 1.0F); testAggregationReal( createSequenceBlockOfReal(-1000, 1000), createRLEBlock(1, 2000), 0.01, Floats.toArray(LongStream.range(-1000, 1000).mapToObj(Float::new).collect(toImmutableList()))); } @Test public void testBigintsWithWeight() { testAggregationBigint( createLongsBlock(1L, null, 2L, null, 3L, null, 4L, null, 5L, null), createRLEBlock(1, 10), 0.01, 1, 2, 3, 4, 5); testAggregationBigint( createLongsBlock(null, null, null, null, null), createRLEBlock(1, 5), NaN); testAggregationBigint( createLongsBlock(-1, -2, -3, -4, -5, -6, -7, -8, -9, -10), createRLEBlock(1, 10), 0.01, -1, -2, -3, -4, -5, -6, -7, -8, -9, -10); testAggregationBigint( createLongsBlock(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), createRLEBlock(1, 10), 0.01, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10); testAggregationBigint( createLongsBlock(new int[] {}), createRLEBlock(1, 0), NaN); testAggregationBigint( createLongsBlock(1), createRLEBlock(1, 1), 0.01, 1); testAggregationBigint( createLongSequenceBlock(-1000, 1000), createRLEBlock(1, 2000), 0.01, LongStream.range(-1000, 1000).toArray()); } @Override protected InternalAggregationFunction getAggregationFunction(Type... type) { FunctionManager functionManager = METADATA.getFunctionManager(); return functionManager.getAggregateFunctionImplementation( functionManager.lookupFunction("qdigest_agg", fromTypes(type))); } private void testAggregationBigint(Block inputBlock, Block weightsBlock, double maxError, long... inputs) { // Test without weights and accuracy testAggregationBigints( getAggregationFunction(BIGINT), new Page(inputBlock), maxError, inputs); // Test with weights and without accuracy testAggregationBigints( getAggregationFunction(BIGINT, BIGINT), new Page(inputBlock, weightsBlock), maxError, inputs); // Test with weights and accuracy testAggregationBigints( getAggregationFunction(BIGINT, BIGINT, DOUBLE), new Page(inputBlock, weightsBlock, createRLEBlock(maxError, inputBlock.getPositionCount())), maxError, inputs); } private void testAggregationReal(Block longsBlock, Block weightsBlock, double maxError, float... inputs) { // Test without weights and accuracy testAggregationReal( getAggregationFunction(REAL), new Page(longsBlock), maxError, inputs); // Test with weights and without accuracy testAggregationReal( getAggregationFunction(REAL, BIGINT), new Page(longsBlock, weightsBlock), maxError, inputs); // Test with weights and accuracy testAggregationReal( getAggregationFunction(REAL, BIGINT, DOUBLE), new Page(longsBlock, weightsBlock, createRLEBlock(maxError, longsBlock.getPositionCount())), maxError, inputs); } private void testAggregationBigints(InternalAggregationFunction function, Page page, double maxError, long... inputs) { // aggregate level assertAggregation(function, QDIGEST_EQUALITY, "test multiple positions", page, getExpectedValueLongs(maxError, inputs)); // test scalars List<Long> rows = Arrays.stream(inputs).sorted().boxed().collect(Collectors.toList()); SqlVarbinary returned = (SqlVarbinary) AggregationTestUtils.aggregation(function, page); assertPercentileWithinError(QDIGEST, StandardTypes.BIGINT, returned, maxError, rows, 0.1, 0.5, 0.9, 0.99); } private void testAggregationReal(InternalAggregationFunction function, Page page, double maxError, float... inputs) { assertAggregation(function, QDIGEST_EQUALITY, "test multiple positions", page, getExpectedValuesFloats(maxError, inputs)); // test scalars List<Double> rows = Floats.asList(inputs).stream().sorted().map(Float::doubleValue).collect(Collectors.toList()); SqlVarbinary returned = (SqlVarbinary) AggregationTestUtils.aggregation(function, page); assertPercentileWithinError(QDIGEST, StandardTypes.REAL, returned, maxError, rows, 0.1, 0.5, 0.9, 0.99); } @Override protected void testAggregationDoubles(InternalAggregationFunction function, Page page, double maxError, double... inputs) { assertAggregation(function, QDIGEST_EQUALITY, "test multiple positions", page, getExpectedValueDoubles(maxError, inputs)); // test scalars List<Double> rows = Doubles.asList(inputs).stream().sorted().collect(Collectors.toList()); SqlVarbinary returned = (SqlVarbinary) AggregationTestUtils.aggregation(function, page); assertPercentileWithinError(QDIGEST, StandardTypes.DOUBLE, returned, maxError, rows, 0.1, 0.5, 0.9, 0.99); } private Object getExpectedValueLongs(double maxError, long... values) { if (values.length == 0) { return null; } QuantileDigest qdigest = new QuantileDigest(maxError); Arrays.stream(values).forEach(qdigest::add); return new SqlVarbinary(qdigest.serialize().getBytes()); } @Override protected Object getExpectedValueDoubles(double maxError, double... values) { if (values.length == 0) { return null; } QuantileDigest qdigest = new QuantileDigest(maxError); Arrays.stream(values).forEach(value -> qdigest.add(doubleToSortableLong(value))); return new SqlVarbinary(qdigest.serialize().getBytes()); } private Object getExpectedValuesFloats(double maxError, float... values) { if (values.length == 0) { return null; } QuantileDigest qdigest = new QuantileDigest(maxError); Floats.asList(values).forEach(value -> qdigest.add(floatToSortableInt(value))); return new SqlVarbinary(qdigest.serialize().getBytes()); } }
package net.sourceforge.pmd.lang.rule; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import net.sourceforge.pmd.PropertyDescriptor; import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.RulePriority; import net.sourceforge.pmd.RuleSetReference; import net.sourceforge.pmd.lang.Language; import net.sourceforge.pmd.lang.LanguageVersion; import net.sourceforge.pmd.util.StringUtil; /** * This class represents a Rule which is a reference to Rule defined in another * RuleSet. All details of the Rule are delegated to the underlying referenced * Rule, but those operations which modify overridden aspects of the rule are * explicitly tracked. Modification operations which set a value to the * current underlying value do not override. */ public class RuleReference extends AbstractDelegateRule { private Language language; private LanguageVersion minimumLanguageVersion; private LanguageVersion maximumLanguageVersion; private Boolean deprecated; private String name; private List<PropertyDescriptor<?>> propertyDescriptors; private Map<PropertyDescriptor<?>, Object> propertyValues; private String message; private String description; private List<String> examples; private String externalInfoUrl; private RulePriority priority; private RuleSetReference ruleSetReference; private static final List<PropertyDescriptor<?>> EMPTY_DESCRIPTORS = new ArrayList<PropertyDescriptor<?>>(0); public Language getOverriddenLanguage() { return language; } public RuleReference() { } public RuleReference(Rule theRule, RuleSetReference theRuleSetReference) { setRule(theRule); ruleSetReference = theRuleSetReference; } @Override public void setLanguage(Language language) { // Only override if different than current value, or if already overridden. if (!isSame(language, super.getLanguage()) || this.language != null) { this.language = language; super.setLanguage(language); } } public LanguageVersion getOverriddenMinimumLanguageVersion() { return minimumLanguageVersion; } @Override public void setMinimumLanguageVersion(LanguageVersion minimumLanguageVersion) { // Only override if different than current value, or if already overridden. if (!isSame(minimumLanguageVersion, super.getMinimumLanguageVersion()) || this.minimumLanguageVersion != null) { this.minimumLanguageVersion = minimumLanguageVersion; super.setMinimumLanguageVersion(minimumLanguageVersion); } } public LanguageVersion getOverriddenMaximumLanguageVersion() { return maximumLanguageVersion; } @Override public void setMaximumLanguageVersion(LanguageVersion maximumLanguageVersion) { // Only override if different than current value, or if already overridden. if (!isSame(maximumLanguageVersion, super.getMaximumLanguageVersion()) || this.maximumLanguageVersion != null) { this.maximumLanguageVersion = maximumLanguageVersion; super.setMaximumLanguageVersion(maximumLanguageVersion); } } public Boolean isOverriddenDeprecated() { return deprecated; } @Override public boolean isDeprecated() { return deprecated != null && deprecated.booleanValue(); } @Override public void setDeprecated(boolean deprecated) { // Deprecation does not propagate to the underlying Rule. It is the // Rule reference itself which is being deprecated. this.deprecated = deprecated ? deprecated : null; } public String getOverriddenName() { return name; } @Override public void setName(String name) { // Only override if different than current value, or if already overridden. if (!isSame(name, super.getName()) || this.name != null) { this.name = name; super.setName(name); } } public String getOverriddenMessage() { return message; } @Override public void setMessage(String message) { // Only override if different than current value, or if already overridden. if (!isSame(message, super.getMessage()) || this.message != null) { this.message = message; super.setMessage(message); } } public String getOverriddenDescription() { return description; } @Override public void setDescription(String description) { // Only override if different than current value, or if already overridden. if (!isSame(description, super.getDescription()) || this.description != null) { this.description = description; super.setDescription(description); } } public List<String> getOverriddenExamples() { return examples; } @Override public void addExample(String example) { // TODO Meaningful override of examples is hard, because they are merely // a list of strings. How does one indicate override of a particular // value? Via index? Rule.setExample(int, String)? But the XML format // does not provide a means of overriding by index, not unless you took // the position in the XML file to indicate corresponding index to // override. But that means you have to override starting from index 0. // This would be so much easier if examples had to have names, like // properties. // Only override if different than current values. if (!contains(super.getExamples(), example)) { if (examples == null) { examples = new ArrayList<String>(1); } // TODO Fix later. To keep example overrides from being unbounded, we're only going to keep track of the last one. examples.clear(); examples.add(example); super.addExample(example); } } public String getOverriddenExternalInfoUrl() { return externalInfoUrl; } @Override public void setExternalInfoUrl(String externalInfoUrl) { // Only override if different than current value, or if already overridden. if (!isSame(externalInfoUrl, super.getExternalInfoUrl()) || this.externalInfoUrl != null) { this.externalInfoUrl = externalInfoUrl; super.setExternalInfoUrl(externalInfoUrl); } } public RulePriority getOverriddenPriority() { return priority; } @Override public void setPriority(RulePriority priority) { // Only override if different than current value, or if already overridden. if (priority != super.getPriority() || this.priority != null) { this.priority = priority; super.setPriority(priority); } } public List<PropertyDescriptor<?>> getOverriddenPropertyDescriptors() { return propertyDescriptors == null ? EMPTY_DESCRIPTORS : propertyDescriptors; } @Override public void definePropertyDescriptor(PropertyDescriptor<?> propertyDescriptor) throws IllegalArgumentException { // Define on the underlying Rule, where it is impossible to have two // property descriptors with the same name. Therefore, there is no need // to check if the property is already overridden at this level. super.definePropertyDescriptor(propertyDescriptor); if (propertyDescriptors == null) { propertyDescriptors = new ArrayList<PropertyDescriptor<?>>(); } propertyDescriptors.add(propertyDescriptor); } public Map<PropertyDescriptor<?>, Object> getOverriddenPropertiesByPropertyDescriptor() { return propertyValues; } @Override public <T> void setProperty(PropertyDescriptor<T> propertyDescriptor, T value) { // Only override if different than current value. if (!isSame(super.getProperty(propertyDescriptor), value)) { if (propertyValues == null) { propertyValues = new HashMap<PropertyDescriptor<?>, Object>(); } propertyValues.put(propertyDescriptor, value); super.setProperty(propertyDescriptor, value); } } public RuleSetReference getRuleSetReference() { return ruleSetReference; } public void setRuleSetReference(RuleSetReference ruleSetReference) { this.ruleSetReference = ruleSetReference; } private static boolean isSame(String s1, String s2) { return StringUtil.isSame(s1, s2, true, false, true); } @SuppressWarnings("PMD.CompareObjectsWithEquals") private static boolean isSame(Object o1, Object o2) { if (o1 instanceof Object[] && o2 instanceof Object[]) { return isSame((Object[])o1, (Object[])o2); } return o1 == o2 || (o1 != null && o2 != null && o1.equals(o2)); } @SuppressWarnings("PMD.UnusedNullCheckInEquals") //TODO: fix UnusedNullCheckInEquals rule for Arrays private static boolean isSame(Object[] a1, Object[] a2) { return a1 == a2 || (a1 != null && a2 != null && Arrays.equals(a1, a2)); } private static boolean contains(Collection<String> collection, String s1) { for (String s2 : collection) { if (isSame(s1, s2)) { return true; } } return false; } public boolean hasDescriptor(PropertyDescriptor<?> descriptor) { return (propertyDescriptors != null && propertyDescriptors.contains(descriptor)) || super.hasDescriptor(descriptor); } public boolean hasOverriddenProperty(PropertyDescriptor<?> descriptor) { return propertyValues != null && propertyValues.containsKey(descriptor); } public boolean usesDefaultValues() { List<PropertyDescriptor<?>> descriptors = getOverriddenPropertyDescriptors(); if (!descriptors.isEmpty()) { return false; } for (PropertyDescriptor<?> desc : descriptors) { if (!isSame(desc.defaultValue(), getProperty(desc))) { return false; } } if (!getRule().usesDefaultValues()) { return false; } return true; } public void useDefaultValueFor(PropertyDescriptor<?> desc) { // not sure if we should go all the way through to the real thing? getRule().useDefaultValueFor(desc); if (propertyValues == null) return; propertyValues.remove(desc); if (propertyDescriptors != null) { propertyDescriptors.remove(desc); } } }
/* Derby - Class com.pivotal.gemfirexd.internal.impl.tools.ij.util Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * Changes for GemFireXD distributed data platform (some marked by "GemStone changes") * * Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.pivotal.gemfirexd.internal.impl.tools.ij; import com.pivotal.gemfirexd.internal.iapi.tools.i18n.*; import com.pivotal.gemfirexd.internal.shared.common.StopWatch; import com.pivotal.gemfirexd.internal.tools.JDBCDisplayUtil; import scala.tools.jline.console.ConsoleReader; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.security.AccessController; import java.security.PrivilegedAction; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.Types; import java.util.Properties; import java.util.Vector; import java.util.Locale; /** Methods used to control setup for apps as well as display some internal ij structures. @see com.pivotal.gemfirexd.internal.tools.JDBCDisplayUtil */ public final class util implements java.security.PrivilegedAction { private static boolean IS_AT_LEAST_JDBC2; { boolean isAtLeastJDBC2; try { // Need to test to see if this is // currently JDBC 2 or JSR169. // Checking for BigDecimal doesn't work because // BigDecimal exists in J2ME/CDC/Foundation 1.1 Class.forName("java.sql.Driver"); isAtLeastJDBC2 = true; } catch (Throwable t) { isAtLeastJDBC2 = false; } IS_AT_LEAST_JDBC2 = isAtLeastJDBC2; } private static final Class[] DS_GET_CONN_TYPES = {"".getClass(), "".getClass()}; private util() {} //----------------------------------------------------------------- // Methods for starting up JBMS /** * Find the argument that follows the specified parameter. * * @param param the parameter (e.g. "-p") * @param args the argument list to consider. * * @return the argument that follows the parameter, or null if not found */ static public String getArg(String param, String[] args) { int pLocn; Properties p; if (args == null) return null; for (pLocn=0; pLocn<args.length; pLocn++) { if (param.equals(args[pLocn])) break; } if (pLocn >= (args.length-1)) // not found or no file return null; return args[pLocn+1]; } /** ij is started with "-p[r] file OtherArgs"; the file contains properties to control the driver and database used to run ij, and can provide additional system properties. <p> getPropertyArg will look at the args and take out a "-p <file>" pair, reading the file into the system properties. <p> If there was a -p without a following <file>, no action is taken. @exception IOException thrown if file not found @param args the argument list to consider. @return true if a property item was found and loaded. */ static public boolean getPropertyArg(String[] args) throws IOException { String n; InputStream in1; Properties p; if ((n = getArg("-p", args))!= null){ in1 = new FileInputStream(n); in1 = new BufferedInputStream(in1); } else if ((n = getArg("-pr", args)) != null) { in1 = getResourceAsStream(n); if (in1 == null) throw ijException.resourceNotFound(); } else return false; p = System.getProperties(); // Trim off excess whitespace in property file, if any, and // then load those properties into 'p'. util.loadWithTrimmedValues(in1, p); return true; } /** ij is started with "-ca[r] file OtherArgs"; the file contains connection attibute properties to pass to getConnection <p> getConnAttributeArg will look at the args and take out a "-ca[r] <file>" pair and returning the Properties <p> @exception IOException thrown if file not found @param args the argument list to consider. @return properties in the file */ static public Properties getConnAttributeArg(String[] args) throws IOException { String n; InputStream in1; Properties p = new Properties(); if ((n = getArg("-ca", args))!= null){ in1 = new FileInputStream(n); in1 = new BufferedInputStream(in1); } else if ((n = getArg("-car", args)) != null) { in1 = getResourceAsStream(n); if (in1 == null) throw ijException.resourceNotFound(); } else return null; // Trim off excess whitespace in property file, if any, and // then load those properties into 'p'. util.loadWithTrimmedValues(in1, p); return p; } /** Convenience routine to qualify a resource name with "ij.defaultPackageName" if it is not qualified (does not begin with a "/"). @param absolute true means return null if the name is not absolute and false means return partial names. */ static String qualifyResourceName(String resourceName, boolean absolute) { resourceName=resourceName.trim(); if (resourceName.startsWith("/")) { return resourceName; } else { String pName = util.getSystemProperty("ij.defaultResourcePackage").trim(); if (pName == null) return null; if ((pName).endsWith("/")) resourceName = pName+resourceName; else resourceName = pName+"/"+resourceName; if (absolute && !resourceName.startsWith("/")) return null; else return resourceName; } } /** Convenience routine to get a resource as a BufferedInputStream. If the resourceName is not absolute (does not begin with a "/") this qualifies the name with the "ij.defaultResourcePackage" name. @param resourceName the name of the resource @return a buffered stream for the resource if it exists and null otherwise. */ static public InputStream getResourceAsStream(String resourceName) { final Class c = util.class; final String resource = qualifyResourceName(resourceName,true); if (resource == null) return null; InputStream is = (InputStream) AccessController.doPrivileged(new PrivilegedAction() { public Object run() { InputStream locis = c.getResourceAsStream(resource); return locis; } } ); if (is != null) is = new BufferedInputStream(is, utilMain.BUFFEREDFILESIZE); return is; } /** Return the name of the ij command file or null if none is specified. The command file may be proceeded with -f flag on the command line. Alternatively, the command file may be specified without a -f flag. In this case we assume the first unknown argument is the command file. <P> This should only be called after calling invalidArgs. <p> If there is no such argument, a null is returned. @param args the argument list to consider. @return the name of the first argument not preceded by "-p", null if none found. @exception IOException thrown if file not found */ static public String getFileArg(String[] args) throws IOException { String fileName; int fLocn; boolean foundP = false; if (args == null) return null; if ((fileName=getArg("-f",args))!=null) return fileName; // //The first unknown arg is the file for (int ix=0; ix < args.length; ix++) if(args[ix].equals("-f") || args[ix].equals("-fr") || args[ix].equals("-ca") || args[ix].equals("-car") || args[ix].equals("-p") || args[ix].equals("-pr")) ix++; //skip the parameter to these args else return args[ix]; return null; } /** Return the name of a resource containing input commands or null iff none has been specified. */ static public String getInputResourceNameArg(String[] args) { return getArg("-fr", args); } /** Verify the ij line arguments command arguments. Also used to detect --help. @return true if the args are invalid <UL> <LI>Only legal argument provided. <LI>Only specify a quantity once. </UL> */ static public boolean invalidArgs(String[] args) { int countSupported = 0; boolean haveInput = false; for (int ix=0; ix < args.length; ix++) { // //If the arguemnt is a supported flag skip the flags argument if(!haveInput && (args[ix].equals("-f") || args[ix].equals("-fr"))) { haveInput = true; ix++; if (ix >= args.length) return true; } else if ((args[ix].equals("-p") || args[ix].equals("-pr") || args[ix].equals("-ca") || args[ix].equals("-car") )) { // next arg is the file/resource name ix++; if (ix >= args.length) return true; } else if (args[ix].equals("--help")) { return true; } // //Assume the first unknown arg is a file name. else if (!haveInput) { haveInput = true; } else { return true; } } return false; } /** * print a usage message for invocations of main(). */ static void Usage(LocalizedOutput out) { out.println( LocalizedResource.getMessage("IJ_UsageJavaComCloudToolsIjPPropeInput")); out.flush(); } private static final Class[] STRING_P = { "".getClass() }; private static final Class[] INT_P = { Integer.TYPE }; /** * Sets up a data source with values specified in ij.dataSource.* properties or * passed as parameters of this method * * @param ds DataSource object * @param dbName Database Name * @param firstTime If firstTime is false, ij.dataSource.createDatabase and ij.dataSource.databaseName * properties will not be used. The value in parameter dbName will be used instead of * ij.dataSource.databaseName. * * @throws Exception */ static public void setupDataSource(Object ds,String dbName,boolean firstTime) throws Exception { // Loop over set methods on Datasource object, if there is a property // then call the method with corresponding value. Call setCreateDatabase based on //parameter create. java.lang.reflect.Method[] methods = ds.getClass().getMethods(); for (int i = 0; i < methods.length; i++) { java.lang.reflect.Method m = methods[i]; String name = m.getName(); if (name.startsWith("set") && (name.length() > "set".length())) { //Check if setCreateDatabase has to be called based on create parameter if(name.equals("setCreateDatabase") && !firstTime) continue; String property = name.substring("set".length()); // setXyyyZwww property = "ij.dataSource."+property.substring(0,1).toLowerCase(java.util.Locale.ENGLISH)+ property.substring(1); // xyyyZwww String value = util.getSystemProperty(property); if(name.equals("setDatabaseName") && !firstTime) value = dbName; if (value != null) { try { // call string method m.invoke(ds, new Object[] {value}); } catch (Throwable ignore) { // failed, assume it's an integer parameter m.invoke(ds, new Object[] {Integer.valueOf(value)}); } } } } } /** * Returns a connection obtained using the DataSource. This method will be called when ij.dataSource * property is set. It uses ij.dataSource.* properties to get details for the connection. * * @param dsName Data Source name * @param user User name * @param password Password * @param dbName Database Name * @param firstTime Indicates if the method is called first time. This is passed to setupDataSource * method. * * @throws SQLException */ public static Connection getDataSourceConnection(String dsName,String user,String password, String dbName,boolean firstTime) throws SQLException{ // Get a new proxied connection through DataSource Object ds = null; // really javax.sql.DataSource try { Class dc = Class.forName(dsName); ds = dc.newInstance(); // set datasource properties setupDataSource(ds,dbName,firstTime); // Java method call "by hand" { con = ds.getConnection(); } // or con = ds.getConnection(user, password) java.lang.reflect.Method m = user == null ? dc.getMethod("getConnection") : dc.getMethod("getConnection", DS_GET_CONN_TYPES); return (java.sql.Connection) m.invoke(ds, user == null ? null : new Object[] {user, password}); } catch (InvocationTargetException ite) { if (ite.getTargetException() instanceof SQLException) throw (SQLException) ite.getTargetException(); ite.printStackTrace(System.out); } catch (Exception e) { e.printStackTrace(System.out); } return null; } /** This will look for the System properties "ij.driver" and "ij.database" and return a java.sql.Connection if it successfully connects. The deprecated driver and database properties are examined first. <p> If no connection was possible, it will return a null. <p> Failure to load the driver class is quietly ignored. @param defaultDriver the driver to use if no property value found @param defaultURL the database URL to use if no property value found @param connInfo Connection attributes to pass to getConnection @return a connection to the defaultURL if possible; null if not. @exception SQLException on failure to connect. @exception ClassNotFoundException on failure to load driver. @exception InstantiationException on failure to load driver. @exception IllegalAccessException on failure to load driver. */ static public Connection startJBMS(String defaultDriver, String defaultURL, Properties connInfo) throws SQLException, ClassNotFoundException, InstantiationException, IllegalAccessException { Connection con = null; String driverName; String databaseURL; // deprecate the non-ij prefix. actually, we should defer to jdbc.drivers... driverName = util.getSystemProperty("driver"); if (driverName == null) driverName = util.getSystemProperty("ij.driver"); if (driverName == null || driverName.length()==0) driverName = defaultDriver; if (driverName != null && IS_AT_LEAST_JDBC2) { util.loadDriver(driverName); } String jdbcProtocol = util.getSystemProperty("ij.protocol"); if (jdbcProtocol != null && IS_AT_LEAST_JDBC2) util.loadDriverIfKnown(jdbcProtocol); String user = util.getSystemProperty("ij.user"); String password = util.getSystemProperty("ij.password"); // deprecate the non-ij prefix name databaseURL = util.getSystemProperty("database"); if (databaseURL == null) databaseURL = util.getSystemProperty("ij.database"); if (databaseURL == null || databaseURL.length()==0) databaseURL = defaultURL; if (databaseURL != null && IS_AT_LEAST_JDBC2) { // add protocol if might help find driver. // if have full URL, load driver for it if (databaseURL.startsWith("jdbc:")) util.loadDriverIfKnown(databaseURL); if (!databaseURL.startsWith("jdbc:") && jdbcProtocol != null) databaseURL = jdbcProtocol+databaseURL; // Update connInfo for ij system properties and // framework network server connInfo = updateConnInfo(user, password,connInfo); // JDBC driver String driver = util.getSystemProperty("driver"); if (driver == null) { driver = "com.pivotal.gemfirexd.jdbc.EmbeddedDriver"; } loadDriver(driver); con = DriverManager.getConnection(databaseURL,connInfo); return con; } // handle datasource property String dsName = util.getSystemProperty("ij.dataSource"); if (dsName == null) return null; //First connection - pass firstTime=true, dbName=null. For database name, //value in ij.dataSource.databaseName will be used. con = getDataSourceConnection(dsName,user,password,null,true); return con; } public static Properties updateConnInfo(String user, String password, Properties connInfo) { String ijGetMessages = util.getSystemProperty("ij.retrieveMessagesFromServerOnGetMessage"); boolean retrieveMessages = false; // For JCC make sure we set it to retrieve messages if (isJCCFramework()) retrieveMessages = true; if (ijGetMessages != null) { if (ijGetMessages.equals("false")) retrieveMessages = false; else retrieveMessages = true; } if (connInfo == null) connInfo = new Properties(); if (retrieveMessages == true) { connInfo.put("retrieveMessagesFromServerOnGetMessage", "true"); } if (user != null) connInfo.put("user",user); if (password != null) connInfo.put("password", password); return connInfo; } /** Utility interface that defaults driver and database to null. @return a connection to the defaultURL if possible; null if not. @exception SQLException on failure to connect. @exception ClassNotFoundException on failure to load driver. @exception InstantiationException on failure to load driver. @exception IllegalAccessException on failure to load driver. */ static public Connection startJBMS() throws SQLException, ClassNotFoundException, InstantiationException, IllegalAccessException { return startJBMS(null,null); } /** Utility interface that defaults connInfo to null <p> @param defaultDriver the driver to use if no property value found @param defaultURL the database URL to use if no property value found @return a connection to the defaultURL if possible; null if not. @exception SQLException on failure to connect. @exception ClassNotFoundException on failure to load driver. @exception InstantiationException on failure to load driver. @exception IllegalAccessException on failure to load driver. */ static public Connection startJBMS(String defaultDriver, String defaultURL) throws SQLException, ClassNotFoundException, InstantiationException, IllegalAccessException { return startJBMS(defaultDriver,defaultURL,null); } //----------------------------------------------------------------- // Methods for displaying and checking results // See com.pivotal.gemfirexd.internal.tools.JDBCDisplayUtil for more general displays. /** Display a vector of strings to the out stream. */ public static void DisplayVector(LocalizedOutput out, Vector v) { int l = v.size(); for (int i=0;i<l;i++) out.println(v.elementAt(i)); } /** Display a vector of statements to the out stream. public static void DisplayVector(AppStreamWriter out, Vector v, Connection conn) throws SQLException { int l = v.size(); AppUI.out.println("SIZE="+l); for (int i=0;i<l;i++) { Object o = v.elementAt(i); if (o instanceof Integer) { // update count JDBCDisplayUtil.DisplayUpdateCount(out,((Integer)o).intValue()); } else { // o instanceof ResultSet JDBCDisplayUtil.DisplayResults(out,(ResultSet)o,conn); ((ResultSet)o).close(); // release the result set } } } */ /** Display a statement that takes parameters by stuffing it with rows from the result set and displaying each result each time through. Deal with autocommit behavior along the way. @exception SQLException thrown on db error @exception ijException thrown on ij error */ public static void DisplayMulti(LocalizedOutput out, PreparedStatement ps, ResultSet rs, Connection conn, ConsoleReader reader /* GemStoneAddition */, StopWatch timer /* GemStoneAddition */) throws SQLException, ijException { boolean autoCommited = false; // mark if autocommit in place boolean exec = false; // mark the first time through boolean anotherUsingRow = false; // remember if there's another row // from using. ResultSetMetaData rsmd = rs.getMetaData(); int numCols = rsmd.getColumnCount(); /* NOTE: We need to close the USING RS first * so that RunTimeStatistic gets info from * the user query. */ anotherUsingRow = rs.next(); while (! autoCommited && anotherUsingRow) { // note the first time through if (!exec) { exec = true; // send a warning if additional results may be lost if (conn.getAutoCommit()) { out.println(LocalizedResource.getMessage("IJ_IjWarniAutocMayCloseUsingResulSet")); autoCommited = true; } } // We need to make sure we pass along the scale, because // setObject assumes a scale of zero (beetle 4365) for (int c=1; c<=numCols; c++) { int sqlType = rsmd.getColumnType(c); if (sqlType == Types.DECIMAL) { if (util.IS_AT_LEAST_JDBC2) { ps.setObject(c,rs.getObject(c), sqlType, rsmd.getScale(c)); } else { // In J2ME there is no object that represents // a DECIMAL value. By default use String to // pass values around, but for integral types // first convert to a integral type from the DECIMAL // because strings like 3.4 are not convertible to // an integral type. Of course in JSR169 we have // no way to determine the parameter types, // ParameterMetaData is not supported. // So convert as string, and on a conversion error // try as a long. try { ps.setString(c, rs.getString(c)); } catch (SQLException e) { // 22018 - invalid format if ("22018".equals(e.getSQLState())) ps.setLong(c, rs.getLong(c)); else throw e; } } } else { ps.setObject(c,rs.getObject(c), sqlType); } } // Advance in the USING RS anotherUsingRow = rs.next(); // Close the USING RS when exhausted and appropriate // NOTE: Close before the user query if (! anotherUsingRow || conn.getAutoCommit()) //if no more rows or if auto commit is on, close the resultset { rs.close(); } /* 4. execute the statement against those parameters */ ps.execute(); JDBCDisplayUtil.DisplayResults(out,ps,conn, reader /* GemStoneAddition */, timer /* GemStoneAddition */); /* 5. clear the parameters */ ps.clearParameters(); } if (!exec) { rs.close(); //this means, using clause didn't qualify any rows. Just close the resultset associated with using clause throw ijException.noUsingResults(); } // REMIND: any way to look for more rsUsing rows if autoCommit? // perhaps just document the behavior... } static final String getSystemProperty(String propertyName) { try { if (propertyName.startsWith("ij.") || propertyName.startsWith("gemfirexd.")) { util u = new util(); u.key = propertyName; return (String) java.security.AccessController.doPrivileged(u); } else { return System.getProperty(propertyName); } } catch (SecurityException se) { return null; } } private String key; public final Object run() { return System.getProperty(key); } /** * Read a set of properties from the received input stream, strip * off any excess white space that exists in those property values, * and then add those newly-read properties to the received * Properties object; not explicitly removing the whitespace here can * lead to problems. * * This method exists because of the manner in which the jvm reads * properties from file--extra spaces are ignored after a _key_, but * if they exist at the _end_ of a property decl line (i.e. as part * of a _value_), they are preserved, as outlined in the Java API: * * "Any whitespace after the key is skipped; if the first non- * whitespace character after the key is = or :, then it is ignored * and any whitespace characters after it are also skipped. All * remaining characters on the line become part of the associated * element string." * * Creates final properties set consisting of 'prop' plus all * properties loaded from 'iStr' (with the extra whitespace (if any) * removed from all values), will be returned via the parameter. * * @param iStr An input stream from which the new properties are to be * loaded (should already be initialized). * @param prop A set of properties to which the properties from * iStr will be added (should already be initialized). * * Copied here to avoid dependency on an engine class. **/ private static void loadWithTrimmedValues(InputStream iStr, Properties prop) throws IOException { // load the properties from the received input stream. Properties p = new Properties(); p.load(iStr); // Now, trim off any excess whitespace, if any, and then // add the properties from file to the received Properties // set. for (java.util.Enumeration propKeys = p.propertyNames(); propKeys.hasMoreElements();) { // get the value, trim off the whitespace, then store it // in the received properties object. String tmpKey = (String)propKeys.nextElement(); String tmpValue = p.getProperty(tmpKey); tmpValue = tmpValue.trim(); prop.put(tmpKey, tmpValue); } return; } private static final String[][] protocolDrivers = { { "jdbc:derby:net:", "com.ibm.db2.jcc.DB2Driver"}, // GemStone changes BEGIN { "jdbc:gemfirexd://", "com.pivotal.gemfirexd.jdbc.ClientDriver" }, { "jdbc:gemfirexd:", "com.pivotal.gemfirexd.jdbc.EmbeddedDriver" }, /* (original derby code) { "jdbc:derby://", "com.pivotal.gemfirexd.jdbc.ClientDriver"}, { "jdbc:derby:", "com.pivotal.gemfirexd.jdbc.EmbeddedDriver" }, */ // GemStone changes END }; /** Find the appropriate driver and load it, given a JDBC URL. No action if no driver known for a given URL. @param jdbcProtocol the protocol to try. @exception ClassNotFoundException if unable to locate class for driver. @exception InstantiationException if unable to create an instance. @exception IllegalAccessException if driver class constructor not visible. */ public static void loadDriverIfKnown(String jdbcProtocol) throws ClassNotFoundException, InstantiationException, IllegalAccessException { for (int i=0; i < protocolDrivers.length; i++) { if (jdbcProtocol.startsWith(protocolDrivers[i][0])) { loadDriver(protocolDrivers[i][1]); break; // only want the first one } } } /** Load a driver given a class name. @exception ClassNotFoundException if unable to locate class for driver. @exception InstantiationException if unable to create an instance. @exception IllegalAccessException if driver class constructor not visible. */ public static void loadDriver(String driverClass) throws ClassNotFoundException, InstantiationException, IllegalAccessException { Class.forName(driverClass).newInstance(); } /** * Used to determine if this is a JCC testing framework * So that retrieveMessages can be sent. The plan is to have * ij will retrieve messages by default and not look at the testing * frameworks. So, ulitmately this function will look at the driver * rather than the framework. * * @return true if the framework contains Net or JCC. */ private static boolean isJCCFramework() { String framework = util.getSystemProperty("framework"); return ((framework != null) && ((framework.toUpperCase(Locale.ENGLISH).equals("DERBYNET")) || (framework.toUpperCase(Locale.ENGLISH).indexOf("JCC") != -1))); } /** * Selects the current schema from the given connection. * * As there are no way of getting current schema supported by * all major DBMS-es, this method may return null. * * @param theConnection Connection to get current schema for * @return the current schema of the connection, or null if error. */ public static String getSelectedSchema(Connection theConnection) throws SQLException { String schema = null; if (theConnection == null) return null; Statement st = theConnection.createStatement(); try { if(!st.execute("VALUES CURRENT SCHEMA")) return null; ResultSet rs = st.getResultSet(); if(rs==null || !rs.next()) return null; schema = rs.getString(1); } catch(SQLException e) { // There are no standard way of getting schema. // Getting default schema may fail. } finally { st.close(); } return schema; } }
package com.jetbrains.edu.coursecreator.actions; import com.google.gson.FieldNamingPolicy; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; import com.intellij.icons.AllIcons; import com.intellij.ide.projectView.ProjectView; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.platform.templates.github.ZipUtil; import com.jetbrains.edu.EduDocumentListener; import com.jetbrains.edu.EduNames; import com.jetbrains.edu.EduUtils; import com.jetbrains.edu.courseFormat.*; import com.jetbrains.edu.coursecreator.CCProjectService; import com.jetbrains.edu.oldCourseFormat.OldCourse; import org.jetbrains.annotations.NotNull; import java.io.*; import java.util.Map; public class CCFromCourseArchive extends DumbAwareAction { private static final Logger LOG = Logger.getInstance(CCFromCourseArchive.class.getName()); public CCFromCourseArchive() { super("Unpack Course Archive", "Unpack Course Archive", AllIcons.FileTypes.Archive); } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabledAndVisible(false); //CCProjectService.setCCActionAvailable(e); } @Override public void actionPerformed(@NotNull AnActionEvent e) { final Project project = e.getData(CommonDataKeys.PROJECT); if (project == null) { return; } unpackCourseArchive(project); } private static void unpackCourseArchive(final Project project) { FileChooserDescriptor descriptor = new FileChooserDescriptor(true, true, true, true, true, false); final VirtualFile virtualFile = FileChooser.chooseFile(descriptor, project, null); if (virtualFile == null) { return; } final String basePath = project.getBasePath(); if (basePath == null) return; final CCProjectService service = CCProjectService.getInstance(project); Reader reader = null; try { ZipUtil.unzip(null, new File(basePath), new File(virtualFile.getPath()), null, null, true); reader = new InputStreamReader(new FileInputStream(new File(basePath, EduNames.COURSE_META_FILE))); Gson gson = new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES).create(); Course course = gson.fromJson(reader, Course.class); if (course == null || course.getLessons().isEmpty() || StringUtil.isEmptyOrSpaces(course.getLessons().get(0).getName())) { try { reader.close(); } catch (IOException e) { LOG.error(e.getMessage()); } reader = new InputStreamReader(new FileInputStream(new File(basePath, EduNames.COURSE_META_FILE))); OldCourse oldCourse = gson.fromJson(reader, OldCourse.class); course = EduUtils.transformOldCourse(oldCourse); } service.setCourse(course); project.getBaseDir().refresh(false, true); int index = 1; int taskIndex = 1; for (Lesson lesson : course.getLessons()) { final VirtualFile lessonDir = project.getBaseDir().findChild(EduNames.LESSON + String.valueOf(index)); lesson.setIndex(index); if (lessonDir == null) continue; for (Task task : lesson.getTaskList()) { final VirtualFile taskDir = lessonDir.findChild(EduNames.TASK + String.valueOf(taskIndex)); task.setIndex(taskIndex); task.setLesson(lesson); if (taskDir == null) continue; for (final Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { createAnswerFile(project, taskDir, taskDir, entry); } }); } taskIndex += 1; } index += 1; taskIndex = 1; } } catch (FileNotFoundException e) { LOG.error(e.getMessage()); } catch (IOException e) { LOG.error(e.getMessage()); } catch (JsonSyntaxException e) { LOG.error(e.getMessage()); } finally { if (reader != null) { try { reader.close(); } catch (IOException e) { LOG.error(e.getMessage()); } } } synchronize(project); } public static void createAnswerFile(@NotNull final Project project, @NotNull final VirtualFile userFileDir, @NotNull final VirtualFile answerFileDir, @NotNull final Map.Entry<String, TaskFile> taskFileEntry) { final String name = taskFileEntry.getKey(); final TaskFile taskFile = taskFileEntry.getValue(); VirtualFile file = userFileDir.findChild(name); assert file != null; String answerFileName = file.getNameWithoutExtension() + ".answer." + file.getExtension(); VirtualFile answerFile = answerFileDir.findChild(answerFileName); if (answerFile != null) { try { answerFile.delete(project); } catch (IOException e) { LOG.error(e); } } try { answerFile = userFileDir.createChildData(project, answerFileName); } catch (IOException e) { LOG.error(e); } if (answerFile == null) return; final Document originDocument = FileDocumentManager.getInstance().getDocument(file); if (originDocument == null) { return; } final Document document = FileDocumentManager.getInstance().getDocument(answerFile); if (document == null) return; CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { document.replaceString(0, document.getTextLength(), originDocument.getCharsSequence()); } }); } }, "Create answer document", "Create answer document"); EduDocumentListener listener = new EduDocumentListener(taskFile, false); document.addDocumentListener(listener); taskFile.sortAnswerPlaceholders(); for (int i = taskFile.getAnswerPlaceholders().size() - 1; i >= 0; i--) { final AnswerPlaceholder answerPlaceholder = taskFile.getAnswerPlaceholders().get(i); replaceAnswerPlaceholder(project, document, answerPlaceholder); } CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { FileDocumentManager.getInstance().saveDocument(document); } }); } }, "x", "qwe"); document.removeDocumentListener(listener); } private static void replaceAnswerPlaceholder(@NotNull final Project project, @NotNull final Document document, @NotNull final AnswerPlaceholder answerPlaceholder) { final int offset = answerPlaceholder.getRealStartOffset(document); CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { final String text = document.getText(TextRange.create(offset, offset + answerPlaceholder.getLength())); answerPlaceholder.setTaskText(text); final VirtualFile hints = project.getBaseDir().findChild(EduNames.HINTS); if (hints != null) { final String hintFile = answerPlaceholder.getHint(); final VirtualFile virtualFile = hints.findChild(hintFile); if (virtualFile != null) { final Document hintDocument = FileDocumentManager.getInstance().getDocument(virtualFile); if (hintDocument != null) { final String hintText = hintDocument.getText(); answerPlaceholder.setHint(hintText); } } } document.replaceString(offset, offset + answerPlaceholder.getLength(), answerPlaceholder.getPossibleAnswer()); FileDocumentManager.getInstance().saveDocument(document); } }); } }, "x", "qwe"); } private static void synchronize(@NotNull final Project project) { VirtualFileManager.getInstance().refreshWithoutFileWatcher(true); ProjectView.getInstance(project).refresh(); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_07_01.implementation; import com.microsoft.azure.management.network.v2019_07_01.P2SVpnServerConfiguration; import com.microsoft.azure.arm.model.implementation.CreatableUpdatableImpl; import rx.Observable; import java.util.List; import com.microsoft.azure.management.network.v2019_07_01.VpnGatewayTunnelingProtocol; import com.microsoft.azure.management.network.v2019_07_01.P2SVpnServerConfigVpnClientRootCertificate; import com.microsoft.azure.management.network.v2019_07_01.P2SVpnServerConfigVpnClientRevokedCertificate; import com.microsoft.azure.management.network.v2019_07_01.P2SVpnServerConfigRadiusServerRootCertificate; import com.microsoft.azure.management.network.v2019_07_01.P2SVpnServerConfigRadiusClientRootCertificate; import com.microsoft.azure.management.network.v2019_07_01.IpsecPolicy; import com.microsoft.azure.management.network.v2019_07_01.ProvisioningState; import com.microsoft.azure.SubResource; class P2SVpnServerConfigurationImpl extends CreatableUpdatableImpl<P2SVpnServerConfiguration, P2SVpnServerConfigurationInner, P2SVpnServerConfigurationImpl> implements P2SVpnServerConfiguration, P2SVpnServerConfiguration.Definition, P2SVpnServerConfiguration.Update { private final NetworkManager manager; private String resourceGroupName; private String virtualWanName; private String p2SVpnServerConfigurationName; P2SVpnServerConfigurationImpl(String name, NetworkManager manager) { super(name, new P2SVpnServerConfigurationInner()); this.manager = manager; // Set resource name this.p2SVpnServerConfigurationName = name; // } P2SVpnServerConfigurationImpl(P2SVpnServerConfigurationInner inner, NetworkManager manager) { super(inner.name(), inner); this.manager = manager; // Set resource name this.p2SVpnServerConfigurationName = inner.name(); // set resource ancestor and positional variables this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups"); this.virtualWanName = IdParsingUtils.getValueFromIdByName(inner.id(), "virtualWans"); this.p2SVpnServerConfigurationName = IdParsingUtils.getValueFromIdByName(inner.id(), "p2sVpnServerConfigurations"); // } @Override public NetworkManager manager() { return this.manager; } @Override public Observable<P2SVpnServerConfiguration> createResourceAsync() { P2sVpnServerConfigurationsInner client = this.manager().inner().p2sVpnServerConfigurations(); return client.createOrUpdateAsync(this.resourceGroupName, this.virtualWanName, this.p2SVpnServerConfigurationName, this.inner()) .map(innerToFluentMap(this)); } @Override public Observable<P2SVpnServerConfiguration> updateResourceAsync() { P2sVpnServerConfigurationsInner client = this.manager().inner().p2sVpnServerConfigurations(); return client.createOrUpdateAsync(this.resourceGroupName, this.virtualWanName, this.p2SVpnServerConfigurationName, this.inner()) .map(innerToFluentMap(this)); } @Override protected Observable<P2SVpnServerConfigurationInner> getInnerAsync() { P2sVpnServerConfigurationsInner client = this.manager().inner().p2sVpnServerConfigurations(); return client.getAsync(this.resourceGroupName, this.virtualWanName, this.p2SVpnServerConfigurationName); } @Override public boolean isInCreateMode() { return this.inner().id() == null; } @Override public String etag() { return this.inner().etag(); } @Override public String id() { return this.inner().id(); } @Override public String name() { return this.inner().name(); } @Override public List<SubResource> p2SVpnGateways() { return this.inner().p2SVpnGateways(); } @Override public List<P2SVpnServerConfigRadiusClientRootCertificate> p2SVpnServerConfigRadiusClientRootCertificates() { return this.inner().p2SVpnServerConfigRadiusClientRootCertificates(); } @Override public List<P2SVpnServerConfigRadiusServerRootCertificate> p2SVpnServerConfigRadiusServerRootCertificates() { return this.inner().p2SVpnServerConfigRadiusServerRootCertificates(); } @Override public String p2SVpnServerConfigurationPropertiesEtag() { return this.inner().p2SVpnServerConfigurationPropertiesEtag(); } @Override public String p2SVpnServerConfigurationPropertiesName() { return this.inner().p2SVpnServerConfigurationPropertiesName(); } @Override public List<P2SVpnServerConfigVpnClientRevokedCertificate> p2SVpnServerConfigVpnClientRevokedCertificates() { return this.inner().p2SVpnServerConfigVpnClientRevokedCertificates(); } @Override public List<P2SVpnServerConfigVpnClientRootCertificate> p2SVpnServerConfigVpnClientRootCertificates() { return this.inner().p2SVpnServerConfigVpnClientRootCertificates(); } @Override public ProvisioningState provisioningState() { return this.inner().provisioningState(); } @Override public String radiusServerAddress() { return this.inner().radiusServerAddress(); } @Override public String radiusServerSecret() { return this.inner().radiusServerSecret(); } @Override public List<IpsecPolicy> vpnClientIpsecPolicies() { return this.inner().vpnClientIpsecPolicies(); } @Override public List<VpnGatewayTunnelingProtocol> vpnProtocols() { return this.inner().vpnProtocols(); } @Override public P2SVpnServerConfigurationImpl withExistingVirtualWan(String resourceGroupName, String virtualWanName) { this.resourceGroupName = resourceGroupName; this.virtualWanName = virtualWanName; return this; } @Override public P2SVpnServerConfigurationImpl withId(String id) { this.inner().withId(id); return this; } @Override public P2SVpnServerConfigurationImpl withName(String name) { this.inner().withName(name); return this; } @Override public P2SVpnServerConfigurationImpl withP2SVpnServerConfigRadiusClientRootCertificates(List<P2SVpnServerConfigRadiusClientRootCertificate> p2SVpnServerConfigRadiusClientRootCertificates) { this.inner().withP2SVpnServerConfigRadiusClientRootCertificates(p2SVpnServerConfigRadiusClientRootCertificates); return this; } @Override public P2SVpnServerConfigurationImpl withP2SVpnServerConfigRadiusServerRootCertificates(List<P2SVpnServerConfigRadiusServerRootCertificate> p2SVpnServerConfigRadiusServerRootCertificates) { this.inner().withP2SVpnServerConfigRadiusServerRootCertificates(p2SVpnServerConfigRadiusServerRootCertificates); return this; } @Override public P2SVpnServerConfigurationImpl withP2SVpnServerConfigurationPropertiesEtag(String p2SVpnServerConfigurationPropertiesEtag) { this.inner().withP2SVpnServerConfigurationPropertiesEtag(p2SVpnServerConfigurationPropertiesEtag); return this; } @Override public P2SVpnServerConfigurationImpl withP2SVpnServerConfigurationPropertiesName(String p2SVpnServerConfigurationPropertiesName) { this.inner().withP2SVpnServerConfigurationPropertiesName(p2SVpnServerConfigurationPropertiesName); return this; } @Override public P2SVpnServerConfigurationImpl withP2SVpnServerConfigVpnClientRevokedCertificates(List<P2SVpnServerConfigVpnClientRevokedCertificate> p2SVpnServerConfigVpnClientRevokedCertificates) { this.inner().withP2SVpnServerConfigVpnClientRevokedCertificates(p2SVpnServerConfigVpnClientRevokedCertificates); return this; } @Override public P2SVpnServerConfigurationImpl withP2SVpnServerConfigVpnClientRootCertificates(List<P2SVpnServerConfigVpnClientRootCertificate> p2SVpnServerConfigVpnClientRootCertificates) { this.inner().withP2SVpnServerConfigVpnClientRootCertificates(p2SVpnServerConfigVpnClientRootCertificates); return this; } @Override public P2SVpnServerConfigurationImpl withRadiusServerAddress(String radiusServerAddress) { this.inner().withRadiusServerAddress(radiusServerAddress); return this; } @Override public P2SVpnServerConfigurationImpl withRadiusServerSecret(String radiusServerSecret) { this.inner().withRadiusServerSecret(radiusServerSecret); return this; } @Override public P2SVpnServerConfigurationImpl withVpnClientIpsecPolicies(List<IpsecPolicy> vpnClientIpsecPolicies) { this.inner().withVpnClientIpsecPolicies(vpnClientIpsecPolicies); return this; } @Override public P2SVpnServerConfigurationImpl withVpnProtocols(List<VpnGatewayTunnelingProtocol> vpnProtocols) { this.inner().withVpnProtocols(vpnProtocols); return this; } }
/** * Copyright (c) 2010 Perforce Software. All rights reserved. */ package com.perforce.p4java.ant.tasks; import java.util.List; import org.apache.tools.ant.BuildException; import com.perforce.p4java.core.IFileDiff; import com.perforce.p4java.core.file.IFileSpec; import com.perforce.p4java.exception.P4JavaError; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.impl.generic.core.file.FileSpec; import com.perforce.p4java.option.server.GetFileDiffsOptions; /** * Run diff (on the server) of two files in the depot. Both files may optionally * include a revision specification; the default is to compare the head * revision. Wildcards may be used, but they must match between file1 and file2. * Note if using clients or labels as file arguments they must be preceded with * a file path e.g. //...@mylabel //...@yourlabel.</p> * * @see PerforceTask * @see ServerTask */ public class Diff2Task extends ClientTask { /** * The first depot file and revision specifier for diff2. */ protected String file1; /** * The second depot file and revision specifier for diff2. */ protected String file2; /** * If not null, it causes diff2 to use the branch view to specify the pairs * of files to compare. If file arguments are also present, they can further * limit the files and specify the revisions for comparison. Note that if * only one file is given, it restricts the right-hand side of the branch * view. */ protected String branch; /** * If true, suppresses the display of the header lines of files whose * content and types are identical and suppresses the actual diff for all * files. Corresponds to the -q flag. */ protected boolean quiet = false; /** * If true, diff even files with non-text (binary) types. Corresponds to the * -t flag. */ protected boolean includeNonTextDiffs = false; /** * If true, use the GNU diff -u format and displays only files that differ. * See the "-u" option in the main diff2 documentation for an explanation. * Corresponds to the -u flag. */ boolean gnuDiffs = false; /** If true, use RCS diff; corresponds to -dn. */ protected boolean rcsDiffs = false; /** * If positive, specifies the number of context diff lines; if zero, lets * server pick context number; if negative, no options are generated. * Corresponds to -dc[n], with -dc generated for diffContext == 0, -dcn for * diffContext > 0, where "n" is of course the value of diffContext. */ protected int diffContext = -1; /** If true, perform summary diff; corresponds to -ds. */ protected boolean summaryDiff = false; /** * If true, do a unified diff; corresponds to -du[n] with -du generated for * unifiedDiff == 0, -dun for unifiedDiff > 0, where "n" is of course the * value of unifiedDiff. */ protected int unifiedDiff = -1; /** If true, ignore whitespace changes; corresponds to -db. */ protected boolean ignoreWhitespaceChanges = false; /** If true, ignore whitespace; corresponds to -dw. */ protected boolean ignoreWhitespace = false; /** If true, ignore line endings; corresponds to -dl. */ protected boolean ignoreLineEndings = false; /** * Left depot file and revision specifier used as input for this Perforce * command. */ protected IFileSpec file1FileSpec; /** * Right depot file and revision specifier used as input for this Perforce * command. */ protected IFileSpec file2FileSpec; /** * Collection of file diffs returned from the Perforce command. */ protected List<IFileDiff> retFileDiffs; /** * Default constructor. */ public Diff2Task() { super(); commandOptions = new GetFileDiffsOptions(quiet, includeNonTextDiffs, gnuDiffs, rcsDiffs, diffContext, summaryDiff, unifiedDiff, ignoreWhitespaceChanges, ignoreWhitespace, ignoreLineEndings); } /** * Sets the file1. * * @param file1 * the new file1 */ public void setFile1(String file1) { this.file1 = file1; } /** * Sets the file2. * * @param file2 * the new file2 */ public void setFile2(String file2) { this.file2 = file2; } /** * Sets the branch. * * @param branch * the new branch */ public void setBranch(String branch) { this.branch = branch; } /** * Sets the quiet. * * @param quiet * the new quiet */ public void setQuiet(boolean quiet) { ((GetFileDiffsOptions) commandOptions).setQuiet(quiet); } /** * Sets the include non text diffs. * * @param includeNonTextDiffs * the new include non text diffs */ public void setIncludeNonTextDiffs(boolean includeNonTextDiffs) { ((GetFileDiffsOptions) commandOptions) .setIncludeNonTextDiffs(includeNonTextDiffs); } /** * Sets the gnu diffs. * * @param gnuDiffs * the new gnu diffs */ public void setGnuDiffs(boolean gnuDiffs) { ((GetFileDiffsOptions) commandOptions).setGnuDiffs(gnuDiffs); } /** * Sets the rcs diffs. * * @param rcsDiffs * the new rcs diffs */ public void setRcsDiffs(boolean rcsDiffs) { ((GetFileDiffsOptions) commandOptions).setRcsDiffs(rcsDiffs); } /** * Sets the diff context. * * @param diffContext * the new diff context */ public void setDiffContext(int diffContext) { ((GetFileDiffsOptions) commandOptions).setDiffContext(diffContext); } /** * Sets the summary diff. * * @param summaryDiff * the new summary diff */ public void setSummaryDiff(boolean summaryDiff) { ((GetFileDiffsOptions) commandOptions).setSummaryDiff(summaryDiff); } /** * Sets the unified diff. * * @param unifiedDiff * the new unified diff */ public void setUnifiedDiff(int unifiedDiff) { ((GetFileDiffsOptions) commandOptions).setUnifiedDiff(unifiedDiff); } /** * Sets the ignore whitespace changes. * * @param ignoreWhitespaceChanges * the new ignore whitespace changes */ public void setIgnoreWhitespaceChanges(boolean ignoreWhitespaceChanges) { ((GetFileDiffsOptions) commandOptions) .setIgnoreWhitespaceChanges(ignoreWhitespaceChanges); } /** * Sets the ignore whitespace. * * @param ignoreWhitespace * the new ignore whitespace */ public void setIgnoreWhitespace(boolean ignoreWhitespace) { ((GetFileDiffsOptions) commandOptions) .setIgnoreWhitespace(ignoreWhitespace); } /** * Sets the ignore line endings. * * @param ignoreLineEndings * the new ignore line endings */ public void setIgnoreLineEndings(boolean ignoreLineEndings) { ((GetFileDiffsOptions) commandOptions) .setIgnoreLineEndings(ignoreLineEndings); } /** * Gets the file1 file spec. * * @return the file1 file spec */ public IFileSpec getFile1FileSpec() { return file1FileSpec; } /** * Gets the file2 file spec. * * @return the file2 file spec */ public IFileSpec getFile2FileSpec() { return file2FileSpec; } /** * Gets the ret file diffs. * * @return the ret file diffs */ public List<IFileDiff> getRetFileDiffs() { return retFileDiffs; } /** * Execute the Perforce diff2 command with file spec 1, file spec 2, branch * and options. Log the returned file diffs. This command is perform on the * server side. * <p> * Run diff on the Perforce server of two files in the depot. * <p> * This method corresponds closely to the standard diff2 command, and that * command's documentation should be consulted for the overall and detailed * semantics * * @throws BuildException * the build exception * @see PerforceTask#execP4Command() */ protected void execP4Command() throws BuildException { try { file1FileSpec = new FileSpec(file1); file2FileSpec = new FileSpec(file2); retFileDiffs = getP4Server().getFileDiffs(file1FileSpec, file2FileSpec, branch, ((GetFileDiffsOptions) commandOptions)); logFileDiffs(retFileDiffs); } catch (P4JavaException e) { throw new BuildException(e.getLocalizedMessage(), e, getLocation()); } catch (P4JavaError e) { throw new BuildException(e.getLocalizedMessage(), e, getLocation()); } catch (Throwable t) { throw new BuildException(t.getLocalizedMessage(), t, getLocation()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT! // Generated from protobuf package org.apache.drill.exec.proto.beans; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import com.dyuproject.protostuff.GraphIOUtil; import com.dyuproject.protostuff.Input; import com.dyuproject.protostuff.Message; import com.dyuproject.protostuff.Output; import com.dyuproject.protostuff.Schema; public final class DrillServiceInstance implements Externalizable, Message<DrillServiceInstance>, Schema<DrillServiceInstance> { public static Schema<DrillServiceInstance> getSchema() { return DEFAULT_INSTANCE; } public static DrillServiceInstance getDefaultInstance() { return DEFAULT_INSTANCE; } static final DrillServiceInstance DEFAULT_INSTANCE = new DrillServiceInstance(); private String id; private long registrationTimeUTC; private DrillbitEndpoint endpoint; public DrillServiceInstance() { } // getters and setters // id public String getId() { return id; } public DrillServiceInstance setId(String id) { this.id = id; return this; } // registrationTimeUTC public long getRegistrationTimeUTC() { return registrationTimeUTC; } public DrillServiceInstance setRegistrationTimeUTC(long registrationTimeUTC) { this.registrationTimeUTC = registrationTimeUTC; return this; } // endpoint public DrillbitEndpoint getEndpoint() { return endpoint; } public DrillServiceInstance setEndpoint(DrillbitEndpoint endpoint) { this.endpoint = endpoint; return this; } // java serialization public void readExternal(ObjectInput in) throws IOException { GraphIOUtil.mergeDelimitedFrom(in, this, this); } public void writeExternal(ObjectOutput out) throws IOException { GraphIOUtil.writeDelimitedTo(out, this, this); } // message method public Schema<DrillServiceInstance> cachedSchema() { return DEFAULT_INSTANCE; } // schema methods public DrillServiceInstance newMessage() { return new DrillServiceInstance(); } public Class<DrillServiceInstance> typeClass() { return DrillServiceInstance.class; } public String messageName() { return DrillServiceInstance.class.getSimpleName(); } public String messageFullName() { return DrillServiceInstance.class.getName(); } public boolean isInitialized(DrillServiceInstance message) { return true; } public void mergeFrom(Input input, DrillServiceInstance message) throws IOException { for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this)) { switch(number) { case 0: return; case 1: message.id = input.readString(); break; case 2: message.registrationTimeUTC = input.readInt64(); break; case 3: message.endpoint = input.mergeObject(message.endpoint, DrillbitEndpoint.getSchema()); break; default: input.handleUnknownField(number, this); } } } public void writeTo(Output output, DrillServiceInstance message) throws IOException { if(message.id != null) output.writeString(1, message.id, false); if(message.registrationTimeUTC != 0) output.writeInt64(2, message.registrationTimeUTC, false); if(message.endpoint != null) output.writeObject(3, message.endpoint, DrillbitEndpoint.getSchema(), false); } public String getFieldName(int number) { switch(number) { case 1: return "id"; case 2: return "registrationTimeUTC"; case 3: return "endpoint"; default: return null; } } public int getFieldNumber(String name) { final Integer number = __fieldMap.get(name); return number == null ? 0 : number.intValue(); } private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>(); static { __fieldMap.put("id", 1); __fieldMap.put("registrationTimeUTC", 2); __fieldMap.put("endpoint", 3); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.lite.io; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import java.util.concurrent.ConcurrentHashMap; /** * Utility class to generate HTTP dates. * * @author Remy Maucherat */ public final class FastHttpDateFormat { // -------------------------------------------------------------- Variables protected static final int CACHE_SIZE = Integer.parseInt(System.getProperty("org.apache.tomcat.util.http.FastHttpDateFormat.CACHE_SIZE", "1000")); /** * HTTP date format. */ protected static final SimpleDateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US); /** * The set of SimpleDateFormat formats to use in getDateHeader(). */ protected static final SimpleDateFormat formats[] = { new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US), new SimpleDateFormat("EEEEEE, dd-MMM-yy HH:mm:ss zzz", Locale.US), new SimpleDateFormat("EEE MMMM d HH:mm:ss yyyy", Locale.US) }; protected final static TimeZone gmtZone = TimeZone.getTimeZone("GMT"); /** * GMT timezone - all HTTP dates are on GMT */ static { format.setTimeZone(gmtZone); formats[0].setTimeZone(gmtZone); formats[1].setTimeZone(gmtZone); formats[2].setTimeZone(gmtZone); } /** * Instant on which the currentDate object was generated. */ protected static long currentDateGenerated = 0L; /** * Current formatted date. */ protected static String currentDate = null; /** * Formatter cache. */ protected static final ConcurrentHashMap<Long, String> formatCache = new ConcurrentHashMap<Long, String>(CACHE_SIZE); /** * Parser cache. */ protected static final ConcurrentHashMap<String, Long> parseCache = new ConcurrentHashMap<String, Long>(CACHE_SIZE); // --------------------------------------------------------- Public Methods /** * Get the current date in HTTP format. */ public static final String getCurrentDate() { long now = System.currentTimeMillis(); if ((now - currentDateGenerated) > 1000) { synchronized (format) { if ((now - currentDateGenerated) > 1000) { currentDateGenerated = now; currentDate = format.format(new Date(now)); } } } return currentDate; } /** * Get the HTTP format of the specified date. */ public static final String formatDate (long value, DateFormat threadLocalformat) { Long longValue = new Long(value); String cachedDate = formatCache.get(longValue); if (cachedDate != null) return cachedDate; String newDate = null; Date dateValue = new Date(value); if (threadLocalformat != null) { newDate = threadLocalformat.format(dateValue); updateFormatCache(longValue, newDate); } else { synchronized (formatCache) { synchronized (format) { newDate = format.format(dateValue); } updateFormatCache(longValue, newDate); } } return newDate; } /** * Try to parse the given date as a HTTP date. */ public static final long parseDate(String value, DateFormat[] threadLocalformats) { Long cachedDate = parseCache.get(value); if (cachedDate != null) return cachedDate.longValue(); Long date = null; if (threadLocalformats != null) { date = internalParseDate(value, threadLocalformats); updateParseCache(value, date); } else { synchronized (parseCache) { date = internalParseDate(value, formats); updateParseCache(value, date); } } if (date == null) { return (-1L); } else { return date.longValue(); } } /** * Parse date with given formatters. */ private static final Long internalParseDate (String value, DateFormat[] formats) { Date date = null; for (int i = 0; (date == null) && (i < formats.length); i++) { try { date = formats[i].parse(value); } catch (ParseException e) { ; } } if (date == null) { return null; } return new Long(date.getTime()); } /** * Update cache. */ private static void updateFormatCache(Long key, String value) { if (value == null) { return; } if (formatCache.size() > CACHE_SIZE) { formatCache.clear(); } formatCache.put(key, value); } /** * Update cache. */ private static void updateParseCache(String key, Long value) { if (value == null) { return; } if (parseCache.size() > CACHE_SIZE) { parseCache.clear(); } parseCache.put(key, value); } }
package com.zfgc.dbobj; import java.util.ArrayList; import java.util.List; public class FileUploadTempDbObjExample { /** * This field was generated by MyBatis Generator. * This field corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ protected String orderByClause; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ protected boolean distinct; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ protected List<Criteria> oredCriteria; /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public FileUploadTempDbObjExample() { oredCriteria = new ArrayList<Criteria>(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public String getOrderByClause() { return orderByClause; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public void setDistinct(boolean distinct) { this.distinct = distinct; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public boolean isDistinct() { return distinct; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public List<Criteria> getOredCriteria() { return oredCriteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public void or(Criteria criteria) { oredCriteria.add(criteria); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andUsersIdIsNull() { addCriterion("USERS_ID is null"); return (Criteria) this; } public Criteria andUsersIdIsNotNull() { addCriterion("USERS_ID is not null"); return (Criteria) this; } public Criteria andUsersIdEqualTo(Integer value) { addCriterion("USERS_ID =", value, "usersId"); return (Criteria) this; } public Criteria andUsersIdNotEqualTo(Integer value) { addCriterion("USERS_ID <>", value, "usersId"); return (Criteria) this; } public Criteria andUsersIdGreaterThan(Integer value) { addCriterion("USERS_ID >", value, "usersId"); return (Criteria) this; } public Criteria andUsersIdGreaterThanOrEqualTo(Integer value) { addCriterion("USERS_ID >=", value, "usersId"); return (Criteria) this; } public Criteria andUsersIdLessThan(Integer value) { addCriterion("USERS_ID <", value, "usersId"); return (Criteria) this; } public Criteria andUsersIdLessThanOrEqualTo(Integer value) { addCriterion("USERS_ID <=", value, "usersId"); return (Criteria) this; } public Criteria andUsersIdIn(List<Integer> values) { addCriterion("USERS_ID in", values, "usersId"); return (Criteria) this; } public Criteria andUsersIdNotIn(List<Integer> values) { addCriterion("USERS_ID not in", values, "usersId"); return (Criteria) this; } public Criteria andUsersIdBetween(Integer value1, Integer value2) { addCriterion("USERS_ID between", value1, value2, "usersId"); return (Criteria) this; } public Criteria andUsersIdNotBetween(Integer value1, Integer value2) { addCriterion("USERS_ID not between", value1, value2, "usersId"); return (Criteria) this; } public Criteria andFileNameIsNull() { addCriterion("FILE_NAME is null"); return (Criteria) this; } public Criteria andFileNameIsNotNull() { addCriterion("FILE_NAME is not null"); return (Criteria) this; } public Criteria andFileNameEqualTo(String value) { addCriterion("FILE_NAME =", value, "fileName"); return (Criteria) this; } public Criteria andFileNameNotEqualTo(String value) { addCriterion("FILE_NAME <>", value, "fileName"); return (Criteria) this; } public Criteria andFileNameGreaterThan(String value) { addCriterion("FILE_NAME >", value, "fileName"); return (Criteria) this; } public Criteria andFileNameGreaterThanOrEqualTo(String value) { addCriterion("FILE_NAME >=", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLessThan(String value) { addCriterion("FILE_NAME <", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLessThanOrEqualTo(String value) { addCriterion("FILE_NAME <=", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLike(String value) { addCriterion("FILE_NAME like", value, "fileName"); return (Criteria) this; } public Criteria andFileNameNotLike(String value) { addCriterion("FILE_NAME not like", value, "fileName"); return (Criteria) this; } public Criteria andFileNameIn(List<String> values) { addCriterion("FILE_NAME in", values, "fileName"); return (Criteria) this; } public Criteria andFileNameNotIn(List<String> values) { addCriterion("FILE_NAME not in", values, "fileName"); return (Criteria) this; } public Criteria andFileNameBetween(String value1, String value2) { addCriterion("FILE_NAME between", value1, value2, "fileName"); return (Criteria) this; } public Criteria andFileNameNotBetween(String value1, String value2) { addCriterion("FILE_NAME not between", value1, value2, "fileName"); return (Criteria) this; } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated do_not_delete_during_merge Thu Nov 29 20:27:12 EST 2018 */ public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table FILE_UPLOAD_TEMP * * @mbggenerated Thu Nov 29 20:27:12 EST 2018 */ public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2beta1/storage.proto package com.google.privacy.dlp.v2beta1; /** * <pre> * A representation of a Datastore kind. * </pre> * * Protobuf type {@code google.privacy.dlp.v2beta1.KindExpression} */ public final class KindExpression extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2beta1.KindExpression) KindExpressionOrBuilder { // Use KindExpression.newBuilder() to construct. private KindExpression(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private KindExpression() { name_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private KindExpression( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_KindExpression_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_KindExpression_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2beta1.KindExpression.class, com.google.privacy.dlp.v2beta1.KindExpression.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <pre> * The name of the kind. * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * <pre> * The name of the kind. * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2beta1.KindExpression)) { return super.equals(obj); } com.google.privacy.dlp.v2beta1.KindExpression other = (com.google.privacy.dlp.v2beta1.KindExpression) obj; boolean result = true; result = result && getName() .equals(other.getName()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.KindExpression parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta1.KindExpression parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta1.KindExpression parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2beta1.KindExpression prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A representation of a Datastore kind. * </pre> * * Protobuf type {@code google.privacy.dlp.v2beta1.KindExpression} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2beta1.KindExpression) com.google.privacy.dlp.v2beta1.KindExpressionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_KindExpression_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_KindExpression_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2beta1.KindExpression.class, com.google.privacy.dlp.v2beta1.KindExpression.Builder.class); } // Construct using com.google.privacy.dlp.v2beta1.KindExpression.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_KindExpression_descriptor; } public com.google.privacy.dlp.v2beta1.KindExpression getDefaultInstanceForType() { return com.google.privacy.dlp.v2beta1.KindExpression.getDefaultInstance(); } public com.google.privacy.dlp.v2beta1.KindExpression build() { com.google.privacy.dlp.v2beta1.KindExpression result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.privacy.dlp.v2beta1.KindExpression buildPartial() { com.google.privacy.dlp.v2beta1.KindExpression result = new com.google.privacy.dlp.v2beta1.KindExpression(this); result.name_ = name_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2beta1.KindExpression) { return mergeFrom((com.google.privacy.dlp.v2beta1.KindExpression)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2beta1.KindExpression other) { if (other == com.google.privacy.dlp.v2beta1.KindExpression.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.privacy.dlp.v2beta1.KindExpression parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.privacy.dlp.v2beta1.KindExpression) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * <pre> * The name of the kind. * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The name of the kind. * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The name of the kind. * </pre> * * <code>string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * <pre> * The name of the kind. * </pre> * * <code>string name = 1;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <pre> * The name of the kind. * </pre> * * <code>string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2beta1.KindExpression) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2beta1.KindExpression) private static final com.google.privacy.dlp.v2beta1.KindExpression DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2beta1.KindExpression(); } public static com.google.privacy.dlp.v2beta1.KindExpression getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<KindExpression> PARSER = new com.google.protobuf.AbstractParser<KindExpression>() { public KindExpression parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new KindExpression(input, extensionRegistry); } }; public static com.google.protobuf.Parser<KindExpression> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<KindExpression> getParserForType() { return PARSER; } public com.google.privacy.dlp.v2beta1.KindExpression getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
import javax.swing.*; import java.awt.*; import java.awt.event.*; public class PlayerFrame extends JFrame implements MouseListener { JPanel mpan; JButton addBut; JLabel labels[]=new JLabel[6]; JTextField names[]=new JTextField[6]; JButton colorBut[]=new JButton[6]; JButton picBut[]=new JButton[6]; // JLabel pic[]=new JLabel[6]; int num=1; JButton addHBut=new JButton("Add Human Player"); JButton addCBut=new JButton("Add Robot Player"); JButton startBut=new JButton("Start Game"); JButton cancelBut=new JButton("Cancel"); JLabel avatar[]=new JLabel[6]; JFileChooser fileChooser=new JFileChooser(); JColorChooser colorChooser=new JColorChooser(); Game game; int compcount=1; public PlayerFrame() { super("Players"); setSize(700,570); setResizable(false); setDefaultCloseOperation(EXIT_ON_CLOSE); getContentPane().setLayout(null); Dimension d=Toolkit.getDefaultToolkit().getScreenSize(); setLocation((int)(d.getWidth()-getWidth())/2,(int)(d.getHeight()-getHeight())/2); setFont(new Font("Tahoma",2,18)); mpan=new JPanel(null); mpan.setLayout(null); mpan.setBounds(0,25,getWidth(),getHeight()); mpan.setVisible(true); fileChooser.setDialogTitle("Choose your picture"); cancelBut.addMouseListener(this); startBut.addMouseListener(this); addCBut.addMouseListener(this); addHBut.addMouseListener(this); for(int i=1; i<=6; i++) { labels[i-1]=new JLabel("Player "+i+":"); labels[i-1].setBounds(25,((i-1)*75),100,25); labels[i-1].setVisible(false); mpan.add(labels[i-1]); names[i-1]=new JTextField(); names[i-1].setBounds(100,((i-1)*75),300,25); names[i-1].setVisible(false); mpan.add(names[i-1]); colorBut[i-1]=new JButton("Color.."); colorBut[i-1].setBounds(405,((i-1)*75),100,25); colorBut[i-1].setVisible(false); colorBut[i-1].addMouseListener(this); mpan.add(colorBut[i-1]); picBut[i-1]=new JButton("Picture.."); picBut[i-1].setBounds(510,((i-1)*75),100,25); picBut[i-1].setVisible(false); picBut[i-1].addMouseListener(this); mpan.add(picBut[i-1]); avatar[i-1]=new JLabel(); avatar[i-1].setBounds(615,picBut[i-1].getY(),70,70); avatar[i-1].setVisible(false); avatar[i-1].setBackground(Color.GRAY); avatar[i-1].setOpaque(true); mpan.add(avatar[i-1]); } labels[0].setVisible(true); names[0].setVisible(true); colorBut[0].setVisible(true); picBut[0].setVisible(true); avatar[0].setVisible(true); addHBut.setBounds(25,getHeight()-25-25-25-25,140,25); addHBut.setVisible(true); mpan.add(addHBut); addCBut.setBounds(190,getHeight()-25-25-25-25,140,25); addCBut.setVisible(true); mpan.add(addCBut); startBut.setBounds(355,getHeight()-25-25-25-25,140,25); startBut.setVisible(true); mpan.add(startBut); cancelBut.setBounds(520,getHeight()-25-25-25-25,140,25); cancelBut.setVisible(true); mpan.add(cancelBut); getContentPane().add(mpan); setVisible(true); } public void mouseClicked(MouseEvent e) { if(e.getSource().equals(addHBut)) { if(num<6) { labels[num].setVisible(true); names[num].setVisible(true); colorBut[num].setVisible(true); picBut[num].setVisible(true); avatar[num].setVisible(true); num++; } } else if(e.getSource().equals(addCBut)) { if(num<6) { labels[num].setVisible(true); names[num].setVisible(true); names[num].setText("ComputerPlayer"+compcount++); colorBut[num].setVisible(true); picBut[num].setVisible(true); picBut[num].setEnabled(false); picBut[num].removeMouseListener(this); avatar[num].setVisible(true); num++; } } else if(e.getSource().equals(colorBut[0])) { avatar[0].setIcon(null); Color c=colorChooser.showDialog(null,"Choose your color",Color.GREEN); if(!(c.equals(null))) { avatar[0].setBackground(c); } } else if(e.getSource().equals(picBut[0])) { int x=fileChooser.showOpenDialog(null); if(x!=fileChooser.CANCEL_OPTION) { avatar[0].setIcon(new ImageIcon(fileChooser.getSelectedFile().getAbsolutePath())); } } else if(e.getSource().equals(colorBut[1])) { avatar[1].setIcon(null); Color c=colorChooser.showDialog(null,"Choose your color",Color.GREEN); if(!(c.equals(null))) { avatar[1].setBackground(c); } } else if(e.getSource().equals(picBut[1])) { int x=fileChooser.showOpenDialog(null); if(x!=fileChooser.CANCEL_OPTION) { avatar[1].setIcon(new ImageIcon(fileChooser.getSelectedFile().getAbsolutePath())); } } else if(e.getSource().equals(colorBut[2])) { avatar[2].setIcon(null); Color c=colorChooser.showDialog(null,"Choose your color",Color.GREEN); if(!(c.equals(null))) { avatar[2].setBackground(c); } } else if(e.getSource().equals(picBut[2])) { int x=fileChooser.showOpenDialog(null); if(x!=fileChooser.CANCEL_OPTION) { avatar[2].setIcon(new ImageIcon(fileChooser.getSelectedFile().getAbsolutePath())); } } else if(e.getSource().equals(colorBut[3])) { avatar[3].setIcon(null); Color c=colorChooser.showDialog(null,"Choose your color",Color.GREEN); if(!(c.equals(null))) { avatar[3].setBackground(c); } } else if(e.getSource().equals(picBut[3])) { int x=fileChooser.showOpenDialog(null); if(x!=fileChooser.CANCEL_OPTION) { avatar[3].setIcon(new ImageIcon(fileChooser.getSelectedFile().getAbsolutePath())); } } else if(e.getSource().equals(colorBut[4])) { avatar[4].setIcon(null); Color c=colorChooser.showDialog(null,"Choose your color",Color.GREEN); if(!(c.equals(null))) { avatar[4].setBackground(c); } } else if(e.getSource().equals(picBut[4])) { int x=fileChooser.showOpenDialog(null); if(x!=fileChooser.CANCEL_OPTION) { avatar[4].setIcon(new ImageIcon(fileChooser.getSelectedFile().getAbsolutePath())); } } else if(e.getSource().equals(colorBut[5])) { avatar[5].setIcon(null); Color c=colorChooser.showDialog(null,"Choose your color",Color.GREEN); if(!(c.equals(null))) { avatar[5].setBackground(c); } } else if(e.getSource().equals(picBut[5])) { int x=fileChooser.showOpenDialog(null); if(x!=fileChooser.CANCEL_OPTION) { avatar[5].setIcon(new ImageIcon(fileChooser.getSelectedFile().getAbsolutePath())); } } else if(e.getSource().equals(startBut)) { dispose(); new GameFrame(); game= new Game(num-1); for(int i=0; i<6;i++) { if (names[i].isVisible() && names[i].getText()!=""){ game.addPlayerToGame(new Player(names[i].getText(),i+1,avatar[i].getBackground(),avatar[i].getIcon())); } } } else if(e.getSource().equals(cancelBut)) { new StartFrame(); dispose(); } } public void mouseEntered(MouseEvent e){} public void mouseExited(MouseEvent e){} public void mousePressed(MouseEvent e){} public void mouseReleased(MouseEvent e){} public static void main(String[]args) { PlayerFrame p=new PlayerFrame(); } }
package com.defano.wyldcard.parts.model; import com.defano.wyldcard.WyldCard; import com.defano.wyldcard.parts.Messagable; import com.defano.wyldcard.parts.button.ButtonModel; import com.defano.wyldcard.parts.card.CardLayer; import com.defano.wyldcard.parts.field.FieldModel; import com.defano.wyldcard.parts.stack.StackModel; import com.defano.wyldcard.runtime.context.ExecutionContext; import com.defano.wyldcard.runtime.interpreter.CompilationUnit; import com.defano.wyldcard.runtime.interpreter.Interpreter; import com.defano.wyldcard.util.ThreadUtils; import com.defano.wyldcard.window.WindowBuilder; import com.defano.wyldcard.window.layouts.ButtonPropertyEditor; import com.defano.wyldcard.window.layouts.FieldPropertyEditor; import com.defano.wyldcard.window.layouts.ScriptEditor; import com.defano.hypertalk.ast.expressions.parts.LiteralPartExp; import com.defano.hypertalk.ast.model.*; import com.defano.hypertalk.ast.model.specifiers.CompositePartSpecifier; import com.defano.hypertalk.ast.model.specifiers.PartIdSpecifier; import com.defano.hypertalk.ast.model.specifiers.PartSpecifier; import com.defano.hypertalk.exception.HtException; import com.defano.hypertalk.exception.HtSemanticException; import javax.annotation.PostConstruct; import javax.swing.*; import java.awt.*; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReference; /** * A base model object for all HyperCard "parts" that Defines properties common to all part objects. */ public abstract class PartModel extends WyldCardPropertiesModel implements Messagable { public static final String PROP_SCRIPT = "script"; public static final String PROP_ID = "id"; public static final String PROP_NUMBER = "number"; public static final String PROP_NAME = "name"; public static final String PROP_LEFT = "left"; public static final String PROP_TOP = "top"; public static final String PROP_RIGHT = "right"; public static final String PROP_BOTTOM = "bottom"; public static final String PROP_WIDTH = "width"; public static final String PROP_HEIGHT = "height"; public static final String PROP_RECT = "rect"; public static final String PROP_RECTANGLE = "rectangle"; public static final String PROP_TOPLEFT = "topleft"; public static final String PROP_BOTTOMRIGHT = "bottomright"; public static final String PROP_BOTRIGHT = "botright"; public static final String PROP_VISIBLE = "visible"; public static final String PROP_LOC = "loc"; public static final String PROP_LOCATION = "location"; public static final String PROP_CONTENTS = "contents"; public static final String PROP_SCRIPTTEXT = "scripttext"; public static final String PROP_BREAKPOINTS = "breakpoints"; public static final String PROP_CHECKPOINTS = "checkpoints"; private final PartType type; private Owner owner; private int scriptEditorCaretPosition; private transient PartModel parentPartModel; private transient Script script; public PartModel(PartType type, Owner owner, PartModel parentPartModel) { super(); this.type = type; this.owner = owner; this.parentPartModel = parentPartModel; newProperty(PROP_VISIBLE, new Value(true), false); newProperty(PROP_SCRIPTTEXT, new Value(), false); newProperty(PROP_BREAKPOINTS, new Value(), false); initialize(); } /** * Recursively re-establish the parent-child part model relationship. Sets the value returned by * {@link #getParentPartModel()} to the given part model and causes this model to invoke this method on all its * children. * <p> * The relationship between a parent and it's child parts are persistent when serialized, but the reverse * relationship (between child and parent) is transient. This is a side effect of the serialization engine being * unable to deal with cycles in the model object graph (a child cannot depend on a parent that also depends on * it.). Thus, as a workaround, we programmatically re-establish the child-to-parent relationship after the stack * has completed deserializing from JSON. * * @param parentPartModel The {@link PartModel} of the parent of this part. Null for models that do not have a * parent part (i.e., stacks and the message box). */ public abstract void relinkParentPartModel(PartModel parentPartModel); @PostConstruct @Override public void initialize() { super.initialize(); // Convert rectangle (consisting of top left and bottom right coordinates) into top, left, height and width newComputedSetterProperty(PROP_RECT, (context, model, propertyName, value) -> { if (value.isRect()) { model.setKnownProperty(context, PROP_LEFT, value.getItemAt(context, 0)); model.setKnownProperty(context, PROP_TOP, value.getItemAt(context, 1)); model.setKnownProperty(context, PROP_HEIGHT, new Value(value.getItemAt(context, 3).longValue() - value.getItemAt(context, 1).longValue())); model.setKnownProperty(context, PROP_WIDTH, new Value(value.getItemAt(context, 2).longValue() - value.getItemAt(context, 0).longValue())); } else { throw new HtSemanticException("Expected a rectangle, but got " + value.toString()); } }); newComputedGetterProperty(PROP_RECT, (context, model, propertyName) -> { Value left = model.getKnownProperty(context, PROP_LEFT); Value top = model.getKnownProperty(context, PROP_TOP); Value height = model.getKnownProperty(context, PROP_HEIGHT); Value width = model.getKnownProperty(context, PROP_WIDTH); return new Value(left.integerValue(), top.integerValue(), left.integerValue() + width.integerValue(), top.integerValue() + height.integerValue()); }); newComputedGetterProperty(PROP_RIGHT, (context, model, propertyName) -> new Value(model.getKnownProperty(context, PROP_LEFT).integerValue() + model.getKnownProperty(context, PROP_WIDTH).integerValue()) ); newComputedSetterProperty(PROP_RIGHT, (context, model, propertyName, value) -> model.setKnownProperty(context, PROP_LEFT, new Value(value.integerValue() - model.getKnownProperty(context, PROP_WIDTH).integerValue())) ); newComputedGetterProperty(PROP_BOTTOM, (context, model, propertyName) -> new Value(model.getKnownProperty(context, PROP_TOP).integerValue() + model.getKnownProperty(context, PROP_HEIGHT).integerValue()) ); newComputedSetterProperty(PROP_BOTTOM, (context, model, propertyName, value) -> model.setKnownProperty(context, PROP_TOP, new Value(value.integerValue() - model.getKnownProperty(context, PROP_HEIGHT).integerValue())) ); newComputedSetterProperty(PROP_TOPLEFT, (context, model, propertyName, value) -> { if (value.isPoint()) { model.setKnownProperty(context, PROP_LEFT, value.getItemAt(context, 0)); model.setKnownProperty(context, PROP_TOP, value.getItemAt(context, 1)); } else { throw new HtSemanticException("Expected a point, but got " + value.toString()); } }); newComputedGetterProperty(PROP_TOPLEFT, (context, model, propertyName) -> new Value(model.getKnownProperty(context, PROP_LEFT).integerValue(), model.getKnownProperty(context, PROP_TOP).integerValue()) ); newComputedSetterProperty(PROP_BOTTOMRIGHT, (context, model, propertyName, value) -> { if (value.isPoint()) { model.setKnownProperty(context, PROP_LEFT, new Value(value.getItemAt(context, 0).longValue() - model.getKnownProperty(context, PROP_WIDTH).longValue())); model.setKnownProperty(context, PROP_TOP, new Value(value.getItemAt(context, 1).longValue() - model.getKnownProperty(context, PROP_HEIGHT).longValue())); } else { throw new HtSemanticException("Expected a point, but got " + value.toString()); } }); newPropertyAlias(PROP_BOTTOMRIGHT, PROP_BOTRIGHT); newComputedGetterProperty(PROP_BOTTOMRIGHT, (context, model, propertyName) -> new Value( model.getKnownProperty(context, PROP_LEFT).integerValue() + model.getKnownProperty(context, PROP_WIDTH).integerValue(), model.getKnownProperty(context, PROP_TOP).integerValue() + model.getKnownProperty(context, PROP_HEIGHT).integerValue() ) ); newPropertyAlias(PROP_LOCATION, PROP_LOC); newComputedGetterProperty(PROP_LOCATION, (context, model, propertyName) -> new Value( model.getKnownProperty(context, PROP_LEFT).integerValue() + model.getKnownProperty(context, PROP_WIDTH).integerValue() / 2, model.getKnownProperty(context, PROP_TOP).integerValue() + model.getKnownProperty(context, PROP_HEIGHT).integerValue() / 2 ) ); newComputedSetterProperty(PROP_LOCATION, (context, model, propertyName, value) -> { if (value.isPoint()) { model.setKnownProperty(context, PROP_LEFT, new Value(value.getItemAt(context, 0).longValue() - model.getKnownProperty(context, PROP_WIDTH).longValue() / 2)); model.setKnownProperty(context, PROP_TOP, new Value(value.getItemAt(context, 1).longValue() - model.getKnownProperty(context, PROP_HEIGHT).longValue() / 2)); } else { throw new HtSemanticException("Expected a point, but got " + value.toString()); } }); newPropertyAlias(PROP_RECT, PROP_RECTANGLE); // When breakpoints change, automatically apply them to the script addPropertyChangedObserver((context, model, property, oldValue, newValue) -> { if (property.equalsIgnoreCase(PROP_BREAKPOINTS)) { getScript(context).applyBreakpoints(getBreakpoints()); } }); newPropertyAlias(PROP_BREAKPOINTS, PROP_CHECKPOINTS); newComputedGetterProperty(PROP_SCRIPT, (context, model, propertyName) -> model.getKnownProperty(context, PROP_SCRIPTTEXT)); newComputedSetterProperty(PROP_SCRIPT, (context, model, propertyName, value) -> { model.setKnownProperty(context, PROP_SCRIPTTEXT, value); precompile(context); }); precompile(new ExecutionContext()); } /** * Gets the "default" adjective associated with the given property. That is, the length adjective that is * automatically applied when referring to a property without explicitly specifying an adjective. * <p> * For example, 'the name of btn 1' actually refers to 'the abbreviated name' property. * * @param propertyName The name of the property whose default adjective should be returned. * @return The default adjective. */ public LengthAdjective getDefaultAdjectiveForProperty(String propertyName) { return LengthAdjective.DEFAULT; } /** * Determines if a length adjective may be applied to the given property of this part (i.e., 'the long name'). * * @param propertyName The name of the property * @return True if the property supports length adjectives for this part, false otherwise. */ public boolean isAdjectiveSupportedProperty(String propertyName) { return false; } public Rectangle getRect(ExecutionContext context) { try { Rectangle rect = new Rectangle(); rect.x = getProperty(context, PROP_LEFT).integerValue(); rect.y = getProperty(context, PROP_TOP).integerValue(); rect.height = getProperty(context, PROP_HEIGHT).integerValue(); rect.width = getProperty(context, PROP_WIDTH).integerValue(); return rect; } catch (Exception e) { throw new RuntimeException("Couldn't get geometry for part model.", e); } } public PartType getType() { return type; } private void precompile(ExecutionContext context) { if (hasProperty(PROP_SCRIPTTEXT)) { Interpreter.asyncCompile(CompilationUnit.SCRIPT, getKnownProperty(context, PROP_SCRIPTTEXT).toString(), (scriptText, compiledScript, generatedError) -> { if (generatedError == null) { script = (Script) compiledScript; script.applyBreakpoints(getBreakpoints()); } }); } } public Script getScript(ExecutionContext context) { if (script == null) { try { script = Interpreter.blockingCompile(CompilationUnit.SCRIPT, getScriptText(context)); script.applyBreakpoints(getBreakpoints()); } catch (HtException e) { e.printStackTrace(); } } return script; } public String getScriptText(ExecutionContext context) { return getKnownProperty(context, PROP_SCRIPTTEXT).toString(); } public Owner getOwner() { return owner; } public void setOwner(Owner owner) { this.owner = owner; } public CardLayer getLayer() { switch (owner) { case BACKGROUND: return CardLayer.BACKGROUND_PARTS; case CARD: return CardLayer.CARD_PARTS; default: throw new IllegalStateException("Bug! Not a card layered part: " + owner); } } public int getId(ExecutionContext context) { return getKnownProperty(context, PROP_ID).integerValue(); } public String getName(ExecutionContext context) { return getKnownProperty(context, PROP_NAME).toString(); } public PartSpecifier getPartSpecifier(ExecutionContext context) { return new PartIdSpecifier(getOwner(), getType(), getId(context)); } /** * Gets the value of this part; thus, reads the value of the property returned by {@link #getValueProperty()}. * * @param context The execution context. * @return The value of this property */ public Value getValue(ExecutionContext context) { return getKnownProperty(context, getValueProperty()); } /** * Sets the value of this part; thus, sets the value of the property returned by {@link #getValueProperty()}. * * @param value The value of this part. * @param context The execution context. */ public void setValue(Value value, ExecutionContext context) { try { setProperty(context, getValueProperty(), value); } catch (Exception e) { throw new RuntimeException(e); } } /** * Gets the name of the property that is read or written when a value is placed into the part i.e., ('put "Hello" * into card field 1'). Typically the 'contents' property, but other parts (like fields) may override to provide * a different property (e.g., fields use the 'text' property as their contents). * * @return The name of the part's value property */ public String getValueProperty() { return PROP_CONTENTS; } /** * Gets a part specifier that refers to this part in the stack. If this part is a button or a field, the part * specifier is a {@link CompositePartSpecifier} referring to the button or field on a specific card or background. * * @param context The execution context. * @return A part specifier referring to this part. */ public PartSpecifier getMe(ExecutionContext context) { PartModel parent = getParentPartModel(); PartSpecifier localPart = new PartIdSpecifier(getOwner(), getType(), getId(context)); if (getType() == PartType.BUTTON || getType() == PartType.FIELD) { return new CompositePartSpecifier(context, localPart, new LiteralPartExp(null, parent.getMe(context))); } else { return localPart; } } public PartModel getParentPartModel() { return parentPartModel; } public void setParentPartModel(PartModel parentPartModel) { this.parentPartModel = parentPartModel; } public StackModel getParentStackModel() { if (this instanceof StackModel) { return (StackModel) this; } else if (getParentPartModel() != null) { return getParentPartModel().getParentStackModel(); } else { return null; } } public int getScriptEditorCaretPosition() { return scriptEditorCaretPosition; } public void setScriptEditorCaretPosition(int scriptEditorCaretPosition) { this.scriptEditorCaretPosition = scriptEditorCaretPosition; } public List<Integer> getBreakpoints() { ExecutionContext context = new ExecutionContext(); ArrayList<Integer> breakpoints = new ArrayList<>(); List<Value> breakpointValues = getKnownProperty(context, PROP_BREAKPOINTS).getItems(context); for (Value thisBreakpoint : breakpointValues) { breakpoints.add(thisBreakpoint.integerValue()); } return breakpoints; } /** * Show the script editor for this part. * <p> * Typically invoked when the user has selected and double-control-clicked the part, chosen the appropriate * command from the Objects menu, or invoked the 'edit script of' command. * * @param context The execution context */ public ScriptEditor editScript(ExecutionContext context) { return editScript(context, null); } /** * Show the script editor for this part, positioning the caret in the editor field accordingly. * <p> * Typically invoked when the user has selected and double-control-clicked the part, chosen the appropriate * command from the Objects menu, or invoked the 'edit script of' command. * * @param context The execution context * @param caretPosition The location where the caret should be positioned in the text or null to use the last saved */ public ScriptEditor editScript(ExecutionContext context, Integer caretPosition) { ScriptEditor editor = WyldCard.getInstance().getWindowManager().findScriptEditorForPart(this); // Existing script editor for this part; show it if (editor != null) { SwingUtilities.invokeLater(() -> { editor.setVisible(true); editor.requestFocus(); }); return editor; } // Create new editor else { AtomicReference<ScriptEditor> newEditor = new AtomicReference<>(); ThreadUtils.invokeAndWaitAsNeeded(() -> { newEditor.set(new ScriptEditor()); if (caretPosition != null) { setScriptEditorCaretPosition(caretPosition); } new WindowBuilder<>(newEditor.get()) .withModel(this) .withTitle("Script of " + getName(context)) .ownsMenubar() .resizeable(true) .withLocationStaggeredOver(WyldCard.getInstance().getWindowManager().getWindowForStack(context, context.getCurrentStack()).getWindowPanel()) .build(); }); SwingUtilities.invokeLater(() -> newEditor.get().requestFocus()); return newEditor.get(); } } /** * Show the property editor for this part. * <p> * Typically invoked when the user has selected and double-clicked the part, or chosen the appropriate command from * the Objects menu. * * @param context The execution context. */ public void editProperties(ExecutionContext context) { ThreadUtils.invokeAndWaitAsNeeded(() -> { if (getType() == PartType.FIELD) { new WindowBuilder<>(new FieldPropertyEditor()) .withModel((FieldModel) this) .asModal() .withTitle(getName(context)) .withLocationCenteredOver(WyldCard.getInstance().getWindowManager().getWindowForStack(context, context.getCurrentStack()).getWindowPanel()) .resizeable(false) .build(); } else { new WindowBuilder<>(new ButtonPropertyEditor()) .withModel((ButtonModel) this) .asModal() .withTitle(getName(context)) .withLocationCenteredOver(WyldCard.getInstance().getWindowManager().getWindowForStack(context, context.getCurrentStack()).getWindowPanel()) .resizeable(false) .build(); } }); } }
package net.somethingdreadful.MAL.api.BaseModels.AnimeManga; import android.app.Activity; import android.database.Cursor; import com.google.gson.annotations.SerializedName; import net.somethingdreadful.MAL.ContentManager; import net.somethingdreadful.MAL.PrefManager; import net.somethingdreadful.MAL.R; import net.somethingdreadful.MAL.account.AccountService; import net.somethingdreadful.MAL.api.MALApi; import net.somethingdreadful.MAL.api.MALModels.RecordStub; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.List; import lombok.Getter; import lombok.Setter; public class Manga extends GenericRecord implements Serializable { /** * Total number of chapters of the manga. * <p/> * This value is the number of chapters of the anime, or null if unknown. */ @Setter @Getter private int chapters; /** * Total number of volumes of the manga. * <p/> * This value is the number of volumes of the manga, or null if unknown. */ @Setter @Getter private int volumes; /** * A list of anime adaptations of this manga (or conversely, anime from which this manga is adapted) */ @Setter @Getter @SerializedName("anime_adaptations") private ArrayList<RecordStub> animeAdaptations; /** * A list of related manga */ @Setter @Getter @SerializedName("related_manga") private ArrayList<RecordStub> relatedManga; /** * User's read status of the manga * <p/> * This is a string that is one of: reading, completed, on-hold, dropped, plan to read */ @Getter @SerializedName("read_status") private String readStatus; /** * Number of chapters already read by the user */ @Getter @SerializedName("chapters_read") private int chaptersRead; /** * Number of volumes already read by the user. */ @Getter @SerializedName("volumes_read") private int volumesRead; /** * The date the user started reading the title */ @Getter @SerializedName("reading_start") private String readingStart; /** * The date the user finished reading the title */ @Getter @SerializedName("reading_end") private String readingEnd; /** * Set if the user is rerereading the manga */ private boolean rereading; /** * The number of times the user has re-read the title. (Does not include the first time.) */ @Getter @SerializedName("reread_count") private int rereadCount; /** * How much value the user thinks there is in rereading the series. */ @Getter @SerializedName("reread_value") private int rereadValue; public void setAllDirty() { addDirtyField("readStatus"); addDirtyField("chaptersRead"); addDirtyField("volumesRead"); addDirtyField("readingStart"); addDirtyField("readingEnd"); addDirtyField("chapDownloaded"); addDirtyField("rereading"); addDirtyField("rereadCount"); addDirtyField("rereadValue"); } public void setReadStatus(String readStatus) { if (this.readStatus == null || !this.readStatus.equals(readStatus)) { this.readStatus = readStatus; if (!fromCursor) { addDirtyField("readStatus"); checkProgress(); } } } public void setChaptersRead(int chaptersRead) { if (this.chaptersRead != chaptersRead) { this.chaptersRead = chaptersRead; if (!fromCursor) { addDirtyField("chaptersRead"); checkProgress(); } } } public void setVolumesRead(int volumesRead) { if (this.volumesRead != volumesRead) { this.volumesRead = volumesRead; if (!fromCursor) { addDirtyField("volumesRead"); checkProgress(); } } } public void setReadingStart(String readingStart) { if (!fromCursor) addDirtyField("readingStart"); this.readingStart = readingStart; } public void setReadingEnd(String readingEnd) { if (!fromCursor) addDirtyField("readingEnd"); this.readingEnd = readingEnd; } private void setRereading(boolean rereading) { if (!fromCursor) addDirtyField("rereading"); this.rereading = rereading; } public void setRereadCount(int rereadCount) { if (!fromCursor) addDirtyField("rereadCount"); this.rereadCount = rereadCount; } public void setRereadValue(int rereadValue) { if (!fromCursor) addDirtyField("rereadValue"); this.rereadValue = rereadValue; } private void checkProgress() { boolean completed = false; boolean started = false; // Automatically set the status on completed if (getChapters() > 0 && getChaptersRead() == getChapters() && !getDirty().contains("readStatus")) { setReadStatus(GenericRecord.STATUS_COMPLETED); } // Automatically set the max chapters on completed if (getReadStatus() != null && getChapters() > 0 && getReadStatus().equals(GenericRecord.STATUS_COMPLETED) && !getDirty().contains("chaptersRead")) { setChaptersRead(getChapters()); if (getVolumes() > 0) setVolumesRead(getVolumes()); completed = true; } if (completed) { // Automatically set the progress when the record has been finished if (getRereading() || (getRereadCount() > 0)) { setRereadCount(getRereadCount() + 1); setRereading(false); } // Automatically set the end date on completed if it is empty if ((getReadingEnd() == null || getReadingEnd().equals("") || getReadingEnd().equals("0-00-00")) && PrefManager.getAutoDateSetter()) { final Calendar c = Calendar.getInstance(); int year = c.get(Calendar.YEAR); int month = c.get(Calendar.MONTH); int day = c.get(Calendar.DAY_OF_MONTH); setReadingEnd(year + "-" + month + "-" + day); } } if (getReadStatus() != null && getReadStatus().equals(GenericRecord.STATUS_READING) && getChaptersRead() == 0 && !getDirty().contains("readStatus")) { started = true; } // Automatically set the progress when the chapter 1 has been read if (getReadStatus() != null && getReadStatus().equals(GenericRecord.STATUS_PLANTOREAD) && getChaptersRead() == 1 && !getDirty().contains("readStatus")) { setReadStatus(GenericRecord.STATUS_READING); started = true; } // Automatically set the start date on start if it is empty if ((getReadingStart() == null || getReadingStart().equals("") || getReadingStart().equals("0-00-00")) && PrefManager.getAutoDateSetter() && started) { final Calendar c = Calendar.getInstance(); int year = c.get(Calendar.YEAR); int month = c.get(Calendar.MONTH); int day = c.get(Calendar.DAY_OF_MONTH); setReadingStart(year + "-" + month + "-" + day); } } /** * Get the anime or manga status translations */ public String getStatusString(Activity activity) { int array; String[] fixedArray; if (AccountService.isMAL()) { array = R.array.mangaStatus_MAL; fixedArray = activity.getResources().getStringArray(R.array.mangaFixedStatus_MAL); } else { array = R.array.mangaStatus_AL; fixedArray = activity.getResources().getStringArray(R.array.mangaFixedStatus_AL); } return getStringFromResourceArray(activity, array, getStatusInt(fixedArray)); } public String getUserStatusString(Activity activity) { return getStringFromResourceArray(activity, R.array.mediaStatus_User, getUserStatusInt(getReadStatus())); } public void setReadStatus(int id) { setReadStatus(ContentManager.listSortFromInt(id, MALApi.ListType.MANGA)); } public int getStatusInt(String[] fixedStatus) { return Arrays.asList(fixedStatus).indexOf(getStatus()); } public int getProgress(boolean useSecondaryAmount) { return useSecondaryAmount ? getVolumesRead() : getChaptersRead(); } public void setProgress(boolean useSecondaryAmount, int progress) { if (useSecondaryAmount) setVolumesRead(progress); else setChaptersRead(progress); } public boolean getRereading() { return rereading; } public void setRereading(int cv) { rereading = cv == 1; } public static Manga fromCursor(Cursor cursor) { List<String> columnNames = Arrays.asList(cursor.getColumnNames()); Manga result = (Manga) GenericRecord.fromCursor(new Manga(), cursor, columnNames); result.setChapters(cursor.getInt(columnNames.indexOf("chapters"))); result.setVolumes(cursor.getInt(columnNames.indexOf("volumes"))); result.setReadStatus(cursor.getString(columnNames.indexOf("readStatus"))); result.setChaptersRead(cursor.getInt(columnNames.indexOf("chaptersRead"))); result.setVolumesRead(cursor.getInt(columnNames.indexOf("volumesRead"))); result.setReadingStart(cursor.getString(columnNames.indexOf("readingStart"))); result.setReadingEnd(cursor.getString(columnNames.indexOf("readingEnd"))); result.setRereading(cursor.getInt(columnNames.indexOf("rereading"))); result.setRereadCount(cursor.getInt(columnNames.indexOf("rereadCount"))); result.setRereadValue(cursor.getInt(columnNames.indexOf("rereadValue"))); return result; } }
package org.dtk.util; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.DataInputStream; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.GZIPOutputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.util.zip.ZipOutputStream; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.StreamingOutput; public class FileUtil { public static void writeToZipFile(String path, Map<String, byte[]> files) throws IOException { File file = new File(path); //Make sure destination dir exists. File parentDir = file.getParentFile(); if(!parentDir.exists()){ if(!parentDir.mkdirs()){ throw new IOException("Could not create directory: " + parentDir.getAbsolutePath()); } } ZipOutputStream out = new ZipOutputStream(new FileOutputStream(file)); Iterator<String> keys = files.keySet().iterator(); while (keys.hasNext()) { String filename = keys.next(); byte[] contents = files.get(filename); out.putNextEntry(new ZipEntry(filename)); out.write(contents); out.closeEntry(); } // Complete the ZIP file out.close(); } public static String createTemporaryPackage(Map<String, String> packageModules) { String temporaryPackageId = null; Set<String> modulePaths = packageModules.keySet(); try { File temporaryPackageLocation = createTempDirectory(); for(String modulePath : modulePaths) { File moduleLocation = new File(temporaryPackageLocation, modulePath); writeToFile(moduleLocation.getAbsolutePath(), packageModules.get(modulePath), null, false); } temporaryPackageId = temporaryPackageLocation.getName(); } catch (IOException e) { } return temporaryPackageId; } public static void writeToFile(String path, String contents, String encoding, boolean useGzip) throws IOException { // summary: writes a file if (encoding == null) { encoding = "utf-8"; } File file = new File(path); //Make sure destination dir exists. File parentDir = file.getParentFile(); if(!parentDir.exists()){ if(!parentDir.mkdirs()){ throw new IOException("Could not create directory: " + parentDir.getAbsolutePath()); } } OutputStream outStream = new FileOutputStream(file); if (useGzip) { outStream = new GZIPOutputStream(outStream); } else { } BufferedWriter output = new java.io.BufferedWriter(new OutputStreamWriter(outStream, encoding)); try { output.append(contents); } finally { output.close(); } } /** * Generate a temporary file path by creating a new temporary file and removing. * * @param tempBuildPrefix - Temporary file prefix * @param tempBuildSuffix - Temporary file suffix * @return Temporary file path * @throws IOException - Couldn't create or remove temporary file */ public static String generateTemporaryFilePath(String tempPrefix, String tempSuffix) throws IOException { // Create temporary file path to hold the build File cacheFile = File.createTempFile(tempPrefix, tempSuffix); String cacheFilePath = cacheFile.getAbsolutePath(); // Remove temporary file until the build is completed. The existence of the file is // used to tell the polling request when the build has completed. boolean confirmDelete = cacheFile.delete(); if (!confirmDelete) { throw new IOException("Error removing temporary file, " + cacheFilePath); } return cacheFilePath; } public static File createTempDirectory() throws IOException { final File temp = File.createTempFile("dojo_web_builder", null); if(!(temp.delete())) { throw new IOException("Could not delete temp file: " + temp.getAbsolutePath()); } if(!(temp.mkdir())) { throw new IOException("Could not create temp directory: " + temp.getAbsolutePath()); } return (temp); } public static File[] findAllDirectories(File parentDirectory) { File[] childDirectories = null; // Sanity check argument, mustn't be null and must be a directory // to contain child files. if (parentDirectory != null && parentDirectory.isDirectory()) { childDirectories = parentDirectory.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); } return childDirectories; } public static File[] findAllPathSuffixMatches(File parentDirectory, final String pathSuffix) { File[] childDirectories = null; // Sanity check argument, mustn't be null and must be a directory // to contain child files. if (parentDirectory != null && parentDirectory.isDirectory()) { childDirectories = parentDirectory.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.getName().endsWith(pathSuffix); } }); } return childDirectories; } public static boolean deleteDirectory(File path) { if (path.exists()) { File[] files = path.listFiles(); for(int i=0; i<files.length; i++) { if(files[i].isDirectory()) { deleteDirectory(files[i]); } else { files[i].delete(); } } } return( path.delete() ); } public static String inflateTemporaryZipFile(InputStream is) throws IOException { File temporaryDir = FileUtil.createTempDirectory(); String temporaryUserAppPath = temporaryDir.getAbsolutePath(); boolean success = FileUtil.inflateZipFile(temporaryUserAppPath, is); if (success) { return temporaryUserAppPath; } else { // Clean up temporary directory. FileUtil.deleteDirectory(temporaryDir); return null; } } // TODO: Must handle errors properly at this stage. public static boolean inflateZipFile(String baseDirectory, InputStream is) { boolean success = false; ZipInputStream zis = new ZipInputStream(is); ZipEntry zipEntry; try { zipEntry = zis.getNextEntry(); while (zipEntry != null) { if (zipEntry.isDirectory()) { File dirFile = new File(baseDirectory, zipEntry.getName()); dirFile.mkdir(); } else { FileOutputStream fout = new FileOutputStream(new File(baseDirectory, zipEntry.getName())); for (int c = zis.read(); c != -1; c = zis.read()) { fout.write(c); } fout.close(); } zis.closeEntry(); zipEntry = zis.getNextEntry(); // Ensure we have successfully inflated at least one entry, removes. success = true; } zis.close(); } catch (IOException e) { e.printStackTrace(); success = false; } return success; } /** * Return streaming output instance for a given file path. * * @param filename - File to stream * @param removeOnFinish - Delete file after streaming? * @return File streaming output. */ public static StreamingOutput streamingFileOutput(final String filename, final boolean removeOnFinish) { return new StreamingOutput() { public void write(OutputStream output) throws IOException, WebApplicationException { // Read file, write to output buffer and close stream... File file = new File(filename); BufferedInputStream in = new java.io.BufferedInputStream( new DataInputStream(new FileInputStream(file))); byte[] bytes = new byte[64000]; int bytesRead = in.read(bytes); while (bytesRead != -1) { output.write(bytes, 0, bytesRead); bytesRead = in.read(bytes); } in.close(); // Remove file after streaming completes? if (removeOnFinish) { file.delete(); } } }; } /** * Substitute any environment variables in the file path * for actual values. * * @param filePath - String that may contain environment variables * @return String Resolved file path */ public static String resolveEnvironmentVariables(String filePath) { String resolvedPath = null; // Can't resolve empty values! if (filePath != null) { // Position index to allow copying of non-matching path sections int lastMatch = 0; StringBuilder pathBuilder = new StringBuilder(); Pattern pattern = Pattern.compile("\\%(.+)\\%"); Matcher matcher = pattern.matcher(filePath); while(matcher.find()) { // Add all file path sections after last match but before current match. pathBuilder.append(filePath.substring(lastMatch, matcher.start())); // Add resolved environment variable path String envVarName = filePath.substring(matcher.start(1), matcher.end(1)); pathBuilder.append(System.getenv(envVarName)); // Advanced index past the end of environment variable. lastMatch = matcher.end(); } // Finally, all all remaining path sections & create full resolved path pathBuilder.append(filePath.substring(lastMatch, filePath.length())); resolvedPath = pathBuilder.toString(); } return resolvedPath; } }
/** * <copyright> * </copyright> * * $Id$ */ package org.eclipse.bpel4chor.model.pbd.impl; import org.eclipse.bpel4chor.model.pbd.Activity; import org.eclipse.bpel4chor.model.pbd.LoopIteration; import org.eclipse.bpel4chor.model.pbd.PbdPackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Loop Iteration</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.eclipse.bpel4chor.model.pbd.impl.LoopIterationImpl#getLoopIterationCounter <em>Loop Iteration Counter</em>}</li> * <li>{@link org.eclipse.bpel4chor.model.pbd.impl.LoopIterationImpl#getActivity <em>Activity</em>}</li> * </ul> * </p> * * @generated */ public class LoopIterationImpl extends EObjectImpl implements LoopIteration { /** * The default value of the '{@link #getLoopIterationCounter() <em>Loop Iteration Counter</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLoopIterationCounter() * @generated * @ordered */ protected static final int LOOP_ITERATION_COUNTER_EDEFAULT = 0; /** * The cached value of the '{@link #getLoopIterationCounter() <em>Loop Iteration Counter</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLoopIterationCounter() * @generated * @ordered */ protected int loopIterationCounter = LOOP_ITERATION_COUNTER_EDEFAULT; /** * The cached value of the '{@link #getActivity() <em>Activity</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getActivity() * @generated * @ordered */ protected Activity activity; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected LoopIterationImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return PbdPackage.Literals.LOOP_ITERATION; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getLoopIterationCounter() { return loopIterationCounter; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setLoopIterationCounter(int newLoopIterationCounter) { int oldLoopIterationCounter = loopIterationCounter; loopIterationCounter = newLoopIterationCounter; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.LOOP_ITERATION__LOOP_ITERATION_COUNTER, oldLoopIterationCounter, loopIterationCounter)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Activity getActivity() { return activity; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetActivity(Activity newActivity, NotificationChain msgs) { Activity oldActivity = activity; activity = newActivity; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, PbdPackage.LOOP_ITERATION__ACTIVITY, oldActivity, newActivity); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setActivity(Activity newActivity) { if (newActivity != activity) { NotificationChain msgs = null; if (activity != null) msgs = ((InternalEObject)activity).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - PbdPackage.LOOP_ITERATION__ACTIVITY, null, msgs); if (newActivity != null) msgs = ((InternalEObject)newActivity).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - PbdPackage.LOOP_ITERATION__ACTIVITY, null, msgs); msgs = basicSetActivity(newActivity, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.LOOP_ITERATION__ACTIVITY, newActivity, newActivity)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case PbdPackage.LOOP_ITERATION__ACTIVITY: return basicSetActivity(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case PbdPackage.LOOP_ITERATION__LOOP_ITERATION_COUNTER: return getLoopIterationCounter(); case PbdPackage.LOOP_ITERATION__ACTIVITY: return getActivity(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case PbdPackage.LOOP_ITERATION__LOOP_ITERATION_COUNTER: setLoopIterationCounter((Integer)newValue); return; case PbdPackage.LOOP_ITERATION__ACTIVITY: setActivity((Activity)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case PbdPackage.LOOP_ITERATION__LOOP_ITERATION_COUNTER: setLoopIterationCounter(LOOP_ITERATION_COUNTER_EDEFAULT); return; case PbdPackage.LOOP_ITERATION__ACTIVITY: setActivity((Activity)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case PbdPackage.LOOP_ITERATION__LOOP_ITERATION_COUNTER: return loopIterationCounter != LOOP_ITERATION_COUNTER_EDEFAULT; case PbdPackage.LOOP_ITERATION__ACTIVITY: return activity != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (loopIterationCounter: "); result.append(loopIterationCounter); result.append(')'); return result.toString(); } } //LoopIterationImpl
package io.dropwizard.logging; import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.net.SyslogAppender; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.Appender; import ch.qos.logback.core.Layout; import ch.qos.logback.core.net.SyslogConstants; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import java.lang.management.ManagementFactory; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * An {@link AppenderFactory} implementation which provides an appender that sends events to a * syslog server. * <p/> * <b>Configuration Parameters:</b> * <table> * <tr> * <td>Name</td> * <td>Default</td> * <td>Description</td> * </tr> * <tr> * <td>{@code host}</td> * <td>{@code localhost}</td> * <td>The hostname of the syslog server.</td> * </tr> * <tr> * <td>{@code port}</td> * <td>{@code 514}</td> * <td>The port on which the syslog server is listening.</td> * </tr> * <tr> * <td>{@code facility}</td> * <td>{@code local0}</td> * <td> * The syslog facility to use. Can be either {@code auth}, {@code authpriv}, * {@code daemon}, {@code cron}, {@code ftp}, {@code lpr}, {@code kern}, {@code mail}, * {@code news}, {@code syslog}, {@code user}, {@code uucp}, {@code local0}, * {@code local1}, {@code local2}, {@code local3}, {@code local4}, {@code local5}, * {@code local6}, or {@code local7}. * </td> * </tr> * <tr> * <td>{@code threshold}</td> * <td>{@code ALL}</td> * <td>The lowest level of events to write to the file.</td> * </tr> * <tr> * <td>{@code logFormat}</td> * <td>the default format</td> * <td> * The Logback pattern with which events will be formatted. See * <a href="http://logback.qos.ch/manual/layouts.html#conversionWord">the Logback documentation</a> * for details. * </td> * </tr> * </table> * * @see AbstractAppenderFactory */ @JsonTypeName("syslog") public class SyslogAppenderFactory extends AbstractAppenderFactory { public enum Facility { AUTH, AUTHPRIV, DAEMON, CRON, FTP, LPR, KERN, MAIL, NEWS, SYSLOG, USER, UUCP, LOCAL0, LOCAL1, LOCAL2, LOCAL3, LOCAL4, LOCAL5, LOCAL6, LOCAL7 } private static final String LOG_TOKEN_NAME = "%app"; private static final String LOG_TOKEN_PID = "%pid"; private static final Pattern PID_PATTERN = Pattern.compile("(\\d+)@"); private static String pid = ""; // make an attempt to get the PID of the process // this will only work on UNIX platforms; for others, the PID will be "unknown" static { final Matcher matcher = PID_PATTERN.matcher(ManagementFactory.getRuntimeMXBean().getName()); if (matcher.find()) { pid = "[" + matcher.group(1) + "]"; } } @NotNull private String host = "localhost"; @Min(1) @Max(65535) private int port = SyslogConstants.SYSLOG_PORT; @NotNull private Facility facility = Facility.LOCAL0; // PrefixedThrowableProxyConverter does not apply to syslog appenders, as stack traces are sent separately from // the main message. This means that the standard prefix of `!` is not used for syslog @NotNull private String stackTracePrefix = SyslogAppender.DEFAULT_STACKTRACE_PATTERN; // prefix the logFormat with the application name and PID (if available) private String logFormat = LOG_TOKEN_NAME + LOG_TOKEN_PID + ": " + SyslogAppender.DEFAULT_SUFFIX_PATTERN; private boolean includeStackTrace = true; /** * Returns the Logback pattern with which events will be formatted. */ @Override @JsonProperty public String getLogFormat() { return logFormat; } /** * Sets the Logback pattern with which events will be formatted. */ @Override @JsonProperty public void setLogFormat(String logFormat) { this.logFormat = logFormat; } /** * Returns the hostname of the syslog server. */ @JsonProperty public String getHost() { return host; } @JsonProperty public void setHost(String host) { this.host = host; } @JsonProperty public Facility getFacility() { return facility; } @JsonProperty public void setFacility(Facility facility) { this.facility = facility; } @JsonProperty public int getPort() { return port; } @JsonProperty public void setPort(int port) { this.port = port; } @JsonProperty public boolean getIncludeStackTrace() { return includeStackTrace; } @JsonProperty public void setIncludeStackTrace(boolean includeStackTrace) { this.includeStackTrace = includeStackTrace; } @JsonProperty public String getStackTracePrefix() { return stackTracePrefix; } @JsonProperty public void setStackTracePrefix(String stackTracePrefix) { this.stackTracePrefix = stackTracePrefix; } @Override public Appender<ILoggingEvent> build(LoggerContext context, String applicationName, Layout<ILoggingEvent> layout) { final SyslogAppender appender = new SyslogAppender(); appender.setName("syslog-appender"); appender.setContext(context); appender.setSuffixPattern(logFormat.replaceAll(LOG_TOKEN_PID, pid).replaceAll(LOG_TOKEN_NAME, Matcher.quoteReplacement(applicationName))); appender.setSyslogHost(host); appender.setPort(port); appender.setFacility(facility.toString().toLowerCase(Locale.ENGLISH)); appender.setThrowableExcluded(!includeStackTrace); appender.setStackTracePattern(stackTracePrefix); addThresholdFilter(appender, threshold); appender.start(); return wrapAsync(appender); } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.wuman.twolevellrucache; import com.jakewharton.disklrucache.DiskLruCache; import com.jakewharton.disklrucache.DiskLruCache.Editor; import com.jakewharton.disklrucache.DiskLruCache.Snapshot; import org.apache.commons.io.IOUtils; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Map; /** * A two-level LRU cache composed of a smaller, first level {@code LruCache} in * memory and a larger, second level {@code DiskLruCache}. * * The keys must be of {@code String} type. The values must be convertible to * and from a byte stream using a {@code Converter}. * * @author wuman * * @param <V> */ public class TwoLevelLruCache<V> { private static final int INDEX_VALUE = 0; // allow only one value per entry private final LruCache<String, V> mMemCache; private final DiskLruCache mDiskCache; private final Converter<V> mConverter; /** * Constructor for TwoLevelLruCache. Use this constructor if only the first * level memory cache is needed. * * @param maxSizeMem */ public TwoLevelLruCache(int maxSizeMem) { super(); mDiskCache = null; mConverter = null; mMemCache = new LruCache<String, V>(maxSizeMem) { @Override protected void entryRemoved(boolean evicted, String key, V oldValue, V newValue) { wrapEntryRemoved(evicted, key, oldValue, newValue); } @Override protected V create(String key) { return wrapCreate(key); } @Override protected int sizeOf(String key, V value) { return wrapSizeOf(key, value); } }; } /** * Constructor for TwoLevelLruCache. Use this constructor if the second * level disk cache is to be enabled. * * @param directory * a writable directory for the L2 disk cache. * @param appVersion * @param maxSizeMem * the maximum sum of the sizes of the entries in the L1 mem * cache. * @param maxSizeDisk * the maximum number of bytes the L2 disk cache should use to * store. * @param converter * a {@code Converter} that is able to convert a byte stream to * and from type {@code V}. * @throws IOException */ public TwoLevelLruCache(File directory, int appVersion, int maxSizeMem, long maxSizeDisk, Converter<V> converter) throws IOException { super(); if (maxSizeMem >= maxSizeDisk) { throw new IllegalArgumentException( "It makes more sense to have a larger second-level disk cache."); } if (converter == null) { throw new IllegalArgumentException("A converter must be submitted."); } mConverter = converter; mMemCache = new LruCache<String, V>(maxSizeMem) { @Override protected void entryRemoved(boolean evicted, String key, V oldValue, V newValue) { wrapEntryRemoved(evicted, key, oldValue, newValue); } @Override protected V create(String key) { return wrapCreate(key); } @Override protected int sizeOf(String key, V value) { return wrapSizeOf(key, value); } }; mDiskCache = DiskLruCache.open(directory, appVersion, 1, maxSizeDisk); } /** * Returns the value for {@code key} if it exists in the cache or can be * created by {@code #create(String)}. * * @param key * @return value */ @SuppressWarnings("unchecked") public final V get(String key) { V value = mMemCache.get(key); if (mDiskCache != null && value == null) { Snapshot snapshot = null; InputStream in = null; try { snapshot = mDiskCache.get(key); if (snapshot != null) { in = snapshot.getInputStream(INDEX_VALUE); byte[] bytes = IOUtils.toByteArray(in); value = mConverter.from(bytes); } } catch (IOException e) { System.out.println("Unable to get entry from disk cache. key: " + key); } catch (Exception e) { System.out.println("Unable to get entry from disk cache. key: " + key); } catch (OutOfMemoryError e) { System.out.println("Unable to get entry from disk cache. key: " + key); } finally { IOUtils.closeQuietly(in); IOUtils.closeQuietly(snapshot); } if (value != null) { // write back to mem cache mMemCache.put(key, value); } } return value; } /** * Caches {@code newValue} for {@code key}. * * @param key * @param newValue * @return oldValue */ public final V put(String key, V newValue) { V oldValue = mMemCache.put(key, newValue); putToDiskQuietly(key, newValue); return oldValue; } private void removeFromDiskQuietly(String key) { if (mDiskCache == null) { return; } try { mDiskCache.remove(key); } catch (IOException e) { System.out.println("Unable to remove entry from disk cache. key: " + key); } } private void putToDiskQuietly(String key, V newValue) { if (mDiskCache == null) { return; } Editor editor = null; OutputStream out = null; try { editor = mDiskCache.edit(key); if (editor != null) { out = editor.newOutputStream(INDEX_VALUE); mConverter.toStream(newValue, out); editor.commit(); } } catch (IOException e) { System.out .println("Unable to put entry to disk cache. key: " + key); } finally { IOUtils.closeQuietly(out); quietlyAbortUnlessCommitted(editor); } } private static void quietlyAbortUnlessCommitted(DiskLruCache.Editor editor) { // Give up because the cache cannot be written. try { if (editor != null) { editor.abortUnlessCommitted(); } } catch (Exception ignored) { } } /** * Removes the entry for {@code key} if it exists. * * @param key * @return oldValue */ public final V remove(String key) { V oldValue = mMemCache.remove(key); removeFromDiskQuietly(key); return oldValue; } private void wrapEntryRemoved(boolean evicted, String key, V oldValue, V newValue) { entryRemoved(evicted, key, oldValue, newValue); if (!evicted) { removeFromDiskQuietly(key); } } /** * Called for entries that have been evicted or removed. This method is * invoked when a value is evicted to make space, removed by a call to * {@link #remove}, or replaced by a call to {@link #put}. The default * implementation does nothing. * * <p> * The method is called without synchronization: other threads may access * the cache while this method is executing. * * @param evicted * true if the entry is being removed to make space, false if the * removal was caused by a {@link #put} or {@link #remove}. * @param key * @param oldValue * @param newValue * the new value for {@code key}, if it exists. If non-null,this * removal was caused by a {@link #put}. Otherwise it was caused * by an eviction or a {@link #remove}. */ protected void entryRemoved(boolean evicted, String key, V oldValue, V newValue) { } private V wrapCreate(String key) { V createdValue = create(key); if (createdValue == null) { return null; } putToDiskQuietly(key, createdValue); return createdValue; } /** * Called after a cache miss to compute a value for the corresponding key. * Returns the computed value or null if no value can be computed. The * default implementation returns null. * * <p> * The method is called without synchronization: other threads may access * the cache while this method is executing. * * <p> * If a value for {@code key} exists in the cache when this method returns, * the created value will be released with {@link #entryRemoved} and * discarded. This can occur when multiple threads request the same key at * the same time (causing multiple values to be created), or when one thread * calls {@link #put} while another is creating a value for the same key. * * @param key * @return createdValue */ protected V create(String key) { return null; } private int wrapSizeOf(String key, V value) { return sizeOf(key, value); } /** * Returns the size of the entry for {@code key} and {@code value} in * user-defined units. The default implementation returns 1 so that size is * the number of entries and max size is the maximum number of entries. * * <p> * An entry's size must not change while it is in the cache. * * @param key * @param value * @return sizeOfEntry */ protected int sizeOf(String key, V value) { return 1; } /** * Returns the sum of the sizes of the entries in the L1 mem cache. * * @return size */ public synchronized final int sizeMem() { return mMemCache.size(); } /** * Returns the number of bytes currently being used to store the values in * the L2 disk cache. This may be greater than the max size if a background * deletion is pending. * * @return size */ public synchronized final long sizeDisk() { return mDiskCache == null ? 0 : mDiskCache.size(); } /** * Returns the maximum sum of the sizes of the entries in the L1 mem cache. * * @return maxSize */ public synchronized final int maxSizeMem() { return mMemCache.maxSize(); } /** * Returns the maximum number of bytes that the L2 disk cache should use to * store its data. * * @return maxSize */ public synchronized final long maxSizeDisk() { return mDiskCache == null ? 0L : mDiskCache.getMaxSize(); } /** * Clear both mem and disk caches. Internally this method calls both * {@link #evictAllMem()} and {@link #evictAllDisk()}. * * @throws IOException */ public final void evictAll() throws IOException { evictAllMem(); evictAllDisk(); } /** * Clear the L1 mem cache, calling {@link #entryRemoved} on each removed * entry. */ public final void evictAllMem() { mMemCache.evictAll(); } /** * Closes the L2 disk cache and deletes all of its stored values. This will * delete all files in the cache directory including files that weren't * created by the cache. * * @throws IOException */ public final void evictAllDisk() throws IOException { if (mDiskCache != null) { mDiskCache.delete(); } } /** * Returns the number of times {@link #get} returned a value. * * @return count */ public synchronized final int hitCount() { return mMemCache.hitCount(); } /** * Returns the number of times {@link #get} returned null or required a new * value to be created. * * @return count */ public synchronized final int missCount() { return mMemCache.missCount(); } /** * Returns the number of times {@link #create(String)} returned a value. * * @return count */ public synchronized final int createCount() { return mMemCache.createCount(); } /** * Returns the number of times {@link #put} was called. * * @return count */ public synchronized final int putCount() { return mMemCache.putCount(); } /** * Returns the number of values that have been evicted. * * @return count */ public synchronized final int evictionCount() { return mMemCache.evictionCount(); } /** * Returns a copy of the current contents of the L1 mem cache, ordered from * least recently accessed to most recently accessed. * * @return snapshot */ public synchronized final Map<String, V> snapshot() { return mMemCache.snapshot(); } @Override public synchronized final String toString() { return mMemCache.toString(); } /** * Returns the directory where the disk cache stores its data. * * @return directory */ public final File getDirectory() { return mDiskCache == null ? null : mDiskCache.getDirectory(); } /** * Returns true if the disk cache has been closed. * * @return closed */ public final boolean isClosed() { return mDiskCache == null ? true : mDiskCache.isClosed(); } /** * Force buffered operations to the file system. * * @throws IOException */ public synchronized final void flush() throws IOException { if (mDiskCache != null) { mDiskCache.flush(); } } /** * Closes the disk cache. Stored values will remain on the file system. * * @throws IOException */ public synchronized final void close() throws IOException { if (mDiskCache != null) { mDiskCache.close(); } } /** * Convert a byte stream to and from a concrete type. * * @param <T> * Object type. */ public static interface Converter<T> { /** Converts bytes to an object. */ T from(byte[] bytes) throws IOException; /** Converts o to bytes written to the specified stream. */ void toStream(T o, OutputStream bytes) throws IOException; } }
/* * Copyright 2013 APPNEXUS INC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.appnexus.opensdk; import com.appnexus.opensdk.mocks.MockDefaultExecutorSupplier; import com.appnexus.opensdk.shadows.ShadowAsyncTaskNoExecutor; import com.appnexus.opensdk.shadows.ShadowSettings; import com.appnexus.opensdk.shadows.ShadowWebSettings; import com.appnexus.opensdk.ut.UTAdRequest; import com.appnexus.opensdk.ut.UTAdRequester; import com.appnexus.opensdk.ut.UTAdResponse; import com.appnexus.opensdk.ut.UTRequestParameters; import com.appnexus.opensdk.ut.adresponse.BaseAdResponse; import com.appnexus.opensdk.utils.Settings; import com.squareup.okhttp.mockwebserver.MockResponse; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import org.robolectric.shadows.ShadowLog; import org.robolectric.shadows.ShadowWebView; import java.util.ArrayList; import java.util.LinkedList; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNotSame; import static junit.framework.Assert.assertNull; import static junit.framework.Assert.assertTrue; @Config(sdk = 21, shadows = {ShadowAsyncTaskNoExecutor.class, ShadowWebView.class, ShadowWebSettings.class, ShadowSettings.class, ShadowLog.class}) @RunWith(RobolectricTestRunner.class) public class AdRequestToAdRequesterTest extends BaseRoboTest implements UTAdRequester { boolean requesterFailed, requesterReceivedServerResponse, requesterReceivedAd; UTAdRequest adRequest; UTAdResponse response; UTRequestParameters requestParameters; @Override public void setup() { super.setup(); requesterFailed = false; requesterReceivedServerResponse = false; requesterReceivedAd = false; requestParameters = new UTRequestParameters(activity); Settings.getSettings().ua = ""; } public void assertReceiveServerResponseSuccessful(boolean success) { assertTrue(requesterReceivedServerResponse || requesterFailed); assertEquals(success, requesterReceivedServerResponse); assertEquals(!success, requesterFailed); } public void assertServerResponseHasAds(boolean hasAds) { if (response != null && response.getAdList()!=null) { assertEquals(hasAds, response.getAdList().size()>0); } } public void setBannerRequestParams() { requestParameters.setPlacementID("0"); requestParameters.setPrimarySize(new AdSize(320,50)); requestParameters.setMediaType(MediaType.BANNER); } public void setInterstitialRequestParams() { requestParameters.setPlacementID("0"); ArrayList<AdSize> allowedSizes = new ArrayList<AdSize>(); allowedSizes.add(new AdSize(300, 250)); requestParameters.setSizes(allowedSizes); requestParameters.setPrimarySize(new AdSize(1,1)); requestParameters.setMediaType(MediaType.INTERSTITIAL); } public void setNativeRequestParams() { requestParameters.setPlacementID("0"); requestParameters.setPrimarySize(new AdSize(1,1)); requestParameters.setMediaType(MediaType.NATIVE); } //This verifies that the AsyncTask for Request is being executed on the Correct Executor. @Test public void testRequestExecutorForBackgroundTasks() { SDKSettings.setExternalExecutor(MockDefaultExecutorSupplier.getInstance().forBackgroundTasks()); assertNotSame(ShadowAsyncTaskNoExecutor.getExecutor(), MockDefaultExecutorSupplier.getInstance().forBackgroundTasks()); adRequest = new UTAdRequest(this); adRequest.execute(); waitForTasks(); Robolectric.flushBackgroundThreadScheduler(); Robolectric.flushForegroundThreadScheduler(); assertEquals(ShadowAsyncTaskNoExecutor.getExecutor(), MockDefaultExecutorSupplier.getInstance().forBackgroundTasks()); } @Test public void testRequestBannerSucceeded() { setBannerRequestParams(); // adRequest initialization goes here because getOwner is called in the constructor adRequest = new UTAdRequest(this); server.enqueue(new MockResponse().setResponseCode(200).setBody(TestResponsesUT.banner())); adRequest.execute(); Robolectric.flushBackgroundThreadScheduler(); Robolectric.flushForegroundThreadScheduler(); assertReceiveServerResponseSuccessful(true); assertServerResponseHasAds(true); assertEquals(MediaType.BANNER, response.getMediaType()); } @Test public void testRequestBannerNativeSucceeded() { setBannerRequestParams(); // adRequest initialization goes here because getOwner is called in the constructor adRequest = new UTAdRequest(this); server.enqueue(new MockResponse().setResponseCode(200).setBody(TestResponsesUT.anNative())); adRequest.execute(); Robolectric.flushBackgroundThreadScheduler(); Robolectric.flushForegroundThreadScheduler(); assertReceiveServerResponseSuccessful(true); assertServerResponseHasAds(true); assertEquals(MediaType.BANNER, response.getMediaType()); } @Test public void testRequestBlank() { setBannerRequestParams(); adRequest = new UTAdRequest(this); // blanks are handled by requester server.enqueue(new MockResponse().setResponseCode(200).setBody(TestResponsesUT.blank())); adRequest.execute(); Robolectric.flushBackgroundThreadScheduler(); Robolectric.flushForegroundThreadScheduler(); assertReceiveServerResponseSuccessful(false); assertNull(response.getAdList()); } @Test public void testRequestStatusError() { setBannerRequestParams(); adRequest = new UTAdRequest(this); server.enqueue(new MockResponse().setResponseCode(404).setBody(TestResponsesUT.banner())); adRequest.execute(); Robolectric.flushBackgroundThreadScheduler(); Robolectric.flushForegroundThreadScheduler(); assertReceiveServerResponseSuccessful(false); assertServerResponseHasAds(false); } @Test public void testRequestNativeSucceeded() { setNativeRequestParams(); adRequest = new UTAdRequest(this); server.enqueue(new MockResponse().setResponseCode(200).setBody(TestResponsesUT.anNative())); adRequest.execute(); Robolectric.flushBackgroundThreadScheduler(); Robolectric.flushForegroundThreadScheduler(); assertReceiveServerResponseSuccessful(true); assertServerResponseHasAds(true); assertEquals(MediaType.NATIVE, response.getMediaType()); } @Test public void testRequestInterstitialSucceeded() { setInterstitialRequestParams(); // adRequest initialization goes here because getOwner is called in the constructor adRequest = new UTAdRequest(this); // Server response for banner and interstitial is the same server.enqueue(new MockResponse().setResponseCode(200).setBody(TestResponsesUT.banner())); adRequest.execute(); Robolectric.flushBackgroundThreadScheduler(); Robolectric.flushForegroundThreadScheduler(); assertReceiveServerResponseSuccessful(true); assertServerResponseHasAds(true); assertEquals(MediaType.INTERSTITIAL, response.getMediaType()); } long time; @Override public void continueWaterfall(ResultCode code) { } @Override public void nativeRenderingFailed() { } @Override public void onReceiveUTResponse(UTAdResponse response){ if(response!=null && response.getAdList() != null && !response.getAdList().isEmpty()) { requesterReceivedServerResponse = true; }else{ failed(ResultCode.getNewInstance(ResultCode.UNABLE_TO_FILL), response.getAdResponseInfo()); } this.response = response; } @Override public void failed(ResultCode code, ANAdResponseInfo adResponseInfo) { requesterFailed = true; } @Override public void onReceiveAd(AdResponse ad) { requesterReceivedAd = true; } @Override public void cancel() { } @Override public void execute() { } @Override public LinkedList<BaseAdResponse> getAdList() { return null; } @Override public UTRequestParameters getRequestParams() { return requestParameters; } }
/** Copyright (c) 2015 Microsoft Corporation Module Name: Optimize.java Abstract: Z3 Java API: Optimizes Author: Nikolaj Bjorner (nbjorner) 2015-07-16 Notes: **/ package com.microsoft.z3; import com.microsoft.z3.enumerations.Z3_lbool; /** * Object for managing optimizization context **/ public class Optimize extends Z3Object { /** * A string that describes all available optimize solver parameters. **/ public String getHelp() { return Native.optimizeGetHelp(getContext().nCtx(), getNativeObject()); } /** * Sets the optimize solver parameters. * * @throws Z3Exception **/ public void setParameters(Params value) { Native.optimizeSetParams(getContext().nCtx(), getNativeObject(), value.getNativeObject()); } /** * Retrieves parameter descriptions for Optimize solver. **/ public ParamDescrs getParameterDescriptions() { return new ParamDescrs(getContext(), Native.optimizeGetParamDescrs(getContext().nCtx(), getNativeObject())); } /** * Assert a constraint (or multiple) into the optimize solver. **/ public void Assert(BoolExpr ... constraints) { getContext().checkContextMatch(constraints); for (BoolExpr a : constraints) { Native.optimizeAssert(getContext().nCtx(), getNativeObject(), a.getNativeObject()); } } /** * Alias for Assert. **/ public void Add(BoolExpr ... constraints) { Assert(constraints); } /** * Handle to objectives returned by objective functions. **/ public class Handle { Optimize opt; int handle; Handle(Optimize opt, int h) { this.opt = opt; this.handle = h; } /** * Retrieve a lower bound for the objective handle. **/ public ArithExpr getLower() { return opt.GetLower(handle); } /** * Retrieve an upper bound for the objective handle. **/ public ArithExpr getUpper() { return opt.GetUpper(handle); } /** * Retrieve the value of an objective. **/ public ArithExpr getValue() { return getLower(); } /** * Print a string representation of the handle. **/ @Override public String toString() { return getValue().toString(); } } /** * Assert soft constraint * * Return an objective which associates with the group of constraints. * **/ public Handle AssertSoft(BoolExpr constraint, int weight, String group) { getContext().checkContextMatch(constraint); Symbol s = getContext().mkSymbol(group); return new Handle(this, Native.optimizeAssertSoft(getContext().nCtx(), getNativeObject(), constraint.getNativeObject(), Integer.toString(weight), s.getNativeObject())); } /** * Check satisfiability of asserted constraints. * Produce a model that (when the objectives are bounded and * don't use strict inequalities) meets the objectives. **/ public Status Check() { Z3_lbool r = Z3_lbool.fromInt(Native.optimizeCheck(getContext().nCtx(), getNativeObject())); switch (r) { case Z3_L_TRUE: return Status.SATISFIABLE; case Z3_L_FALSE: return Status.UNSATISFIABLE; default: return Status.UNKNOWN; } } /** * Creates a backtracking point. **/ public void Push() { Native.optimizePush(getContext().nCtx(), getNativeObject()); } /** * Backtrack one backtracking point. * * Note that an exception is thrown if Pop is called without a corresponding Push. **/ public void Pop() { Native.optimizePop(getContext().nCtx(), getNativeObject()); } /** * The model of the last Check. * * The result is null if Check was not invoked before, * if its results was not SATISFIABLE, or if model production is not enabled. **/ public Model getModel() { long x = Native.optimizeGetModel(getContext().nCtx(), getNativeObject()); if (x == 0) { return null; } else { return new Model(getContext(), x); } } /** * Declare an arithmetical maximization objective. * Return a handle to the objective. The handle is used as * to retrieve the values of objectives after calling Check. **/ public Handle MkMaximize(ArithExpr e) { return new Handle(this, Native.optimizeMaximize(getContext().nCtx(), getNativeObject(), e.getNativeObject())); } /** * Declare an arithmetical minimization objective. * Similar to MkMaximize. **/ public Handle MkMinimize(ArithExpr e) { return new Handle(this, Native.optimizeMinimize(getContext().nCtx(), getNativeObject(), e.getNativeObject())); } /** * Retrieve a lower bound for the objective handle. **/ private ArithExpr GetLower(int index) { return (ArithExpr)Expr.create(getContext(), Native.optimizeGetLower(getContext().nCtx(), getNativeObject(), index)); } /** * Retrieve an upper bound for the objective handle. **/ private ArithExpr GetUpper(int index) { return (ArithExpr)Expr.create(getContext(), Native.optimizeGetUpper(getContext().nCtx(), getNativeObject(), index)); } /** * Return a string the describes why the last to check returned unknown **/ public String getReasonUnknown() { return Native.optimizeGetReasonUnknown(getContext().nCtx(), getNativeObject()); } /** * Print the context to a String (SMT-LIB parseable benchmark). **/ @Override public String toString() { return Native.optimizeToString(getContext().nCtx(), getNativeObject()); } /** * Optimize statistics. **/ public Statistics getStatistics() { return new Statistics(getContext(), Native.optimizeGetStatistics(getContext().nCtx(), getNativeObject())); } Optimize(Context ctx, long obj) throws Z3Exception { super(ctx, obj); } Optimize(Context ctx) throws Z3Exception { super(ctx, Native.mkOptimize(ctx.nCtx())); } @Override void incRef() { Native.optimizeIncRef(getContext().nCtx(), getNativeObject()); } @Override void addToReferenceQueue() { getContext().getOptimizeDRQ().storeReference(getContext(), this); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.security.PrivilegedExceptionAction; import java.util.Properties; import javax.security.auth.login.LoginException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.QueueManager.QueueACL; import org.apache.hadoop.security.UserGroupInformation; public class TestQueueManagerForJobKillAndJobPriority extends TestQueueManager { public void testOwnerAllowedForJobKill() throws IOException, InterruptedException { try { final UserGroupInformation ugi = createNecessaryUsers(); ugi.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { JobConf conf = setupConf(QueueManager.toFullPropertyName ("default", adminAcl), "junk-user"); verifyJobKill(ugi, conf, true); return null; } }); } finally { tearDownCluster(); } } public void testUserDisabledACLForJobKill() throws IOException, InterruptedException { try { UserGroupInformation ugi = createNecessaryUsers(); // create other user who will try to kill the job of ugi. final UserGroupInformation otherUGI = UserGroupInformation. createUserForTesting("user1", new String [] {"group1"}); ugi.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { //setup a cluster allowing a user to submit JobConf conf = setupConf(QueueManager.toFullPropertyName ("default", adminAcl), "dummy-user"); // Run job as ugi and try to kill job as user1, who (obviously) // should not able to kill the job. verifyJobKill(otherUGI, conf, false); return null; } }); } finally { tearDownCluster(); } } public void testUserEnabledACLForJobKill() throws IOException, LoginException, InterruptedException { try { UserGroupInformation ugi = createNecessaryUsers(); // create other user who will try to kill the job of ugi. final UserGroupInformation otherUGI = UserGroupInformation. createUserForTesting("user1", new String [] {"group1"}); ugi.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); JobConf conf = setupConf(QueueManager.toFullPropertyName ("default", adminAcl), "user1"); // user1 should be able to kill the job verifyJobKill(otherUGI, conf, true); return null; } }); } finally { tearDownCluster(); } } public void testUserDisabledForJobPriorityChange() throws IOException, InterruptedException { try { UserGroupInformation ugi = createNecessaryUsers(); // create other user who will try to change priority of the job of ugi. final UserGroupInformation otherUGI = UserGroupInformation. createUserForTesting("user1", new String [] {"group1"}); ugi.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { JobConf conf = setupConf(QueueManager.toFullPropertyName ("default", adminAcl), "junk-user"); verifyJobPriorityChangeAsOtherUser(otherUGI, conf, false); return null; } }); } finally { tearDownCluster(); } } /** * Test to verify refreshing of queue properties by using MRAdmin tool. * * @throws Exception */ public void testACLRefresh() throws Exception { try { String queueConfigPath = System.getProperty("test.build.extraconf", "build/test/extraconf"); File queueConfigFile = new File(queueConfigPath, QueueManager.QUEUE_ACLS_FILE_NAME); File hadoopConfigFile = new File(queueConfigPath, "mapred-site.xml"); try { //Setting up default mapred-site.xml Properties hadoopConfProps = new Properties(); //these properties should be retained. hadoopConfProps.put("mapred.queue.names", "default,q1,q2"); hadoopConfProps.put(JobConf.MR_ACLS_ENABLED, "true"); //These property should always be overridden hadoopConfProps.put(QueueManager.toFullPropertyName ("default", submitAcl), "u1"); hadoopConfProps.put(QueueManager.toFullPropertyName ("q1", submitAcl), "u2"); hadoopConfProps.put(QueueManager.toFullPropertyName ("q2", submitAcl), "u1"); UtilsForTests.setUpConfigFile(hadoopConfProps, hadoopConfigFile); //Actual property which would be used. Properties queueConfProps = new Properties(); queueConfProps.put(QueueManager.toFullPropertyName ("default", submitAcl), " "); //Writing out the queue configuration file. UtilsForTests.setUpConfigFile(queueConfProps, queueConfigFile); //Create a new configuration to be used with QueueManager JobConf conf = new JobConf(); QueueManager queueManager = new QueueManager(conf); UserGroupInformation ugi = UserGroupInformation. createUserForTesting("user1", new String [] {"group1"}); //Job Submission should fail because ugi to be used is set to blank. assertFalse("User Job Submission Succeeded before refresh.", queueManager.hasAccess("default", QueueACL.SUBMIT_JOB, ugi)); assertFalse("User Job Submission Succeeded before refresh.", queueManager.hasAccess("q1", QueueACL.SUBMIT_JOB, ugi)); assertFalse("User Job Submission Succeeded before refresh.", queueManager.hasAccess("q2", QueueACL.SUBMIT_JOB, ugi)); //Test job submission as alternate user. UserGroupInformation alternateUgi = UserGroupInformation.createUserForTesting("u1", new String[]{"user"}); assertTrue("Alternate User Job Submission failed before refresh.", queueManager.hasAccess("q2", QueueACL.SUBMIT_JOB, alternateUgi)); //Set acl for user1. queueConfProps.put(QueueManager.toFullPropertyName ("default", submitAcl), ugi.getShortUserName()); queueConfProps.put(QueueManager.toFullPropertyName ("q1", submitAcl), ugi.getShortUserName()); queueConfProps.put(QueueManager.toFullPropertyName ("q2", submitAcl), ugi.getShortUserName()); //write out queue-acls.xml. UtilsForTests.setUpConfigFile(queueConfProps, queueConfigFile); //refresh configuration queueManager.refreshAcls(conf); //Submission should succeed assertTrue("User Job Submission failed after refresh.", queueManager.hasAccess("default", QueueACL.SUBMIT_JOB, ugi)); assertTrue("User Job Submission failed after refresh.", queueManager.hasAccess("q1", QueueACL.SUBMIT_JOB, ugi)); assertTrue("User Job Submission failed after refresh.", queueManager.hasAccess("q2", QueueACL.SUBMIT_JOB, ugi)); assertFalse("Alternate User Job Submission succeeded after refresh.", queueManager.hasAccess("q2", QueueACL.SUBMIT_JOB, alternateUgi)); //delete the ACL file. queueConfigFile.delete(); //rewrite the mapred-site.xml hadoopConfProps.put(JobConf.MR_ACLS_ENABLED, "true"); hadoopConfProps.put(QueueManager.toFullPropertyName ("q1", submitAcl), ugi.getShortUserName()); UtilsForTests.setUpConfigFile(hadoopConfProps, hadoopConfigFile); queueManager.refreshAcls(conf); assertTrue("User Job Submission allowed after refresh and no queue acls file.", queueManager.hasAccess("q1", QueueACL.SUBMIT_JOB, ugi)); } finally{ if(queueConfigFile.exists()) { queueConfigFile.delete(); } if(hadoopConfigFile.exists()) { hadoopConfigFile.delete(); } } } finally { tearDownCluster(); } } public void testQueueAclRefreshWithInvalidConfFile() throws IOException { try { String queueConfigPath = System.getProperty("test.build.extraconf", "build/test/extraconf"); File queueConfigFile = new File(queueConfigPath, QueueManager.QUEUE_ACLS_FILE_NAME); File hadoopConfigFile = new File(queueConfigPath, "hadoop-site.xml"); try { // queue properties with which the cluster is started. Properties hadoopConfProps = new Properties(); hadoopConfProps.put("mapred.queue.names", "default,q1,q2"); hadoopConfProps.put(JobConf.MR_ACLS_ENABLED, "true"); UtilsForTests.setUpConfigFile(hadoopConfProps, hadoopConfigFile); //properties for mapred-queue-acls.xml Properties queueConfProps = new Properties(); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); queueConfProps.put(QueueManager.toFullPropertyName ("default", submitAcl), ugi.getShortUserName()); queueConfProps.put(QueueManager.toFullPropertyName ("q1", submitAcl), ugi.getShortUserName()); queueConfProps.put(QueueManager.toFullPropertyName ("q2", submitAcl), ugi.getShortUserName()); UtilsForTests.setUpConfigFile(queueConfProps, queueConfigFile); Configuration conf = new JobConf(); QueueManager queueManager = new QueueManager(conf); //Testing access to queue. assertTrue("User Job Submission failed.", queueManager.hasAccess("default", QueueACL.SUBMIT_JOB, ugi)); assertTrue("User Job Submission failed.", queueManager.hasAccess("q1", QueueACL.SUBMIT_JOB, ugi)); assertTrue("User Job Submission failed.", queueManager.hasAccess("q2", QueueACL.SUBMIT_JOB, ugi)); //Write out a new incomplete invalid configuration file. PrintWriter writer = new PrintWriter(new FileOutputStream(queueConfigFile)); writer.println("<configuration>"); writer.println("<property>"); writer.flush(); writer.close(); try { //Exception to be thrown by queue manager because configuration passed //is invalid. queueManager.refreshAcls(conf); fail("Refresh of ACLs should have failed with invalid conf file."); } catch (Exception e) { } assertTrue("User Job Submission failed after invalid conf file refresh.", queueManager.hasAccess("default", QueueACL.SUBMIT_JOB, ugi)); assertTrue("User Job Submission failed after invalid conf file refresh.", queueManager.hasAccess("q1", QueueACL.SUBMIT_JOB, ugi)); assertTrue("User Job Submission failed after invalid conf file refresh.", queueManager.hasAccess("q2", QueueACL.SUBMIT_JOB, ugi)); } finally { //Cleanup the configuration files in all cases if(hadoopConfigFile.exists()) { hadoopConfigFile.delete(); } if(queueConfigFile.exists()) { queueConfigFile.delete(); } } } finally { tearDownCluster(); } } public void testGroupsEnabledACLForJobSubmission() throws IOException, LoginException, InterruptedException { try { // login as self, get one group, and add in allowed list. UserGroupInformation ugi = createNecessaryUsers(); String[] groups = ugi.getGroupNames(); JobConf conf = setupConf(QueueManager.toFullPropertyName ("default", submitAcl), "3698-junk-user1,3698-junk-user2 " + groups[groups.length-1] + ",3698-junk-group"); verifyJobSubmissionToDefaultQueue (conf, true, ugi.getShortUserName()+","+groups[groups.length-1]); } finally { tearDownCluster(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; import static org.hamcrest.Matchers.equalTo; public class MovFnWhitelistedFunctionTests extends ESTestCase { public void testWindowMax() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double expected = -Double.MAX_VALUE; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { expected = Math.max(expected, value); } double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullWindowMax() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyWindowMax() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); } public void testWindowMin() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double expected = Double.MAX_VALUE; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { expected = Math.min(expected, value); } double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullWindowMin() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyWindowMin() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); } public void testWindowSum() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double expected = 0; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { expected += value; } double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullWindowSum() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(0.0)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyWindowSum() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(0.0)); } public void testSimpleMovAvg() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double expected = 0; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { expected += value; } expected /= window.size(); double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullSimpleMovAvg() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptySimpleMovAvg() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); } public void testSimpleMovStdDev() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double mean = 0; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { mean += value; } mean /= window.size(); double expected = 0.0; for (double value : window) { expected += Math.pow(value - mean, 2); } expected = Math.sqrt(expected / window.size()); double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), mean); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullSimpleStdDev() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray())); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptySimpleStdDev() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray())); assertThat(actual, equalTo(Double.NaN)); } public void testStdDevNaNAvg() { assertThat(MovingFunctions.stdDev(new double[] { 1.0, 2.0, 3.0 }, Double.NaN), equalTo(Double.NaN)); } public void testLinearMovAvg() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); if (i == 0) { window.offer(randValue); continue; } double avg = 0; long totalWeight = 1; long current = 1; for (double value : window) { avg += value * current; totalWeight += current; current += 1; } double expected = avg / totalWeight; double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullLinearMovAvg() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyLinearMovAvg() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); } public void testEWMAMovAvg() { double alpha = randomDouble(); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); if (i == 0) { window.offer(randValue); continue; } double avg = 0; boolean first = true; for (double value : window) { if (first) { avg = value; first = false; } else { avg = (value * alpha) + (avg * (1 - alpha)); } } double expected = avg; double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullEwmaMovAvg() { double alpha = randomDouble(); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyEwmaMovAvg() { double alpha = randomDouble(); EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); assertThat(actual, equalTo(Double.NaN)); } public void testHoltLinearMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); if (i == 0) { window.offer(randValue); continue; } double s = 0; double last_s = 0; // Trend value double b = 0; double last_b = 0; int counter = 0; double last; for (double value : window) { last = value; if (counter == 0) { s = value; b = value - last; } else { s = alpha * value + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; } counter += 1; last_s = s; last_b = b; } double expected = s + (0 * b) ; double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullHoltMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyHoltMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); assertThat(actual, equalTo(Double.NaN)); } public void testHoltWintersMultiplicative() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); int period = randomIntBetween(1,10); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < windowSize; i++) { window.offer(randomDouble()); } // Smoothed value double s = 0; double last_s = 0; // Trend value double b = 0; double last_b = 0; // Seasonal value double[] seasonal = new double[windowSize]; int counter = 0; double[] vs = new double[windowSize]; for (double v : window) { vs[counter] = v + 0.0000000001; counter += 1; } // Initial level value is average of first season // Calculate the slopes between first and second season for each period for (int i = 0; i < period; i++) { s += vs[i]; b += (vs[i + period] - vs[i]) / period; } s /= period; b /= period; last_s = s; // Calculate first seasonal if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { Arrays.fill(seasonal, 0.0); } else { for (int i = 0; i < period; i++) { seasonal[i] = vs[i] / s; } } for (int i = period; i < vs.length; i++) { s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; last_s = s; last_b = b; } int idx = window.size() - period + (0 % period); double expected = (s + (1 * b)) * seasonal[idx]; double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, gamma, period, true); assertEquals(expected, actual, 0.01 * Math.abs(expected)); } public void testNullHoltWintersMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); int period = randomIntBetween(1,10); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < windowSize; i++) { window.offer(Double.NaN); } for (int i = 0; i < numValues; i++) { double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, gamma, period, false); assertThat(actual, equalTo(Double.NaN)); } } public void testEmptyHoltWintersMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); int period = randomIntBetween(1,10); EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, gamma, period, false); assertThat(actual, equalTo(Double.NaN)); } public void testHoltWintersAdditive() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); int period = randomIntBetween(1,10); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < windowSize; i++) { window.offer(randomDouble()); } // Smoothed value double s = 0; double last_s = 0; // Trend value double b = 0; double last_b = 0; // Seasonal value double[] seasonal = new double[windowSize]; int counter = 0; double[] vs = new double[windowSize]; for (double v : window) { vs[counter] = v; counter += 1; } // Initial level value is average of first season // Calculate the slopes between first and second season for each period for (int i = 0; i < period; i++) { s += vs[i]; b += (vs[i + period] - vs[i]) / period; } s /= period; b /= period; last_s = s; // Calculate first seasonal if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { Arrays.fill(seasonal, 0.0); } else { for (int i = 0; i < period; i++) { seasonal[i] = vs[i] / s; } } for (int i = period; i < vs.length; i++) { s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; seasonal[i] = gamma * (vs[i] - (last_s - last_b )) + (1 - gamma) * seasonal[i - period]; last_s = s; last_b = b; } int idx = window.size() - period + (0 % period); double expected = s + (1 * b) + seasonal[idx]; double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, gamma, period, false); assertEquals(expected, actual, 0.01 * Math.abs(expected)); } }
/* * Copyright (C) 2015, BMW Car IT GmbH * * Author: Sebastian Mattheis <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in * writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package com.bmwcarit.barefoot.markov; import static org.junit.Assert.assertEquals; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.json.JSONException; import org.json.JSONObject; import org.junit.Test; public class StateTest { private static class MockElem extends StateCandidate<MockElem, StateTransition, Sample> { public MockElem(int id, double seqprob, double filtprob, MockElem pred) { super(Integer.toString(id)); this.seqprob(seqprob); this.filtprob(filtprob); this.predecessor(pred); } public MockElem(JSONObject json, MockFactory factory) throws JSONException { super(json, factory); } public int numid() { return Integer.parseInt(id()); } } private static class MockFactory extends Factory<MockElem, StateTransition, Sample> { @Override public MockElem candidate(JSONObject json) throws JSONException { return new MockElem(json, this); } @Override public StateTransition transition(JSONObject json) throws JSONException { return new StateTransition(json); } @Override public Sample sample(JSONObject json) throws JSONException { return new Sample(json); } } @Test public void TestState() { Map<Integer, MockElem> elements = new HashMap<>(); elements.put(0, new MockElem(0, Math.log10(0.3), 0.3, null)); elements.put(1, new MockElem(1, Math.log10(0.2), 0.2, null)); elements.put(2, new MockElem(2, Math.log10(0.5), 0.5, null)); StateMemory<MockElem, StateTransition, Sample> state = new StateMemory<>(); { Set<MockElem> vector = new HashSet<>(Arrays.asList(elements.get(0), elements.get(1), elements.get(2))); state.update(vector, new Sample(0)); assertEquals(3, state.size()); assertEquals(2, state.estimate().numid()); } elements.put(3, new MockElem(3, Math.log10(0.3), 0.3, elements.get(1))); elements.put(4, new MockElem(4, Math.log10(0.2), 0.2, elements.get(1))); elements.put(5, new MockElem(5, Math.log10(0.4), 0.4, elements.get(2))); elements.put(6, new MockElem(6, Math.log10(0.1), 0.1, elements.get(2))); { Set<MockElem> vector = new HashSet<>(Arrays.asList(elements.get(3), elements.get(4), elements.get(5), elements.get(6))); state.update(vector, new Sample(1)); assertEquals(4, state.size()); assertEquals(5, state.estimate().numid()); } elements.put(7, new MockElem(7, Math.log10(0.3), 0.3, elements.get(5))); elements.put(8, new MockElem(8, Math.log10(0.2), 0.2, elements.get(5))); elements.put(9, new MockElem(9, Math.log10(0.4), 0.4, elements.get(6))); elements.put(10, new MockElem(10, Math.log10(0.1), 0.1, elements.get(6))); { Set<MockElem> vector = new HashSet<>(Arrays.asList(elements.get(7), elements.get(8), elements.get(9), elements.get(10))); state.update(vector, new Sample(2)); assertEquals(4, state.size()); assertEquals(9, state.estimate().numid()); } elements.put(11, new MockElem(11, Math.log10(0.3), 0.3, null)); elements.put(12, new MockElem(12, Math.log10(0.2), 0.2, null)); elements.put(13, new MockElem(13, Math.log10(0.4), 0.4, null)); elements.put(14, new MockElem(14, Math.log10(0.1), 0.1, null)); { Set<MockElem> vector = new HashSet<>(Arrays.asList(elements.get(11), elements.get(12), elements.get(13), elements.get(14))); state.update(vector, new Sample(3)); assertEquals(4, state.size()); assertEquals(13, state.estimate().numid()); } { Set<MockElem> vector = new HashSet<>(); state.update(vector, new Sample(4)); assertEquals(4, state.size()); assertEquals(13, state.estimate().numid()); } } @Test public void TestStateJSON() throws JSONException { Map<Integer, MockElem> elements = new HashMap<>(); StateMemory<MockElem, StateTransition, Sample> state = new StateMemory<>(); { JSONObject json = state.toJSON(); state = new StateMemory<>(json, new MockFactory()); } elements.put(0, new MockElem(0, Math.log10(0.3), 0.3, null)); elements.put(1, new MockElem(1, Math.log10(0.2), 0.2, null)); elements.put(2, new MockElem(2, Math.log10(0.5), 0.5, null)); state.update( new HashSet<>(Arrays.asList(elements.get(0), elements.get(1), elements.get(2))), new Sample(0)); { JSONObject json = state.toJSON(); state = new StateMemory<>(json, new MockFactory()); elements.clear(); for (MockElem element : state.vector()) { elements.put(element.numid(), element); } } elements.put(3, new MockElem(3, Math.log10(0.3), 0.3, elements.get(1))); elements.put(4, new MockElem(4, Math.log10(0.2), 0.2, elements.get(1))); elements.put(5, new MockElem(5, Math.log10(0.4), 0.4, elements.get(2))); elements.put(6, new MockElem(6, Math.log10(0.1), 0.1, elements.get(2))); state.update(new HashSet<>( Arrays.asList(elements.get(3), elements.get(4), elements.get(5), elements.get(6))), new Sample(1)); { JSONObject json = state.toJSON(); state = new StateMemory<>(json, new MockFactory()); elements.clear(); for (MockElem element : state.vector()) { elements.put(element.numid(), element); } } elements.put(7, new MockElem(7, Math.log10(0.3), 0.3, elements.get(5))); elements.put(8, new MockElem(8, Math.log10(0.2), 0.2, elements.get(5))); elements.put(9, new MockElem(9, Math.log10(0.4), 0.4, elements.get(6))); elements.put(10, new MockElem(10, Math.log10(0.1), 0.1, elements.get(6))); state.update(new HashSet<>( Arrays.asList(elements.get(7), elements.get(8), elements.get(9), elements.get(10))), new Sample(2)); { JSONObject json = state.toJSON(); state = new StateMemory<>(json, new MockFactory()); elements.clear(); for (MockElem element : state.vector()) { elements.put(element.numid(), element); } } elements.put(11, new MockElem(11, Math.log10(0.3), 0.3, null)); elements.put(12, new MockElem(12, Math.log10(0.2), 0.2, null)); elements.put(13, new MockElem(13, Math.log10(0.4), 0.4, null)); elements.put(14, new MockElem(14, Math.log10(0.1), 0.1, null)); state.update(new HashSet<>(Arrays.asList(elements.get(11), elements.get(12), elements.get(13), elements.get(14))), new Sample(3)); state.update(new HashSet<MockElem>(), new Sample(4)); { JSONObject json = state.toJSON(); StateMemory<MockElem, StateTransition, Sample> state2 = new StateMemory<>(json, new MockFactory()); assertEquals(state.size(), state2.size()); assertEquals(4, state2.size()); assertEquals(state.estimate().numid(), state2.estimate().numid()); assertEquals(13, state2.estimate().numid()); } } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.enhancedbookmarks; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.provider.Browser; import android.util.Pair; import org.chromium.base.ApiCompatibilityUtils; import org.chromium.chrome.R; import org.chromium.chrome.browser.BookmarksBridge; import org.chromium.chrome.browser.BookmarksBridge.BookmarkItem; import org.chromium.chrome.browser.ChromeBrowserProviderClient; import org.chromium.chrome.browser.IntentHandler; import org.chromium.chrome.browser.UrlConstants; import org.chromium.chrome.browser.document.ChromeLauncherActivity; import org.chromium.chrome.browser.enhancedbookmarks.EnhancedBookmarksModel.AddBookmarkCallback; import org.chromium.chrome.browser.favicon.FaviconHelper; import org.chromium.chrome.browser.offline_pages.OfflinePageBridge; import org.chromium.chrome.browser.snackbar.Snackbar; import org.chromium.chrome.browser.snackbar.SnackbarManager; import org.chromium.chrome.browser.snackbar.SnackbarManager.SnackbarController; import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.browser.util.FeatureUtilities; import org.chromium.chrome.browser.util.MathUtils; import org.chromium.components.bookmarks.BookmarkId; import org.chromium.components.bookmarks.BookmarkType; import org.chromium.content_public.browser.WebContents; import org.chromium.ui.base.DeviceFormFactor; /** * A class holding static util functions for enhanced bookmark. */ public class EnhancedBookmarkUtils { private static final String BOOKMARK_SAVE_NAME = "SaveBookmark"; private static final int[] DEFAULT_BACKGROUND_COLORS = { 0xFFE64A19, 0xFFF09300, 0xFFAFB42B, 0xFF689F38, 0xFF0B8043, 0xFF0097A7, 0xFF7B1FA2, 0xFFC2185B }; /** * @return True if enhanced bookmark feature is enabled. */ public static boolean isEnhancedBookmarkEnabled() { return BookmarksBridge.isEnhancedBookmarksEnabled(); } /** * If the tab has already been bookmarked, start {@link EnhancedBookmarkEditActivity} for the * bookmark. If not, add the bookmark to bookmarkmodel, and show a snackbar notifying the user. */ public static void addOrEditBookmark(long idToAdd, final EnhancedBookmarksModel bookmarkModel, Tab tab, final SnackbarManager snackbarManager, final Activity activity) { if (idToAdd != ChromeBrowserProviderClient.INVALID_BOOKMARK_ID) { startEditActivity(activity, new BookmarkId(idToAdd, BookmarkType.NORMAL), tab.getWebContents()); return; } AddBookmarkCallback callback = new AddBookmarkCallback() { @Override public void onBookmarkAdded(final BookmarkId enhancedId) { Pair<EnhancedBookmarksModel, BookmarkId> pair = Pair.create(bookmarkModel, enhancedId); SnackbarController snackbarController = new SnackbarController() { @Override public void onDismissForEachType(boolean isTimeout) {} @Override public void onDismissNoAction(Object actionData) { // This method will be called only if the snackbar is dismissed by timeout. @SuppressWarnings("unchecked") Pair<EnhancedBookmarksModel, BookmarkId> pair = (Pair< EnhancedBookmarksModel, BookmarkId>) actionData; pair.first.destroy(); } @Override public void onAction(Object actionData) { @SuppressWarnings("unchecked") Pair<EnhancedBookmarksModel, BookmarkId> pair = (Pair< EnhancedBookmarksModel, BookmarkId>) actionData; // Show edit activity with the name of parent folder highlighted. startEditActivity(activity, enhancedId, null); pair.first.destroy(); } }; int messageId; int buttonId; OfflinePageBridge offlinePageBridge = bookmarkModel.getOfflinePageBridge(); if (offlinePageBridge == null) { messageId = R.string.enhanced_bookmark_page_saved; buttonId = R.string.enhanced_bookmark_item_edit; } else { boolean almostFull = offlinePageBridge.isStorageAlmostFull(); messageId = almostFull ? R.string.enhanced_bookmark_page_saved_offline_pages_storage_near_full : R.string.enhanced_bookmark_page_saved_offline_pages; // TODO(fgorski): show "FREE UP SPACE" button. buttonId = R.string.enhanced_bookmark_item_edit; } snackbarManager.showSnackbar(Snackbar.make( activity.getString(messageId), snackbarController) .setAction(activity.getString(buttonId), pair)); } }; bookmarkModel.addBookmarkAsync(bookmarkModel.getDefaultFolder(), 0, tab.getTitle(), tab.getUrl(), tab.getWebContents(), callback); } /** * Shows enhanced bookmark main UI, if it is turned on. Does nothing if it is turned off. * @return True if enhanced bookmark is on, false otherwise. */ public static boolean showEnhancedBookmarkIfEnabled(Activity activity) { if (!isEnhancedBookmarkEnabled()) { return false; } if (DeviceFormFactor.isTablet(activity)) { openBookmark(activity, UrlConstants.BOOKMARKS_URL); } else { activity.startActivity(new Intent(activity, EnhancedBookmarkActivity.class)); } return true; } /** * Starts an {@link EnhancedBookmarkEditActivity} for the given {@link BookmarkId}. */ public static void startEditActivity( Context context, BookmarkId bookmarkId, WebContents webContents) { Intent intent = new Intent(context, EnhancedBookmarkEditActivity.class); intent.putExtra(EnhancedBookmarkEditActivity.INTENT_BOOKMARK_ID, bookmarkId.toString()); if (webContents != null) { intent.putExtra(EnhancedBookmarkEditActivity.INTENT_WEB_CONTENTS, webContents); } if (context instanceof EnhancedBookmarkActivity) { ((EnhancedBookmarkActivity) context).startActivityForResult( intent, EnhancedBookmarkActivity.EDIT_BOOKMARK_REQUEST_CODE); } else { context.startActivity(intent); } } /** * Generate color based on bookmarked url's hash code. Same color will * always be returned given same bookmark item. * * @param item bookmark the color represents for * @return int for the generated color */ public static int generateBackgroundColor(BookmarkItem item) { int normalizedIndex = MathUtils.positiveModulo(item.getUrl().hashCode(), DEFAULT_BACKGROUND_COLORS.length); return DEFAULT_BACKGROUND_COLORS[normalizedIndex]; } /** * Save the bookmark in bundle to save state of a fragment/activity. * @param bundle Argument holder or savedInstanceState of the fragment/activity. * @param bookmark The bookmark to save. */ public static void saveBookmarkIdToBundle(Bundle bundle, BookmarkId bookmark) { bundle.putString(BOOKMARK_SAVE_NAME, bookmark.toString()); } /** * Retrieve the bookmark previously saved in the arguments bundle. * @param bundle Argument holder or savedInstanceState of the fragment/activity. * @return The ID of the bookmark to retrieve. */ public static BookmarkId getBookmarkIdFromBundle(Bundle bundle) { return BookmarkId.getBookmarkIdFromString(bundle.getString(BOOKMARK_SAVE_NAME)); } public static void openBookmark(Activity activity, String url) { Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); intent.setClassName(activity.getApplicationContext().getPackageName(), ChromeLauncherActivity.class.getName()); intent.putExtra(Browser.EXTRA_APPLICATION_ID, activity.getApplicationContext().getPackageName()); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); IntentHandler.startActivityForTrustedIntent(intent, activity); } /** * Get dominant color from bitmap. This function uses favicon helper to fulfil its task. * @param bitmap The bitmap to extract color from. * @return The dominant color in ARGB format. */ public static int getDominantColorForBitmap(Bitmap bitmap) { int mDominantColor = FaviconHelper.getDominantColorForBitmap(bitmap); // FaviconHelper returns color in ABGR format, do a manual conversion here. int red = (mDominantColor & 0xff) << 16; int green = mDominantColor & 0xff00; int blue = (mDominantColor & 0xff0000) >> 16; int alpha = mDominantColor & 0xff000000; return alpha + red + green + blue; } /** * Updates the title of chrome shown in recent tasks. It only takes effect in document mode. */ public static void setTaskDescriptionInDocumentMode(Activity activity, String description) { if (FeatureUtilities.isDocumentMode(activity)) { // Setting icon to be null and color to be 0 will means "take no effect". ApiCompatibilityUtils.setTaskDescription(activity, description, null, 0); } } /** * Closes the EnhancedBookmark Activity on Phone. Does nothing on tablet. */ public static void finishActivityOnPhone(Context context) { if (context instanceof EnhancedBookmarkActivity) { ((Activity) context).finish(); } } }
/* XMLEventAllocatorImpl.java -- Copyright (C) 2005,2006 Free Software Foundation, Inc. This file is part of GNU Classpath. GNU Classpath is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. GNU Classpath is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GNU Classpath; see the file COPYING. If not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Linking this library statically or dynamically with other modules is making a combined work based on this library. Thus, the terms and conditions of the GNU General Public License cover the whole combination. As a special exception, the copyright holders of this library give you permission to link this library with independent modules to produce an executable, regardless of the license terms of these independent modules, and to copy and distribute the resulting executable under terms of your choice, provided that you also meet, for each linked independent module, the terms and conditions of the license of that module. An independent module is a module which is not derived from or based on this library. If you modify this library, you may extend this exception to your version of the library, but you are not obligated to do so. If you do not wish to do so, delete this exception statement from your version. */ package gnu.xml.stream; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.xml.namespace.QName; import javax.xml.stream.Location; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.events.EntityDeclaration; import javax.xml.stream.events.XMLEvent; import javax.xml.stream.util.XMLEventAllocator; import javax.xml.stream.util.XMLEventConsumer; /** * Allocator for creating XML events based on a reader state. * * @author <a href='mailto:[email protected]'>Chris Burdess</a> */ public class XMLEventAllocatorImpl implements XMLEventAllocator { protected Map entityDeclarations; protected XMLEventAllocatorImpl() { entityDeclarations = new HashMap(); } public XMLEvent allocate(XMLStreamReader reader) throws XMLStreamException { String text; boolean whitespace; boolean ignorableWhitespace; int len; List namespaces; int eventType = reader.getEventType(); Location location = reader.getLocation(); switch (eventType) { case XMLStreamConstants.CDATA: text = reader.getText(); whitespace = isWhitespace(text); // TODO ignorableWhitespace ignorableWhitespace = whitespace && false; return new CharactersImpl(location, text, whitespace, true, ignorableWhitespace); case XMLStreamConstants.CHARACTERS: text = reader.getText(); whitespace = false; // TODO ignorableWhitespace ignorableWhitespace = whitespace && false; return new CharactersImpl(location, text, whitespace, false, ignorableWhitespace); case XMLStreamConstants.COMMENT: text = reader.getText(); return new CommentImpl(location, text); case XMLStreamConstants.DTD: text = reader.getText(); List notations = new LinkedList(); List entities = new LinkedList(); // TODO readDTDBody(notations, entities); return new DTDImpl(location, text, null, notations, entities); case XMLStreamConstants.END_DOCUMENT: return new EndDocumentImpl(location); case XMLStreamConstants.END_ELEMENT: len = reader.getNamespaceCount(); namespaces = new LinkedList(); for (int i = 0; i < len; i++) namespaces.add(new NamespaceImpl(location, reader.getNamespacePrefix(i), reader.getNamespaceURI(i))); return new EndElementImpl(location, reader.getName(), namespaces); case XMLStreamConstants.ENTITY_REFERENCE: String name = reader.getLocalName(); EntityDeclaration decl = (EntityDeclaration) entityDeclarations.get(name); return new EntityReferenceImpl(location, decl, name); case XMLStreamConstants.PROCESSING_INSTRUCTION: return new ProcessingInstructionImpl(location, reader.getPITarget(), reader.getPIData()); case XMLStreamConstants.SPACE: text = reader.getText(); whitespace = true; // TODO ignorableWhitespace ignorableWhitespace = whitespace && false; return new CharactersImpl(location, text, whitespace, false, ignorableWhitespace); case XMLStreamConstants.START_DOCUMENT: String systemId = location.getSystemId(); String encoding = reader.getCharacterEncodingScheme(); boolean encodingDeclared = encoding != null; if (encoding == null) { encoding = reader.getEncoding(); if (encoding == null) encoding = "UTF-8"; } String xmlVersion = reader.getVersion(); if (xmlVersion == null) xmlVersion = "1.0"; boolean xmlStandalone = reader.isStandalone(); boolean standaloneDeclared = reader.standaloneSet(); return new StartDocumentImpl(location, systemId, encoding, xmlVersion, xmlStandalone, standaloneDeclared, encodingDeclared); case XMLStreamConstants.START_ELEMENT: len = reader.getNamespaceCount(); namespaces = new LinkedList(); for (int i = 0; i < len; i++) namespaces.add(new NamespaceImpl(location, reader.getNamespacePrefix(i), reader.getNamespaceURI(i))); len = reader.getAttributeCount(); List attributes = new LinkedList(); for (int i = 0; i < len; i++) attributes.add(new AttributeImpl(location, reader.getAttributeName(i), reader.getAttributeValue(i), QName.valueOf(reader.getAttributeType(i)), reader.isAttributeSpecified(i))); return new StartElementImpl(location, reader.getName(), attributes, namespaces, reader.getNamespaceContext()); default: throw new XMLStreamException("Unknown event type: " + eventType); } } public void allocate(XMLStreamReader reader, XMLEventConsumer consumer) throws XMLStreamException { consumer.add(allocate(reader)); } public XMLEventAllocator newInstance() { return new XMLEventAllocatorImpl(); } protected boolean isWhitespace(String text) { int len = text.length(); for (int i = 0; i < len; i++) { char c = text.charAt(i); if (c != 0x20 && c != 0x09 && c != 0x0a && c != 0x0d) return false; } return true; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import java.io.IOException; import java.io.InvalidObjectException; import java.io.ObjectInputValidation; import java.io.Serializable; import java.net.URI; import java.net.URISyntaxException; import java.util.regex.Pattern; import org.apache.avro.reflect.Stringable; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; /** * Names a file or directory in a {@link FileSystem}. * Path strings use slash as the directory separator. */ @Stringable @InterfaceAudience.Public @InterfaceStability.Stable public class Path implements Comparable<Path>, Serializable, ObjectInputValidation { /** * The directory separator, a slash. */ public static final String SEPARATOR = "/"; /** * The directory separator, a slash, as a character. */ public static final char SEPARATOR_CHAR = '/'; /** * The current directory, ".". */ public static final String CUR_DIR = "."; /** * Whether the current host is a Windows machine. */ public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows"); /** * Pre-compiled regular expressions to detect path formats. */ private static final Pattern HAS_DRIVE_LETTER_SPECIFIER = Pattern.compile("^/?[a-zA-Z]:"); /** Pre-compiled regular expressions to detect duplicated slashes. */ private static final Pattern SLASHES = Pattern.compile("/+"); private static final long serialVersionUID = 0xad00f; private URI uri; // a hierarchical uri /** * Test whether this Path uses a scheme and is relative. * Pathnames with scheme and relative path are illegal. */ void checkNotSchemeWithRelative() { if (toUri().isAbsolute() && !isUriPathAbsolute()) { throw new HadoopIllegalArgumentException( "Unsupported name: has scheme but relative path-part"); } } void checkNotRelative() { if (!isAbsolute() && toUri().getScheme() == null) { throw new HadoopIllegalArgumentException("Path is relative"); } } /** * Return a version of the given Path without the scheme information. * * @param path the source Path * @return a copy of this Path without the scheme information */ public static Path getPathWithoutSchemeAndAuthority(Path path) { // This code depends on Path.toString() to remove the leading slash before // the drive specification on Windows. Path newPath = path.isUriPathAbsolute() ? new Path(null, null, path.toUri().getPath()) : path; return newPath; } /** * Create a new Path based on the child path resolved against the parent path. * * @param parent the parent path * @param child the child path */ public Path(String parent, String child) { this(new Path(parent), new Path(child)); } /** * Create a new Path based on the child path resolved against the parent path. * * @param parent the parent path * @param child the child path */ public Path(Path parent, String child) { this(parent, new Path(child)); } /** * Create a new Path based on the child path resolved against the parent path. * * @param parent the parent path * @param child the child path */ public Path(String parent, Path child) { this(new Path(parent), child); } /** * Create a new Path based on the child path resolved against the parent path. * * @param parent the parent path * @param child the child path */ public Path(Path parent, Path child) { // Add a slash to parent's path so resolution is compatible with URI's URI parentUri = parent.uri; String parentPath = parentUri.getPath(); if (!(parentPath.equals("/") || parentPath.isEmpty())) { try { parentUri = new URI(parentUri.getScheme(), parentUri.getAuthority(), parentUri.getPath()+"/", null, parentUri.getFragment()); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } } URI resolved = parentUri.resolve(child.uri); initialize(resolved.getScheme(), resolved.getAuthority(), resolved.getPath(), resolved.getFragment()); } private void checkPathArg( String path ) throws IllegalArgumentException { // disallow construction of a Path from an empty string if ( path == null ) { throw new IllegalArgumentException( "Can not create a Path from a null string"); } if( path.length() == 0 ) { throw new IllegalArgumentException( "Can not create a Path from an empty string"); } } /** * Construct a path from a String. Path strings are URIs, but with * unescaped elements and some additional normalization. * * @param pathString the path string */ public Path(String pathString) throws IllegalArgumentException { checkPathArg( pathString ); // We can't use 'new URI(String)' directly, since it assumes things are // escaped, which we don't require of Paths. // add a slash in front of paths with Windows drive letters if (hasWindowsDrive(pathString) && pathString.charAt(0) != '/') { pathString = "/" + pathString; } // parse uri components String scheme = null; String authority = null; int start = 0; // parse uri scheme, if any int colon = pathString.indexOf(':'); int slash = pathString.indexOf('/'); if ((colon != -1) && ((slash == -1) || (colon < slash))) { // has a scheme scheme = pathString.substring(0, colon); start = colon+1; } // parse uri authority, if any if (pathString.startsWith("//", start) && (pathString.length()-start > 2)) { // has authority int nextSlash = pathString.indexOf('/', start+2); int authEnd = nextSlash > 0 ? nextSlash : pathString.length(); authority = pathString.substring(start+2, authEnd); start = authEnd; } // uri path is the rest of the string -- query & fragment not supported String path = pathString.substring(start, pathString.length()); initialize(scheme, authority, path, null); } /** * Construct a path from a URI * * @param aUri the source URI */ public Path(URI aUri) { uri = aUri.normalize(); } /** * Construct a Path from components. * * @param scheme the scheme * @param authority the authority * @param path the path */ public Path(String scheme, String authority, String path) { checkPathArg( path ); // add a slash in front of paths with Windows drive letters if (hasWindowsDrive(path) && path.charAt(0) != '/') { path = "/" + path; } // add "./" in front of Linux relative paths so that a path containing // a colon e.q. "a:b" will not be interpreted as scheme "a". if (!WINDOWS && path.charAt(0) != '/') { path = "./" + path; } initialize(scheme, authority, path, null); } private void initialize(String scheme, String authority, String path, String fragment) { try { this.uri = new URI(scheme, authority, normalizePath(scheme, path), null, fragment) .normalize(); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } } /** * Merge 2 paths such that the second path is appended relative to the first. * The returned path has the scheme and authority of the first path. On * Windows, the drive specification in the second path is discarded. * * @param path1 the first path * @param path2 the second path, to be appended relative to path1 * @return the merged path */ public static Path mergePaths(Path path1, Path path2) { String path2Str = path2.toUri().getPath(); path2Str = path2Str.substring(startPositionWithoutWindowsDrive(path2Str)); // Add path components explicitly, because simply concatenating two path // string is not safe, for example: // "/" + "/foo" yields "//foo", which will be parsed as authority in Path return new Path(path1.toUri().getScheme(), path1.toUri().getAuthority(), path1.toUri().getPath() + path2Str); } /** * Normalize a path string to use non-duplicated forward slashes as * the path separator and remove any trailing path separators. * * @param scheme the URI scheme. Used to deduce whether we * should replace backslashes or not * @param path the scheme-specific part * @return the normalized path string */ private static String normalizePath(String scheme, String path) { // Remove duplicated slashes. path = SLASHES.matcher(path).replaceAll("/"); // Remove backslashes if this looks like a Windows path. Avoid // the substitution if it looks like a non-local URI. if (WINDOWS && (hasWindowsDrive(path) || (scheme == null) || (scheme.isEmpty()) || (scheme.equals("file")))) { path = StringUtils.replace(path, "\\", "/"); } // trim trailing slash from non-root path (ignoring windows drive) int minLength = startPositionWithoutWindowsDrive(path) + 1; if (path.length() > minLength && path.endsWith(SEPARATOR)) { path = path.substring(0, path.length()-1); } return path; } private static boolean hasWindowsDrive(String path) { return (WINDOWS && HAS_DRIVE_LETTER_SPECIFIER.matcher(path).find()); } private static int startPositionWithoutWindowsDrive(String path) { if (hasWindowsDrive(path)) { return path.charAt(0) == SEPARATOR_CHAR ? 3 : 2; } else { return 0; } } /** * Determine whether a given path string represents an absolute path on * Windows. e.g. "C:/a/b" is an absolute path. "C:a/b" is not. * * @param pathString the path string to evaluate * @param slashed true if the given path is prefixed with "/" * @return true if the supplied path looks like an absolute path with a Windows * drive-specifier */ public static boolean isWindowsAbsolutePath(final String pathString, final boolean slashed) { int start = startPositionWithoutWindowsDrive(pathString); return start > 0 && pathString.length() > start && ((pathString.charAt(start) == SEPARATOR_CHAR) || (pathString.charAt(start) == '\\')); } /** * Convert this Path to a URI. * * @return this Path as a URI */ public URI toUri() { return uri; } /** * Return the FileSystem that owns this Path. * * @param conf the configuration to use when resolving the FileSystem * @return the FileSystem that owns this Path * @throws java.io.IOException thrown if there's an issue resolving the * FileSystem */ public FileSystem getFileSystem(Configuration conf) throws IOException { return FileSystem.get(this.toUri(), conf); } /** * Returns true if the path component (i.e. directory) of this URI is * absolute <strong>and</strong> the scheme is null, <b>and</b> the authority * is null. * * @return whether the path is absolute and the URI has no scheme nor * authority parts */ public boolean isAbsoluteAndSchemeAuthorityNull() { return (isUriPathAbsolute() && uri.getScheme() == null && uri.getAuthority() == null); } /** * Returns true if the path component (i.e. directory) of this URI is * absolute. * * @return whether this URI's path is absolute */ public boolean isUriPathAbsolute() { int start = startPositionWithoutWindowsDrive(uri.getPath()); return uri.getPath().startsWith(SEPARATOR, start); } /** * Returns true if the path component (i.e. directory) of this URI is * absolute. This method is a wrapper for {@link #isUriPathAbsolute()}. * * @return whether this URI's path is absolute */ public boolean isAbsolute() { return isUriPathAbsolute(); } /** * Returns true if and only if this path represents the root of a file system. * * @return true if and only if this path represents the root of a file system */ public boolean isRoot() { return getParent() == null; } /** * Returns the final component of this path. * * @return the final component of this path */ public String getName() { String path = uri.getPath(); int slash = path.lastIndexOf(SEPARATOR); return path.substring(slash+1); } /** * Returns the parent of a path or null if at root. * @return the parent of a path or null if at root */ public Path getParent() { String path = uri.getPath(); int lastSlash = path.lastIndexOf('/'); int start = startPositionWithoutWindowsDrive(path); if ((path.length() == start) || // empty path (lastSlash == start && path.length() == start+1)) { // at root return null; } String parent; if (lastSlash==-1) { parent = CUR_DIR; } else { parent = path.substring(0, lastSlash==start?start+1:lastSlash); } return new Path(uri.getScheme(), uri.getAuthority(), parent); } /** * Adds a suffix to the final name in the path. * * @param suffix the suffix to add * @return a new path with the suffix added */ public Path suffix(String suffix) { return new Path(getParent(), getName()+suffix); } @Override public String toString() { // we can't use uri.toString(), which escapes everything, because we want // illegal characters unescaped in the string, for glob processing, etc. StringBuilder buffer = new StringBuilder(); if (uri.getScheme() != null) { buffer.append(uri.getScheme()) .append(":"); } if (uri.getAuthority() != null) { buffer.append("//") .append(uri.getAuthority()); } if (uri.getPath() != null) { String path = uri.getPath(); if (path.indexOf('/')==0 && hasWindowsDrive(path) && // has windows drive uri.getScheme() == null && // but no scheme uri.getAuthority() == null) // or authority path = path.substring(1); // remove slash before drive buffer.append(path); } if (uri.getFragment() != null) { buffer.append("#") .append(uri.getFragment()); } return buffer.toString(); } @Override public boolean equals(Object o) { if (!(o instanceof Path)) { return false; } Path that = (Path)o; return this.uri.equals(that.uri); } @Override public int hashCode() { return uri.hashCode(); } @Override public int compareTo(Path o) { return this.uri.compareTo(o.uri); } /** * Returns the number of elements in this path. * @return the number of elements in this path */ public int depth() { String path = uri.getPath(); int depth = 0; int slash = path.length()==1 && path.charAt(0)=='/' ? -1 : 0; while (slash != -1) { depth++; slash = path.indexOf(SEPARATOR, slash+1); } return depth; } /** * Returns a qualified path object for the {@link FileSystem}'s working * directory. * * @param fs the target FileSystem * @return a qualified path object for the FileSystem's working directory * @deprecated use {@link #makeQualified(URI, Path)} */ @Deprecated public Path makeQualified(FileSystem fs) { return makeQualified(fs.getUri(), fs.getWorkingDirectory()); } /** * Returns a qualified path object. * * @param defaultUri if this path is missing the scheme or authority * components, borrow them from this URI * @param workingDir if this path isn't absolute, treat it as relative to this * working directory * @return this path if it contains a scheme and authority and is absolute, or * a new path that includes a path and authority and is fully qualified */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) public Path makeQualified(URI defaultUri, Path workingDir ) { Path path = this; if (!isAbsolute()) { path = new Path(workingDir, this); } URI pathUri = path.toUri(); String scheme = pathUri.getScheme(); String authority = pathUri.getAuthority(); String fragment = pathUri.getFragment(); if (scheme != null && (authority != null || defaultUri.getAuthority() == null)) return path; if (scheme == null) { scheme = defaultUri.getScheme(); } if (authority == null) { authority = defaultUri.getAuthority(); if (authority == null) { authority = ""; } } URI newUri = null; try { newUri = new URI(scheme, authority , normalizePath(scheme, pathUri.getPath()), null, fragment); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } return new Path(newUri); } /** * Validate the contents of a deserialized Path, so as * to defend against malicious object streams. * @throws InvalidObjectException if there's no URI */ @Override public void validateObject() throws InvalidObjectException { if (uri == null) { throw new InvalidObjectException("No URI in deserialized Path"); } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2021 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.addon.automation; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.CALLS_REAL_METHODS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.withSettings; import java.io.File; import java.lang.reflect.Field; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.commons.lang3.RandomStringUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.parosproxy.paros.CommandLine; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.extension.CommandLineArgument; import org.parosproxy.paros.extension.ExtensionLoader; import org.parosproxy.paros.model.Model; import org.zaproxy.addon.automation.jobs.ActiveScanJob; import org.zaproxy.addon.automation.jobs.AddOnJob; import org.zaproxy.addon.automation.jobs.DelayJob; import org.zaproxy.addon.automation.jobs.ParamsJob; import org.zaproxy.addon.automation.jobs.PassiveScanConfigJob; import org.zaproxy.addon.automation.jobs.PassiveScanWaitJob; import org.zaproxy.addon.automation.jobs.RequestorJob; import org.zaproxy.addon.automation.jobs.SpiderJob; import org.zaproxy.zap.extension.pscan.ExtensionPassiveScan; import org.zaproxy.zap.extension.spider.ExtensionSpider; import org.zaproxy.zap.extension.stats.InMemoryStats; import org.zaproxy.zap.testutils.TestUtils; import org.zaproxy.zap.utils.I18N; import org.zaproxy.zap.utils.Stats; import org.zaproxy.zap.utils.ZapXmlConfiguration; class ExtentionAutomationUnitTest extends TestUtils { private static MockedStatic<CommandLine> mockedCmdLine; @BeforeAll static void init() throws Exception { mockedCmdLine = Mockito.mockStatic(CommandLine.class); updateEnv("myEnvVar", "envVarValue"); } @AfterAll static void close() throws ReflectiveOperationException { mockedCmdLine.close(); updateEnv("myEnvVar", ""); } @BeforeEach void setUp() throws Exception { Constant.messages = new I18N(Locale.ENGLISH); Model model = mock(Model.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)); Model.setSingletonForTesting(model); ExtensionLoader extensionLoader = mock(ExtensionLoader.class, withSettings().lenient()); Control.initSingletonForTesting(Model.getSingleton(), extensionLoader); } @Test void shouldReturnDefaultData() { // Given / When ExtensionAutomation extAuto = new ExtensionAutomation(); // Then assertThat(extAuto.canUnload(), is(equalTo(true))); assertThat(extAuto.getI18nPrefix(), is(equalTo("automation"))); assertThat(extAuto.getAuthor(), is(equalTo("ZAP Dev Team"))); } @Test void shouldRegisterBuiltInJobs() { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); // When Map<String, AutomationJob> jobs = extAuto.getAutomationJobs(); // Then assertThat(jobs.size(), is(equalTo(8))); assertThat(jobs.containsKey(AddOnJob.JOB_NAME), is(equalTo(true))); assertThat(jobs.containsKey(PassiveScanConfigJob.JOB_NAME), is(equalTo(true))); assertThat(jobs.containsKey(PassiveScanWaitJob.JOB_NAME), is(equalTo(true))); assertThat(jobs.containsKey(SpiderJob.JOB_NAME), is(equalTo(true))); assertThat(jobs.containsKey(DelayJob.JOB_NAME), is(equalTo(true))); assertThat(jobs.containsKey(ActiveScanJob.JOB_NAME), is(equalTo(true))); assertThat(jobs.containsKey(ParamsJob.JOB_NAME), is(equalTo(true))); assertThat(jobs.containsKey(RequestorJob.JOB_NAME), is(equalTo(true))); } @Test void shouldRegisterNewJob() { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); String jobName = "testjob"; AutomationJob job = new AutomationJobImpl() { @Override public String getType() { return jobName; } @Override public Order getOrder() { return Order.REPORT; } }; // When extAuto.registerAutomationJob(job); Map<String, AutomationJob> jobs = extAuto.getAutomationJobs(); // Then assertThat(jobs.size(), is(equalTo(9))); assertThat(jobs.containsKey(jobName), is(equalTo(true))); } @Test void shouldUnregisterExistingJob() { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); // When Map<String, AutomationJob> jobs = extAuto.getAutomationJobs(); int origSize = jobs.size(); extAuto.unregisterAutomationJob(jobs.get(SpiderJob.JOB_NAME)); // Then assertThat(jobs.size(), is(equalTo(origSize - 1))); assertThat(jobs.containsKey(SpiderJob.JOB_NAME), is(equalTo(false))); } @Test void shouldCreateMinTemplateFile() throws Exception { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); Path filePath = getResourcePath("resources/template-min.yaml"); String expectedTemplate = new String(Files.readAllBytes(filePath)); // When File f = File.createTempFile("ZAP-min-template-test", ".yaml"); extAuto.generateTemplateFile(f.getAbsolutePath(), false); String generatedTemplate = new String(Files.readAllBytes(f.toPath())); // Then // If this fails then the easiest option is to generate the file using the cmdline option, // manually check it and then replace it in the resources directory assertThat(generatedTemplate.length(), is(equalTo(expectedTemplate.length()))); assertThat(generatedTemplate, is(equalTo(expectedTemplate))); } @Test void shouldCreateMaxTemplateFile() throws Exception { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); Path filePath = getResourcePath("resources/template-max.yaml"); String expectedTemplate = new String(Files.readAllBytes(filePath)); // When File f = File.createTempFile("ZAP-max-template-test", ".yaml"); extAuto.generateTemplateFile(f.getAbsolutePath(), true); String generatedTemplate = new String(Files.readAllBytes(f.toPath())); // Then // If this fails then the easiest option is to generate the file using the cmdline option, // manually check it and then replace it in the resources directory assertThat(generatedTemplate.length(), is(equalTo(expectedTemplate.length()))); assertThat(generatedTemplate, is(equalTo(expectedTemplate))); } @Test void shouldCreateConfigTemplateFile() throws Exception { // Given Model model = mock(Model.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)); Model.setSingletonForTesting(model); ExtensionLoader extensionLoader = mock(ExtensionLoader.class, withSettings().lenient()); ExtensionPassiveScan extPscan = mock(ExtensionPassiveScan.class, withSettings().lenient()); given(extensionLoader.getExtension(ExtensionPassiveScan.class)).willReturn(extPscan); ExtensionSpider extSpider = mock(ExtensionSpider.class, withSettings().lenient()); given(extensionLoader.getExtension(ExtensionSpider.class)).willReturn(extSpider); Control.initSingletonForTesting(Model.getSingleton(), extensionLoader); Model.getSingleton().getOptionsParam().load(new ZapXmlConfiguration()); ExtensionAutomation extAuto = new ExtensionAutomation(); Path filePath = getResourcePath("resources/template-config.yaml"); String expectedTemplate = new String(Files.readAllBytes(filePath)); // When File f = File.createTempFile("ZAP-config-template-test", ".yaml"); extAuto.generateConfigFile(f.getAbsolutePath()); String generatedTemplate = new String(Files.readAllBytes(f.toPath())); // Then assertThat(generatedTemplate.length(), is(equalTo(expectedTemplate.length()))); assertThat(generatedTemplate, is(equalTo(expectedTemplate))); } @Test void shouldRunPlan() { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); String job1Name = "job1"; String job2Name = "job2"; String job3Name = "job3"; AutomationJobImpl job1 = new AutomationJobImpl() { @Override public String getType() { return job1Name; } @Override public Order getOrder() { return Order.REPORT; } }; AutomationJobImpl job2 = new AutomationJobImpl() { @Override public String getType() { return job2Name; } @Override public Order getOrder() { return Order.REPORT; } }; AutomationJobImpl job3 = new AutomationJobImpl() { @Override public String getType() { return job3Name; } @Override public Order getOrder() { return Order.REPORT; } }; Path filePath = getResourcePath("resources/testplan-failonerror.yaml"); InMemoryStats stats = new InMemoryStats(); Stats.addListener(stats); // When extAuto.registerAutomationJob(job1); extAuto.registerAutomationJob(job2); extAuto.registerAutomationJob(job3); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); List<AutomationJob> runJobs = progress.getRunJobs(); // Then assertThat(progress.hasWarnings(), is(equalTo(false))); assertThat(progress.hasErrors(), is(equalTo(false))); assertThat(runJobs.size(), is(equalTo(3))); assertThat(runJobs.get(0).getName(), is(equalTo("job1"))); assertThat(((AutomationJobImpl) runJobs.get(0)).wasRun(), is(equalTo(true))); assertThat(runJobs.get(1).getName(), is(equalTo("job2"))); assertThat(((AutomationJobImpl) runJobs.get(1)).wasRun(), is(equalTo(true))); assertThat(runJobs.get(2).getName(), is(equalTo("job3"))); assertThat(((AutomationJobImpl) runJobs.get(2)).wasRun(), is(equalTo(true))); assertThat(stats.getStat(ExtensionAutomation.WARNING_COUNT_STATS), is(equalTo(0L))); assertThat(stats.getStat(ExtensionAutomation.ERROR_COUNT_STATS), is(equalTo(0L))); assertThat(stats.getStat(ExtensionAutomation.PLANS_RUN_STATS), is(equalTo(1L))); assertThat(stats.getStat(ExtensionAutomation.TOTAL_JOBS_RUN_STATS), is(equalTo(3L))); assertThat( stats.getStat( ExtensionAutomation.JOBS_RUN_STATS_PREFIX + "job1" + ExtensionAutomation.JOBS_RUN_STATS_POSTFIX), is(equalTo(1L))); assertThat( stats.getStat( ExtensionAutomation.JOBS_RUN_STATS_PREFIX + "job2" + ExtensionAutomation.JOBS_RUN_STATS_POSTFIX), is(equalTo(1L))); assertThat( stats.getStat( ExtensionAutomation.JOBS_RUN_STATS_PREFIX + "job3" + ExtensionAutomation.JOBS_RUN_STATS_POSTFIX), is(equalTo(1L))); } @Test void shouldRunWithResolvedParams() throws ReflectiveOperationException { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); TestParamContainer tpc = new TestParamContainer(); AutomationJobImpl job = new AutomationJobImpl(tpc) { @Override public String getType() { return "job"; } @Override public Order getOrder() { return Order.EXPLORE; } }; Path filePath = getResourcePath("resources/testplan-applyResolvedParams.yaml"); // When extAuto.registerAutomationJob(job); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); List<AutomationJob> runJobs = progress.getRunJobs(); // Then assertThat(progress.hasWarnings(), is(equalTo(false))); assertThat(progress.hasErrors(), is(equalTo(false))); assertThat(runJobs.size(), is(equalTo(1))); assertThat(runJobs.get(0).getName(), is(equalTo("job"))); assertThat(((AutomationJobImpl) runJobs.get(0)).wasRun(), is(equalTo(true))); assertThat(tpc.getTestParam().getStringParam(), is(equalTo("true"))); } @Nested class PlanInOrderTests { private AutomationJobImpl job1; private AutomationJobImpl job2; private AutomationJobImpl job3; private ExtensionAutomation extAuto; @BeforeEach void setup() { extAuto = new ExtensionAutomation(); job1 = new AutomationJobImpl() { @Override public String getType() { return "job1"; } }; job2 = new AutomationJobImpl() { @Override public String getType() { return "job2"; } }; job3 = new AutomationJobImpl() { @Override public String getType() { return "job3"; } }; } @Test void shouldRunPlanInDefinedOrderWithSameRegOrder() { // Given Path filePath = getResourcePath("resources/testplan-failonerror.yaml"); // When extAuto.registerAutomationJob(job1); extAuto.registerAutomationJob(job2); extAuto.registerAutomationJob(job3); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); List<AutomationJob> runJobs = progress.getRunJobs(); // Then assertThat(runJobs.size(), is(equalTo(3))); assertThat(runJobs.get(0).getName(), is(equalTo("job1"))); assertThat(((AutomationJobImpl) runJobs.get(0)).wasRun(), is(equalTo(true))); assertThat(runJobs.get(1).getName(), is(equalTo("job2"))); assertThat(((AutomationJobImpl) runJobs.get(1)).wasRun(), is(equalTo(true))); assertThat(runJobs.get(2).getName(), is(equalTo("job3"))); assertThat(((AutomationJobImpl) runJobs.get(2)).wasRun(), is(equalTo(true))); } @Test void shouldRunPlanInDefinedOrderWithDifferentRegOrder() { // Given Path filePath = getResourcePath("resources/testplan-failonerror.yaml"); // When extAuto.registerAutomationJob(job3); extAuto.registerAutomationJob(job1); extAuto.registerAutomationJob(job2); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); List<AutomationJob> runJobs = progress.getRunJobs(); // Then assertThat(runJobs.size(), is(equalTo(3))); assertThat(runJobs.get(0).getName(), is(equalTo("job1"))); assertThat(((AutomationJobImpl) runJobs.get(0)).wasRun(), is(equalTo(true))); assertThat(runJobs.get(1).getName(), is(equalTo("job2"))); assertThat(((AutomationJobImpl) runJobs.get(1)).wasRun(), is(equalTo(true))); assertThat(runJobs.get(2).getName(), is(equalTo("job3"))); assertThat(((AutomationJobImpl) runJobs.get(2)).wasRun(), is(equalTo(true))); } } @Test void shouldFailPlanOnError() { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); String job1Name = "job1"; String job3Name = "job3"; AutomationJobImpl job1 = new AutomationJobImpl() { @Override public String getType() { return job1Name; } @Override public Order getOrder() { return Order.REPORT; } }; AutomationJobImpl job3 = new AutomationJobImpl() { @Override public String getType() { return job3Name; } @Override public Order getOrder() { return Order.REPORT; } }; Path filePath = getResourcePath("resources/testplan-failonerror.yaml"); InMemoryStats stats = new InMemoryStats(); Stats.addListener(stats); // When extAuto.registerAutomationJob(job1); extAuto.registerAutomationJob(job3); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); // Then assertThat(progress.hasWarnings(), is(equalTo(false))); assertThat(progress.hasErrors(), is(equalTo(true))); assertThat(job1.wasRun(), is(equalTo(false))); assertThat(job3.wasRun(), is(equalTo(false))); assertThat(stats.getStat(ExtensionAutomation.WARNING_COUNT_STATS), is(equalTo(0L))); assertThat(stats.getStat(ExtensionAutomation.ERROR_COUNT_STATS), is(equalTo(1L))); assertThat(stats.getStat(ExtensionAutomation.PLANS_RUN_STATS), is(equalTo(1L))); assertThat(stats.getStat(ExtensionAutomation.TOTAL_JOBS_RUN_STATS), is(nullValue())); } @Test void shouldRunPlanWithJobsWithSameType() { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); String job1Name = "job1"; AutomationJobImpl job = new AutomationJobImpl() { @Override public String getType() { return job1Name; } @Override public Order getOrder() { return Order.REPORT; } }; Path filePath = getResourcePath("resources/testplan-sametype.yaml"); InMemoryStats stats = new InMemoryStats(); Stats.addListener(stats); // When extAuto.registerAutomationJob(job); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); List<AutomationJob> runJobs = progress.getRunJobs(); // Then assertThat(runJobs.size(), is(equalTo(3))); assertThat(runJobs.get(0).getName(), is(equalTo("job1"))); assertThat(((AutomationJobImpl) runJobs.get(0)).getOptional(), is(equalTo("run 1"))); assertThat(runJobs.get(1).getName(), is(equalTo("job1"))); assertThat(((AutomationJobImpl) runJobs.get(1)).getOptional(), is(equalTo("run 2"))); assertThat(runJobs.get(2).getName(), is(equalTo("job1"))); assertThat(((AutomationJobImpl) runJobs.get(2)).getOptional(), is(nullValue())); assertThat(progress.hasWarnings(), is(equalTo(false))); assertThat(progress.hasErrors(), is(equalTo(false))); assertThat(stats.getStat(ExtensionAutomation.WARNING_COUNT_STATS), is(equalTo(0L))); assertThat(stats.getStat(ExtensionAutomation.ERROR_COUNT_STATS), is(equalTo(0L))); assertThat(stats.getStat(ExtensionAutomation.PLANS_RUN_STATS), is(equalTo(1L))); assertThat(stats.getStat(ExtensionAutomation.TOTAL_JOBS_RUN_STATS), is(equalTo(3L))); assertThat( stats.getStat( ExtensionAutomation.JOBS_RUN_STATS_PREFIX + "job1" + ExtensionAutomation.JOBS_RUN_STATS_POSTFIX), is(equalTo(3L))); } @Test void shouldReturnCmdLineArgs() { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); // When CommandLineArgument[] args = extAuto.getCommandLineArguments(); // Then assertThat(args.length, is(equalTo(4))); assertThat(args[0].getName(), is(equalTo("-autorun"))); assertThat(args[0].getNumOfArguments(), is(equalTo(1))); assertThat(args[1].getName(), is(equalTo("-autogenmin"))); assertThat(args[1].getNumOfArguments(), is(equalTo(1))); assertThat(args[2].getName(), is(equalTo("-autogenmax"))); assertThat(args[2].getNumOfArguments(), is(equalTo(1))); assertThat(args[3].getName(), is(equalTo("-autogenconf"))); assertThat(args[3].getNumOfArguments(), is(equalTo(1))); } @Test void shouldRunPlanWithWarnings() { // Given ExtensionAutomation extAuto = new ExtensionAutomation(); String job1Name = "job1"; String job2Name = "job2"; String job3Name = "job3"; AutomationJobImpl job1 = new AutomationJobImpl() { @Override public String getType() { return job1Name; } @Override public Order getOrder() { return Order.REPORT; } }; AutomationJobImpl job2 = new AutomationJobImpl() { @Override public String getType() { return job2Name; } @Override public Order getOrder() { return Order.REPORT; } }; AutomationJobImpl job3 = new AutomationJobImpl() { @Override public String getType() { return job3Name; } @Override public Order getOrder() { return Order.REPORT; } }; Path filePath = getResourcePath("resources/testplan-withwarnings.yaml"); InMemoryStats stats = new InMemoryStats(); Stats.addListener(stats); // When extAuto.registerAutomationJob(job1); extAuto.registerAutomationJob(job2); extAuto.registerAutomationJob(job3); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); List<AutomationJob> runJobs = progress.getRunJobs(); // Then assertThat(progress.hasWarnings(), is(equalTo(true))); assertThat(progress.getWarnings().size(), is(equalTo(1))); assertThat(progress.getWarnings().get(0), is(equalTo("!automation.error.job.name!"))); assertThat(progress.hasErrors(), is(equalTo(false))); assertThat(runJobs.size(), is(equalTo(3))); assertThat(runJobs.get(0).getName(), is(equalTo("Job 1"))); assertThat(((AutomationJobImpl) runJobs.get(0)).wasRun(), is(equalTo(true))); assertThat(runJobs.get(1).getName(), is(equalTo("job2"))); assertThat(((AutomationJobImpl) runJobs.get(1)).wasRun(), is(equalTo(true))); assertThat(runJobs.get(2).getName(), is(equalTo("job3"))); assertThat(((AutomationJobImpl) runJobs.get(2)).wasRun(), is(equalTo(true))); assertThat(stats.getStat(ExtensionAutomation.WARNING_COUNT_STATS), is(equalTo(1L))); assertThat(stats.getStat(ExtensionAutomation.ERROR_COUNT_STATS), is(equalTo(0L))); assertThat(stats.getStat(ExtensionAutomation.PLANS_RUN_STATS), is(equalTo(1L))); assertThat(stats.getStat(ExtensionAutomation.TOTAL_JOBS_RUN_STATS), is(equalTo(3L))); assertThat( stats.getStat( ExtensionAutomation.JOBS_RUN_STATS_PREFIX + "job1" + ExtensionAutomation.JOBS_RUN_STATS_POSTFIX), is(equalTo(1L))); assertThat( stats.getStat( ExtensionAutomation.JOBS_RUN_STATS_PREFIX + "job2" + ExtensionAutomation.JOBS_RUN_STATS_POSTFIX), is(equalTo(1L))); assertThat( stats.getStat( ExtensionAutomation.JOBS_RUN_STATS_PREFIX + "job3" + ExtensionAutomation.JOBS_RUN_STATS_POSTFIX), is(equalTo(1L))); } @Test void shouldFailPlanOnErrorApplyingParameters() { // Given TestParamContainer tpc = new TestParamContainer(); AutomationJobImpl job = new AutomationJobImpl(tpc) { @Override public String getType() { return "job"; } @Override public Order getOrder() { return Order.EXPLORE; } @Override public String getParamMethodName() { return "getTestParam"; } }; ExtensionAutomation extAuto = new ExtensionAutomation(); Path filePath = getResourcePath("resources/testPlan-failOnErrorApplyingParameters.yaml"); // When extAuto.registerAutomationJob(job); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); // Then assertThat(progress.hasErrors(), is(equalTo(true))); assertThat(progress.hasWarnings(), is(equalTo(false))); assertThat(progress.getErrors().size(), is(equalTo(1))); assertThat(progress.getErrors().get(0), is(equalTo("!automation.error.options.badbool!"))); assertThat(job.wasRun(), is(equalTo(false))); } @Test void shouldFailPlanOnWarningApplyingParameters() { // Given TestParamContainer tpc = new TestParamContainer(); AutomationJobImpl job = new AutomationJobImpl(tpc) { @Override public String getType() { return "job"; } @Override public Order getOrder() { return Order.EXPLORE; } @Override public String getParamMethodName() { return "getTestParam"; } }; ExtensionAutomation extAuto = new ExtensionAutomation(); Path filePath = getResourcePath("resources/testPlan-failOnWarningApplyingParameters.yaml"); // When extAuto.registerAutomationJob(job); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); // Then assertThat(progress.hasErrors(), is(equalTo(false))); assertThat(progress.hasWarnings(), is(equalTo(true))); assertThat(progress.getWarnings().size(), is(equalTo(1))); assertThat( progress.getWarnings().get(0), is(equalTo("!automation.error.options.unknown!"))); assertThat(job.wasRun(), is(equalTo(false))); } @SuppressWarnings({"unchecked"}) public static void updateEnv(String name, String val) throws ReflectiveOperationException { Map<String, String> env = System.getenv(); Field field = env.getClass().getDeclaredField("m"); field.setAccessible(true); ((Map<String, String>) field.get(env)).put(name, val); } @Test void shouldExtractTests() { // Given AutomationJobImpl job = new AutomationJobImpl() { @Override public String getType() { return "job"; } }; ExtensionAutomation extAuto = new ExtensionAutomation(); Path filePath = getResourcePath("resources/testPlan-withTests.yaml"); // When extAuto.registerAutomationJob(job); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); // Then assertThat(progress.hasErrors(), is(false)); assertThat(progress.hasWarnings(), is(false)); assertThat(progress.getRunJobs().size(), is(1)); assertThat(((AutomationJobImpl) progress.getRunJobs().get(0)).testsAdded, is(true)); } @Test void shouldFailPlanOnLoggedTestError() { // Given AutomationJobImpl job1 = new AutomationJobImpl() { @Override public String getType() { return "job1"; } }; AutomationJobImpl job2 = new AutomationJobImpl() { @Override public String getType() { return "job2"; } }; ExtensionAutomation extAuto = new ExtensionAutomation(); Path filePath = getResourcePath("resources/testPlan-failOnLoggedTestError.yaml"); job1.testsLogError = true; // When extAuto.registerAutomationJob(job1); extAuto.registerAutomationJob(job2); AutomationProgress progress = extAuto.runAutomationFile(filePath.toAbsolutePath().toString()); // Then assertThat(progress.getRunJobs().size(), is(1)); assertThat(progress.getRunJobs().get(0).getType(), is(job1.getType())); assertThat(progress.hasWarnings(), is(false)); assertThat(progress.hasErrors(), is(true)); assertThat( progress.getErrors().get(0), is(((AutomationJobImpl) progress.getRunJobs().get(0)).testsLoggedString)); } // Methods are accessed via reflection @SuppressWarnings("unused") private static class TestParamContainer { private TestParam testParam = new TestParam(); public TestParam getTestParam() { return testParam; } } // Methods are accessed via reflection @SuppressWarnings("unused") private static class TestParam { private boolean boolParam; private String stringParam; public void setBoolParam(boolean boolParam) { this.boolParam = boolParam; } public boolean getBoolParam() { return boolParam; } public void setStringParam(String stringParam) { this.stringParam = stringParam; } public String getStringParam() { return stringParam; } } private static class AutomationJobImpl extends AutomationJob { private boolean wasRun = false; private Object paramMethodObject; private String paramNameMethod = "getTestParam"; private String optional; private String type; private Order order = Order.REPORT; private boolean testsAdded = false; private String testsLoggedString; private boolean testsLogError = false; public AutomationJobImpl() {} public AutomationJobImpl(Object paramMethodObject) { this.paramMethodObject = paramMethodObject; } @Override public void runJob(AutomationEnvironment env, AutomationProgress progress) { wasRun = true; } @Override protected void addTests(Object testsObj, AutomationProgress progress) { testsAdded = true; } @Override public void logTestsToProgress(AutomationProgress progress) { if (testsAdded && testsLogError) { testsLoggedString = RandomStringUtils.randomAlphanumeric(20); progress.error(testsLoggedString); } } public boolean wasRun() { return wasRun; } @Override public String getType() { return type; } @Override public Order getOrder() { return order; } @Override public String getSummary() { return ""; } @Override public Object getParamMethodObject() { return paramMethodObject; } @Override public String getParamMethodName() { return paramNameMethod; } @Override public boolean verifyCustomParameter( String name, String value, AutomationProgress progress) { if (name.equals("optional")) { return true; } return false; } @Override public boolean applyCustomParameter(String name, String value) { if (name.equals("optional")) { optional = value; return true; } return false; } public String getOptional() { return this.optional; } @Override public AutomationJob newJob() { AutomationJobImpl job = new AutomationJobImpl(); job.paramMethodObject = this.paramMethodObject; job.type = this.getType(); job.order = this.getOrder(); job.testsLogError = testsLogError; return job; } } }
/** * This code is free software; you can redistribute it and/or modify it under * the terms of the new BSD License. * * Copyright (c) 2008-2011, Sebastian Staudt */ package com.github.koraktor.steamcondenser.steam.community; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Map; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import com.github.koraktor.steamcondenser.exceptions.SteamCondenserException; /** * The SteamGroup class represents a group in the Steam Community * * @author Sebastian Staudt */ public class SteamGroup { private static Map<Object, SteamGroup> steamGroups = new HashMap<Object, SteamGroup>(); private String customUrl; private long fetchTime; private long groupId64; private ArrayList<SteamId> members; /** * Creates a new <code>SteamGroup</code> instance or gets an existing one * from the cache for the group with the given ID * * @param id The 64bit Steam ID of the group * @return The <code>SteamGroup</code> instance of the requested group * @throws SteamCondenserException if an error occurs while parsing the * data */ public static SteamGroup create(long id) throws SteamCondenserException { return SteamGroup.create((Object) id, true, false); } /** * Creates a new <code>SteamGroup</code> instance or gets an existing one * from the cache for the group with the given ID * * @param id The custom URL of the group specified by the group admin * @return The <code>SteamGroup</code> instance of the requested group * @throws SteamCondenserException if an error occurs while parsing the * data */ public static SteamGroup create(String id) throws SteamCondenserException { return SteamGroup.create((Object) id, true, false); } /** * Creates a new <code>SteamGroup</code> instance or gets an existing one * from the cache for the group with the given ID * * @param id The 64bit Steam ID of the group * @param fetch if <code>true</code> the groups's data is loaded into the * object * @return The <code>SteamGroup</code> instance of the requested group * @throws SteamCondenserException if an error occurs while parsing the * data */ public static SteamGroup create(long id, boolean fetch) throws SteamCondenserException { return SteamGroup.create((Object) id, fetch, false); } /** * Creates a new <code>SteamGroup</code> instance or gets an existing one * from the cache for the group with the given ID * * @param id The custom URL of the group specified by the group admin * @param fetch if <code>true</code> the groups's data is loaded into the * object * @return The <code>SteamGroup</code> instance of the requested group * @throws SteamCondenserException if an error occurs while parsing the * data */ public static SteamGroup create(String id, boolean fetch) throws SteamCondenserException { return SteamGroup.create((Object) id, fetch, false); } /** * Creates a new <code>SteamGroup</code> instance or gets an existing one * from the cache for the group with the given ID * * @param id The 64bit Steam ID of the group * @param fetch if <code>true</code> the groups's data is loaded into the * object * @param bypassCache If <code>true</code> an already cached instance for * this group will be ignored and a new one will be created * @return The <code>SteamGroup</code> instance of the requested group * @throws SteamCondenserException if an error occurs while parsing the * data */ public static SteamGroup create(long id, boolean fetch, boolean bypassCache) throws SteamCondenserException { return SteamGroup.create((Object) id, fetch, bypassCache); } /** * Creates a new <code>SteamGroup</code> instance or gets an existing one * from the cache for the group with the given ID * * @param id The custom URL of the group specified by the group admin * @param fetch if <code>true</code> the groups's data is loaded into the * object * @param bypassCache If <code>true</code> an already cached instance for * this group will be ignored and a new one will be created * @return The <code>SteamGroup</code> instance of the requested group * @throws SteamCondenserException if an error occurs while parsing the * data */ public static SteamGroup create(String id, boolean fetch, boolean bypassCache) throws SteamCondenserException { return SteamGroup.create((Object) id, fetch, bypassCache); } /** * Creates a new <code>SteamGroup</code> instance or gets an existing one * from the cache for the group with the given ID * * @param id The custom URL of the group specified by the group admin or * the 64bit group ID * @param fetch if <code>true</code> the groups's data is loaded into the * object * @param bypassCache If <code>true</code> an already cached instance for * this group will be ignored and a new one will be created * @return The <code>SteamGroup</code> instance of the requested group * @throws SteamCondenserException if an error occurs while parsing the * data */ private static SteamGroup create(Object id, boolean fetch, boolean bypassCache) throws SteamCondenserException { if(SteamGroup.isCached(id) && !bypassCache) { SteamGroup group = SteamGroup.steamGroups.get(id); if(fetch && !group.isFetched()) { group.fetchMembers(); } return group; } else { return new SteamGroup(id, fetch); } } /** * Returns whether the requested group is already cached * * @param id The custom URL of the group specified by the group admin or * the 64bit group ID * @return <code>true</code> if this group is already cached */ public static boolean isCached(Object id) { return SteamGroup.steamGroups.containsKey(id); } /** * Creates a new <code>SteamGroup</code> instance for the group with the * given ID * * @param id The custom URL of the group specified by the group admin or * the 64bit group ID * @param fetch if <code>true</code> the groups's data is loaded into the * object * @throws SteamCondenserException if an error occurs while parsing the * data */ private SteamGroup(Object id, boolean fetch) throws SteamCondenserException { if(id instanceof String) { this.customUrl = (String) id; } else { this.groupId64 = (Long) id; } if(fetch) { this.fetchMembers(); } this.cache(); } /** * Saves this <code>SteamGroup</code> instance in the cache * * @return <code>false</code> if this group is already cached */ public boolean cache() { if(!SteamGroup.steamGroups.containsKey(this.groupId64)) { SteamGroup.steamGroups.put(this.groupId64, this); if(this.customUrl != null && !SteamGroup.steamGroups.containsKey(this.customUrl)) { SteamGroup.steamGroups.put(this.customUrl, this); } return true; } return false; } /** * Loads the members of this group * <p> * This might take several HTTP requests as the Steam Community splits this * data over several XML documents if the group has lots of members. * * @throws SteamCondenserException if an error occurs while parsing the * data */ public void fetchMembers() throws SteamCondenserException { int page = 0; int totalPages; String url; this.members = new ArrayList<SteamId>(); try { DocumentBuilder parser = DocumentBuilderFactory.newInstance().newDocumentBuilder(); do { page ++; url = this.getBaseUrl() + "/memberslistxml?p=" + page; Element memberData = parser.parse(url).getDocumentElement(); totalPages = Integer.parseInt(memberData.getElementsByTagName("totalPages").item(0).getTextContent()); NodeList membersList = ((Element) memberData.getElementsByTagName("members").item(0)).getElementsByTagName("steamID64"); for(int i = 0; i < membersList.getLength(); i++) { Element member = (Element) membersList.item(i); this.members.add(SteamId.create(Long.parseLong(member.getTextContent()))); } } while(page < totalPages); } catch(Exception e) { throw new SteamCondenserException("XML data could not be parsed.", e); } this.fetchTime = new Date().getTime(); } /** * Returns the custom URL of this group * <p> * The custom URL is a admin specified unique string that can be used * instead of the 64bit SteamID as an identifier for a group. * * @return The custom URL of this group */ public String getCustomUrl() { return this.customUrl; } /** * Returns this group's 64bit SteamID * * @return This group's 64bit SteamID */ public long getGroupId64() { return this.groupId64; } /** * Returns the base URL for this group's page * <p> * This URL is different for groups having a custom URL. * * @return The base URL for this group */ public String getBaseUrl() { if(this.customUrl == null) { return "http://steamcommunity.com/gid/" + this.groupId64; } else { return "http://steamcommunity.com/groups/" + this.customUrl; } } /** * Returns the time this group has been fetched * * @return The timestamp of the last fetch time */ public long getFetchTime() { return this.fetchTime; } /** * Returns this group's 64bit SteamID * * @return This group's 64bit SteamID */ public long getId() { return this.groupId64; } /** * Returns the number of members this group has * <p> * If the members have already been fetched the size of the member array is * returned. Otherwise the group size is separately fetched without needing * multiple requests for big groups. * * @return The number of this group's members * @throws SteamCondenserException if an error occurs while parsing the * data */ public int getMemberCount() throws SteamCondenserException { try { if(this.members == null) { DocumentBuilder parser = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Element memberData = parser.parse(this.getBaseUrl() + "/memberslistxml").getDocumentElement(); return Integer.parseInt(memberData.getElementsByTagName("memberCount").item(0).getTextContent()); } else { return this.members.size(); } } catch(Exception e) { throw new SteamCondenserException(e.getMessage(), e); } } /** * Returns the members of this group * <p> * If the members haven't been fetched yet, this is done now. * * @return The Steam ID's of the members of this group * @see #fetchMembers * @throws SteamCondenserException if an error occurs while parsing the * data */ public ArrayList<SteamId> getMembers() throws SteamCondenserException { if(this.members == null) { this.fetchMembers(); } return this.members; } /** * Returns whether the data for this group has already been fetched * * @return <code>true</code> if the group's members have been * fetched */ public boolean isFetched() { return this.fetchTime != 0; } }
/* * Copyright (C) 2011 The Project Lombok Authors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package lombok.ast.app; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.tools.SimpleJavaFileObject; import lombok.AccessLevel; import lombok.Data; import lombok.RequiredArgsConstructor; import lombok.val; import lombok.ast.Node; import lombok.ast.Version; import lombok.ast.ecj.EcjTreeBuilder; import lombok.ast.ecj.EcjTreeConverter; import lombok.ast.ecj.EcjTreeOperations; import lombok.ast.ecj.EcjTreePrinter; import lombok.ast.grammar.ParseProblem; import lombok.ast.grammar.Source; import lombok.ast.javac.JcTreeBuilder; import lombok.ast.javac.JcTreeConverter; import lombok.ast.javac.JcTreePrinter; import lombok.ast.printer.HtmlFormatter; import lombok.ast.printer.SourceFormatter; import lombok.ast.printer.SourcePrinter; import lombok.ast.printer.StructureFormatter; import lombok.ast.printer.TextFormatter; import org.eclipse.jdt.internal.compiler.CompilationResult; import org.eclipse.jdt.internal.compiler.DefaultErrorHandlingPolicies; import org.eclipse.jdt.internal.compiler.ast.ASTNode; import org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration; import org.eclipse.jdt.internal.compiler.batch.CompilationUnit; import org.eclipse.jdt.internal.compiler.classfmt.ClassFileConstants; import org.eclipse.jdt.internal.compiler.impl.CompilerOptions; import org.eclipse.jdt.internal.compiler.parser.Parser; import org.eclipse.jdt.internal.compiler.problem.DefaultProblemFactory; import org.eclipse.jdt.internal.compiler.problem.ProblemReporter; import org.parboiled.google.collect.Lists; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.Files; import com.sun.tools.javac.main.JavaCompiler; import com.sun.tools.javac.main.OptionName; import com.sun.tools.javac.tree.JCTree; import com.sun.tools.javac.tree.JCTree.JCCompilationUnit; import com.sun.tools.javac.util.Context; import com.sun.tools.javac.util.Options; import com.zwitserloot.cmdreader.CmdReader; import com.zwitserloot.cmdreader.Description; import com.zwitserloot.cmdreader.FullName; import com.zwitserloot.cmdreader.InvalidCommandLineException; import com.zwitserloot.cmdreader.Mandatory; import com.zwitserloot.cmdreader.Sequential; import com.zwitserloot.cmdreader.Shorthand; @RequiredArgsConstructor(access = AccessLevel.PRIVATE) public class Main { private static class CmdArgs { @Shorthand("v") @Description("Print the name of each file as it is being converted.") private boolean verbose; @Description("Show version number and exit.") private boolean version; @Shorthand("h") @Description("Show this help text and exit.") private boolean help; @Shorthand("e") @Description("Sets the encoding of your source files. Defaults to the system default charset. Example: \"UTF-8\"") private String encoding; @Shorthand("p") @Description("Print converted code to standard output instead of saving it in target directory") private boolean print; @Shorthand("d") @Description("Directory to save converted files to") @Mandatory(onlyIfNot={"print", "help", "version"}) private String target; @Shorthand("i") @Description("Save the result of each (intermediate) operation as 'text' representation. Do not use any text/source/html operations if you use this option.") @FullName("save-intermediate") private boolean saveIntermediate; @Shorthand("z") @Description("Normalize the way various different nodes are printed when using the structural printer ('text'), when these nodes are semantically identical") private boolean normalize; @Shorthand("n") @Description("Omit printing the start and end position of nodes for structural output") @FullName("no-positions") private boolean noPositions; @Mandatory(onlyIfNot={"help", "version"}) @Sequential @Description("Operations to apply to each source file. Comma-separated (no spaces). Valid options: ecj/javac/lombok first to decide how the file is parsed initially, " + "then any number of further ecj/javac/lombok keywords to convert ASTs, and finally text/source/html.") private String program; @Description("Files to convert. Provide either a file, or a directory. If you use a directory, all files in it (recursive) are converted") @Mandatory(onlyIfNot={"help", "version"}) @Sequential private List<String> input = new ArrayList<String>(); } public static void main(String[] rawArgs) throws Exception { CmdArgs args; CmdReader<CmdArgs> reader = CmdReader.of(CmdArgs.class); try { args = reader.make(rawArgs); } catch (InvalidCommandLineException e) { System.err.println(e.getMessage()); System.err.println(reader.generateCommandLineHelp("java -jar lombok.ast.jar")); System.exit(1); return; } if (args.help) { System.out.println("lombok.ast java AST tool " + Version.getVersion()); System.out.println(reader.generateCommandLineHelp("java -jar lombok.ast.jar")); System.exit(0); return; } if (args.version) { System.out.println(Version.getVersion()); System.exit(0); return; } try { Charset charset = args.encoding == null ? Charset.defaultCharset() : Charset.forName(args.encoding); Main main = new Main(charset, args.verbose, args.normalize, !args.noPositions, args.saveIntermediate); main.compile(args.program); if (!args.print) { File targetDir = new File(args.target); if (!targetDir.exists()) targetDir.mkdirs(); if (!targetDir.isDirectory()) { System.err.printf("%s is not a directory or cannot be created\n", targetDir.getCanonicalPath()); System.exit(1); return; } main.setOutputDir(targetDir); } for (String input : args.input) { main.addToQueue(input); } main.go(); } catch (IllegalArgumentException e) { System.err.println(e.getMessage()); System.exit(1); return; } } private void go() throws IOException { for (Plan p : files) { process(p.getFile(), outDir, p.getRelativeName()); } if (errors > 0) { System.err.printf("%d errors\n", errors); } System.exit(errors > 0 ? 2 : 0); } private void setOutputDir(File f) { this.outDir = f; } private void addToQueue(String item) throws IOException { addToQueue0(new File(item), ""); } private void addToQueue0(File f, String pathSoFar) throws IOException { pathSoFar += (pathSoFar.isEmpty() ? "" : "/") + f.getName(); if (f.isFile()) { if (f.getName().endsWith(".java")) { files.add(new Plan(f, pathSoFar)); } } else if (f.isDirectory()) { for (File inner : f.listFiles()) { addToQueue0(inner, pathSoFar); } } else { throw new IllegalArgumentException("Unknown file: " + f.getCanonicalPath()); } } @Data private static class Plan { final File file; final String relativeName; } private void process(File in, File outDir, String relativeName) throws IOException { File out = outDir == null ? null : new File(outDir, relativeName); if (verbose && !saveIntermediate) { System.out.printf("Processing: %s to %s\n", in.getCanonicalPath(), out == null ? "sysout" : out.getCanonicalPath()); } Source source = new Source(Files.toString(in, charset), in.getCanonicalPath()); Object transfer = null; String chain = "/"; try { for (Operation<Object, Object> programElem : program) { transfer = programElem.process(source, transfer); if (saveIntermediate) { if (!"/".equals(chain)) { chain += "-"; } chain += getDestinationType(programElem); File intermediate = new File(outDir.getCanonicalPath() + chain + "/" + relativeName); intermediate.getParentFile().mkdirs(); if (verbose) { System.out.printf("Processing: %s to %s\n", in.getCanonicalPath(), intermediate.getCanonicalPath()); } if (TO_JAVAC.contains(programElem)) { Files.write(javacToText.process(source, (JCCompilationUnit) transfer).toString(), intermediate, charset); } else if (TO_ECJ.contains(programElem)) { Files.write(ecjToText.process(source, (CompilationUnitDeclaration) transfer).toString(), intermediate, charset); } else if (TO_LOMBOK.contains(programElem)) { Files.write(lombokToText.process(source, (Node) transfer).toString(), intermediate, charset); } } } if (out == null) { System.out.println(transfer); } else if (!saveIntermediate) { out.getParentFile().mkdirs(); Files.write(transfer.toString(), out, charset); } } catch (ConversionProblem cp) { System.err.printf("Can't convert: %s due to %s\n", in.getCanonicalPath(), cp.getMessage()); errors++; } catch (RuntimeException e) { System.err.printf("Error during convert: %s\n%s\n", in.getCanonicalPath(), printEx(e)); errors++; } } private String getDestinationType(Operation<Object, Object> operation) { if (TO_LOMBOK.contains(operation)) return "lombok"; else if (TO_ECJ.contains(operation)) return "ecj"; else if (TO_JAVAC.contains(operation)) return "javac"; else if (TO_TEXT.contains(operation)) return "text"; else return null; } private static String printEx(Throwable t) { val sb = new StringBuilder(); sb.append(t.toString()); sb.append("\n"); Joiner.on("\n").appendTo(sb, t.getStackTrace()); return sb.toString(); } private void compile(String program) { this.program = compile0(program); } @Data private static final class ChainElement { private final String type, subtype; @Override public String toString() { return subtype.length() == 0 ? type : String.format("%s:%s", type, subtype); } public boolean hasSubtype() { return subtype.length() > 0; } } private List<ChainElement> toChainElements(String program) { val out = new ArrayList<ChainElement>(); for (String part : program.split("\\s*,\\s*")) { int idx = part.indexOf(':'); if (idx == -1) out.add(new ChainElement(part.trim(), "")); else out.add(new ChainElement(part.substring(0, idx).trim(), part.substring(idx+1).trim())); } return out; } @SuppressWarnings("unchecked") private void addNormalization(List<Operation<Object, Object>> list, ChainElement element) { if (!element.hasSubtype()) return; Operation<?, ?> operation = NORMALIZATION.get(element.toString()); if (operation == null) { List<String> normalizations = Lists.newArrayList(); for (String n : NORMALIZATION.keySet()) if (n.startsWith(element.getType() + ":")) normalizations.add(n); throw new IllegalArgumentException(String.format( "Illegal normalization operation: %s. Valid normalizations: %s", element, Joiner.on(",").join(normalizations))); } list.add((Operation<Object, Object>) operation); } @SuppressWarnings("unchecked") private List<Operation<Object, Object>> compile0(String program) { List<ChainElement> parts = toChainElements(program); List<Operation<Object, Object>> out = Lists.newArrayList(); if (parts.isEmpty()) throw new IllegalArgumentException("No operations"); Operation<?, ?> initialOp = CONVERSIONS.get("_," + parts.get(0).getType()); if (initialOp == null) { List<String> initialOps = Lists.newArrayList(); for (String key : CONVERSIONS.keySet()) { if (key.startsWith("_,")) initialOps.add(key.substring(2)); } throw new IllegalArgumentException(String.format( "Illegal initial operation: %s\nLegal initial operations: %s", parts.get(0), Joiner.on(",").join(initialOps))); } out.add((Operation<Object, Object>) initialOp); addNormalization(out, parts.get(0)); for (int i = 0; i < parts.size() - 1; i++) { String convKey = String.format("%s,%s", parts.get(i).getType(), parts.get(i + 1).getType()); Operation<?, ?> convOp = CONVERSIONS.get(convKey); if (convOp == null) { List<String> convOps = Lists.newArrayList(); for (String key : CONVERSIONS.keySet()) { if (key.startsWith(parts.get(i).getType() + ",")) convOps.add(key.substring(parts.get(i).getType().length() + 1)); } throw new IllegalArgumentException(String.format( "Illegal conversion operation: %s\nLegal conversion operations from %s: %s", convKey, parts.get(i), Joiner.on(",").join(convOps))); } out.add((Operation<Object, Object>) convOp); addNormalization(out, parts.get(i + 1)); } String lastPart = parts.get(parts.size() - 1).getType(); if (!LEGAL_FINAL.contains(lastPart) && !saveIntermediate) { throw new IllegalArgumentException(String.format( "Illegal final operation: %s\nLegal final operations: %s", lastPart, Joiner.on(",").join(LEGAL_FINAL))); } return out; } private final Charset charset; private List<Operation<Object, Object>> program; private final boolean verbose; private final boolean normalize; private final boolean positions; private final boolean saveIntermediate; private int errors; private File outDir = null; private final List<Plan> files = Lists.newArrayList(); interface Operation<A, B> { B process(Source source, A in) throws ConversionProblem; } static class ConversionProblem extends Exception { ConversionProblem(String message) { super(message); } } protected CompilerOptions ecjCompilerOptions() { CompilerOptions options = new CompilerOptions(); options.complianceLevel = ClassFileConstants.JDK1_6; options.sourceLevel = ClassFileConstants.JDK1_6; options.targetJDK = ClassFileConstants.JDK1_6; options.parseLiteralExpressionsAsConstants = true; return options; } private final Operation<Void, Node> parseWithLombok = new Operation<Void, Node>() { @Override public Node process(Source in, Void irrelevant) throws ConversionProblem { List<Node> nodes = in.getNodes(); List<ParseProblem> problems = in.getProblems(); if (problems.size() > 0) throw new ConversionProblem(String.format("Can't read file %s due to parse error: %s", in.getName(), problems.get(0))); if (nodes.size() == 1) return nodes.get(0); if (nodes.size() == 0) throw new ConversionProblem("No nodes parsed by lombok.ast"); throw new ConversionProblem("More than 1 node parsed by lombok.ast"); } }; private final Operation<Void, ASTNode> parseWithEcj = new Operation<Void, ASTNode>() { @Override public ASTNode process(Source in, Void irrelevant) throws ConversionProblem { CompilerOptions compilerOptions = ecjCompilerOptions(); Parser parser = new Parser(new ProblemReporter( DefaultErrorHandlingPolicies.proceedWithAllProblems(), compilerOptions, new DefaultProblemFactory() ), compilerOptions.parseLiteralExpressionsAsConstants); parser.javadocParser.checkDocComment = true; CompilationUnit sourceUnit = new CompilationUnit(in.getRawInput().toCharArray(), in.getName(), charset.name()); CompilationResult compilationResult = new CompilationResult(sourceUnit, 0, 0, 0); CompilationUnitDeclaration cud = parser.parse(sourceUnit, compilationResult); if (cud.hasErrors()) { throw new ConversionProblem(String.format("Can't read file %s due to parse error: %s", in.getName(), compilationResult.getErrors()[0])); } return cud; } }; private final Operation<Void, JCCompilationUnit> parseWithJavac = new Operation<Void, JCCompilationUnit>() { @Override public JCCompilationUnit process(Source in, Void irrelevant) throws ConversionProblem { Context context = new Context(); Options.instance(context).put(OptionName.ENCODING, charset.name()); JavaCompiler compiler = new JavaCompiler(context); compiler.genEndPos = true; compiler.keepComments = true; JCCompilationUnit cu = compiler.parse(new ContentBasedJavaFileObject(in.getName(), in.getRawInput())); return cu; } }; private final Operation<JCCompilationUnit, Node> javacToLombok = new Operation<JCCompilationUnit, Node>() { @Override public Node process(Source source, JCCompilationUnit in) throws ConversionProblem { JcTreeConverter converter = new JcTreeConverter(); converter.visit(in); return converter.getResult(); } }; private final Operation<CompilationUnitDeclaration, Node> ecjToLombok = new Operation<CompilationUnitDeclaration, Node>() { @Override public Node process(Source source, CompilationUnitDeclaration in) throws ConversionProblem { EcjTreeConverter converter = new EcjTreeConverter(); converter.visit(source.getRawInput(), in); return converter.get(); } }; private final Operation<Node, JCCompilationUnit> lombokToJavac = new Operation<Node, JCCompilationUnit>() { @Override public JCCompilationUnit process(Source source, Node in) throws ConversionProblem { JcTreeBuilder builder = new JcTreeBuilder(); builder.visit(in); JCTree out = builder.get(); if (out instanceof JCCompilationUnit) return (JCCompilationUnit) out; throw new ConversionProblem("result from lombokToJavac is not JCCompilationUnit"); } }; private final Operation<Node, CompilationUnitDeclaration> lombokToEcj = new Operation<Node, CompilationUnitDeclaration>() { @Override public CompilationUnitDeclaration process(Source source, Node in) throws ConversionProblem { EcjTreeBuilder builder = new EcjTreeBuilder(source, ecjCompilerOptions()); builder.visit(in); ASTNode out = builder.get(); if (out instanceof CompilationUnitDeclaration) return (CompilationUnitDeclaration) out; throw new ConversionProblem("result from lombokToEcj is not CompilationUnitDeclaration"); } }; private final Operation<Node, String> lombokToHtml = new Operation<Node, String>() { @Override public String process(Source source, Node in) throws ConversionProblem { SourceFormatter formatter = new HtmlFormatter(source.getRawInput()); in.accept(new SourcePrinter(formatter)); for (ParseProblem x : source.getProblems()) { formatter.addError(x.getPosition().getStart(), x.getPosition().getEnd(), x.getMessage()); } return formatter.finish(); } }; private final Operation<Node, String> lombokToSource = new Operation<Node, String>() { @Override public String process(Source source, Node in) throws ConversionProblem { SourceFormatter formatter = new TextFormatter(); in.accept(new SourcePrinter(formatter)); for (ParseProblem x : source.getProblems()) { formatter.addError(x.getPosition().getStart(), x.getPosition().getEnd(), x.getMessage()); } return formatter.finish(); } }; private final Operation<Node, String> lombokToText = new Operation<Node, String>() { @Override public String process(Source source, Node in) throws ConversionProblem { SourceFormatter formatter = positions ? StructureFormatter.formatterWithPositions() : StructureFormatter.formatterWithoutPositions(); in.accept(new SourcePrinter(formatter)); for (ParseProblem x : source.getProblems()) { formatter.addError(x.getPosition().getStart(), x.getPosition().getEnd(), x.getMessage()); } return formatter.finish(); } }; private final Operation<JCCompilationUnit, String> javacToText = new Operation<JCCompilationUnit, String>() { @Override public String process(Source source, JCCompilationUnit in) throws ConversionProblem { JcTreePrinter printer = positions ? JcTreePrinter.printerWithPositions() : JcTreePrinter.printerWithoutPositions(); printer.visit(in); return printer.toString(); } }; private final Operation<CompilationUnitDeclaration, String> ecjToText = new Operation<CompilationUnitDeclaration, String>() { @Override public String process(Source source, CompilationUnitDeclaration in) throws ConversionProblem { if (normalize) { return positions ? EcjTreeOperations.convertToString(in) : EcjTreeOperations.convertToStringNoPositions(in); } else { EcjTreePrinter printer = positions ? EcjTreePrinter.printerWithPositions() : EcjTreePrinter.printerWithoutPositions(); printer.visit(in); return printer.getContent(); } } }; private final Map<String, Operation<?, ?>> CONVERSIONS = ImmutableMap.<String, Operation<?, ?>>builder() .put("_,ecj", parseWithEcj) .put("_,lombok", parseWithLombok) .put("_,javac", parseWithJavac) .put("javac,lombok", javacToLombok) .put("lombok,javac", lombokToJavac) .put("ecj,lombok", ecjToLombok) .put("lombok,ecj", lombokToEcj) .put("lombok,text", lombokToText) .put("lombok,source", lombokToSource) .put("lombok,html", lombokToHtml) .put("ecj,text", ecjToText) .put("javac,text", javacToText) .build(); private final Map<String, Operation<?, ?>> NORMALIZATION = ImmutableMap.<String, Operation<?, ?>>builder() .put("ecj:ecjbugs", EcjBugsNormalization.ecjToEcjBugsNormalizedEcj) .put("lombok:ecjbugs", EcjBugsNormalization.lombokToEcjBugsNormalizedLombok) .build(); private final List<String> LEGAL_FINAL = ImmutableList.of("source", "html", "text"); private final List<Operation<?, Node>> TO_LOMBOK = ImmutableList.of(ecjToLombok, javacToLombok, parseWithLombok); private final List<Operation<?, ? extends ASTNode>> TO_ECJ = ImmutableList.of(lombokToEcj, parseWithEcj); private final List<Operation<?, JCCompilationUnit>> TO_JAVAC = ImmutableList.of(lombokToJavac, parseWithJavac); private final List<Operation<?, String>> TO_TEXT = ImmutableList.of(ecjToText, javacToText, lombokToText); private static class ContentBasedJavaFileObject extends SimpleJavaFileObject { private final String content; public ContentBasedJavaFileObject(String name, String content) { super(new File(name).toURI(), Kind.SOURCE); this.content = content; } @Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { return content; } } }
/* * Copyright 2012 Anita Onnuvel * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ahp.vinavidai.quiz.action; import static org.ahp.vinavidai.constants.HttpSessionAttributeConstants.QUIZ_UNDER_CREATION; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.ahp.commons.action.AhpAbstractProcessAction; import org.ahp.core.actions.AhpActionHelper; import org.ahp.core.businessdelegate.AhpBusinessDelegate; import org.ahp.core.pojo.Audit; import org.ahp.core.pojo.User; import org.ahp.vinavidai.enums.DescriptionQuestionMaximumSizeType; import org.ahp.vinavidai.enums.NavigateActions; import org.ahp.vinavidai.enums.QuestionType; import org.ahp.vinavidai.enums.SubmitActions; import org.ahp.vinavidai.pojo.Category; import org.ahp.vinavidai.pojo.Option; import org.ahp.vinavidai.pojo.Question; import org.ahp.vinavidai.pojo.Quiz; import org.ahp.vinavidai.pojo.SkillLevel; import org.ahp.vinavidai.quiz.QuizService; import org.ahp.vinavidai.quiz.form.CreateQuestionForm; import org.apache.commons.lang3.StringUtils; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author Anita Onnuvel * * @struts.action * path="/ProcessCreateQuestion" * name="CreateQuestionForm" * input="/quiz/Question.jsp" * scope="session" * validate="true" * * @struts.action-forward * name="DisplayCreateQuestion" * path="/DisplayCreateQuestion.do" * redirect="false" * * @spring.bean * name="/ProcessCreateQuestion" * * @spring.property * name="quizService" * ref="quizService" */ public class ProcessCreateQuestion extends AhpAbstractProcessAction { final static Logger LOGGER = LoggerFactory.getLogger( ProcessCreateQuestion.class ); private QuizService mQuizService; public void setQuizService( QuizService pQuizService ) { this.mQuizService = pQuizService; } @Override public ActionForward process( ActionMapping pActionMapping, ActionForm pActionForm, HttpServletRequest pHttpServletRequest, HttpServletResponse pHttpServletResponse ) { User lLoggedInUser = AhpActionHelper.getLoggedInUser( pHttpServletRequest ); CreateQuestionForm lCreateQuestionForm = ( CreateQuestionForm ) pActionForm; Quiz lQuiz = ( Quiz ) pHttpServletRequest.getAttribute( QUIZ_UNDER_CREATION ); if ( !lCreateQuestionForm.isSubmitAction( SubmitActions.EMPTY ) ) { if ( lQuiz != null ) { lCreateQuestionForm.setQuiz( lQuiz ); lCreateQuestionForm.setNextPage( NavigateActions.DisplayCreateQuestionStart.toString() ); } if ( lCreateQuestionForm.isSubmitAction( SubmitActions.ADD_OPTION ) || lCreateQuestionForm.isSubmitAction( SubmitActions.DELETE_OPTION ) || lCreateQuestionForm.isSubmitAction( SubmitActions.RESET ) ) { lCreateQuestionForm.setNextPage( NavigateActions.DisplayCreateQuestion.toString() ); } if ( lCreateQuestionForm.isSubmitAction( SubmitActions.NEXT ) ) { lCreateQuestionForm.setNextPage( NavigateActions.DisplayCreateQuestionNext.toString() ); this.storeQuestion( lCreateQuestionForm, lLoggedInUser ); } if ( lCreateQuestionForm.isSubmitAction( SubmitActions.COMPLETE ) ) { lCreateQuestionForm.setNextPage( NavigateActions.DisplayCreateQuizConfirmation.toString() ); pHttpServletRequest.getSession().setAttribute( QUIZ_UNDER_CREATION, null ); this.storeQuestion( lCreateQuestionForm, lLoggedInUser ); } if ( lCreateQuestionForm.isSubmitAction( SubmitActions.CANCEL ) ) { lCreateQuestionForm.setNextPage( NavigateActions.DisplayCreateQuizCancelled.toString() ); } } else { lCreateQuestionForm.setNextPage( NavigateActions.DisplayCreateQuestion.toString() ); } return pActionMapping.findForward( NavigateActions.DisplayCreateQuestion.toString() ); } /** * * @param pCreateQuestionForm * @param pLoggedInUser */ private void storeQuestion( CreateQuestionForm pCreateQuestionForm, User pLoggedInUser ) { Quiz lQuizUnderCreation = pCreateQuestionForm.getQuiz(); Question lQuestion = new Question(); Audit lAudit = AhpBusinessDelegate.createAudit( pLoggedInUser ); lQuestion.setAudit( lAudit ); lQuestion.setQuiz( lQuizUnderCreation ); lQuestion.setQuestionType( QuestionType.valueOf( pCreateQuestionForm.getQuestionType() ) ); lQuestion.setQuestionDescription( pCreateQuestionForm.getQuestionDescription() ); lQuestion.setQuestionObjective( pCreateQuestionForm.getQuestionObjective() ); if ( lQuizUnderCreation.getQuestions() != null ) { lQuestion.setQuestionOrder( pCreateQuestionForm.getQuiz().getQuestions().size() + 1 ); } else { lQuestion.setQuestionOrder( 1 ); } Category lCategory = new Category(); lCategory.setCategoryId( pCreateQuestionForm.getSelectedQuestionCategory() ); lQuestion.setCategory( lCategory ); SkillLevel lSkillLevel = new SkillLevel(); lSkillLevel.setSkillLevelId( pCreateQuestionForm.getSelectedQuestionSkillLevel() ); lQuestion.setSkillLevel( lSkillLevel ); lQuestion.setQuestionPoints( pCreateQuestionForm.getQuestionPoints() ); long lQuestionDuration = -1; if ( StringUtils.isNotBlank( pCreateQuestionForm.getResponseDurationInHours() ) ) { lQuestionDuration += TimeUnit.MILLISECONDS.convert( Long.parseLong( pCreateQuestionForm.getResponseDurationInHours() ), TimeUnit.HOURS ); } if ( StringUtils.isNotBlank( pCreateQuestionForm.getResponseDurationInMinutes() ) ) { lQuestionDuration += TimeUnit.MILLISECONDS.convert( Long.parseLong( pCreateQuestionForm.getResponseDurationInHours() ), TimeUnit.MINUTES ); } if ( StringUtils.isNotBlank( pCreateQuestionForm.getResponseDurationInSeconds() ) ) { lQuestionDuration += TimeUnit.MILLISECONDS.convert( Long.parseLong( pCreateQuestionForm.getResponseDurationInHours() ), TimeUnit.SECONDS ); } lQuestion.setQuestionDuration( lQuestionDuration ); if ( lQuestion.getQuestionType().equals( QuestionType.MultipleChoice ) || lQuestion.getQuestionType().equals( QuestionType.WordList ) || lQuestion.getQuestionType().equals( QuestionType.Matching ) || lQuestion.getQuestionType().equals( QuestionType.Ordering ) ) { for ( Option lOption : pCreateQuestionForm.getOptions() ) { lOption.setQuestion( lQuestion ); lOption.setAudit( lAudit ); } } if ( lQuestion.getQuestionType().equals( QuestionType.TrueOrFalse ) || lQuestion.getQuestionType().equals( QuestionType.Descriptive ) ) { Option lOption = pCreateQuestionForm.getOptions().get( 0 ); lOption.setQuestion( lQuestion ); if ( !StringUtils.isEmpty( lOption.getDescriptionQuestionMaximumSizeTypeStr() ) ) { lOption.setDescriptionQuestionMaximumSizeType( DescriptionQuestionMaximumSizeType.valueOf( lOption .getDescriptionQuestionMaximumSizeTypeStr() ) ); } lOption.setAudit( lAudit ); List<Option> lOptions = new LinkedList<Option>(); lOptions.add( lOption ); pCreateQuestionForm.setOptions( lOptions ); } if ( lQuestion.getQuestionType().equals( QuestionType.FillInTheBlank ) ) { String[] lQuestionDescriptionArray = lQuestion.getQuestionDescription().split( "\\s+" ); List<Option> lFillInTheBlanksOptions = new LinkedList<Option>(); for ( String lWordToken : lQuestionDescriptionArray ) { if ( lWordToken.startsWith( "$" ) && lWordToken.endsWith( "$" ) ) { Option lOption = new Option(); lOption.setOptionDescription( lWordToken.substring( 1, lWordToken.length() - 1 ) ); lOption.setQuestion( lQuestion ); lOption.setAudit( lAudit ); lFillInTheBlanksOptions.add( lOption ); } } pCreateQuestionForm.setOptions( lFillInTheBlanksOptions ); } // Set Options in Question Set<Option> lOptions = new LinkedHashSet<Option>(); for ( Option lOption : pCreateQuestionForm.getOptions() ) { lOptions.add( lOption ); } lQuestion.setOptions( lOptions ); if ( lQuizUnderCreation.getQuestions() == null ) { Set<Question> lQuestions = new LinkedHashSet<Question>(); lQuestions.add( lQuestion ); lQuizUnderCreation.setQuestions( lQuestions ); } else { lQuizUnderCreation.getQuestions().add( lQuestion ); } lQuizUnderCreation = this.mQuizService.updateQuiz( lQuizUnderCreation ); pCreateQuestionForm.setQuiz( lQuizUnderCreation ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.operators.rank; import org.apache.flink.api.common.state.MapState; import org.apache.flink.api.common.state.MapStateDescriptor; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.java.typeutils.ListTypeInfo; import org.apache.flink.configuration.Configuration; import org.apache.flink.table.dataformat.BaseRow; import org.apache.flink.table.dataformat.util.BaseRowUtil; import org.apache.flink.table.runtime.generated.GeneratedRecordComparator; import org.apache.flink.table.runtime.generated.GeneratedRecordEqualiser; import org.apache.flink.table.runtime.generated.RecordEqualiser; import org.apache.flink.table.runtime.keyselector.BaseRowKeySelector; import org.apache.flink.table.runtime.typeutils.BaseRowTypeInfo; import org.apache.flink.table.runtime.typeutils.SortedMapTypeInfo; import org.apache.flink.util.Collector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; /** * The function could handle retract stream. Input stream could only contain acc, delete or retract record. */ public class RetractableTopNFunction extends AbstractTopNFunction { private static final long serialVersionUID = 1365312180599454479L; private static final Logger LOG = LoggerFactory.getLogger(RetractableTopNFunction.class); // Message to indicate the state is cleared because of ttl restriction. The message could be used to output to log. private static final String STATE_CLEARED_WARN_MSG = "The state is cleared because of state ttl. " + "This will result in incorrect result. You can increase the state ttl to avoid this."; private final BaseRowTypeInfo sortKeyType; // flag to skip records with non-exist error instead to fail, true by default. private final boolean lenient = true; // a map state stores mapping from sort key to records list private transient MapState<BaseRow, List<BaseRow>> dataState; // a sorted map stores mapping from sort key to records count private transient ValueState<SortedMap<BaseRow, Long>> treeMap; // The util to compare two BaseRow equals to each other. private GeneratedRecordEqualiser generatedEqualiser; private RecordEqualiser equaliser; private Comparator<BaseRow> serializableComparator; public RetractableTopNFunction( long minRetentionTime, long maxRetentionTime, BaseRowTypeInfo inputRowType, GeneratedRecordComparator generatedRecordComparator, BaseRowKeySelector sortKeySelector, RankType rankType, RankRange rankRange, GeneratedRecordEqualiser generatedEqualiser, boolean generateRetraction, boolean outputRankNumber) { super(minRetentionTime, maxRetentionTime, inputRowType, generatedRecordComparator, sortKeySelector, rankType, rankRange, generateRetraction, outputRankNumber); this.sortKeyType = sortKeySelector.getProducedType(); this.serializableComparator = new ComparatorWrapper(generatedRecordComparator); this.generatedEqualiser = generatedEqualiser; } @Override public void open(Configuration parameters) throws Exception { super.open(parameters); // compile equaliser equaliser = generatedEqualiser.newInstance(getRuntimeContext().getUserCodeClassLoader()); generatedEqualiser = null; ListTypeInfo<BaseRow> valueTypeInfo = new ListTypeInfo<>(inputRowType); MapStateDescriptor<BaseRow, List<BaseRow>> mapStateDescriptor = new MapStateDescriptor<>( "data-state", sortKeyType, valueTypeInfo); dataState = getRuntimeContext().getMapState(mapStateDescriptor); ValueStateDescriptor<SortedMap<BaseRow, Long>> valueStateDescriptor = new ValueStateDescriptor<>( "sorted-map", new SortedMapTypeInfo<>(sortKeyType, BasicTypeInfo.LONG_TYPE_INFO, serializableComparator)); treeMap = getRuntimeContext().getState(valueStateDescriptor); } @Override public void processElement(BaseRow input, Context ctx, Collector<BaseRow> out) throws Exception { long currentTime = ctx.timerService().currentProcessingTime(); // register state-cleanup timer registerProcessingCleanupTimer(ctx, currentTime); initRankEnd(input); SortedMap<BaseRow, Long> sortedMap = treeMap.value(); if (sortedMap == null) { sortedMap = new TreeMap<>(sortKeyComparator); } BaseRow sortKey = sortKeySelector.getKey(input); if (BaseRowUtil.isAccumulateMsg(input)) { // update sortedMap if (sortedMap.containsKey(sortKey)) { sortedMap.put(sortKey, sortedMap.get(sortKey) + 1); } else { sortedMap.put(sortKey, 1L); } // emit if (outputRankNumber || hasOffset()) { // the without-number-algorithm can't handle topN with offset, // so use the with-number-algorithm to handle offset emitRecordsWithRowNumber(sortedMap, sortKey, input, out); } else { emitRecordsWithoutRowNumber(sortedMap, sortKey, input, out); } // update data state List<BaseRow> inputs = dataState.get(sortKey); if (inputs == null) { // the sort key is never seen inputs = new ArrayList<>(); } inputs.add(input); dataState.put(sortKey, inputs); } else { // emit updates first if (outputRankNumber || hasOffset()) { // the without-number-algorithm can't handle topN with offset, // so use the with-number-algorithm to handle offset retractRecordWithRowNumber(sortedMap, sortKey, input, out); } else { retractRecordWithoutRowNumber(sortedMap, sortKey, input, out); } // and then update sortedMap if (sortedMap.containsKey(sortKey)) { long count = sortedMap.get(sortKey) - 1; if (count == 0) { sortedMap.remove(sortKey); } else { sortedMap.put(sortKey, count); } } else { if (sortedMap.isEmpty()) { if (lenient) { LOG.warn(STATE_CLEARED_WARN_MSG); } else { throw new RuntimeException(STATE_CLEARED_WARN_MSG); } } else { throw new RuntimeException("Can not retract a non-existent record: ${inputBaseRow.toString}. " + "This should never happen."); } } } treeMap.update(sortedMap); } @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector<BaseRow> out) throws Exception { if (stateCleaningEnabled) { cleanupState(dataState, treeMap); } } // ------------- ROW_NUMBER------------------------------- private void emitRecordsWithRowNumber( SortedMap<BaseRow, Long> sortedMap, BaseRow sortKey, BaseRow inputRow, Collector<BaseRow> out) throws Exception { Iterator<Map.Entry<BaseRow, Long>> iterator = sortedMap.entrySet().iterator(); long curRank = 0L; boolean findsSortKey = false; while (iterator.hasNext() && isInRankEnd(curRank)) { Map.Entry<BaseRow, Long> entry = iterator.next(); BaseRow key = entry.getKey(); if (!findsSortKey && key.equals(sortKey)) { curRank += entry.getValue(); collect(out, inputRow, curRank); findsSortKey = true; } else if (findsSortKey) { List<BaseRow> inputs = dataState.get(key); if (inputs == null) { // Skip the data if it's state is cleared because of state ttl. if (lenient) { LOG.warn(STATE_CLEARED_WARN_MSG); } else { throw new RuntimeException(STATE_CLEARED_WARN_MSG); } } else { int i = 0; while (i < inputs.size() && isInRankEnd(curRank)) { curRank += 1; BaseRow prevRow = inputs.get(i); retract(out, prevRow, curRank - 1); collect(out, prevRow, curRank); i++; } } } else { curRank += entry.getValue(); } } } private void emitRecordsWithoutRowNumber( SortedMap<BaseRow, Long> sortedMap, BaseRow sortKey, BaseRow inputRow, Collector<BaseRow> out) throws Exception { Iterator<Map.Entry<BaseRow, Long>> iterator = sortedMap.entrySet().iterator(); long curRank = 0L; boolean findsSortKey = false; BaseRow toCollect = null; BaseRow toDelete = null; while (iterator.hasNext() && isInRankEnd(curRank)) { Map.Entry<BaseRow, Long> entry = iterator.next(); BaseRow key = entry.getKey(); if (!findsSortKey && key.equals(sortKey)) { curRank += entry.getValue(); if (isInRankRange(curRank)) { toCollect = inputRow; } findsSortKey = true; } else if (findsSortKey) { List<BaseRow> inputs = dataState.get(key); if (inputs == null) { // Skip the data if it's state is cleared because of state ttl. if (lenient) { LOG.warn(STATE_CLEARED_WARN_MSG); } else { throw new RuntimeException(STATE_CLEARED_WARN_MSG); } } else { long count = entry.getValue(); // gets the rank of last record with same sortKey long rankOfLastRecord = curRank + count; // deletes the record if there is a record recently downgrades to Top-(N+1) if (isInRankEnd(rankOfLastRecord)) { curRank = rankOfLastRecord; } else { int index = Long.valueOf(rankEnd - curRank).intValue(); toDelete = inputs.get(index); break; } } } else { curRank += entry.getValue(); } } if (toDelete != null) { delete(out, toDelete); } if (toCollect != null) { collect(out, inputRow); } } private void retractRecordWithRowNumber( SortedMap<BaseRow, Long> sortedMap, BaseRow sortKey, BaseRow inputRow, Collector<BaseRow> out) throws Exception { Iterator<Map.Entry<BaseRow, Long>> iterator = sortedMap.entrySet().iterator(); long curRank = 0L; boolean findsSortKey = false; while (iterator.hasNext() && isInRankEnd(curRank)) { Map.Entry<BaseRow, Long> entry = iterator.next(); BaseRow key = entry.getKey(); if (!findsSortKey && key.equals(sortKey)) { List<BaseRow> inputs = dataState.get(key); if (inputs == null) { // Skip the data if it's state is cleared because of state ttl. if (lenient) { LOG.warn(STATE_CLEARED_WARN_MSG); } else { throw new RuntimeException(STATE_CLEARED_WARN_MSG); } } else { Iterator<BaseRow> inputIter = inputs.iterator(); while (inputIter.hasNext() && isInRankEnd(curRank)) { curRank += 1; BaseRow prevRow = inputIter.next(); if (!findsSortKey && equaliser.equalsWithoutHeader(prevRow, inputRow)) { delete(out, prevRow, curRank); curRank -= 1; findsSortKey = true; inputIter.remove(); } else if (findsSortKey) { retract(out, prevRow, curRank + 1); collect(out, prevRow, curRank); } } if (inputs.isEmpty()) { dataState.remove(key); } else { dataState.put(key, inputs); } } } else if (findsSortKey) { List<BaseRow> inputs = dataState.get(key); int i = 0; while (i < inputs.size() && isInRankEnd(curRank)) { curRank += 1; BaseRow prevRow = inputs.get(i); retract(out, prevRow, curRank + 1); collect(out, prevRow, curRank); i++; } } else { curRank += entry.getValue(); } } } private void retractRecordWithoutRowNumber( SortedMap<BaseRow, Long> sortedMap, BaseRow sortKey, BaseRow inputRow, Collector<BaseRow> out) throws Exception { Iterator<Map.Entry<BaseRow, Long>> iterator = sortedMap.entrySet().iterator(); long curRank = 0L; boolean findsSortKey = false; while (iterator.hasNext() && isInRankEnd(curRank)) { Map.Entry<BaseRow, Long> entry = iterator.next(); BaseRow key = entry.getKey(); if (!findsSortKey && key.equals(sortKey)) { List<BaseRow> inputs = dataState.get(key); if (inputs == null) { // Skip the data if it's state is cleared because of state ttl. if (lenient) { LOG.warn(STATE_CLEARED_WARN_MSG); } else { throw new RuntimeException(STATE_CLEARED_WARN_MSG); } } else { Iterator<BaseRow> inputIter = inputs.iterator(); while (inputIter.hasNext() && isInRankEnd(curRank)) { curRank += 1; BaseRow prevRow = inputIter.next(); if (!findsSortKey && equaliser.equalsWithoutHeader(prevRow, inputRow)) { delete(out, prevRow, curRank); curRank -= 1; findsSortKey = true; inputIter.remove(); } else if (findsSortKey) { if (curRank == rankEnd) { collect(out, prevRow, curRank); break; } } } if (inputs.isEmpty()) { dataState.remove(key); } else { dataState.put(key, inputs); } } } else if (findsSortKey) { long count = entry.getValue(); // gets the rank of last record with same sortKey long rankOfLastRecord = curRank + count; // sends the record if there is a record recently upgrades to Top-N if (rankOfLastRecord < rankEnd) { curRank = rankOfLastRecord; } else { int index = Long.valueOf(rankEnd - curRank - 1).intValue(); List<BaseRow> inputs = dataState.get(key); BaseRow toAdd = inputs.get(index); collect(out, toAdd); break; } } else { curRank += entry.getValue(); } } } /** * Note: Because it's impossible to restore a RecordComparator instance generated by GeneratedRecordComparator from * snapshot, We introduce ComparatorWrapper class to wrap the GeneratedRecordComparator, a ComparatorWrapper * instance is serializable, and a RecordComparator instance could be restored based on the deserialized * ComparatorWrapper instance. */ private static class ComparatorWrapper implements Comparator<BaseRow>, Serializable { private static final long serialVersionUID = 4386377835781068140L; private transient Comparator<BaseRow> comparator; private GeneratedRecordComparator generatedRecordComparator; private ComparatorWrapper(GeneratedRecordComparator generatedRecordComparator) { this.generatedRecordComparator = generatedRecordComparator; } @Override public int compare(BaseRow o1, BaseRow o2) { if (comparator == null) { comparator = generatedRecordComparator.newInstance(Thread.currentThread().getContextClassLoader()); } return comparator.compare(o1, o2); } @Override public boolean equals(Object obj) { if (obj instanceof ComparatorWrapper) { ComparatorWrapper o = (ComparatorWrapper) obj; GeneratedRecordComparator oGeneratedComparator = o.generatedRecordComparator; return generatedRecordComparator.getClassName().equals(oGeneratedComparator.getClassName()) && generatedRecordComparator.getCode().equals(oGeneratedComparator.getCode()) && Arrays.equals(generatedRecordComparator.getReferences(), oGeneratedComparator.getReferences()); } else { return false; } } } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.ringbuffer.impl.operations; import com.hazelcast.cache.CacheNotExistsException; import com.hazelcast.cache.impl.CacheService; import com.hazelcast.cache.impl.journal.CacheEventJournal; import com.hazelcast.config.EventJournalConfig; import com.hazelcast.config.RingbufferConfig; import com.hazelcast.map.impl.MapService; import com.hazelcast.map.impl.journal.MapEventJournal; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.internal.serialization.Data; import com.hazelcast.nio.serialization.IdentifiedDataSerializable; import com.hazelcast.ringbuffer.impl.ArrayRingbuffer; import com.hazelcast.ringbuffer.impl.Ringbuffer; import com.hazelcast.ringbuffer.impl.RingbufferContainer; import com.hazelcast.ringbuffer.impl.RingbufferService; import com.hazelcast.spi.impl.operationservice.BackupAwareOperation; import com.hazelcast.internal.services.ObjectNamespace; import com.hazelcast.spi.impl.operationservice.Operation; import com.hazelcast.internal.services.ServiceNamespace; import com.hazelcast.internal.services.ServiceNamespaceAware; import com.hazelcast.spi.merge.RingbufferMergeData; import com.hazelcast.spi.merge.SplitBrainMergePolicy; import com.hazelcast.spi.merge.SplitBrainMergeTypes.RingbufferMergeTypes; import com.hazelcast.internal.serialization.SerializationService; import java.io.IOException; import static com.hazelcast.internal.nio.IOUtil.readObject; import static com.hazelcast.internal.nio.IOUtil.writeObject; import static com.hazelcast.ringbuffer.impl.RingbufferDataSerializerHook.F_ID; import static com.hazelcast.ringbuffer.impl.RingbufferDataSerializerHook.MERGE_OPERATION; import static com.hazelcast.ringbuffer.impl.RingbufferService.SERVICE_NAME; import static com.hazelcast.spi.impl.merge.MergingValueFactory.createMergingValue; /** * Contains an entire ringbuffer for split-brain healing with a * {@link SplitBrainMergePolicy}. * * @since 3.10 */ public class MergeOperation extends Operation implements IdentifiedDataSerializable, BackupAwareOperation, ServiceNamespaceAware { private ObjectNamespace namespace; private SplitBrainMergePolicy<RingbufferMergeData, RingbufferMergeTypes, RingbufferMergeData> mergePolicy; private Ringbuffer<Object> mergingRingbuffer; private transient Ringbuffer<Object> resultRingbuffer; private transient RingbufferConfig config; private transient RingbufferService ringbufferService; private transient SerializationService serializationService; public MergeOperation() { } public MergeOperation(ObjectNamespace namespace, SplitBrainMergePolicy<RingbufferMergeData, RingbufferMergeTypes, RingbufferMergeData> mergePolicy, Ringbuffer<Object> mergingRingbuffer) { this.namespace = namespace; this.mergePolicy = mergePolicy; this.mergingRingbuffer = mergingRingbuffer; } @Override public void beforeRun() throws Exception { this.ringbufferService = getService(); this.config = getRingbufferConfig(ringbufferService, namespace); this.serializationService = getNodeEngine().getSerializationService(); } @Override public void run() throws Exception { RingbufferContainer<Object, Object> existingContainer = ringbufferService.getContainerOrNull(getPartitionId(), namespace); RingbufferMergeTypes mergingValue = createMergingValue(serializationService, mergingRingbuffer); serializationService.getManagedContext().initialize(mergePolicy); resultRingbuffer = merge(existingContainer, mergingValue); } /** * Merges the provided {@code mergingValue} into the {@code existingContainer} * and returns the merged ringbuffer. * * @param existingContainer the container into which to merge the data * @param mergingValue the data to merge * @return the merged ringbuffer */ private Ringbuffer<Object> merge(RingbufferContainer<Object, Object> existingContainer, RingbufferMergeTypes mergingValue) { RingbufferMergeTypes existingValue = createMergingValueOrNull(existingContainer); RingbufferMergeData resultData = mergePolicy.merge(mergingValue, existingValue); if (resultData == null) { ringbufferService.destroyDistributedObject(namespace.getObjectName()); return null; } else { if (existingContainer == null) { RingbufferConfig config = getRingbufferConfig(ringbufferService, namespace); existingContainer = ringbufferService.getOrCreateContainer(getPartitionId(), namespace, config); } setRingbufferData(resultData, existingContainer); return existingContainer.getRingbuffer(); } } private RingbufferMergeTypes createMergingValueOrNull(RingbufferContainer<Object, Object> existingContainer) { return existingContainer == null || existingContainer.getRingbuffer().isEmpty() ? null : createMergingValue(serializationService, existingContainer.getRingbuffer()); } /** * Sets the ringbuffer data given by the {@code fromMergeData} to the * {@code toContainer}. * * @param fromMergeData the data which needs to be set into the containter * @param toContainer the target ringbuffer container */ private void setRingbufferData( RingbufferMergeData fromMergeData, RingbufferContainer<Object, Object> toContainer) { boolean storeEnabled = toContainer.getStore().isEnabled(); Data[] storeItems = storeEnabled ? new Data[fromMergeData.size()] : null; toContainer.setHeadSequence(fromMergeData.getHeadSequence()); toContainer.setTailSequence(fromMergeData.getTailSequence()); for (long seq = fromMergeData.getHeadSequence(); seq <= fromMergeData.getTailSequence(); seq++) { final Object resultValue = fromMergeData.read(seq); toContainer.set(seq, resultValue); if (storeEnabled) { storeItems[(int) (seq - fromMergeData.getHeadSequence())] = serializationService.toData(resultValue); } } if (storeEnabled) { toContainer.getStore() .storeAll(fromMergeData.getHeadSequence(), storeItems); } } @Override public boolean shouldBackup() { return true; } @Override public int getSyncBackupCount() { return config.getBackupCount(); } @Override public int getAsyncBackupCount() { return config.getAsyncBackupCount(); } @Override public Operation getBackupOperation() { return new MergeBackupOperation(namespace.getObjectName(), resultRingbuffer); } /** * Returns the ringbuffer config for the provided namespace. The namespace * provides information whether the requested ringbuffer is a ringbuffer * that the user is directly interacting with through a ringbuffer proxy * or if this is a backing ringbuffer for an event journal. * If a ringbuffer configuration for an event journal is requested, this * method will expect the configuration for the relevant map or cache * to be available. * * @param service the ringbuffer service * @param ns the object namespace for which we are creating a ringbuffer * @return the ringbuffer configuration * @throws CacheNotExistsException if a config for a cache event journal was requested * and the cache configuration was not found */ private RingbufferConfig getRingbufferConfig(RingbufferService service, ObjectNamespace ns) { final String serviceName = ns.getServiceName(); if (RingbufferService.SERVICE_NAME.equals(serviceName)) { return service.getRingbufferConfig(ns.getObjectName()); } else if (MapService.SERVICE_NAME.equals(serviceName)) { MapService mapService = getNodeEngine().getService(MapService.SERVICE_NAME); MapEventJournal journal = mapService.getMapServiceContext().getEventJournal(); EventJournalConfig journalConfig = journal.getEventJournalConfig(ns); return journal.toRingbufferConfig(journalConfig, namespace); } else if (CacheService.SERVICE_NAME.equals(serviceName)) { CacheService cacheService = getNodeEngine().getService(CacheService.SERVICE_NAME); CacheEventJournal journal = cacheService.getEventJournal(); EventJournalConfig journalConfig = journal.getEventJournalConfig(ns); return journal.toRingbufferConfig(journalConfig, namespace); } else { throw new IllegalArgumentException("Unsupported ringbuffer service name: " + serviceName); } } @Override public ServiceNamespace getServiceNamespace() { return namespace; } @Override public String getServiceName() { return SERVICE_NAME; } @Override public int getFactoryId() { return F_ID; } @Override public int getClassId() { return MERGE_OPERATION; } @Override protected void writeInternal(ObjectDataOutput out) throws IOException { super.writeInternal(out); out.writeObject(namespace); out.writeObject(mergePolicy); out.writeLong(mergingRingbuffer.tailSequence()); out.writeLong(mergingRingbuffer.headSequence()); out.writeInt((int) mergingRingbuffer.getCapacity()); for (Object mergingItem : mergingRingbuffer) { writeObject(out, mergingItem); } } @Override protected void readInternal(ObjectDataInput in) throws IOException { super.readInternal(in); namespace = in.readObject(); mergePolicy = in.readObject(); final long tailSequence = in.readLong(); final long headSequence = in.readLong(); final int capacity = in.readInt(); mergingRingbuffer = new ArrayRingbuffer<Object>(capacity); mergingRingbuffer.setTailSequence(tailSequence); mergingRingbuffer.setHeadSequence(headSequence); for (long seq = headSequence; seq <= tailSequence; seq++) { mergingRingbuffer.set(seq, readObject(in)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package brooklyn.management.ha; import static com.google.common.base.Preconditions.checkNotNull; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import brooklyn.entity.rebind.persister.BrooklynMementoPersisterToMultiFile; import brooklyn.entity.rebind.persister.FileBasedStoreObjectAccessor; import brooklyn.entity.rebind.persister.MementoFileWriterSync; import brooklyn.entity.rebind.persister.MementoSerializer; import brooklyn.entity.rebind.persister.RetryingMementoSerializer; import brooklyn.entity.rebind.persister.XmlMementoSerializer; import brooklyn.entity.rebind.plane.dto.ManagementPlaneSyncRecordImpl; import brooklyn.util.exceptions.Exceptions; import brooklyn.util.time.Duration; import brooklyn.util.time.Time; import com.google.common.annotations.Beta; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.base.Stopwatch; import com.google.common.io.Files; /** * Structure of files is: * <ul> * <li>{@code plane/} - top-level directory * <ul> * <li>{@code master} - contains the id of the management-node that is currently master * <li>{@code change.log} - log of changes made * <li>{@code nodes/} - sub-directory, containing one file per management-node * <ul> * <li>{@code a9WiuVKp} - file named after the management-node's id, containing the management node's current state * <li>{@code E1eDXQF3} * </ul> * </ul> * </ul> * * All writes are done synchronously. * * @since 0.7.0 * * @author aled * @deprecated since 0.7.0 use {@link ManagementPlaneSyncRecordPersisterToObjectStore} e.g. with {@link FileBasedStoreObjectAccessor} */ @Beta @Deprecated public class ManagementPlaneSyncRecordPersisterToMultiFile implements ManagementPlaneSyncRecordPersister { // TODO Multiple node appending to change.log could cause strange interleaving, or perhaps even data loss? // But this file is not critical to functionality. // TODO Should ManagementPlaneSyncRecordPersister.Delta be different so can tell what is a significant event, // and thus log it in change.log - currently only subset of significant things being logged. private static final Logger LOG = LoggerFactory.getLogger(ManagementPlaneSyncRecordPersisterToMultiFile.class); private static final Duration SHUTDOWN_TIMEOUT = Duration.TEN_SECONDS; private final String tmpSuffix; private final File dir; private final File nodesDir; // TODO Leak if we go through lots of managers; but tiny! private final ConcurrentMap<String, MementoFileWriterSync<ManagementNodeSyncRecord>> nodeWriters = new ConcurrentHashMap<String, MementoFileWriterSync<ManagementNodeSyncRecord>>(); private final MementoFileWriterSync<String> masterWriter; private final MementoFileWriterSync<String> changeLogWriter; private final MementoSerializer<Object> serializer; private static final int MAX_SERIALIZATION_ATTEMPTS = 5; private volatile boolean running = true; /** * @param dir Directory to write management-plane data * @param classLoader ClassLoader to use when deserializing data * @param id Unique identifier, e.g. used for temp file suffix in case multpile concurrent writers */ public ManagementPlaneSyncRecordPersisterToMultiFile(File dir, ClassLoader classLoader, String id) { this.dir = checkNotNull(dir, "dir"); MementoSerializer<Object> rawSerializer = new XmlMementoSerializer<Object>(checkNotNull(classLoader, "classLoader")); this.serializer = new RetryingMementoSerializer<Object>(rawSerializer, MAX_SERIALIZATION_ATTEMPTS); this.tmpSuffix = checkNotNull(id, "id")+".tmp"; // important to end in .tmp for loadMemento's file filter BrooklynMementoPersisterToMultiFile.checkDirIsAccessible(dir); nodesDir = new File(dir, "nodes"); nodesDir.mkdir(); BrooklynMementoPersisterToMultiFile.checkDirIsAccessible(nodesDir); masterWriter = new MementoFileWriterSync<String>(getFileForMaster(), serializer, tmpSuffix); changeLogWriter = new MementoFileWriterSync<String>(getFileForChangeLog(), MementoSerializer.NOOP, tmpSuffix); LOG.info("ManagementPlaneMemento-persister will use directory {}", dir); } @Override public void stop() { running = false; try { for (MementoFileWriterSync<?> writer : nodeWriters.values()) { try { writer.waitForWriteCompleted(SHUTDOWN_TIMEOUT); } catch (TimeoutException e) { LOG.warn("Timeout during shutdown, waiting for write of "+writer+"; continuing"); } } try { masterWriter.waitForWriteCompleted(SHUTDOWN_TIMEOUT); } catch (TimeoutException e) { LOG.warn("Timeout during shutdown, waiting for write of "+masterWriter+"; continuing"); } } catch (InterruptedException e) { throw Exceptions.propagate(e); } } @VisibleForTesting public File getDir() { return dir; } @Override public ManagementPlaneSyncRecord loadSyncRecord() throws IOException { if (!running) { throw new IllegalStateException("Persister not running; cannot load memento from "+dir); } // Note this is called a lot - every time we check the heartbeats if (LOG.isTraceEnabled()) LOG.trace("Loading management-plane memento from {}", dir); Stopwatch stopwatch = Stopwatch.createStarted(); ManagementPlaneSyncRecordImpl.Builder builder = ManagementPlaneSyncRecordImpl.builder(); // Be careful about order: if the master-file says nodeX then nodeX's file must have an up-to-date timestamp. // Therefore read master file first, followed by the other node-files. File masterFile = getFileForMaster(); String masterNodeId = (String) (masterFile.exists() ? serializer.fromString(readFile(masterFile)) : null); if (masterNodeId == null) { LOG.warn("No entity-memento deserialized from file "+masterFile+"; ignoring and continuing"); } else { builder.masterNodeId(masterNodeId); } // Load node-files FileFilter fileFilter = new FileFilter() { @Override public boolean accept(File file) { return !file.getName().endsWith(".tmp"); } }; File[] nodeFiles = nodesDir.listFiles(fileFilter); for (File file : nodeFiles) { ManagementNodeSyncRecord memento = (ManagementNodeSyncRecord) serializer.fromString(readFile(file)); if (memento == null) { LOG.warn("No manager-memento deserialized from file "+file+" (possibly just stopped?); ignoring and continuing"); } else { builder.node(memento); } } if (LOG.isTraceEnabled()) LOG.trace("Loaded management-plane memento; took {}", Time.makeTimeStringRounded(stopwatch.elapsed(TimeUnit.MILLISECONDS))); return builder.build(); } @Override public void delta(Delta delta) { if (!running) { if (LOG.isDebugEnabled()) LOG.debug("Persister not running; ignoring checkpointed delta of manager-memento"); return; } if (LOG.isTraceEnabled()) LOG.trace("Checkpointed delta of manager-memento; updating {}", delta); for (ManagementNodeSyncRecord m : delta.getNodes()) { persist(m); } for (String id : delta.getRemovedNodeIds()) { deleteNode(id); } switch (delta.getMasterChange()) { case NO_CHANGE: break; // no-op case SET_MASTER: persistMaster(checkNotNull(delta.getNewMasterOrNull())); break; case CLEAR_MASTER: persistMaster(null); break; // no-op default: throw new IllegalStateException("Unknown state for master-change: "+delta.getMasterChange()); } } private void persistMaster(String nodeId) { masterWriter.write(nodeId); changeLogWriter.append(Time.makeDateString()+": set master to "+nodeId+"\n"); } @Override @VisibleForTesting public void waitForWritesCompleted(Duration timeout) throws InterruptedException, TimeoutException { for (MementoFileWriterSync<?> writer : nodeWriters.values()) { writer.waitForWriteCompleted(timeout); } masterWriter.waitForWriteCompleted(timeout); } private String readFile(File file) throws IOException { return Files.asCharSource(file, Charsets.UTF_8).read(); } private void persist(ManagementNodeSyncRecord node) { MementoFileWriterSync<ManagementNodeSyncRecord> writer = getOrCreateNodeWriter(node.getNodeId()); boolean fileExists = writer.exists(); writer.write(node); if (!fileExists) { changeLogWriter.append(Time.makeDateString()+": created node "+node.getNodeId()+"\n"); } if (node.getStatus() == ManagementNodeState.TERMINATED || node.getStatus() == ManagementNodeState.FAILED) { changeLogWriter.append(Time.makeDateString()+": set node "+node.getNodeId()+" status to "+node.getStatus()+"\n"); } } private void deleteNode(String nodeId) { getOrCreateNodeWriter(nodeId).delete(); changeLogWriter.append(Time.makeDateString()+": deleted node "+nodeId+"\n"); } private MementoFileWriterSync<ManagementNodeSyncRecord> getOrCreateNodeWriter(String nodeId) { MementoFileWriterSync<ManagementNodeSyncRecord> writer = nodeWriters.get(nodeId); if (writer == null) { nodeWriters.putIfAbsent(nodeId, new MementoFileWriterSync<ManagementNodeSyncRecord>(getFileForNode(nodeId), serializer, tmpSuffix)); writer = nodeWriters.get(nodeId); } return writer; } private File getFileForNode(String nodeId) { return new File(nodesDir, nodeId); } private File getFileForMaster() { return new File(dir, "master"); } private File getFileForPlaneId() { return new File(dir, "plane.id"); } private File getFileForChangeLog() { return new File(dir, "change.log"); } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.autoscaling.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.autoscaling.AmazonAutoScaling#describeScalingActivities(DescribeScalingActivitiesRequest) DescribeScalingActivities operation}. * <p> * Describes one or more scaling activities for the specified Auto * Scaling group. If you omit the <code>ActivityIds</code> , the call * returns all activities from the past six weeks. Activities are sorted * by the start time. Activities still in progress appear first on the * list. * </p> * * @see com.amazonaws.services.autoscaling.AmazonAutoScaling#describeScalingActivities(DescribeScalingActivitiesRequest) */ public class DescribeScalingActivitiesRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> activityIds; /** * The name of the group. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 1600<br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> */ private String autoScalingGroupName; /** * The maximum number of items to return with this call. */ private Integer maxRecords; /** * The token for the next set of items to return. (You received this * token from a previous call.) * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> */ private String nextToken; /** * The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. * * @return The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. */ public java.util.List<String> getActivityIds() { if (activityIds == null) { activityIds = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); activityIds.setAutoConstruct(true); } return activityIds; } /** * The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. * * @param activityIds The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. */ public void setActivityIds(java.util.Collection<String> activityIds) { if (activityIds == null) { this.activityIds = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> activityIdsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(activityIds.size()); activityIdsCopy.addAll(activityIds); this.activityIds = activityIdsCopy; } /** * The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setActivityIds(java.util.Collection)} or {@link * #withActivityIds(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param activityIds The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeScalingActivitiesRequest withActivityIds(String... activityIds) { if (getActivityIds() == null) setActivityIds(new java.util.ArrayList<String>(activityIds.length)); for (String value : activityIds) { getActivityIds().add(value); } return this; } /** * The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param activityIds The activity IDs of the desired scaling activities. If this list is * omitted, all activities are described. If the * <code>AutoScalingGroupName</code> parameter is provided, the results * are limited to that group. The list of requested activities cannot * contain more than 50 items. If unknown activities are requested, they * are ignored with no error. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeScalingActivitiesRequest withActivityIds(java.util.Collection<String> activityIds) { if (activityIds == null) { this.activityIds = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> activityIdsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(activityIds.size()); activityIdsCopy.addAll(activityIds); this.activityIds = activityIdsCopy; } return this; } /** * The name of the group. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 1600<br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @return The name of the group. */ public String getAutoScalingGroupName() { return autoScalingGroupName; } /** * The name of the group. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 1600<br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @param autoScalingGroupName The name of the group. */ public void setAutoScalingGroupName(String autoScalingGroupName) { this.autoScalingGroupName = autoScalingGroupName; } /** * The name of the group. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 1600<br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @param autoScalingGroupName The name of the group. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeScalingActivitiesRequest withAutoScalingGroupName(String autoScalingGroupName) { this.autoScalingGroupName = autoScalingGroupName; return this; } /** * The maximum number of items to return with this call. * * @return The maximum number of items to return with this call. */ public Integer getMaxRecords() { return maxRecords; } /** * The maximum number of items to return with this call. * * @param maxRecords The maximum number of items to return with this call. */ public void setMaxRecords(Integer maxRecords) { this.maxRecords = maxRecords; } /** * The maximum number of items to return with this call. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param maxRecords The maximum number of items to return with this call. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeScalingActivitiesRequest withMaxRecords(Integer maxRecords) { this.maxRecords = maxRecords; return this; } /** * The token for the next set of items to return. (You received this * token from a previous call.) * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @return The token for the next set of items to return. (You received this * token from a previous call.) */ public String getNextToken() { return nextToken; } /** * The token for the next set of items to return. (You received this * token from a previous call.) * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @param nextToken The token for the next set of items to return. (You received this * token from a previous call.) */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * The token for the next set of items to return. (You received this * token from a previous call.) * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @param nextToken The token for the next set of items to return. (You received this * token from a previous call.) * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeScalingActivitiesRequest withNextToken(String nextToken) { this.nextToken = nextToken; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getActivityIds() != null) sb.append("ActivityIds: " + getActivityIds() + ","); if (getAutoScalingGroupName() != null) sb.append("AutoScalingGroupName: " + getAutoScalingGroupName() + ","); if (getMaxRecords() != null) sb.append("MaxRecords: " + getMaxRecords() + ","); if (getNextToken() != null) sb.append("NextToken: " + getNextToken() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getActivityIds() == null) ? 0 : getActivityIds().hashCode()); hashCode = prime * hashCode + ((getAutoScalingGroupName() == null) ? 0 : getAutoScalingGroupName().hashCode()); hashCode = prime * hashCode + ((getMaxRecords() == null) ? 0 : getMaxRecords().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeScalingActivitiesRequest == false) return false; DescribeScalingActivitiesRequest other = (DescribeScalingActivitiesRequest)obj; if (other.getActivityIds() == null ^ this.getActivityIds() == null) return false; if (other.getActivityIds() != null && other.getActivityIds().equals(this.getActivityIds()) == false) return false; if (other.getAutoScalingGroupName() == null ^ this.getAutoScalingGroupName() == null) return false; if (other.getAutoScalingGroupName() != null && other.getAutoScalingGroupName().equals(this.getAutoScalingGroupName()) == false) return false; if (other.getMaxRecords() == null ^ this.getMaxRecords() == null) return false; if (other.getMaxRecords() != null && other.getMaxRecords().equals(this.getMaxRecords()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public DescribeScalingActivitiesRequest clone() { return (DescribeScalingActivitiesRequest) super.clone(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.chimesdkidentity.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Summary of the data for an <code>AppInstance</code>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/chime-sdk-identity-2021-04-20/AppInstanceSummary" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AppInstanceSummary implements Serializable, Cloneable, StructuredPojo { /** * <p> * The <code>AppInstance</code> ARN. * </p> */ private String appInstanceArn; /** * <p> * The name of the <code>AppInstance</code>. * </p> */ private String name; /** * <p> * The metadata of the <code>AppInstance</code>. * </p> */ private String metadata; /** * <p> * The <code>AppInstance</code> ARN. * </p> * * @param appInstanceArn * The <code>AppInstance</code> ARN. */ public void setAppInstanceArn(String appInstanceArn) { this.appInstanceArn = appInstanceArn; } /** * <p> * The <code>AppInstance</code> ARN. * </p> * * @return The <code>AppInstance</code> ARN. */ public String getAppInstanceArn() { return this.appInstanceArn; } /** * <p> * The <code>AppInstance</code> ARN. * </p> * * @param appInstanceArn * The <code>AppInstance</code> ARN. * @return Returns a reference to this object so that method calls can be chained together. */ public AppInstanceSummary withAppInstanceArn(String appInstanceArn) { setAppInstanceArn(appInstanceArn); return this; } /** * <p> * The name of the <code>AppInstance</code>. * </p> * * @param name * The name of the <code>AppInstance</code>. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the <code>AppInstance</code>. * </p> * * @return The name of the <code>AppInstance</code>. */ public String getName() { return this.name; } /** * <p> * The name of the <code>AppInstance</code>. * </p> * * @param name * The name of the <code>AppInstance</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public AppInstanceSummary withName(String name) { setName(name); return this; } /** * <p> * The metadata of the <code>AppInstance</code>. * </p> * * @param metadata * The metadata of the <code>AppInstance</code>. */ public void setMetadata(String metadata) { this.metadata = metadata; } /** * <p> * The metadata of the <code>AppInstance</code>. * </p> * * @return The metadata of the <code>AppInstance</code>. */ public String getMetadata() { return this.metadata; } /** * <p> * The metadata of the <code>AppInstance</code>. * </p> * * @param metadata * The metadata of the <code>AppInstance</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public AppInstanceSummary withMetadata(String metadata) { setMetadata(metadata); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAppInstanceArn() != null) sb.append("AppInstanceArn: ").append(getAppInstanceArn()).append(","); if (getName() != null) sb.append("Name: ").append("***Sensitive Data Redacted***").append(","); if (getMetadata() != null) sb.append("Metadata: ").append("***Sensitive Data Redacted***"); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AppInstanceSummary == false) return false; AppInstanceSummary other = (AppInstanceSummary) obj; if (other.getAppInstanceArn() == null ^ this.getAppInstanceArn() == null) return false; if (other.getAppInstanceArn() != null && other.getAppInstanceArn().equals(this.getAppInstanceArn()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getMetadata() == null ^ this.getMetadata() == null) return false; if (other.getMetadata() != null && other.getMetadata().equals(this.getMetadata()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAppInstanceArn() == null) ? 0 : getAppInstanceArn().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getMetadata() == null) ? 0 : getMetadata().hashCode()); return hashCode; } @Override public AppInstanceSummary clone() { try { return (AppInstanceSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.chimesdkidentity.model.transform.AppInstanceSummaryMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package com.siondream.libgdxjam.screens; import box2dLight.RayHandler; import com.badlogic.ashley.core.Engine; import com.badlogic.ashley.core.EntitySystem; import com.badlogic.ashley.core.Family; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input.Keys; import com.badlogic.gdx.InputMultiplexer; import com.badlogic.gdx.InputProcessor; import com.badlogic.gdx.Screen; import com.badlogic.gdx.assets.AssetManager; import com.badlogic.gdx.audio.Music; import com.badlogic.gdx.audio.Sound; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.physics.box2d.World; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.ui.Button; import com.badlogic.gdx.scenes.scene2d.ui.ImageButton; import com.badlogic.gdx.scenes.scene2d.ui.Label; import com.badlogic.gdx.scenes.scene2d.ui.Skin; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.ui.TextButton; import com.badlogic.gdx.scenes.scene2d.ui.Window; import com.badlogic.gdx.scenes.scene2d.utils.ClickListener; import com.badlogic.gdx.utils.Disposable; import com.badlogic.gdx.utils.Logger; import com.badlogic.gdx.utils.TimeUtils; import com.badlogic.gdx.utils.viewport.ExtendViewport; import com.badlogic.gdx.utils.viewport.Viewport; import com.siondream.libgdxjam.Env; import com.siondream.libgdxjam.ecs.components.NodeComponent; import com.siondream.libgdxjam.ecs.systems.AnimationControlSystem; import com.siondream.libgdxjam.ecs.systems.CameraSystem; import com.siondream.libgdxjam.ecs.systems.DoorSystem; import com.siondream.libgdxjam.ecs.systems.LayerSystem; import com.siondream.libgdxjam.ecs.systems.LightSystem; import com.siondream.libgdxjam.ecs.systems.NodeSystem; import com.siondream.libgdxjam.ecs.systems.ParticleSystem; import com.siondream.libgdxjam.ecs.systems.PhysicsSystem; import com.siondream.libgdxjam.ecs.systems.RenderingSystem; import com.siondream.libgdxjam.ecs.systems.SensorSystem; import com.siondream.libgdxjam.ecs.systems.SpineSystem; import com.siondream.libgdxjam.ecs.systems.VisionSystem; import com.siondream.libgdxjam.ecs.systems.agents.CCTvSystem; import com.siondream.libgdxjam.ecs.systems.agents.GruntSystem; import com.siondream.libgdxjam.ecs.systems.agents.PlayerSystem; import com.siondream.libgdxjam.ecs.systems.ai.AttackSystem; import com.siondream.libgdxjam.ecs.systems.ai.IdleSystem; import com.siondream.libgdxjam.ecs.systems.ai.PatrolSystem; import com.siondream.libgdxjam.ecs.systems.ai.SleepSystem; import com.siondream.libgdxjam.ecs.systems.ai.StateMachineSystem; import com.siondream.libgdxjam.overlap.OverlapScene; import com.siondream.libgdxjam.physics.Categories; import com.siondream.libgdxjam.progression.EventManager; import com.siondream.libgdxjam.progression.SceneManager; public class GameScreen implements Screen, InputProcessor { private OrthographicCamera camera; private Viewport viewport; private Engine engine; private double accumulator; private double currentTime; private OverlapScene scene; private Logger logger = new Logger(GameScreen.class.getSimpleName(), Env.LOG_LEVEL); private Button optionsButton; private Window optionsWindow; private Window victoryWindow; private Window defeatWindow; private Music music; private Sound click; public GameScreen() { logger.info("initialize"); camera = new OrthographicCamera(); viewport = new ExtendViewport( Env.MIN_WORLD_WIDTH, Env.MIN_WORLD_HEIGHT, Env.MAX_WORLD_WIDTH, Env.MAX_WORLD_HEIGHT, camera ); setupEngine(); SceneManager.init(engine); EventManager.init(engine); AssetManager manager = Env.getGame().getAssetManager(); music = manager.get(Env.MUSIC_FOLDER + "/danger-storm.ogg", Music.class); music.setLooping(true); click = manager.get(Env.SFX_FOLDER + "/click3.ogg", Sound.class); } @Override public void show() { logger.info("show"); PhysicsSystem physics = engine.getSystem(PhysicsSystem.class); World world = physics.getWorld(); Categories categories = physics.getCategories(); RayHandler rayHandler = engine.getSystem(LightSystem.class).getRayHandler(); scene = SceneManager.loadScene("Level1", world, categories, rayHandler); setupUI(); addInputProcessors(); camera.position.set(0f,0f,0f); engine.getSystem(PlayerSystem.class).setBlockInput(false); music.play(); } @Override public void hide() { logger.info("hide"); scene.removeFromEngine(engine); engine.removeAllEntities(); removeInputProcessors(); music.stop(); } private void setupUI() { AssetManager assetMgr = Env.getGame().getAssetManager(); Stage stage = Env.getGame().getStage(); Table mainTable = new Table(); mainTable.setFillParent(true); Skin skin = assetMgr.get(Env.UI_FOLDER + "/ui.skin", Skin.class); createOptionsDialog(skin, mainTable); createVictoryDialog(skin, mainTable); createDefeatDialog(skin, mainTable); mainTable.row().padTop(30f).colspan(2).expand(); createOptionsButton(skin, mainTable); mainTable.row().colspan(1); stage.addActor(mainTable); } public void showVictory() { victoryWindow.setVisible(true); } public void showDefeat() { defeatWindow.setVisible(true); } //TODO: TO BE IMPROVED private void createVictoryDialog(Skin skin, Table mainTable) { victoryWindow = new Window("", skin, "options"); victoryWindow.setSize(700f, 500f); victoryWindow.setPosition((Env.MAX_UI_WIDTH - 700f) * .5f, (Env.MAX_UI_HEIGHT - 500f) * .5f); Label title = new Label("VICTORY", skin, "dialogtitle_green"); title.setColor(Color.GREEN); TextButton againBtn = new TextButton("PLAY AGAIN", skin, "optionsmenu"); TextButton exitBtn = new TextButton("EXIT", skin, "optionsmenu"); victoryWindow.row().pad(30f).center().uniformX(); victoryWindow.add(title).center(); Table buttonsGroup = new Table(); buttonsGroup.row().center().fillX(); buttonsGroup.add(againBtn); buttonsGroup.row().center().fillX().spaceTop(30f); buttonsGroup.add(exitBtn); victoryWindow.row().pad(50).center(); victoryWindow.add(buttonsGroup); mainTable.addActor(victoryWindow); againBtn.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { click.play(); optionsButton.setVisible(true); victoryWindow.setVisible(false); SceneManager.resetCurrentScene(); }; }); exitBtn.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { click.play(); Gdx.app.exit(); }; }); victoryWindow.setVisible(false); } //TODO: TO BE IMPROVED private void createDefeatDialog(Skin skin, Table mainTable) { defeatWindow = new Window("", skin, "options"); defeatWindow.setSize(700f, 500f); defeatWindow.setPosition((Env.MAX_UI_WIDTH - 700f) * .5f, (Env.MAX_UI_HEIGHT - 500f) * .5f); Label title = new Label("DEFEAT", skin, "dialogtitle_red"); TextButton againBtn = new TextButton("TRY AGAIN", skin, "optionsmenu"); TextButton exitBtn = new TextButton("EXIT", skin, "optionsmenu"); defeatWindow.row().pad(30f).center().uniformX(); defeatWindow.add(title).center(); Table buttonsGroup = new Table(); buttonsGroup.row().center().fillX(); buttonsGroup.add(againBtn); buttonsGroup.row().center().fillX().spaceTop(30f); buttonsGroup.add(exitBtn); defeatWindow.row().pad(50).center(); defeatWindow.add(buttonsGroup); mainTable.addActor(defeatWindow); againBtn.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { click.play(); optionsButton.setVisible(true); defeatWindow.setVisible(false); SceneManager.resetCurrentScene(); }; }); exitBtn.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { click.play(); Gdx.app.exit(); }; }); defeatWindow.setVisible(false); } //TODO: TO BE IMPROVED private void createOptionsDialog(Skin skin, Table mainTable) { optionsWindow = new Window("", skin, "options"); optionsWindow.setSize(700f, 500f); optionsWindow.setPosition((Env.MAX_UI_WIDTH - 700f) * .5f, (Env.MAX_UI_HEIGHT - 500f) * .5f); Label title = new Label("SETTINGS", skin, "dialogtitle_black"); TextButton resetBtn = new TextButton("RESET", skin, "optionsmenu"); TextButton continueBtn = new TextButton("CONTINUE", skin, "optionsmenu"); optionsWindow.row().pad(30f).center().uniformX(); optionsWindow.add(title).center(); Table buttonsGroup = new Table(); buttonsGroup.row().center().fillX(); buttonsGroup.add(resetBtn); buttonsGroup.row().center().fillX().spaceTop(30f); buttonsGroup.add(continueBtn); optionsWindow.row().pad(50).center(); optionsWindow.add(buttonsGroup); mainTable.addActor(optionsWindow); resetBtn.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { click.play(); optionsButton.setVisible(true); SceneManager.resetCurrentScene(); }; }); continueBtn.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { click.play(); optionsButton.setVisible(true); optionsWindow.setVisible(false); }; }); optionsWindow.setVisible(false); } private void createOptionsButton(Skin skin, final Table mainTable) { optionsButton = new ImageButton(skin, "options"); optionsButton.addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { click.play(); optionsButton.setVisible(false); optionsWindow.setVisible(true); }; }); mainTable.add(optionsButton).size(75f).top().right(); } @Override public void render(float delta) { double newTime = TimeUtils.millis() / 1000.0; double frameTime = Math.min(newTime - currentTime, Env.MAX_STEP); float deltaTime = (float)frameTime; currentTime = newTime; accumulator += frameTime; while (accumulator >= Env.STEP) { engine.getSystem(PhysicsSystem.class).setAlpha(Env.STEP / (float)accumulator); engine.update(deltaTime); accumulator -= Env.STEP; } engine.getSystem(RenderingSystem.class).update(Env.STEP); } @Override public void resize(int width, int height) { logger.info("resize"); viewport.update(width, height); } @Override public void pause() { } @Override public void resume() { } @Override public void dispose() { logger.info("dispose"); for (EntitySystem system : engine.getSystems()) { if (system instanceof Disposable) { ((Disposable)system).dispose(); } } } @Override public boolean keyDown(int keycode) { if (keycode == Keys.D) { //engine.getSystem(RenderingSystem.class).toggleDebug(); return true; } return false; } @Override public boolean keyUp(int keycode) { return false; } @Override public boolean keyTyped(char character) { return false; } @Override public boolean touchDown(int screenX, int screenY, int pointer, int button) { return false; } @Override public boolean touchUp(int screenX, int screenY, int pointer, int button) { return false; } @Override public boolean touchDragged(int screenX, int screenY, int pointer) { return false; } @Override public boolean mouseMoved(int screenX, int screenY) { return false; } @Override public boolean scrolled(int amount) { return false; } private void setupEngine() { logger.info("initializing engine"); engine = new Engine(); PhysicsSystem physicsSystem = new PhysicsSystem( Env.getGame().getCategories() ); CameraSystem cameraSystem = new CameraSystem(camera); LightSystem lightSystem = new LightSystem(physicsSystem.getWorld()); ParticleSystem particleSystem = new ParticleSystem(Env.UI_TO_WORLD); LayerSystem layerSystem = new LayerSystem(); SpineSystem spineSystem = new SpineSystem(); VisionSystem visionSystem = new VisionSystem(physicsSystem.getWorld()); StateMachineSystem stateMachineSystem = new StateMachineSystem(); PatrolSystem patrolSystem = new PatrolSystem(); IdleSystem idleSystem = new IdleSystem(); AttackSystem attackSystem = new AttackSystem( physicsSystem.getWorld(), physicsSystem.getCategories(), physicsSystem.getHandler() ); SleepSystem sleepSystem = new SleepSystem(Env.getGame().getTags()); GruntSystem gruntSystem = new GruntSystem( visionSystem, Env.getGame().getTags() ); CCTvSystem cctvSystem = new CCTvSystem( visionSystem, Env.getGame().getTags() ); PlayerSystem playerSystem = new PlayerSystem( physicsSystem, Env.getGame().getTags() ); SensorSystem sensorSystem = new SensorSystem(physicsSystem); DoorSystem doorSystem = new DoorSystem(); AnimationControlSystem animationControlSystem = new AnimationControlSystem(); RenderingSystem renderingSystem = new RenderingSystem( viewport, cameraSystem.getFocusRectangle(), cameraSystem.getTargetPosition(), Env.getGame().getStage(), physicsSystem.getWorld(), lightSystem.getRayHandler() ); physicsSystem.priority = 1; stateMachineSystem.priority = 1; patrolSystem.priority = 2; idleSystem.priority = 3; attackSystem.priority = 4; sleepSystem.priority = 5; sensorSystem.priority = 6; lightSystem.priority = 7; particleSystem.priority = 8; layerSystem.priority = 9; spineSystem.priority = 10; visionSystem.priority = 11; cctvSystem.priority = 12; gruntSystem.priority = 13; playerSystem.priority = 14; cameraSystem.priority = 15; animationControlSystem.priority = 16; doorSystem.priority = 16; renderingSystem.priority = 18; engine.addSystem(physicsSystem); engine.addSystem(stateMachineSystem); engine.addSystem(patrolSystem); engine.addSystem(idleSystem); engine.addSystem(attackSystem); engine.addSystem(sleepSystem); engine.addSystem(sensorSystem); engine.addSystem(cameraSystem); engine.addSystem(lightSystem); engine.addSystem(particleSystem); engine.addSystem(layerSystem); engine.addSystem(spineSystem); engine.addSystem(renderingSystem); engine.addSystem(cctvSystem); engine.addSystem(gruntSystem); engine.addSystem(animationControlSystem); engine.addSystem(doorSystem); engine.addSystem(playerSystem); engine.addSystem(visionSystem); engine.addEntityListener( Family.all(NodeComponent.class).get(), new NodeSystem(engine) ); //renderingSystem.setDebug(true); renderingSystem.setProcessing(false); } private void addInputProcessors() { logger.info("enabling engine input processors"); InputMultiplexer inputMultiplexer = Env.getGame().getMultiplexer(); inputMultiplexer.addProcessor(this); for (EntitySystem system : engine.getSystems()) { if (system instanceof InputProcessor) { inputMultiplexer.addProcessor((InputProcessor)system); } } } private void removeInputProcessors() { logger.info("disabling engine input processors"); InputMultiplexer inputMultiplexer = Env.getGame().getMultiplexer(); inputMultiplexer.removeProcessor(this); for (EntitySystem system : engine.getSystems()) { if (system instanceof InputProcessor) { inputMultiplexer.removeProcessor((InputProcessor)system); } } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.java.decompiler.modules.decompiler.stats; import org.jetbrains.java.decompiler.code.CodeConstants; import org.jetbrains.java.decompiler.main.DecompilerContext; import org.jetbrains.java.decompiler.main.collectors.BytecodeMappingTracer; import org.jetbrains.java.decompiler.main.collectors.CounterContainer; import org.jetbrains.java.decompiler.modules.decompiler.DecHelper; import org.jetbrains.java.decompiler.modules.decompiler.ExprProcessor; import org.jetbrains.java.decompiler.modules.decompiler.StatEdge; import org.jetbrains.java.decompiler.modules.decompiler.StatEdge.EdgeType; import org.jetbrains.java.decompiler.modules.decompiler.exps.VarExprent; import org.jetbrains.java.decompiler.struct.gen.VarType; import org.jetbrains.java.decompiler.util.TextBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; public final class CatchAllStatement extends Statement { private Statement handler; private boolean isFinally; private VarExprent monitor; private final List<VarExprent> vars = new ArrayList<>(); // ***************************************************************************** // constructors // ***************************************************************************** private CatchAllStatement() { super(StatementType.CATCH_ALL); } private CatchAllStatement(Statement head, Statement handler) { this(); first = head; stats.addWithKey(head, head.id); this.handler = handler; stats.addWithKey(handler, handler.id); List<StatEdge> lstSuccs = head.getSuccessorEdges(EdgeType.DIRECT_ALL); if (!lstSuccs.isEmpty()) { StatEdge edge = lstSuccs.get(0); if (edge.getType() == EdgeType.REGULAR) { post = edge.getDestination(); } } vars.add(new VarExprent(DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.VAR_COUNTER), new VarType(CodeConstants.TYPE_OBJECT, 0, "java/lang/Throwable"), DecompilerContext.getVarProcessor())); } // ***************************************************************************** // public methods // ***************************************************************************** public static Statement isHead(Statement head) { if (head.getLastBasicType() != StatementType.GENERAL) { return null; } Set<Statement> setHandlers = DecHelper.getUniquePredExceptions(head); if (setHandlers.size() != 1) { return null; } for (StatEdge edge : head.getSuccessorEdges(EdgeType.EXCEPTION)) { Statement exc = edge.getDestination(); if (edge.getExceptions() == null && exc.getLastBasicType() == StatementType.GENERAL && setHandlers.contains(exc)) { List<StatEdge> lstSuccs = exc.getSuccessorEdges(EdgeType.DIRECT_ALL); if (lstSuccs.isEmpty() || lstSuccs.get(0).getType() != EdgeType.REGULAR) { if (head.isMonitorEnter() || exc.isMonitorEnter()) { return null; } if (DecHelper.checkStatementExceptions(Arrays.asList(head, exc))) { return new CatchAllStatement(head, exc); } } } } return null; } @Override public TextBuffer toJava(int indent, BytecodeMappingTracer tracer) { String new_line_separator = DecompilerContext.getNewLineSeparator(); TextBuffer buf = new TextBuffer(); buf.append(ExprProcessor.listToJava(varDefinitions, indent, tracer)); boolean labeled = isLabeled(); if (labeled) { buf.appendIndent(indent).append("label").append(Integer.toString(id)).append(":").appendLineSeparator(); tracer.incrementCurrentSourceLine(); } List<StatEdge> lstSuccs = first.getSuccessorEdges(EdgeType.DIRECT_ALL); if (first.type == StatementType.TRY_CATCH && first.varDefinitions.isEmpty() && isFinally && !labeled && !first.isLabeled() && (lstSuccs.isEmpty() || !lstSuccs.get(0).explicit)) { TextBuffer content = ExprProcessor.jmpWrapper(first, indent, true, tracer); content.setLength(content.length() - new_line_separator.length()); tracer.incrementCurrentSourceLine(-1); buf.append(content); } else { buf.appendIndent(indent).append("try {").appendLineSeparator(); tracer.incrementCurrentSourceLine(); buf.append(ExprProcessor.jmpWrapper(first, indent + 1, true, tracer)); buf.appendIndent(indent).append("}"); } buf.append(isFinally ? " finally" : " catch (" + vars.get(0).toJava(indent, tracer) + ")").append(" {").appendLineSeparator(); tracer.incrementCurrentSourceLine(); if (monitor != null) { buf.appendIndent(indent+1).append("if (").append(monitor.toJava(indent, tracer)).append(") {").appendLineSeparator(); tracer.incrementCurrentSourceLine(); } buf.append(ExprProcessor.jmpWrapper(handler, indent + 1 + (monitor != null ? 1 : 0), true, tracer)); if (monitor != null) { buf.appendIndent(indent + 1).append("}").appendLineSeparator(); tracer.incrementCurrentSourceLine(); } buf.appendIndent(indent).append("}").appendLineSeparator(); tracer.incrementCurrentSourceLine(); return buf; } @Override public void replaceStatement(Statement oldstat, Statement newstat) { if (handler == oldstat) { handler = newstat; } super.replaceStatement(oldstat, newstat); } @Override public Statement getSimpleCopy() { CatchAllStatement cas = new CatchAllStatement(); cas.isFinally = this.isFinally; if (this.monitor != null) { cas.monitor = new VarExprent(DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.VAR_COUNTER), VarType.VARTYPE_INT, DecompilerContext.getVarProcessor()); } if (!this.vars.isEmpty()) { cas.vars.add(new VarExprent(DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.VAR_COUNTER), new VarType(CodeConstants.TYPE_OBJECT, 0, "java/lang/Throwable"), DecompilerContext.getVarProcessor())); } return cas; } @Override public void initSimpleCopy() { first = stats.get(0); handler = stats.get(1); } // ***************************************************************************** // getter and setter methods // ***************************************************************************** public Statement getHandler() { return handler; } public boolean isFinally() { return isFinally; } public void setFinally(boolean isFinally) { this.isFinally = isFinally; } public VarExprent getMonitor() { return monitor; } public void setMonitor(VarExprent monitor) { this.monitor = monitor; } public List<VarExprent> getVars() { return vars; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.alexaforbusiness.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The attributes of a skill group. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/alexaforbusiness-2017-11-09/SkillGroupData" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SkillGroupData implements Serializable, Cloneable, StructuredPojo { /** * <p> * The skill group ARN of a skill group. * </p> */ private String skillGroupArn; /** * <p> * The skill group name of a skill group. * </p> */ private String skillGroupName; /** * <p> * The description of a skill group. * </p> */ private String description; /** * <p> * The skill group ARN of a skill group. * </p> * * @param skillGroupArn * The skill group ARN of a skill group. */ public void setSkillGroupArn(String skillGroupArn) { this.skillGroupArn = skillGroupArn; } /** * <p> * The skill group ARN of a skill group. * </p> * * @return The skill group ARN of a skill group. */ public String getSkillGroupArn() { return this.skillGroupArn; } /** * <p> * The skill group ARN of a skill group. * </p> * * @param skillGroupArn * The skill group ARN of a skill group. * @return Returns a reference to this object so that method calls can be chained together. */ public SkillGroupData withSkillGroupArn(String skillGroupArn) { setSkillGroupArn(skillGroupArn); return this; } /** * <p> * The skill group name of a skill group. * </p> * * @param skillGroupName * The skill group name of a skill group. */ public void setSkillGroupName(String skillGroupName) { this.skillGroupName = skillGroupName; } /** * <p> * The skill group name of a skill group. * </p> * * @return The skill group name of a skill group. */ public String getSkillGroupName() { return this.skillGroupName; } /** * <p> * The skill group name of a skill group. * </p> * * @param skillGroupName * The skill group name of a skill group. * @return Returns a reference to this object so that method calls can be chained together. */ public SkillGroupData withSkillGroupName(String skillGroupName) { setSkillGroupName(skillGroupName); return this; } /** * <p> * The description of a skill group. * </p> * * @param description * The description of a skill group. */ public void setDescription(String description) { this.description = description; } /** * <p> * The description of a skill group. * </p> * * @return The description of a skill group. */ public String getDescription() { return this.description; } /** * <p> * The description of a skill group. * </p> * * @param description * The description of a skill group. * @return Returns a reference to this object so that method calls can be chained together. */ public SkillGroupData withDescription(String description) { setDescription(description); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSkillGroupArn() != null) sb.append("SkillGroupArn: ").append(getSkillGroupArn()).append(","); if (getSkillGroupName() != null) sb.append("SkillGroupName: ").append(getSkillGroupName()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SkillGroupData == false) return false; SkillGroupData other = (SkillGroupData) obj; if (other.getSkillGroupArn() == null ^ this.getSkillGroupArn() == null) return false; if (other.getSkillGroupArn() != null && other.getSkillGroupArn().equals(this.getSkillGroupArn()) == false) return false; if (other.getSkillGroupName() == null ^ this.getSkillGroupName() == null) return false; if (other.getSkillGroupName() != null && other.getSkillGroupName().equals(this.getSkillGroupName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSkillGroupArn() == null) ? 0 : getSkillGroupArn().hashCode()); hashCode = prime * hashCode + ((getSkillGroupName() == null) ? 0 : getSkillGroupName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); return hashCode; } @Override public SkillGroupData clone() { try { return (SkillGroupData) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.alexaforbusiness.model.transform.SkillGroupDataMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package org.jenkinsci.plugins.p4.changes; import com.perforce.p4java.core.ChangelistStatus; import com.perforce.p4java.core.IChangelistSummary; import com.perforce.p4java.core.IFix; import com.perforce.p4java.core.file.FileAction; import com.perforce.p4java.core.file.IFileSpec; import com.perforce.p4java.graph.ICommit; import com.perforce.p4java.impl.generic.core.Label; import hudson.model.Descriptor; import hudson.model.User; import hudson.scm.ChangeLogSet; import hudson.tasks.Mailer.UserProperty; import jenkins.model.Jenkins; import org.jenkinsci.plugins.p4.PerforceScm; import org.jenkinsci.plugins.p4.client.ConnectionHelper; import org.jenkinsci.plugins.p4.email.P4UserProperty; import org.kohsuke.stapler.export.Exported; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.logging.Logger; public class P4ChangeEntry extends ChangeLogSet.Entry { private static Logger logger = Logger.getLogger(P4ChangeEntry.class.getName()); private int fileCountLimit = PerforceScm.DEFAULT_FILE_LIMIT; private P4Ref id; private User author; private Date date = new Date(); private String clientId = ""; private String msg = ""; private List<P4AffectedFile> affectedFiles; private boolean shelved; private boolean fileLimit = false; private List<IFix> jobs; public P4ChangeEntry(P4ChangeSet parent) { super(); setParent(parent); jobs = new ArrayList<IFix>(); affectedFiles = new ArrayList<P4AffectedFile>(); getFileCountLimit(); } public P4ChangeEntry() { getFileCountLimit(); } public void setChange(ConnectionHelper p4, IChangelistSummary changelist) throws Exception { // set id int changeId = changelist.getId(); id = new P4ChangeRef(changeId); // set author String user = changelist.getUsername(); author = User.get(user); // set email property on user String email = p4.getEmail(user); if (email != null && !email.isEmpty()) { P4UserProperty p4prop = new P4UserProperty(email); author.addProperty(p4prop); logger.fine("Setting email for user: " + user + ":" + email); // Set default email for Jenkins user if not defined UserProperty prop = author.getProperty(UserProperty.class); if (prop == null || prop.getAddress() == null || prop.getAddress().isEmpty()) { prop = new UserProperty(email); author.addProperty(prop); logger.fine("Setting default user: " + user + ":" + email); } } // set date of change date = changelist.getDate(); // set client id clientId = changelist.getClientId(); // set display message msg = changelist.getDescription(); // set list of file revisions in change List<IFileSpec> files; if (changelist.getStatus() == ChangelistStatus.PENDING) { files = p4.getShelvedFiles(changeId); shelved = true; } else { files = p4.getChangeFiles(changeId, fileCountLimit + 1); shelved = false; } if (files != null && files.size() > fileCountLimit) { fileLimit = true; files = files.subList(0, fileCountLimit); } // set list of affected paths/files affectedFiles = new ArrayList<P4AffectedFile>(); if (files != null) { for (IFileSpec item : files) { affectedFiles.add(new P4AffectedFile(item)); } } // set list of jobs in change this.jobs = p4.getJobs(changeId); } public void setLabel(ConnectionHelper p4, String labelId) throws Exception { Label label = (Label) p4.getLabel(labelId); // set id id = new P4LabelRef(labelId); // set author String user = label.getOwnerName(); user = (user != null && !user.isEmpty()) ? user : "unknown"; author = User.get(user); // set date of change date = label.getLastAccess(); // set client id clientId = labelId; // set display message msg = label.getDescription(); // set list of file revisions in change List<IFileSpec> files = p4.getLabelFiles(labelId, fileCountLimit + 1); if (files.size() > fileCountLimit) { fileLimit = true; files = files.subList(0, fileCountLimit); } // set list of affected files affectedFiles = new ArrayList<P4AffectedFile>(); for (IFileSpec item : files) { affectedFiles.add(new P4AffectedFile(item)); } } public void setGraphCommit(ConnectionHelper p4, String id) throws Exception { if (id == null || id.isEmpty() || !id.contains("@")) { return; } String[] parts = id.split("@"); if (parts.length != 2) { return; } String repo = parts[0]; String sha = parts[1]; setGraphCommit(p4, repo, sha); } public void setGraphCommit(ConnectionHelper p4, String repo, String sha) throws Exception { ICommit commit = p4.getGraphCommit(sha, repo); id = new P4GraphRef(repo, commit); // set author String user = commit.getAuthor(); user = (user != null && !user.isEmpty()) ? user : "unknown"; author = User.get(user); // set date of change date = commit.getDate(); // set client id clientId = commit.getAuthorEmail(); // set display message msg = commit.getDescription(); // set list of affected paths affectedFiles = new ArrayList<>(); List<IFileSpec> graphFiles = p4.getCommitFiles(repo, sha); for (IFileSpec item : graphFiles) { String path = item.getDepotPathString(); FileAction action = item.getAction(); affectedFiles.add(new P4AffectedFile(path, sha, action)); } if (affectedFiles.size() > fileCountLimit) { fileLimit = true; affectedFiles = affectedFiles.subList(0, fileCountLimit); } } @Exported public String getChangeNumber() { return id.toString(); } @Exported public String getChangeTime() { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); return sdf.format(date); } public P4Ref getId() { return id; } public void setId(P4Ref value) { id = value; } @Override public User getAuthor() { // JENKINS-31169 if (author == null) { return User.getUnknown(); } return author; } public void setAuthor(String value) { author = User.get(value); } public Date getDate() { return (Date) date.clone(); } public void setDate(String value) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); try { date = sdf.parse(value); } catch (ParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void setClientId(String value) { clientId = value; } public String getClientId() { return clientId; } @Override public String getMsg() { return msg; } public int getRows() { String[] lines = msg.split("\r\n|\r|\n"); int rows = lines.length; rows = (rows > 10) ? 10 : rows; return rows; } public void setMsg(String value) { msg = value; } // JENKINS-31306 @Override public Collection<String> getAffectedPaths() { Collection<String> affectedPaths = new ArrayList<String>(); for (P4AffectedFile item : getAffectedFiles()) { affectedPaths.add(item.getPath()); } return affectedPaths; } @Override public Collection<P4AffectedFile> getAffectedFiles() { return affectedFiles; } public void addAffectedFiles(P4AffectedFile file) { affectedFiles.add(file); } public boolean isFileLimit() { return fileLimit; } public String getAction(IFileSpec file) { FileAction action = file.getAction(); String s = action.name(); return s.replace("/", "_"); } public void setShelved(boolean value) { shelved = value; } public boolean isShelved() { return shelved; } public boolean isLabel() { return id.isLabel(); } public List<IFix> getJobs() { return jobs; } public void addJob(IFix job) { jobs.add(job); } public String getJobStatus(IFix job) { String status = job.getStatus(); return status; } public int getMaxLimit() { return fileCountLimit; } // For email-ext @Exported public long getTimestamp() { return getDate().getTime(); } // For email-ext @Exported public String getCommitId() { return getChangeNumber(); } private int getFileCountLimit() { int max = 0; Jenkins j = Jenkins.getInstance(); if (j != null) { Descriptor dsc = j.getDescriptor(PerforceScm.class); if (dsc instanceof PerforceScm.DescriptorImpl) { PerforceScm.DescriptorImpl p4scm = (PerforceScm.DescriptorImpl) dsc; max = p4scm.getMaxFiles(); } } fileCountLimit = (max > 0) ? max : PerforceScm.DEFAULT_FILE_LIMIT; return fileCountLimit; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.util.dynamicMembers; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.UserDataHolderEx; import com.intellij.psi.*; import com.intellij.psi.scope.ElementClassHint; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.extensions.NamedArgumentDescriptor; import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocComment; import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocTag; import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementFactory; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameterList; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrAccessorMethod; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrReflectedMethod; import org.jetbrains.plugins.groovy.lang.psi.impl.synthetic.GrLightMethodBuilder; import org.jetbrains.plugins.groovy.lang.psi.util.GrStaticChecker; import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil; import javax.swing.*; import java.util.*; import java.util.concurrent.ConcurrentHashMap; public class DynamicMemberUtils { public static final Key<Map<String, String>> COMMENT_KEY = Key.create("DynamicMemberUtils:COMMENT_KEY"); private static final Key<ConcurrentHashMap<String, ClassMemberHolder>> KEY = Key.create("DynamicMemberUtils"); private DynamicMemberUtils() { } public static ClassMemberHolder getMembers(@NotNull Project project, @NotNull String source) { ConcurrentHashMap<String, ClassMemberHolder> map = project.getUserData(KEY); if (map == null) { map = new ConcurrentHashMap<String, ClassMemberHolder>(); map = ((UserDataHolderEx)project).putUserDataIfAbsent(KEY, map); } ClassMemberHolder res = map.get(source); if (res == null) { res = new ClassMemberHolder(project, source); ClassMemberHolder oldValue = map.putIfAbsent(source, res); if (oldValue != null) { res = oldValue; } } assert source == res.myClassSource : "Store class sources in static constant, do not generate it in each call."; return res; } public static boolean process(PsiScopeProcessor processor, PsiClass psiClass, GrReferenceExpression ref, String classSource) { return process(processor, GrStaticChecker.isInStaticContext(ref, psiClass), ref, classSource); } public static boolean process(PsiScopeProcessor processor, boolean isInStaticContext, PsiElement place, String classSource) { ElementClassHint classHint = processor.getHint(ElementClassHint.KEY); String name = ResolveUtil.getNameHint(processor); ClassMemberHolder memberHolder = getMembers(place.getProject(), classSource); if (ResolveUtil.shouldProcessMethods(classHint)) { PsiMethod[] methods = isInStaticContext ? memberHolder.getStaticMethods(name) : memberHolder.getMethods(name); for (PsiMethod method : methods) { if (!processor.execute(method, ResolveState.initial())) return false; } } if (ResolveUtil.shouldProcessProperties(classHint)) { PsiField[] fields = isInStaticContext ? memberHolder.getStaticFields(name) : memberHolder.getFields(name); for (PsiField field : fields) { if (!processor.execute(field, ResolveState.initial())) return false; } } return true; } public static boolean checkVersion(PsiMethod method, String version) { String since = getCommentValue(method, "since"); if (since == null) return true; return version.compareTo(since) >= 0; } @Nullable public static String getCommentValue(PsiMethod method, String commentTagName) { Map<String, String> commentMap = method.getUserData(COMMENT_KEY); if (commentMap == null) return null; return commentMap.get(commentTagName); } public static class ClassMemberHolder { private final String myClassSource; private final GrTypeDefinition myClass; private final Map<String, PsiMethod[]> myMethodMap; private final Map<String, PsiField[]> myFieldMap; private final Map<String, PsiMethod[]> myStaticMethodMap; private final Map<String, PsiField[]> myStaticFieldMap; private final Map<String, PsiMethod[]> myNonStaticMethodMap; private final Map<String, PsiField[]> myNonStaticFieldMap; private ClassMemberHolder(Project project, String classSource) { myClassSource = classSource; final GroovyPsiElementFactory elementFactory = GroovyPsiElementFactory.getInstance(project); myClass = (GrTypeDefinition)elementFactory.createGroovyFile(classSource, false, null).getClasses()[0]; Map<String, String> classCommentMap = parseComment(myClass.getDocComment()); // Collect fields. myFieldMap = new HashMap<String, PsiField[]>(); myStaticFieldMap = new HashMap<String, PsiField[]>(); myNonStaticFieldMap = new HashMap<String, PsiField[]>(); GrField[] fields = myClass.getFields(); PsiField[] allFields = new PsiField[fields.length]; int i = 0; for (PsiField field : fields) { MyGrDynamicPropertyImpl dynamicField = new MyGrDynamicPropertyImpl(myClass, (GrField)field, null, classSource); Map<String, String> commentMap = parseComment(((GrField)field).getDocComment()); String originalInfo = commentMap.get("originalInfo"); if (originalInfo == null) { originalInfo = classCommentMap.get("originalInfo"); } dynamicField.setOriginalInfo(originalInfo); PsiField[] dynamicFieldArray = new PsiField[]{dynamicField}; if (field.hasModifierProperty(PsiModifier.STATIC)) { myStaticFieldMap.put(field.getName(), dynamicFieldArray); } else { myNonStaticFieldMap.put(field.getName(), dynamicFieldArray); } Object oldValue = myFieldMap.put(field.getName(), dynamicFieldArray); assert oldValue == null : "Duplicated field in dynamic class: " + myClass.getName() + ":" + field.getName(); allFields[i++] = dynamicField; } myFieldMap.put(null, allFields); // Collect methods.. checkDuplicatedMethods(myClass); MultiMap<String, PsiMethod> multiMap = new MultiMap<String, PsiMethod>(); MultiMap<String, PsiMethod> staticMultiMap = new MultiMap<String, PsiMethod>(); MultiMap<String, PsiMethod> nonStaticMultiMap = new MultiMap<String, PsiMethod>(); for (GrMethod method : myClass.getCodeMethods()) { GrDynamicMethodWithCache dynamicMethod = new GrDynamicMethodWithCache(method, classSource); Map<String, String> commentMap = parseComment(method.getDocComment()); if (!commentMap.isEmpty()) { dynamicMethod.putUserData(COMMENT_KEY, commentMap); } String originalInfo = commentMap.get("originalInfo"); if (originalInfo == null) { originalInfo = classCommentMap.get("originalInfo"); } dynamicMethod.setOriginalInfo(originalInfo); String kind = commentMap.get("kind"); if (kind == null) { kind = classCommentMap.get("kind"); } if (kind != null) { dynamicMethod.putUserData(GrLightMethodBuilder.KIND_KEY, kind); } multiMap.putValue(null, dynamicMethod); multiMap.putValue(method.getName(), dynamicMethod); if (method.hasModifierProperty(PsiModifier.STATIC)) { staticMultiMap.putValue(null, dynamicMethod); staticMultiMap.putValue(method.getName(), dynamicMethod); } else { nonStaticMultiMap.putValue(null, dynamicMethod); nonStaticMultiMap.putValue(method.getName(), dynamicMethod); } } myMethodMap = convertMap(multiMap); myStaticMethodMap = convertMap(staticMultiMap); myNonStaticMethodMap = convertMap(nonStaticMultiMap); } private static Map<String, String> parseComment(@Nullable GrDocComment comment) { if (comment == null) return Collections.emptyMap(); GrDocTag[] docTags = comment.getTags(); if (docTags.length == 0) return Collections.emptyMap(); Map<String, String> res = new HashMap<String, String>(); for (GrDocTag tag : docTags) { String tagText = tag.getText().trim(); int idx = tagText.indexOf(' '); if (idx != -1) { res.put(tag.getName(), tagText.substring(idx + 1).trim()); } } return res; } public GrTypeDefinition getParsedClass() { return myClass; } private static void checkDuplicatedMethods(PsiClass psiClass) { Set<String> existingMethods = new HashSet<String>(); for (PsiMethod psiMethod : psiClass.getMethods()) { if (!(psiMethod instanceof GrAccessorMethod) && !(psiMethod instanceof GrReflectedMethod) && !existingMethods.add(psiMethod.getText())) { throw new RuntimeException("Duplicated field in dynamic class: " + psiClass.getName() + ":" + psiMethod.getText()); } } } private static Map<String, PsiMethod[]> convertMap(MultiMap<String, PsiMethod> multiMap) { Map<String, PsiMethod[]> res = new HashMap<String, PsiMethod[]>(); for (String methodName : multiMap.keySet()) { Collection<PsiMethod> m = multiMap.get(methodName); res.put(methodName, m.toArray(new PsiMethod[m.size()])); } return res; } public PsiMethod[] getMethods() { return getMethods(null); } public PsiMethod[] getDynamicMethods(@Nullable String nameHint) { PsiMethod[] res = myNonStaticMethodMap.get(nameHint); if (res == null) { res = PsiMethod.EMPTY_ARRAY; } return res; } public PsiMethod[] getStaticMethods(@Nullable String nameHint) { PsiMethod[] res = myStaticMethodMap.get(nameHint); if (res == null) { res = PsiMethod.EMPTY_ARRAY; } return res; } public PsiMethod[] getMethods(@Nullable String nameHint) { PsiMethod[] res = myMethodMap.get(nameHint); if (res == null) { res = PsiMethod.EMPTY_ARRAY; } return res; } public PsiField[] getFields() { return getFields(null); } public PsiField[] getFields(@Nullable String nameHint) { PsiField[] res = myFieldMap.get(nameHint); if (res == null) { res = PsiField.EMPTY_ARRAY; } return res; } public PsiField[] getStaticFields(@Nullable String nameHint) { PsiField[] res = myStaticFieldMap.get(nameHint); if (res == null) { res = PsiField.EMPTY_ARRAY; } return res; } } public static boolean isDynamicElement(@Nullable PsiElement element) { return element instanceof DynamicElement; } public static boolean isDynamicElement(@Nullable PsiElement element, @NotNull String classSource) { return element instanceof DynamicElement && classSource.equals(((DynamicElement)element).getSource()); } public interface DynamicElement { String getSource(); PsiClass getSourceClass(); } private static class GrDynamicMethodWithCache extends GrDynamicMethodImpl implements DynamicElement, OriginInfoAwareElement { private final PsiTypeParameter[] myTypeParameters; private final GrParameterList myParameterList; private final Map<String, NamedArgumentDescriptor> namedParameters; private String myOriginalInfo; public final String mySource; public GrDynamicMethodWithCache(GrMethod method, String source) { super(method); myTypeParameters = super.getTypeParameters(); myParameterList = super.getParameterList(); namedParameters = super.getNamedParameters(); mySource = source; } @Override public String getText() { return myMethod.getText(); } @NotNull @Override public PsiTypeParameter[] getTypeParameters() { return myTypeParameters; } @NotNull @Override public GrParameterList getParameterList() { return myParameterList; } @NotNull @Override public Map<String, NamedArgumentDescriptor> getNamedParameters() { return namedParameters; } @Override public Icon getIcon(int flags) { return myMethod.getIcon(flags); } @Override public String getSource() { return mySource; } @Override public PsiClass getSourceClass() { return myMethod.getContainingClass(); } @Nullable @Override public String getOriginInfo() { return myOriginalInfo; } public void setOriginalInfo(String originalInfo) { myOriginalInfo = originalInfo; } } private static class MyGrDynamicPropertyImpl extends GrDynamicPropertyImpl implements DynamicElement, OriginInfoAwareElement { private final String mySource; private final PsiClass myClass; private String myOriginalInfo; private MyGrDynamicPropertyImpl(PsiClass containingClass, GrField field, PsiElement navigationalElement, String source) { super(null, field, navigationalElement); myClass = containingClass; mySource = source; } @Override public String getSource() { return mySource; } @Override public PsiClass getSourceClass() { return myClass; } @Nullable @Override public String getOriginInfo() { return myOriginalInfo; } public void setOriginalInfo(String originalInfo) { myOriginalInfo = originalInfo; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.searchafter; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collections; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; public class SearchAfterBuilderTests extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static SearchAfterBuilder randomSearchAfterBuilder() throws IOException { int numSearchFrom = randomIntBetween(1, 10); SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder(); Object[] values = new Object[numSearchFrom]; for (int i = 0; i < numSearchFrom; i++) { int branch = randomInt(9); switch (branch) { case 0: values[i] = randomInt(); break; case 1: values[i] = randomFloat(); break; case 2: values[i] = randomLong(); break; case 3: values[i] = randomDouble(); break; case 4: values[i] = randomAsciiOfLengthBetween(5, 20); break; case 5: values[i] = randomBoolean(); break; case 6: values[i] = randomByte(); break; case 7: values[i] = randomShort(); break; case 8: values[i] = new Text(randomAsciiOfLengthBetween(5, 20)); break; case 9: values[i] = null; break; } } searchAfterBuilder.setSortValues(values); return searchAfterBuilder; } // We build a json version of the search_after first in order to // ensure that every number type remain the same before/after xcontent (de)serialization. // This is not a problem because the final type of each field value is extracted from associated sort field. // This little trick ensure that equals and hashcode are the same when using the xcontent serialization. private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { int numSearchAfter = randomIntBetween(1, 10); XContentBuilder jsonBuilder = XContentFactory.jsonBuilder(); jsonBuilder.startObject(); jsonBuilder.startArray("search_after"); for (int i = 0; i < numSearchAfter; i++) { int branch = randomInt(9); switch (branch) { case 0: jsonBuilder.value(randomInt()); break; case 1: jsonBuilder.value(randomFloat()); break; case 2: jsonBuilder.value(randomLong()); break; case 3: jsonBuilder.value(randomDouble()); break; case 4: jsonBuilder.value(randomAsciiOfLengthBetween(5, 20)); break; case 5: jsonBuilder.value(randomBoolean()); break; case 6: jsonBuilder.value(randomByte()); break; case 7: jsonBuilder.value(randomShort()); break; case 8: jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20))); break; case 9: jsonBuilder.nullValue(); break; } } jsonBuilder.endArray(); jsonBuilder.endObject(); XContentParser parser = createParser(JsonXContent.jsonXContent, jsonBuilder.bytes()); parser.nextToken(); parser.nextToken(); parser.nextToken(); return SearchAfterBuilder.fromXContent(parser, null); } private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException { return copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), SearchAfterBuilder::new); } public void testSerialization() throws Exception { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SearchAfterBuilder original = randomSearchAfterBuilder(); SearchAfterBuilder deserialized = serializedCopy(original); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); } } public void testEqualsAndHashcode() throws Exception { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { // TODO add equals tests with mutating the original object checkEqualsAndHashCode(randomSearchAfterBuilder(), SearchAfterBuilderTests::serializedCopy); } } public void testFromXContent() throws Exception { for (int runs = 0; runs < 20; runs++) { SearchAfterBuilder searchAfterBuilder = randomJsonSearchFromBuilder(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } builder.startObject(); searchAfterBuilder.innerToXContent(builder); builder.endObject(); XContentParser parser = createParser(shuffleXContent(builder)); new QueryParseContext(parser, ParseFieldMatcher.STRICT); parser.nextToken(); parser.nextToken(); parser.nextToken(); SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser, null); assertNotSame(searchAfterBuilder, secondSearchAfterBuilder); assertEquals(searchAfterBuilder, secondSearchAfterBuilder); assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); } } public void testWithNullArray() throws Exception { SearchAfterBuilder builder = new SearchAfterBuilder(); try { builder.setSortValues(null); fail("Should fail on null array."); } catch (NullPointerException e) { assertThat(e.getMessage(), Matchers.equalTo("Values cannot be null.")); } } public void testWithEmptyArray() throws Exception { SearchAfterBuilder builder = new SearchAfterBuilder(); try { builder.setSortValues(new Object[0]); fail("Should fail on empty array."); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), Matchers.equalTo("Values must contains at least one value.")); } } /** * Explicitly tests what you can't list as a sortValue. What you can list is tested by {@link #randomSearchAfterBuilder()}. */ public void testBadTypes() throws IOException { randomSearchFromBuilderWithSortValueThrows(new Object()); randomSearchFromBuilderWithSortValueThrows(new GeoPoint(0, 0)); randomSearchFromBuilderWithSortValueThrows(randomSearchAfterBuilder()); randomSearchFromBuilderWithSortValueThrows(this); } private static void randomSearchFromBuilderWithSortValueThrows(Object containing) throws IOException { // Get a valid one SearchAfterBuilder builder = randomSearchAfterBuilder(); // Now replace its values with one containing the passed in object Object[] values = builder.getSortValues(); values[between(0, values.length - 1)] = containing; Exception e = expectThrows(IllegalArgumentException.class, () -> builder.setSortValues(values)); assertEquals(e.getMessage(), "Can't handle search_after field value of type [" + containing.getClass() + "]"); } }
package org.wiztools.restclient.ui.reqbody; import java.awt.BorderLayout; import java.awt.Component; import java.awt.FlowLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.text.MessageFormat; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import javax.annotation.PostConstruct; import javax.inject.Inject; import javax.swing.*; import javax.swing.table.AbstractTableModel; import org.wiztools.restclient.bean.*; import org.wiztools.restclient.ui.RESTView; import org.wiztools.restclient.ui.UIUtil; /** * * @author subwiz */ public class ReqBodyPanelMultipart extends JPanel implements ReqBodyPanel { @Inject private RESTView view; @Inject private AddMultipartFileDialog jd_addFileDialog; @Inject private AddMultipartStringDialog jd_addStringDialog; @Inject private MultipartOptionsDialog jd_options; private final JButton jb_string = new JButton("String"); private final JButton jb_file = new JButton("File"); private final JButton jb_config = new JButton(UIUtil.getIconFromClasspath("org/wiztools/restclient/cog.png")); private final MultipartTableModel model = new MultipartTableModel(); private final JTable jt = new JTable(model); private class MultipartTableModel extends AbstractTableModel { private final String[] columnNames = new String[]{"Type", "Content-type", "Name", "Part"}; private final LinkedList<ReqEntityPart> list = new LinkedList<>(); @Override public int getRowCount() { return list.size(); } @Override public int getColumnCount() { return 4; } @Override public String getColumnName(int columnIndex) { return columnNames[columnIndex]; } @Override public boolean isCellEditable(int rowIndex, int columnIndex) { return false; } @Override public Object getValueAt(int rowIndex, int columnIndex) { ReqEntityPart part = list.get(rowIndex); if(columnIndex == 0) { if(part instanceof ReqEntityStringPart) { return "String"; } else if(part instanceof ReqEntityFilePart) { return "File"; } } else if(columnIndex == 1) { return part.getContentType(); } else if(columnIndex == 2) { return part.getName(); } else { if(part instanceof ReqEntityStringPart) { return ((ReqEntityStringPart)part).getPart(); } else if(part instanceof ReqEntityFilePart) { return ((ReqEntityFilePart)part).getPart(); } } throw new IllegalArgumentException("Should never come here!"); } public void addPartFirst(ReqEntityPart part) { list.addFirst(part); fireTableDataChanged(); } public void addPartLast(ReqEntityPart part) { list.addLast(part); fireTableDataChanged(); } public ReqEntityPart getEntityInRow(int row) { return list.get(row); } public void removeRow(int row) { list.remove(row); fireTableDataChanged(); } public void clear() { list.clear(); fireTableDataChanged(); } } @PostConstruct protected void init() { // Listeners: final AddMultipartPartListener listener = new AddMultipartPartListener() { @Override public void addPart(ReqEntityPart part) { model.addPartFirst(part); } }; jd_addStringDialog.addMultipartPartListener(listener); jd_addFileDialog.addMultipartPartListener(listener); // Table popup: JPopupMenu menu = new JPopupMenu(); JMenuItem jmi_rm = new JMenuItem("Delete selected"); jmi_rm.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final int[] rows = jt.getSelectedRows(); Arrays.sort(rows); if(rows != null && rows.length > 0) { int i = 0; for(int row: rows) { row = row - i; // the number of rows previously deleted should be accounted for! model.removeRow(row); i++; } view.setStatusMessage(MessageFormat.format("Deleted {0} row(s)", i)); } else { view.setStatusMessage("No row(s) selected!"); } } }); menu.add(jmi_rm); JMenuItem jmi_view = new JMenuItem("Quick view"); jmi_view.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { quickView(); } }); menu.add(jmi_view); jt.setComponentPopupMenu(menu); // Layouts: setLayout(new BorderLayout()); { // North: JPanel jp_border = new JPanel(new BorderLayout(0, 0)); JPanel jp_center = new JPanel(new FlowLayout(FlowLayout.LEFT)); jp_center.add(new JLabel("Add Part: ")); { // String button: jb_string.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { jd_addStringDialog.setVisible(true); } }); jp_center.add(jb_string); } { // file button: jb_file.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { jd_addFileDialog.setVisible(true); } }); jp_center.add(jb_file); } jp_border.add(jp_center, BorderLayout.CENTER); JPanel jp_east = new JPanel(new FlowLayout(FlowLayout.RIGHT)); { // config button: jb_config.setToolTipText("Set multipart mode"); jb_config.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { jd_options.setVisible(true); } }); jp_east.add(jb_config); } jp_border.add(jp_east, BorderLayout.EAST); add(jp_border, BorderLayout.NORTH); } // Center: JScrollPane jsp = new JScrollPane(jt); add(jsp, BorderLayout.CENTER); } private void quickView() { final int row = jt.getSelectedRow(); if(row != -1) { ReqEntityPart entity = model.getEntityInRow(row); view.showMessage("Quick View", entity.toString()); } } @Override public void enableBody() { jb_string.setEnabled(true); jb_file.setEnabled(true); jb_config.setEnabled(true); jt.setEnabled(true); jd_options.setEnabled(true); } @Override public void disableBody() { jb_string.setEnabled(false); jb_file.setEnabled(false); jb_config.setEnabled(false); jt.setEnabled(false); jd_options.setEnabled(false); } @Override public void clear() { jd_options.clear(); model.clear(); } @Override public void setEntity(ReqEntity entity) { if(entity instanceof ReqEntityMultipart) { ReqEntityMultipart e = (ReqEntityMultipart) entity; MultipartSubtype type = e.getSubtype(); jd_options.setSelectedSubtype(type); MultipartMode mode = e.getMode(); jd_options.setSelectedMode(mode); List<ReqEntityPart> parts = e.getBody(); for(ReqEntityPart part: parts) { model.addPartLast(part); } } } @Override public ReqEntity getEntity() { MultipartSubtype type = jd_options.getSelectedSubtype(); MultipartMode mode = jd_options.getSelectedMode(); ReqEntity entity = new ReqEntityMultipartBean( (LinkedList<ReqEntityPart>)model.list.clone(), mode, type); return entity; } @Override public Component getComponent() { return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.state.internals; import org.apache.kafka.common.MetricName; import org.apache.kafka.common.metrics.JmxReporter; import org.apache.kafka.common.metrics.KafkaMetric; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.internals.InternalProcessorContext; import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl; import org.apache.kafka.streams.state.KeyValueIterator; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.test.KeyValueIteratorStub; import org.easymock.EasyMockRule; import org.easymock.Mock; import org.easymock.MockType; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static org.apache.kafka.common.utils.Utils.mkEntry; import static org.apache.kafka.common.utils.Utils.mkMap; import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.ROLLUP_VALUE; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.aryEq; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.easymock.EasyMock.mock; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; @RunWith(Parameterized.class) public class MeteredKeyValueStoreTest { @Rule public EasyMockRule rule = new EasyMockRule(this); private static final String STORE_TYPE = "scope"; private static final String STORE_LEVEL_GROUP_FROM_0100_TO_24 = "stream-" + STORE_TYPE + "-state-metrics"; private static final String STORE_LEVEL_GROUP = "stream-state-metrics"; private static final String THREAD_ID_TAG_KEY_FROM_0100_TO_24 = "client-id"; private static final String THREAD_ID_TAG_KEY = "thread-id"; private final String threadId = Thread.currentThread().getName(); private final TaskId taskId = new TaskId(0, 0); @Mock(type = MockType.NICE) private KeyValueStore<Bytes, byte[]> inner; @Mock(type = MockType.NICE) private InternalProcessorContext context; private MeteredKeyValueStore<String, String> metered; private final String key = "key"; private final Bytes keyBytes = Bytes.wrap(key.getBytes()); private final String value = "value"; private final byte[] valueBytes = value.getBytes(); private final KeyValue<Bytes, byte[]> byteKeyValuePair = KeyValue.pair(keyBytes, valueBytes); private final Metrics metrics = new Metrics(); private String storeLevelGroup; private String threadIdTagKey; private Map<String, String> tags; @Parameters(name = "{0}") public static Collection<Object[]> data() { return Arrays.asList(new Object[][] { {StreamsConfig.METRICS_LATEST}, {StreamsConfig.METRICS_0100_TO_24} }); } @Parameter public String builtInMetricsVersion; @Before public void before() { metered = new MeteredKeyValueStore<>( inner, STORE_TYPE, new MockTime(), Serdes.String(), Serdes.String() ); metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG); expect(context.metrics()) .andReturn(new StreamsMetricsImpl(metrics, "test", builtInMetricsVersion)).anyTimes(); expect(context.taskId()).andReturn(taskId).anyTimes(); expect(inner.name()).andReturn("metered").anyTimes(); storeLevelGroup = StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? STORE_LEVEL_GROUP_FROM_0100_TO_24 : STORE_LEVEL_GROUP; threadIdTagKey = StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? THREAD_ID_TAG_KEY_FROM_0100_TO_24 : THREAD_ID_TAG_KEY; tags = mkMap( mkEntry(threadIdTagKey, threadId), mkEntry("task-id", taskId.toString()), mkEntry(STORE_TYPE + "-state-id", "metered") ); } private void init() { replay(inner, context); metered.init(context, metered); } @Test public void testMetrics() { init(); final JmxReporter reporter = new JmxReporter("kafka.streams"); metrics.addReporter(reporter); assertTrue(reporter.containsMbean(String.format( "kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s", storeLevelGroup, threadIdTagKey, threadId, taskId.toString(), STORE_TYPE, "metered" ))); if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) { assertTrue(reporter.containsMbean(String.format( "kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s", storeLevelGroup, threadIdTagKey, threadId, taskId.toString(), STORE_TYPE, ROLLUP_VALUE ))); } } @Test public void shouldWriteBytesToInnerStoreAndRecordPutMetric() { inner.put(eq(keyBytes), aryEq(valueBytes)); expectLastCall(); init(); metered.put(key, value); final KafkaMetric metric = metric("put-rate"); assertTrue((Double) metric.metricValue() > 0); verify(inner); } @Test public void shouldGetBytesFromInnerStoreAndReturnGetMetric() { expect(inner.get(keyBytes)).andReturn(valueBytes); init(); assertThat(metered.get(key), equalTo(value)); final KafkaMetric metric = metric("get-rate"); assertTrue((Double) metric.metricValue() > 0); verify(inner); } @Test public void shouldPutIfAbsentAndRecordPutIfAbsentMetric() { expect(inner.putIfAbsent(eq(keyBytes), aryEq(valueBytes))).andReturn(null); init(); metered.putIfAbsent(key, value); final KafkaMetric metric = metric("put-if-absent-rate"); assertTrue((Double) metric.metricValue() > 0); verify(inner); } @SuppressWarnings("unchecked") @Test public void shouldPutAllToInnerStoreAndRecordPutAllMetric() { inner.putAll(anyObject(List.class)); expectLastCall(); init(); metered.putAll(Collections.singletonList(KeyValue.pair(key, value))); final KafkaMetric metric = metric("put-all-rate"); assertTrue((Double) metric.metricValue() > 0); verify(inner); } @Test public void shouldDeleteFromInnerStoreAndRecordDeleteMetric() { expect(inner.delete(keyBytes)).andReturn(valueBytes); init(); metered.delete(key); final KafkaMetric metric = metric("delete-rate"); assertTrue((Double) metric.metricValue() > 0); verify(inner); } @Test public void shouldGetRangeFromInnerStoreAndRecordRangeMetric() { expect(inner.range(keyBytes, keyBytes)) .andReturn(new KeyValueIteratorStub<>(Collections.singletonList(byteKeyValuePair).iterator())); init(); final KeyValueIterator<String, String> iterator = metered.range(key, key); assertThat(iterator.next().value, equalTo(value)); assertFalse(iterator.hasNext()); iterator.close(); final KafkaMetric metric = metric("range-rate"); assertTrue((Double) metric.metricValue() > 0); verify(inner); } @Test public void shouldGetAllFromInnerStoreAndRecordAllMetric() { expect(inner.all()).andReturn(new KeyValueIteratorStub<>(Collections.singletonList(byteKeyValuePair).iterator())); init(); final KeyValueIterator<String, String> iterator = metered.all(); assertThat(iterator.next().value, equalTo(value)); assertFalse(iterator.hasNext()); iterator.close(); final KafkaMetric metric = metric(new MetricName("all-rate", storeLevelGroup, "", tags)); assertTrue((Double) metric.metricValue() > 0); verify(inner); } @Test public void shouldFlushInnerWhenFlushTimeRecords() { inner.flush(); expectLastCall().once(); init(); metered.flush(); final KafkaMetric metric = metric("flush-rate"); assertTrue((Double) metric.metricValue() > 0); verify(inner); } private interface CachedKeyValueStore extends KeyValueStore<Bytes, byte[]>, CachedStateStore<byte[], byte[]> { } @SuppressWarnings("unchecked") @Test public void shouldSetFlushListenerOnWrappedCachingStore() { final CachedKeyValueStore cachedKeyValueStore = mock(CachedKeyValueStore.class); expect(cachedKeyValueStore.setFlushListener(anyObject(CacheFlushListener.class), eq(false))).andReturn(true); replay(cachedKeyValueStore); metered = new MeteredKeyValueStore<>( cachedKeyValueStore, STORE_TYPE, new MockTime(), Serdes.String(), Serdes.String() ); assertTrue(metered.setFlushListener(null, false)); verify(cachedKeyValueStore); } @Test public void shouldNotThrowNullPointerExceptionIfGetReturnsNull() { expect(inner.get(Bytes.wrap("a".getBytes()))).andReturn(null); init(); assertNull(metered.get("a")); } @Test public void shouldNotSetFlushListenerOnWrappedNoneCachingStore() { assertFalse(metered.setFlushListener(null, false)); } @Test public void shouldRemoveMetricsOnClose() { inner.close(); expectLastCall(); init(); // replays "inner" // There's always a "count" metric registered assertThat(storeMetrics(), not(empty())); metered.close(); assertThat(storeMetrics(), empty()); verify(inner); } @Test public void shouldRemoveMetricsEvenIfWrappedStoreThrowsOnClose() { inner.close(); expectLastCall().andThrow(new RuntimeException("Oops!")); init(); // replays "inner" assertThat(storeMetrics(), not(empty())); assertThrows(RuntimeException.class, metered::close); assertThat(storeMetrics(), empty()); verify(inner); } private KafkaMetric metric(final MetricName metricName) { return this.metrics.metric(metricName); } private KafkaMetric metric(final String name) { return metrics.metric(new MetricName(name, storeLevelGroup, "", tags)); } private List<MetricName> storeMetrics() { return metrics.metrics() .keySet() .stream() .filter(name -> name.group().equals(storeLevelGroup) && name.tags().equals(tags)) .collect(Collectors.toList()); } }