gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
|---|---|
// [Twin] Copyright eBay Inc., Twin authors, and other contributors.
// This file is provided to you under the terms of the Apache License, Version 2.0.
// See LICENSE.txt and NOTICE.txt for license and copyright information.
package org.ebayopensource.twin.json;
import java.io.*;
import java.lang.reflect.*;
import java.util.*;
/**
* Utility methods for converting Java objects to and from JSON strings.
* <p>
* Objects implementing the JSONable interface will be so transformed before serialization.
* <p>
* JSONRecognizers can be used to turn Maps into custom objects on deserialization.
* <p>
* Wraps the json.org library. There were some methods to convert between Java objects
* and json.org objects, but these are now deprecated.
*/
public class JSON {
/**
* Convert a Java object to a string using JSON serialization
* @throws IllegalArgumentException on serialization error
*/
public static String toString(Object data) {
return encode(data);
}
/**
* Convert a Java object to a string using JSON serialization
* @throws IllegalArgumentException on serialization error
*/
public static String encode(Object data) {
StringWriter sw = new StringWriter();
try {
encode(data, sw);
} catch (IOException e) {
throw new RuntimeException(e);
}
return sw.toString();
}
public static void encode(Object data, OutputStream stream) throws IOException {
Writer writer = new OutputStreamWriter(stream, "UTF-8");
encode(data, writer);
}
public static void encode(Object data, Writer writer) throws IOException {
while(data instanceof JSONable)
data = ((JSONable)data).toJSON();
if(data instanceof Double || data instanceof Short) {
double value = ((Number)data).doubleValue();
if(Double.isInfinite(value) || Double.isNaN(value)) // JSON can not represent these
data = null;
else {
writer.write(String.valueOf(value));
return;
}
}
if(data == null) {
writer.write("null");
return;
}
// double and short already handled.
if(data instanceof Number || data instanceof Boolean) {
writer.write(data.toString());
return;
}
if(data instanceof Character)
data = data.toString();
if(data instanceof Enum<?>)
data = ((Enum<?>)data).name();
if(data.getClass().isArray()) {
ArrayList<Object> list = new ArrayList<Object>();
int length = Array.getLength(data);
for(int i=0; i<length; i++)
list.add(Array.get(data, i));
data = list; // continue
}
if(data instanceof Map<?,?>) {
writer.write('{');
boolean any=false;
for(Map.Entry<?, ?> entry : ((Map<?,?>)data).entrySet()) {
if(any)
writer.write(',');
encode(String.valueOf(entry.getKey()), writer);
writer.write(':');
encode(entry.getValue(), writer);
any = true;
}
writer.write('}');
return;
}
if(data instanceof List<?>) {
writer.write('[');
boolean any=false;
for(Object item : (List<?>)data) {
if(any)
writer.write(',');
encode(item, writer);
any = true;
}
writer.write(']');
return;
}
if(data instanceof String) {
String string = (String)data;
char[] hex = new char[]{'\\', 'u', '0', '0', '0', '0'};
writer.write('"');
for(int i=0; i<string.length(); i++) {
char c = string.charAt(i);
switch(c) {
case '\r':
writer.write("\\r");
break;
case '\n':
writer.write("\\n");
break;
case '\b':
writer.write("\\b");
break;
case '\f':
writer.write("\\f");
break;
case '\t':
writer.write("\\t");
break;
case '"':
case '\\':
writer.write('\\'); // fall thru
writer.write(c);
break;
default:
if(c < 0x20 || c >= 0x80) {
hex[2] = HEX[(c >> 12)&0xf];
hex[3] = HEX[(c >> 8)&0xf];
hex[4] = HEX[(c >> 4)&0xf];
hex[5] = HEX[(c >> 0)&0xf];
writer.write(hex);
} else {
writer.write(c);
}
break;
}
}
writer.write('"');
return;
}
throw new IllegalArgumentException("Cannot encode "+data.getClass().getName()+": "+data);
}
private static final char[] HEX = "0123456789abcdef".toCharArray();
/**
* Convert a String to a Java object using JSON deserialization
* @throws IllegalArgumentException on deserialization error
*/
public static Object decode(String text, JSONRecognizer... recognizers) {
PushbackReader reader = new PushbackReader(new StringReader(text));
try {
return decode(reader, recognizers);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static Object decode(InputStream in, JSONRecognizer... recognizers) throws IOException {
PushbackReader reader = new PushbackReader(new InputStreamReader(in, "UTF-8"));
return decode(reader, recognizers);
}
public static Object decode(PushbackReader reader, JSONRecognizer... recognizers) throws IOException {
int start = skipWhitespace(reader);
switch(start) {
case -1:
throw new IllegalArgumentException("EOF at start of decode");
case '"':
return recognize(readString(reader, recognizers), recognizers);
case '{':
return recognize(readMap(reader, recognizers), recognizers);
case '[':
return recognize(readList(reader, recognizers), recognizers);
case 't': case 'f': case 'n':
return recognize(readTrueFalseNull(reader, recognizers), recognizers);
case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case '-':
return recognize(readNumber(reader, recognizers), recognizers);
default:
throw new IllegalArgumentException("Unexpected character "+(char)start+" at start of decode");
}
}
private static Boolean readTrueFalseNull(PushbackReader in, JSONRecognizer[] recognizers) throws IOException {
int[] data = new int[5];
data[0] = in.read();
data[1] = in.read();
data[2] = in.read();
data[3] = in.read();
if(data[3] < 0)
throw new IllegalArgumentException("End of file while reading bareword");
switch(data[0]) {
case 't':
if(!(data[1] == 'r' && data[2] == 'u' && data[3] == 'e'))
throw new IllegalArgumentException("Expected 'true' after reading 't'");
return true;
case 'n':
if(!(data[1] == 'u' && data[2] == 'l' && data[3] == 'l'))
throw new IllegalArgumentException("Expected 'null' after reading 'n'");
return null;
case 'f':
data[4] = in.read();
if(data[4] < 0)
throw new IllegalArgumentException("End of file while reading bareword");
if(!(data[1] == 'a' && data[2] == 'l' && data[3] == 's' && data[4] == 'e'))
throw new IllegalArgumentException("Expected 'false' after reading 'f'");
return false;
}
throw new IllegalStateException();
}
private static Number readNumber(PushbackReader in, JSONRecognizer[] recognizers) throws IOException {
StringBuffer sb = new StringBuffer();
boolean isDecimal=false;
out: while(true) {
int c = in.read();
switch(c) {
case -1:
break out;
case '.':
case 'e': case 'E':
isDecimal=true; // fall through
case '-': case '+':
case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9':
sb.append((char)c);
break;
default:
in.unread((char)c);
break out;
}
}
if(isDecimal)
return Double.parseDouble(sb.toString());
long l = Long.parseLong(sb.toString());
if(l <= Integer.MAX_VALUE && l >= Integer.MIN_VALUE)
return (int)l;
return l;
}
private static String readString(PushbackReader in, JSONRecognizer[] recognizers) throws IOException {
int firstQuote = in.read();
if(firstQuote != '"')
throw new IllegalStateException();
StringBuffer data = new StringBuffer();
out: while(true) {
int c = in.read();
switch(c) {
case -1:
throw new IllegalArgumentException("String meets end of file");
case '"':
break out;
case '\\':
int d = in.read();
switch(d) {
case -1:
throw new IllegalArgumentException("Escape sequence meets end of file");
case '\\':
case '"':
data.append((char)d);
break;
case 'r':
data.append('\r');
break;
case 'n':
data.append('\n');
break;
case 'b':
data.append('\b');
break;
case 'f':
data.append('\f');
break;
case 't':
data.append('\t');
break;
case 'u':
int hex1 = in.read();
int hex2 = in.read();
int hex3 = in.read();
int hex4 = in.read();
if(hex4 < 0)
throw new IllegalArgumentException("Unicode escape meets end of file");
data.append((char)Integer.parseInt(""+(char)hex1+(char)hex2+(char)hex3+(char)hex4, 16));
break;
default:
throw new IllegalArgumentException("Unknown escape sequence \\"+(char)d);
}
break;
default:
data.append((char)c);
break;
}
}
return data.toString();
}
private static Map<String, Object> readMap(PushbackReader in, JSONRecognizer... recognizers) throws IOException {
int open = in.read();
if(open != '{')
throw new IllegalStateException();
HashMap<String,Object> result = new HashMap<String, Object>();
out: while(true) {
int next = skipWhitespace(in);
if(next < 0)
throw new IllegalArgumentException("EOF inside map");
else if(next == '}') {
in.read();
break out;
}
if(!result.isEmpty()) {
if(next == ',') {
in.read();
next = skipWhitespace(in);
} else {
throw new IllegalArgumentException("Expected , or } in map, got "+(char)next);
}
}
if (next != '"')
throw new IllegalArgumentException("Expected \" to begin key in map, got "+(char)next);
String key = readString(in, recognizers);
int colon = skipWhitespace(in);
if(colon != ':')
throw new IllegalArgumentException("Expected : after key name in map, got "+(char)colon);
in.read();
skipWhitespace(in);
result.put(key, decode(in, recognizers));
}
return result;
}
private static List<Object> readList(PushbackReader in, JSONRecognizer... recognizers) throws IOException {
int open = in.read();
if(open != '[')
throw new IllegalStateException();
ArrayList<Object> result = new ArrayList<Object>();
out: while(true) {
int next = skipWhitespace(in);
if(next < 0)
throw new IllegalArgumentException("EOF inside list");
if(next == ']') {
in.read();
break out;
}
if(!result.isEmpty()) {
if(next == ',') {
in.read();
skipWhitespace(in);
} else {
throw new IllegalArgumentException("Unexpected character in list "+(char)next+", expected , or ]");
}
}
result.add(decode(in, recognizers));
}
return result;
}
@SuppressWarnings("unchecked")
private static Object recognize(Object o, JSONRecognizer... recognizers) {
if(!(o instanceof Map<?,?>))
return o;
Map<String,Object> map = (Map<String,Object>)o;
for(JSONRecognizer recognizer : recognizers) {
Object result = recognizer.recognize(map);
if(result != null)
return result;
}
return map;
}
private static int skipWhitespace(PushbackReader reader) throws IOException {
while(true) {
int i = reader.read();
if(i < 0)
return i;
else if(!Character.isWhitespace((char)i)) {
reader.unread(i);
return i;
}
}
}
/*
public static Object decode(String text, JSONRecognizer... recognizers) {
text = text.trim();
try {
if(text.startsWith("{"))
return _wrappedToPOJO(new JSONObject(text), recognizers);
if(text.startsWith("["))
return _wrappedToPOJO(new JSONArray(text), recognizers);
if(text.startsWith("\""))
return parseString(text);
// bug in JSON lib
if(text.length() == 0)
throw new IllegalArgumentException("Blank JSON text");
return _wrappedToPOJO(JSONObject.stringToValue(text), recognizers);
} catch (JSONException e) {
throw new IllegalArgumentException(e);
}
}
*/
/**
* Convert a String to a Java object using JSON deserialization
* @throws IllegalArgumentException on deserialization error
* The method signature has two recognizer patterns so that decode(String) isn't ambiguous
* The classes passed must implement the JSONStaticRecognizer interface.
* We can't statically enforce this because you can't have varargs of generic types.
*/
public static Object decode(String text, Class<?> firstStaticRecognizer, Class<?>... restStaticRecognizers) {
Class<?>[] staticRecognizers = new Class<?>[restStaticRecognizers.length+1];
staticRecognizers[0] = firstStaticRecognizer;
System.arraycopy(restStaticRecognizers, 0, staticRecognizers, 1, restStaticRecognizers.length);
JSONRecognizer[] recognizers = new JSONRecognizer[staticRecognizers.length];
for(int i=0; i<recognizers.length; i++) {
final Class<?> type = staticRecognizers[i];
if(!JSONStaticRecognizer.class.isAssignableFrom(type))
throw new IllegalArgumentException(type.getName()+" does not implement "+JSONStaticRecognizer.class.getSimpleName());
recognizers[i] = new JSONRecognizer() {
private Method method;
{
try {
method = type.getMethod("recognize", Map.class);
if(!Modifier.isStatic(method.getModifiers()))
throw new NoSuchMethodException("recognize method is not static");
} catch (NoSuchMethodException e) {
throw new IllegalStateException(type.getName()+" implements "+JSONStaticRecognizer.class.getSimpleName()+" but doesn't have public static Object recognize(Map<String,Object> value)");
}
}
@Override
public Object recognize(Map<String, Object> jsonObject) {
try {
return method.invoke(null, jsonObject);
} catch (InvocationTargetException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
};
}
return decode(text, recognizers);
}
}
|
|
package org.opentosca.planbuilder.core.bpel.handlers;
import java.util.ArrayList;
import java.util.List;
import javax.xml.namespace.QName;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.eclipse.winery.model.tosca.TBoundaryDefinitions;
import org.eclipse.winery.model.tosca.TDefinitions;
import org.eclipse.winery.model.tosca.TEntityTemplate;
import org.eclipse.winery.model.tosca.TNodeTemplate;
import org.eclipse.winery.model.tosca.TPropertyMapping;
import org.eclipse.winery.model.tosca.TRelationshipTemplate;
import org.eclipse.winery.model.tosca.TServiceTemplate;
import org.opentosca.container.core.model.ModelUtils;
import org.opentosca.planbuilder.core.plugins.context.Property2VariableMapping;
import org.opentosca.planbuilder.core.plugins.context.PropertyVariable;
import org.opentosca.planbuilder.model.plan.bpel.BPELPlan;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* <p>
* This Class is responsible for fetching BoundaryDefinitions mappings and initialize the BuildPlan with appropiate
* assigns to return property values to the BuildPlan caller
* </p>
* <p>
* Copyright 2013 IAAS University of Stuttgart <br>
* <br>
*
* @author Kalman Kepes - [email protected]
*/
public class ServiceTemplateBoundaryPropertyMappingsToOutputHandler {
private final static Logger LOG =
LoggerFactory.getLogger(ServiceTemplateBoundaryPropertyMappingsToOutputHandler.class);
/**
* <p>
* Initializes the response message of the given BuildPlan according to the given BoundaryDefinitions inside the
* given Definitions document
* </p>
*
* @param definitions the Definitions document to look for BoundaryDefinitions for and contains the ServiceTemplate
* the BuildPlan belongs to
* @param buildPlan a initialized BuildPlan
* @param propMap a PropMap which contains the names of the different template property variables inside the
* plan
*/
public void initializeBuildPlanOutput(final TDefinitions definitions, final BPELPlan buildPlan,
final Property2VariableMapping propMap,
TServiceTemplate serviceTemplate) {
final ServiceTemplatePropertyToPropertyMapping mapping = getMappings(serviceTemplate, propMap);
if (mapping == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("Couldn't generate mapping, BuildPlan Output may be empty");
return;
}
initializeAssignOutput(buildPlan, propMap, mapping, serviceTemplate);
}
/**
* Generates a copy with a literal value to the outputmessage of the given BuildPlan. The literal consists of the
* mappings given, where the propertyMap is used identify the propertyVariables inside the buildPlan
*
* @param buildPlan the BuildPlan to add the copy to
* @param propMap a PropertyMap containing the variable names of the properties
* @param mapping the mappings from serviceTemplate Properties to template properties
*/
private void initializeAssignOutput(final BPELPlan buildPlan, final Property2VariableMapping propMap,
final ServiceTemplatePropertyToPropertyMapping mapping,
TServiceTemplate serviceTemplate) {
try {
final BPELPlanHandler buildPlanHandler = new BPELPlanHandler();
final BPELPlanHandler processHandler = new BPELPlanHandler();
final List<String> failedServiceTemplateProperties = new ArrayList<>();
for (final String serviceTemplatePropertyName : mapping.getServiceTemplatePropertyNames()) {
// add copy to assign
final String templatePropertyName = mapping.getTemplatePropertyName(serviceTemplatePropertyName);
if (templatePropertyName == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("TemplatePropertyName is null");
failedServiceTemplateProperties.add(serviceTemplatePropertyName);
continue;
}
// add to outputmessage
buildPlanHandler.addStringElementToPlanResponse(serviceTemplatePropertyName, buildPlan);
if (isConcatQuery(templatePropertyName)) {
processHandler.addCopyStringToOutputAssign(generateCopyFromQueryToOutputAsString(templatePropertyName,
"//*[local-name()='"
+ serviceTemplatePropertyName
+ "']"),
buildPlan);
} else {
final String templateId = mapping.getTemplateId(serviceTemplatePropertyName);
if (templateId == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("TemplateId of mapping is null!");
failedServiceTemplateProperties.add(serviceTemplatePropertyName);
continue;
}
boolean assigned = false;
for (PropertyVariable var : propMap.getPropertyVariables(serviceTemplate, templateId)) {
if (var.getPropertyName().equals(templatePropertyName)) {
assigned = true;
processHandler.assginOutputWithVariableValue(var.getVariableName(),
serviceTemplatePropertyName, buildPlan);
}
}
if (!assigned) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("PropertyVarName is null");
failedServiceTemplateProperties.add(serviceTemplatePropertyName);
continue;
}
}
}
for (final String failedServiceTempProp : failedServiceTemplateProperties) {
mapping.removeServiceTemplatePropertyMapping(failedServiceTempProp);
}
} catch (final ParserConfigurationException e) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.error("Couldn't initialize a Handler, BuildPlan OutputMessage may be empty",
e);
return;
}
}
private String generateCopyFromQueryToOutputAsString(final String fromQuery, final String toQuery) {
String copyString = "<bpel:copy xmlns:bpel=\"" + BPELPlan.bpelNamespace
+ "\"><bpel:from expressionLanguage=\"urn:oasis:names:tc:wsbpel:2.0:sublang:xpath1.0\"><![CDATA[";
copyString += fromQuery + "]]></bpel:from>";
copyString +=
"<bpel:to variable=\"output\" part=\"payload\"><bpel:query queryLanguage=\"urn:oasis:names:tc:wsbpel:2.0:sublang:xpath1.0\"><![CDATA["
+ toQuery + "]]></bpel:query></bpel:to></bpel:copy>";
return copyString;
}
/**
* Calculates the ServiceTemplate Property to Template Property mappings for the given ServiceTemplate
*
* @param buildPlanServiceTemplate a ServiceTemplate
* @return a Mapping from ServiceTemplate properties to Template properties
*/
private ServiceTemplatePropertyToPropertyMapping getMappings(final TServiceTemplate buildPlanServiceTemplate,
final Property2VariableMapping propMap) {
QName serviceTemplateQName = new QName(buildPlanServiceTemplate.getTargetNamespace(), buildPlanServiceTemplate.getId());
final ServiceTemplatePropertyToPropertyMapping mappingWrapper = new ServiceTemplatePropertyToPropertyMapping();
final TBoundaryDefinitions boundaryDefinitions = buildPlanServiceTemplate.getBoundaryDefinitions();
if (boundaryDefinitions == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("No BoundaryDefinitions in ServiceTemplate {} found. Output of BuildPlan maybe empty.",
new QName(buildPlanServiceTemplate.getTargetNamespace(), buildPlanServiceTemplate.getId()));
return null;
}
// get Properties
final TBoundaryDefinitions.Properties serviceTemplateProps = boundaryDefinitions.getProperties();
if (serviceTemplateProps == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("ServiceTemplate has no Properties defined");
return null;
}
// get the propertyElement and propertyMappings
final TBoundaryDefinitions.Properties serviceTemplateProperties = serviceTemplateProps;
if (serviceTemplateProperties == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("ServiceTemplate has no Properties defined");
return null;
}
final List<TPropertyMapping> propertyMappings = serviceTemplateProps.getPropertyMappings();
final Element propElement = (Element) serviceTemplateProperties.getAny();
if (propElement == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("ServiceTemplate has no Properties defined");
return null;
}
// example:
// <BoundaryDefinitions>
// <Properties>
// <ex:Property>someDefaultValue</ex:Property>
// <PropertyMappings>
// <PropertyMapping serviceTemplatePropertyRef="/ex:Property"
// targetObjectRef="nodeTemplateID"
// targetPropertyRef="/nodeTemplateIdLocalName"/> +
// </PropertyMappings/> ?
// </Properties>
for (final TPropertyMapping propertyMapping : propertyMappings) {
// these two will be used to create a propery reference for the
// internal property variable of the plan
final String templateId = propertyMapping.getTargetObjectRef().getId();
final String targetPropertyRef = propertyMapping.getTargetPropertyRef();
// this will be a localName in the output
final String serviceTemplatePropLocalName = getTemplatePropertyLocalName(propElement, propertyMapping.getServiceTemplatePropertyRef());
String templatePropLocalName = this.determinePropLocalName(targetPropertyRef,propertyMapping,buildPlanServiceTemplate, templateId,serviceTemplateQName,propMap,serviceTemplatePropLocalName);
if (templatePropLocalName == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("Referenced Template {} in ServiceTemplate {} has no Properties defined, continueing with other PropertyMapping",
templateId,
serviceTemplateQName);
continue;
}
mappingWrapper.addMapping(serviceTemplatePropLocalName, templateId, templatePropLocalName);
}
return mappingWrapper;
}
private String determinePropLocalName(String targetPropertyRef, TPropertyMapping propertyMapping, TServiceTemplate buildPlanServiceTemplate,
String templateId, QName serviceTemplateQName, Property2VariableMapping propMap, String serviceTemplatePropLocalName){
String templatePropLocalName = null;
boolean isConcatQuery = false;
if (isConcatQuery(targetPropertyRef)) {
isConcatQuery = true;
templatePropLocalName =
injectBPELVariables(propertyMapping.getTargetPropertyRef(), propMap, buildPlanServiceTemplate);
} else {
TEntityTemplate.Properties props = null;
if (getNodeTemplate(buildPlanServiceTemplate, templateId) != null) {
props =
getNodeTemplate(buildPlanServiceTemplate, templateId).getProperties();
} else if (getRelationshipTemplate(buildPlanServiceTemplate, templateId) != null) {
props =
getRelationshipTemplate(buildPlanServiceTemplate, templateId).getProperties();
}
if (props != null && ModelUtils.asMap(props).isEmpty()) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("Referenced Template {} in ServiceTemplate {} has no Properties defined, continueing with other PropertyMapping",
templateId,
serviceTemplateQName
.toString());
return null;
}
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.debug("Adding Mapping for ServiceTemplateProperty {}, TemplateId {}",
serviceTemplatePropLocalName,
templateId);
templatePropLocalName = ModelUtils.asMap(props).get(propertyMapping.getTargetPropertyRef());
}
if (templatePropLocalName == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("Referenced Template {} in ServiceTemplate {} has no Properties defined, continueing with other PropertyMapping",
templateId,
serviceTemplateQName
.toString());
return null;
}
if (serviceTemplatePropLocalName == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("Couldn't find Property Element for ServiceTemplate {} , continueing with other PropertyMapping",
serviceTemplateQName
.toString());
return null;
}
if (!isConcatQuery && templateId == null) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.warn("targetObjectRef for ServiceTemplate {} not set, continueing with other PropertyMapping",
serviceTemplateQName
.toString());
return null;
}
return templatePropLocalName;
}
private String injectBPELVariables(final String targetPropertyRef, final Property2VariableMapping propMap,
TServiceTemplate serviceTemplate) {
final String testQuery = targetPropertyRef.trim();
if (!testQuery.endsWith(")")) {
return null;
}
final int functionOpeningBracket = testQuery.indexOf("(");
final String functionString = testQuery.substring(0, functionOpeningBracket);
// simple validity check as we only want to be able to concat strings,
// but maybe more later
if (!functionString.equals("concat")) {
return null;
}
final String functionContent =
testQuery.substring(functionOpeningBracket + 1, testQuery.lastIndexOf(")")).trim();
final String[] functionParts = functionContent.split(",");
final List<String> augmentedFunctionParts = new ArrayList<>();
for (final String functionPart : functionParts) {
if (functionPart.trim().startsWith("'")) {
// string function part, just add to list
augmentedFunctionParts.add(functionPart);
} else if (functionPart.trim().split("\\.Properties\\.").length == 2) {
// "DSL" Query
final String[] queryParts = functionPart.trim().split("\\.Properties\\.");
final String nodeTemplateName = queryParts[0];
final String propertyName = queryParts[1];
boolean addedVar = false;
for (PropertyVariable var : propMap.getPropertyVariables(serviceTemplate, nodeTemplateName)) {
if (var.getPropertyName().equals(propertyName)) {
addedVar = true;
augmentedFunctionParts.add("$" + var.getVariableName());
}
}
if (!addedVar) {
return null;
}
}
}
String resultString = functionString + "(";
for (final String functionPart : augmentedFunctionParts) {
resultString += functionPart + ",";
}
resultString = resultString.substring(0, resultString.length() - 1) + ")";
return resultString;
}
private boolean isConcatQuery(final String xPathQuery) {
final String testString = xPathQuery.trim();
if (!testString.startsWith("concat(")) {
return false;
}
String functionContent = testString.substring("concat(".length());
functionContent = functionContent.substring(0, functionContent.length() - 1);
final String[] functionParts = functionContent.split(",");
for (final String functionPart : functionParts) {
if (functionPart.startsWith("'") && !functionPart.endsWith("'")) {
return false;
}
}
return true;
}
/**
* Returns the localName of an element which is referenced by the XPath expression inside the PropertyMappings
*
* @param serviceTemplatePropElement the first element inside the Properties Element
* @param xpathExpr an XPath Expression
* @return a localName when the XPath expression returned exactly one Node, else null
*/
private String getTemplatePropertyLocalName(final Element serviceTemplatePropElement, final String xpathExpr) {
try {
final XPath xPath = XPathFactory.newInstance().newXPath();
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.debug("Executing XPath Expression {} on Node {}",
xpathExpr, serviceTemplatePropElement);
final NodeList nodes =
(NodeList) xPath.evaluate(xpathExpr, serviceTemplatePropElement, XPathConstants.NODESET);
// we assume that the expression gives us a single node
if (nodes.getLength() == 1) {
final Node node = nodes.item(0);
return node.getLocalName();
} else {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.error("XPath expression {} on Element {} returned multiple Nodes",
xpathExpr, serviceTemplatePropElement);
return null;
}
} catch (final XPathExpressionException e1) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.error("XPath Expression for serviceTemplatePropetyRef isn't valid",
e1);
}
return null;
}
/**
* Returns an TNodeTemplate of the given serviceTemplate and TemplateId
*
* @param serviceTemplate the ServiceTemplate to search in
* @param templateId the Id of the Template
* @return an TNodeTemplate with the specified Id, else null
*/
private TNodeTemplate getNodeTemplate(final TServiceTemplate serviceTemplate,
final String templateId) {
for (final TNodeTemplate nodeTemplate : serviceTemplate.getTopologyTemplate().getNodeTemplates()) {
if (nodeTemplate.getId().equals(templateId)) {
return nodeTemplate;
}
}
return null;
}
/**
* Returns an TRelationshipTemplate of the given serviceTemplate and TemplateId
*
* @param serviceTemplate the ServiceTemplate to search in
* @param templateId the If of the template to search for
* @return an TRelationshipTemplate with the specified Id, else null
*/
private TRelationshipTemplate getRelationshipTemplate(final TServiceTemplate serviceTemplate,
final String templateId) {
for (final TRelationshipTemplate relationshipTemplate : serviceTemplate.getTopologyTemplate()
.getRelationshipTemplates()) {
if (relationshipTemplate.getId().equals(templateId)) {
return relationshipTemplate;
}
}
return null;
}
/**
* <p>
* This class is a wrapper, which holds a mapping from ServiceTemplate Property, Template and Template Property
* </p>
* Copyright 2013 IAAS University of Stuttgart <br>
* <br>
*
* @author Kalman Kepes - [email protected]
*/
private class ServiceTemplatePropertyToPropertyMapping {
// internal array, basically n rows 3 columns
private String[][] internalArray = new String[1][3];
/**
* Adds a mapping from ServiceTemplate Property, Template and Template Property
*
* @param serviceTemplatePropertyLocalName the localName of a serviceTemplate property
* @param templateId the template Id
* @param templatePropertyLocalName the localName of a template id
*/
protected void addMapping(final String serviceTemplatePropertyLocalName, final String templateId,
final String templatePropertyLocalName) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.debug("Adding ServiceTemplate Property Mapping, serviceTemplate property localName {}, templateId {} and template property localName {}",
serviceTemplatePropertyLocalName,
templateId, templatePropertyLocalName);
if (this.internalArray.length == 1) {
// nothing stored inside array yet
this.internalArray[0][0] = serviceTemplatePropertyLocalName;
this.internalArray[0][1] = templateId;
this.internalArray[0][2] = templatePropertyLocalName;
increaseArraySize();
} else {
this.internalArray[this.internalArray.length - 1][0] = serviceTemplatePropertyLocalName;
this.internalArray[this.internalArray.length - 1][1] = templateId;
this.internalArray[this.internalArray.length - 1][2] = templatePropertyLocalName;
increaseArraySize();
}
printInternalArray();
}
private void printInternalArray() {
for (int index_1 = 0; index_1 < this.internalArray.length; index_1++) {
for (int index_2 = 0; index_2 < this.internalArray[index_1].length; index_2++) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.debug("index1: " + index_1 + " index2: "
+ index_2 + " value: " + this.internalArray[index_1][index_2]);
}
}
}
/**
* Removes a SericeTemplate Property Mapping
*
* @param serviceTemplatePropertyName a localName of serviceTemplate property
*/
protected void removeServiceTemplatePropertyMapping(final String serviceTemplatePropertyName) {
ServiceTemplateBoundaryPropertyMappingsToOutputHandler.LOG.debug("Removin ServiceTemplate Property Mapping for serviceTemplate Property {}",
serviceTemplatePropertyName);
for (int index = 0; index < this.internalArray.length; index++) {
if (this.internalArray[index][0] != null
&& this.internalArray[index][0].equals(serviceTemplatePropertyName)) {
// TODO pretty ugly, but should work
this.internalArray[index][0] = null;
this.internalArray[index][1] = null;
this.internalArray[index][2] = null;
}
}
}
/**
* Returns all ServiceTemplate Property localName inside this wrapper
*
* @return a List of Strings which are ServiceTemplate property localNames
*/
protected List<String> getServiceTemplatePropertyNames() {
final List<String> names = new ArrayList<>();
for (final String[] element : this.internalArray) {
if (element[0] != null) {
names.add(element[0]);
}
}
return names;
}
/**
* Returns the templateId of the ServiceTemplate Property Mapping
*
* @param serviceTemplateLocalName a localName of a ServiceTemplate property
* @return a String which is a templateId else null
*/
protected String getTemplateId(final String serviceTemplateLocalName) {
for (final String[] element : this.internalArray) {
if (element[0] != null && element[0].equals(serviceTemplateLocalName)) {
return element[1];
}
}
return null;
}
/**
* Returns a Template property localName for the given serviceTemplate property localName
*
* @param serviceTemplateLocalName a localName of a serviceTemplate Property
* @return a String which is a localName of a Template property, else null
*/
protected String getTemplatePropertyName(final String serviceTemplateLocalName) {
for (final String[] element : this.internalArray) {
if (element[0] != null && element[0].equals(serviceTemplateLocalName)) {
return element[2];
}
}
return null;
}
/**
* Increases the size of the internal array by 1 row
*/
private void increaseArraySize() {
final int arrayLength = this.internalArray.length;
final String[][] newArray = new String[arrayLength + 1][3];
for (int index = 0; index < arrayLength; index++) {
// copy serviceTemplatePropertyName
newArray[index][0] = this.internalArray[index][0];
// copy templateId
newArray[index][1] = this.internalArray[index][1];
// copy templatePropLocalName
newArray[index][2] = this.internalArray[index][2];
}
this.internalArray = newArray;
}
}
}
|
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import static com.google.common.collect.ImmutableList.toImmutableList;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException;
import com.google.devtools.build.lib.analysis.AliasProvider;
import com.google.devtools.build.lib.analysis.AspectResolver;
import com.google.devtools.build.lib.analysis.AspectValue;
import com.google.devtools.build.lib.analysis.CachingAnalysisEnvironment;
import com.google.devtools.build.lib.analysis.CachingAnalysisEnvironment.MissingDepException;
import com.google.devtools.build.lib.analysis.ConfiguredAspect;
import com.google.devtools.build.lib.analysis.ConfiguredAspectFactory;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.ConfiguredTargetValue;
import com.google.devtools.build.lib.analysis.Dependency;
import com.google.devtools.build.lib.analysis.DependencyKey;
import com.google.devtools.build.lib.analysis.DependencyKind;
import com.google.devtools.build.lib.analysis.DuplicateException;
import com.google.devtools.build.lib.analysis.ExecGroupCollection.InvalidExecGroupException;
import com.google.devtools.build.lib.analysis.InconsistentAspectOrderException;
import com.google.devtools.build.lib.analysis.PlatformOptions;
import com.google.devtools.build.lib.analysis.ResolvedToolchainContext;
import com.google.devtools.build.lib.analysis.TargetAndConfiguration;
import com.google.devtools.build.lib.analysis.ToolchainCollection;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue;
import com.google.devtools.build.lib.analysis.config.ConfigConditions;
import com.google.devtools.build.lib.analysis.config.ConfigurationResolver;
import com.google.devtools.build.lib.analysis.config.DependencyEvaluationException;
import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException;
import com.google.devtools.build.lib.analysis.config.TransitionResolver;
import com.google.devtools.build.lib.analysis.config.transitions.ConfigurationTransition;
import com.google.devtools.build.lib.analysis.config.transitions.NoTransition;
import com.google.devtools.build.lib.analysis.configuredtargets.MergedConfiguredTarget;
import com.google.devtools.build.lib.analysis.starlark.StarlarkTransition.TransitionException;
import com.google.devtools.build.lib.bugreport.BugReport;
import com.google.devtools.build.lib.causes.Cause;
import com.google.devtools.build.lib.causes.LabelCause;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.StoredEventHandler;
import com.google.devtools.build.lib.packages.Aspect;
import com.google.devtools.build.lib.packages.AspectDefinition;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.BuildFileContainsErrorsException;
import com.google.devtools.build.lib.packages.NativeAspectClass;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.NoSuchThingException;
import com.google.devtools.build.lib.packages.OutputFile;
import com.google.devtools.build.lib.packages.Package;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.RuleClassProvider;
import com.google.devtools.build.lib.packages.StarlarkAspectClass;
import com.google.devtools.build.lib.packages.StarlarkDefinedAspect;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions;
import com.google.devtools.build.lib.profiler.memory.CurrentRuleTracker;
import com.google.devtools.build.lib.skyframe.AspectKeyCreator.AspectKey;
import com.google.devtools.build.lib.skyframe.BzlLoadFunction.BzlLoadFailedException;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetFunction.ComputeDependenciesState;
import com.google.devtools.build.lib.skyframe.SkyframeExecutor.BuildViewProvider;
import com.google.devtools.build.lib.util.OrderedSetMultimap;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunction.Environment.SkyKeyComputeState;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.SkyframeLookupResult;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Nullable;
import net.starlark.java.eval.StarlarkSemantics;
/**
* The Skyframe function that generates aspects.
*
* <p>This class, together with {@link ConfiguredTargetFunction} drives the analysis phase. For more
* information, see {@link com.google.devtools.build.lib.analysis.RuleConfiguredTargetFactory}.
*
* <p>{@link AspectFunction} takes a SkyKey containing an {@link AspectKey} [a tuple of (target
* label, configurations, aspect class and aspect parameters)], loads an {@link Aspect} from aspect
* class and aspect parameters, gets a {@link ConfiguredTarget} for label and configurations, and
* then creates a {@link ConfiguredAspect} for a given {@link AspectKey}.
*
* <p>See {@link com.google.devtools.build.lib.packages.AspectClass} documentation for an overview
* of aspect-related classes
*
* @see com.google.devtools.build.lib.analysis.RuleConfiguredTargetFactory
* @see com.google.devtools.build.lib.packages.AspectClass
*/
final class AspectFunction implements SkyFunction {
private final BuildViewProvider buildViewProvider;
private final RuleClassProvider ruleClassProvider;
/**
* Indicates whether the set of packages transitively loaded for a given {@link AspectValue} will
* be needed for package root resolution later in the build. If not, they are not collected and
* stored.
*/
private final boolean storeTransitivePackagesForPackageRootResolution;
AspectFunction(
BuildViewProvider buildViewProvider,
RuleClassProvider ruleClassProvider,
boolean storeTransitivePackagesForPackageRootResolution) {
this.buildViewProvider = buildViewProvider;
this.ruleClassProvider = ruleClassProvider;
this.storeTransitivePackagesForPackageRootResolution =
storeTransitivePackagesForPackageRootResolution;
}
static class State implements SkyKeyComputeState {
/** Null if AspectFuncton is not storing this information. */
@Nullable NestedSetBuilder<Package> transitivePackagesForPackageRootResolution;
NestedSetBuilder<Cause> transitiveRootCauses = NestedSetBuilder.stableOrder();
@Nullable InitialValues initialValues;
ComputeDependenciesState computeDependenciesState = new ComputeDependenciesState();
State(boolean storeTransitivePackagesForPackageRootResolution) {
this.transitivePackagesForPackageRootResolution =
storeTransitivePackagesForPackageRootResolution ? NestedSetBuilder.stableOrder() : null;
}
}
private static class InitialValues {
@Nullable private final Aspect aspect;
@Nullable private final ConfiguredAspectFactory aspectFactory;
@Nullable private final BuildConfigurationValue configuration;
private final ConfiguredTarget associatedTarget;
private final Target target;
private InitialValues(
@Nullable Aspect aspect,
@Nullable ConfiguredAspectFactory aspectFactory,
@Nullable BuildConfigurationValue configuration,
ConfiguredTarget associatedTarget,
Target target) {
this.aspect = aspect;
this.aspectFactory = aspectFactory;
this.configuration = configuration;
this.associatedTarget = associatedTarget;
this.target = target;
}
}
@Nullable
@Override
public SkyValue compute(SkyKey skyKey, Environment env)
throws AspectFunctionException, InterruptedException {
AspectKey key = (AspectKey) skyKey.argument();
State state = env.getState(() -> new State(storeTransitivePackagesForPackageRootResolution));
if (state.initialValues == null) {
InitialValues initialValues = getInitialValues(key, env);
if (initialValues == null) {
return null;
}
state.initialValues = initialValues;
}
Aspect aspect = state.initialValues.aspect;
ConfiguredAspectFactory aspectFactory = state.initialValues.aspectFactory;
BuildConfigurationValue configuration = state.initialValues.configuration;
ConfiguredTarget associatedTarget = state.initialValues.associatedTarget;
Target target = state.initialValues.target;
if (AliasProvider.isAlias(associatedTarget)) {
return createAliasAspect(
env,
buildViewProvider.getSkyframeBuildView().getHostConfiguration(),
new TargetAndConfiguration(target, configuration),
aspect,
key,
configuration,
associatedTarget);
}
// If we get here, label should match original label, and therefore the target we looked up
// above indeed corresponds to associatedTarget.getLabel().
Preconditions.checkState(
associatedTarget.getOriginalLabel().equals(associatedTarget.getLabel()),
"Non-alias %s should have matching label but found %s",
associatedTarget.getOriginalLabel(),
associatedTarget.getLabel());
// If the incompatible flag is set, the top-level aspect should not be applied on top-level
// targets whose rules do not advertise the aspect's required providers. The aspect should not
// also propagate to these targets dependencies.
StarlarkSemantics starlarkSemantics = PrecomputedValue.STARLARK_SEMANTICS.get(env);
if (starlarkSemantics == null) {
return null;
}
boolean checkRuleAdvertisedProviders =
starlarkSemantics.getBool(
BuildLanguageOptions.INCOMPATIBLE_TOP_LEVEL_ASPECTS_REQUIRE_PROVIDERS);
if (checkRuleAdvertisedProviders) {
if (target instanceof Rule) {
if (!aspect
.getDefinition()
.getRequiredProviders()
.isSatisfiedBy(((Rule) target).getRuleClassObject().getAdvertisedProviders())) {
return new AspectValue(
key,
aspect,
target.getLocation(),
ConfiguredAspect.forNonapplicableTarget(),
/*transitivePackagesForPackageRootResolution=*/ NestedSetBuilder.emptySet(
Order.STABLE_ORDER));
}
}
}
ImmutableList<Aspect> topologicalAspectPath;
if (key.getBaseKeys().isEmpty()) {
topologicalAspectPath = ImmutableList.of(aspect);
} else {
LinkedHashSet<AspectKey> orderedKeys = new LinkedHashSet<>();
collectAspectKeysInTopologicalOrder(key.getBaseKeys(), orderedKeys);
Map<SkyKey, SkyValue> aspectValues = env.getValues(orderedKeys);
if (env.valuesMissing()) {
return null;
}
ImmutableList.Builder<Aspect> topologicalAspectPathBuilder =
ImmutableList.builderWithExpectedSize(orderedKeys.size() + 1);
for (AspectKey aspectKey : orderedKeys) {
AspectValue aspectValue = (AspectValue) aspectValues.get(aspectKey);
topologicalAspectPathBuilder.add(aspectValue.getAspect());
}
topologicalAspectPath = topologicalAspectPathBuilder.add(aspect).build();
List<ConfiguredAspect> directlyRequiredAspects =
Lists.transform(
key.getBaseKeys(), k -> ((AspectValue) aspectValues.get(k)).getConfiguredAspect());
try {
associatedTarget = MergedConfiguredTarget.of(associatedTarget, directlyRequiredAspects);
} catch (DuplicateException e) {
env.getListener().handle(Event.error(target.getLocation(), e.getMessage()));
throw new AspectFunctionException(
new AspectCreationException(e.getMessage(), target.getLabel(), configuration));
}
}
SkyframeDependencyResolver resolver = new SkyframeDependencyResolver(env);
TargetAndConfiguration originalTargetAndConfiguration =
new TargetAndConfiguration(target, configuration);
try {
UnloadedToolchainContext unloadedToolchainContext =
getUnloadedToolchainContext(env, key, aspect, configuration);
if (env.valuesMissing()) {
return null;
}
// Get the configuration targets that trigger this rule's configurable attributes.
ConfigConditions configConditions =
ConfiguredTargetFunction.getConfigConditions(
env,
originalTargetAndConfiguration,
state.transitivePackagesForPackageRootResolution,
unloadedToolchainContext == null ? null : unloadedToolchainContext.targetPlatform(),
state.transitiveRootCauses);
if (configConditions == null) {
// Those targets haven't yet been resolved.
return null;
}
OrderedSetMultimap<DependencyKind, ConfiguredTargetAndData> depValueMap;
try {
depValueMap =
ConfiguredTargetFunction.computeDependencies(
state.computeDependenciesState,
state.transitivePackagesForPackageRootResolution,
state.transitiveRootCauses,
env,
resolver,
originalTargetAndConfiguration,
topologicalAspectPath,
configConditions.asProviders(),
unloadedToolchainContext == null
? null
: ToolchainCollection.builder()
.addDefaultContext(unloadedToolchainContext)
.build(),
shouldUseToolchainTransition(configuration, aspect.getDefinition()),
ruleClassProvider,
buildViewProvider.getSkyframeBuildView().getHostConfiguration());
} catch (ConfiguredValueCreationException e) {
throw new AspectCreationException(
e.getMessage(), key.getLabel(), configuration, e.getDetailedExitCode());
}
if (depValueMap == null) {
return null;
}
if (!state.transitiveRootCauses.isEmpty()) {
NestedSet<Cause> causes = state.transitiveRootCauses.build();
throw new AspectFunctionException(
new AspectCreationException(
"Loading failed",
causes,
ConfiguredTargetFunction.getPrioritizedDetailedExitCode(causes)));
}
// Load the requested toolchains into the ToolchainContext, now that we have dependencies.
ResolvedToolchainContext toolchainContext = null;
if (unloadedToolchainContext != null) {
String targetDescription =
String.format(
"aspect %s applied to %s", aspect.getDescriptor().getDescription(), target);
toolchainContext =
ResolvedToolchainContext.load(
unloadedToolchainContext,
targetDescription,
// TODO(161222568): Support exec groups on aspects.
depValueMap.get(DependencyKind.defaultExecGroupToolchain()));
}
return createAspect(
env,
key,
topologicalAspectPath,
aspect,
aspectFactory,
new ConfiguredTargetAndData(
associatedTarget, target, configuration, /*transitionKeys=*/ null),
configuration,
configConditions,
toolchainContext,
depValueMap,
state.transitivePackagesForPackageRootResolution);
} catch (DependencyEvaluationException e) {
// TODO(bazel-team): consolidate all env.getListener().handle() calls in this method, like in
// ConfiguredTargetFunction. This encourages clear, consistent user messages (ideally without
// the programmer having to think about it).
if (!e.depReportedOwnError()) {
env.getListener().handle(Event.error(e.getLocation(), e.getMessage()));
}
if (e.getCause() instanceof ConfiguredValueCreationException) {
ConfiguredValueCreationException cause = (ConfiguredValueCreationException) e.getCause();
throw new AspectFunctionException(
new AspectCreationException(
cause.getMessage(), cause.getRootCauses(), cause.getDetailedExitCode()));
} else if (e.getCause() instanceof InconsistentAspectOrderException) {
InconsistentAspectOrderException cause = (InconsistentAspectOrderException) e.getCause();
env.getListener().handle(Event.error(cause.getLocation(), cause.getMessage()));
throw new AspectFunctionException(
new AspectCreationException(cause.getMessage(), key.getLabel(), configuration));
} else if (e.getCause() instanceof TransitionException) {
TransitionException cause = (TransitionException) e.getCause();
throw new AspectFunctionException(
new AspectCreationException(cause.getMessage(), key.getLabel(), configuration));
} else {
// Cast to InvalidConfigurationException as a consistency check. If you add any
// DependencyEvaluationException constructors, you may need to change this code, too.
InvalidConfigurationException cause = (InvalidConfigurationException) e.getCause();
throw new AspectFunctionException(
new AspectCreationException(
cause.getMessage(), key.getLabel(), configuration, cause.getDetailedExitCode()));
}
} catch (AspectCreationException e) {
throw new AspectFunctionException(e);
} catch (ConfiguredValueCreationException e) {
throw new AspectFunctionException(e);
} catch (ToolchainException e) {
throw new AspectFunctionException(
new AspectCreationException(
e.getMessage(), new LabelCause(key.getLabel(), e.getDetailedExitCode())));
}
}
static SkyKey bzlLoadKeyForStarlarkAspect(StarlarkAspectClass starlarkAspectClass) {
Label extensionLabel = starlarkAspectClass.getExtensionLabel();
return StarlarkBuiltinsValue.isBuiltinsRepo(extensionLabel.getRepository())
? BzlLoadValue.keyForBuiltins(extensionLabel)
: BzlLoadValue.keyForBuild(extensionLabel);
}
@Nullable
private InitialValues getInitialValues(AspectKey key, Environment env)
throws AspectFunctionException, InterruptedException {
StarlarkAspectClass starlarkAspectClass;
ConfiguredAspectFactory aspectFactory = null;
Aspect aspect = null;
SkyKey aspectPackageKey = PackageValue.key(key.getLabel().getPackageIdentifier());
SkyKey baseConfiguredTargetKey = key.getBaseConfiguredTargetKey();
SkyKey basePackageKey =
PackageValue.key(key.getBaseConfiguredTargetKey().getLabel().getPackageIdentifier());
SkyKey configurationKey = key.getConfigurationKey();
SkyKey bzlLoadKey;
if (key.getAspectClass() instanceof NativeAspectClass) {
NativeAspectClass nativeAspectClass = (NativeAspectClass) key.getAspectClass();
starlarkAspectClass = null;
aspectFactory = (ConfiguredAspectFactory) nativeAspectClass;
aspect = Aspect.forNative(nativeAspectClass, key.getParameters());
bzlLoadKey = null;
} else {
Preconditions.checkState(
key.getAspectClass() instanceof StarlarkAspectClass, "Unknown aspect class: %s", key);
starlarkAspectClass = (StarlarkAspectClass) key.getAspectClass();
bzlLoadKey = bzlLoadKeyForStarlarkAspect(starlarkAspectClass);
}
ImmutableSet.Builder<SkyKey> initialKeys = ImmutableSet.builder();
initialKeys.add(aspectPackageKey).add(baseConfiguredTargetKey).add(basePackageKey);
if (configurationKey != null) {
initialKeys.add(configurationKey);
}
if (bzlLoadKey != null) {
initialKeys.add(bzlLoadKey);
}
SkyframeLookupResult initialValues = env.getValuesAndExceptions(initialKeys.build());
if (env.valuesMissing()) {
return null;
}
if (starlarkAspectClass != null) {
StarlarkDefinedAspect starlarkAspect;
try {
BzlLoadValue bzlLoadvalue;
try {
bzlLoadvalue =
(BzlLoadValue) initialValues.getOrThrow(bzlLoadKey, BzlLoadFailedException.class);
if (bzlLoadvalue == null) {
BugReport.sendBugReport(
new IllegalStateException(
"bzlLoadValue " + bzlLoadKey + " was missing, this should never happen"));
return null;
}
} catch (BzlLoadFailedException e) {
throw new AspectCreationException(
e.getMessage(), starlarkAspectClass.getExtensionLabel(), e.getDetailedExitCode());
}
starlarkAspect = loadAspectFromBzl(starlarkAspectClass, bzlLoadvalue);
} catch (AspectCreationException e) {
env.getListener().handle(Event.error(e.getMessage()));
throw new AspectFunctionException(e);
}
aspectFactory = new StarlarkAspectFactory(starlarkAspect);
aspect =
Aspect.forStarlark(
starlarkAspect.getAspectClass(),
starlarkAspect.getDefinition(key.getParameters()),
key.getParameters());
}
// Keep this in sync with the same code in ConfiguredTargetFunction.
PackageValue aspectPackage = (PackageValue) initialValues.get(aspectPackageKey);
if (aspectPackage.getPackage().containsErrors()) {
throw new AspectFunctionException(
new BuildFileContainsErrorsException(key.getLabel().getPackageIdentifier()));
}
ConfiguredTargetValue baseConfiguredTargetValue;
try {
baseConfiguredTargetValue =
(ConfiguredTargetValue)
initialValues.getOrThrow(
baseConfiguredTargetKey, ConfiguredValueCreationException.class);
if (baseConfiguredTargetValue == null) {
BugReport.sendBugReport(
new IllegalStateException(
"BzlLoadFailedException should have been processed by ConfiguredTargetFunction for "
+ baseConfiguredTargetKey
+ " and "
+ key));
return null;
}
} catch (ConfiguredValueCreationException e) {
throw new AspectFunctionException(
new AspectCreationException(e.getMessage(), e.getRootCauses(), e.getDetailedExitCode()));
}
ConfiguredTarget associatedTarget = baseConfiguredTargetValue.getConfiguredTarget();
Preconditions.checkState(
Objects.equals(key.getConfigurationKey(), associatedTarget.getConfigurationKey()),
"Aspect not in same configuration as associated target: %s, %s",
key,
associatedTarget);
BuildConfigurationValue configuration =
configurationKey == null
? null
: (BuildConfigurationValue) initialValues.get(configurationKey);
PackageValue basePackage = (PackageValue) initialValues.get(basePackageKey);
Target target;
try {
target = basePackage.getPackage().getTarget(associatedTarget.getOriginalLabel().getName());
} catch (NoSuchTargetException e) {
throw new IllegalStateException("Name already verified", e);
}
return new InitialValues(aspect, aspectFactory, configuration, associatedTarget, target);
}
/**
* Loads a Starlark-defined aspect from an extension file.
*
* @throws AspectCreationException if the value loaded is not a {@link StarlarkDefinedAspect}
*/
static StarlarkDefinedAspect loadAspectFromBzl(
StarlarkAspectClass starlarkAspectClass, BzlLoadValue bzlLoadValue)
throws AspectCreationException {
Label extensionLabel = starlarkAspectClass.getExtensionLabel();
String starlarkValueName = starlarkAspectClass.getExportedName();
Object starlarkValue = bzlLoadValue.getModule().getGlobal(starlarkValueName);
if (!(starlarkValue instanceof StarlarkDefinedAspect)) {
throw new AspectCreationException(
String.format(
starlarkValue == null ? "%s is not exported from %s" : "%s from %s is not an aspect",
starlarkValueName,
extensionLabel),
extensionLabel);
}
return (StarlarkDefinedAspect) starlarkValue;
}
@Nullable
private static UnloadedToolchainContext getUnloadedToolchainContext(
Environment env,
AspectKey key,
Aspect aspect,
@Nullable BuildConfigurationValue configuration)
throws InterruptedException, AspectCreationException {
// Determine what toolchains are needed by this target.
UnloadedToolchainContext unloadedToolchainContext = null;
if (configuration != null) {
// Configuration can be null in the case of aspects applied to input files. In this case,
// there are no chances of toolchains being used, so skip it.
try {
unloadedToolchainContext =
(UnloadedToolchainContext)
env.getValueOrThrow(
ToolchainContextKey.key()
.configurationKey(configuration.getKey())
.toolchainTypes(aspect.getDefinition().getToolchainTypes())
.build(),
ToolchainException.class);
} catch (ToolchainException e) {
// TODO(katre): better error handling
throw new AspectCreationException(
e.getMessage(), new LabelCause(key.getLabel(), e.getDetailedExitCode()));
}
}
if (env.valuesMissing()) {
return null;
}
return unloadedToolchainContext;
}
/**
* Returns whether or not to use the new toolchain transition. Checks the global incompatible
* change flag and the aspect's toolchain transition readiness attribute.
*/
// TODO(#10523): Remove this when the migration period for toolchain transitions has ended.
private static boolean shouldUseToolchainTransition(
@Nullable BuildConfigurationValue configuration, AspectDefinition definition) {
// Check whether the global incompatible change flag is set.
if (configuration != null) {
PlatformOptions platformOptions = configuration.getOptions().get(PlatformOptions.class);
if (platformOptions != null && platformOptions.overrideToolchainTransition) {
return true;
}
}
// Check the aspect definition to see if it is ready.
return definition.useToolchainTransition();
}
/**
* Collects {@link AspectKey} dependencies by performing a postorder traversal over {@link
* AspectKey#getBaseKeys}.
*
* <p>The resulting set of {@code orderedKeys} is topologically ordered: each aspect key appears
* after all of its dependencies.
*/
private static void collectAspectKeysInTopologicalOrder(
List<AspectKey> baseKeys, LinkedHashSet<AspectKey> orderedKeys) {
for (AspectKey key : baseKeys) {
if (!orderedKeys.contains(key)) {
collectAspectKeysInTopologicalOrder(key.getBaseKeys(), orderedKeys);
orderedKeys.add(key);
}
}
}
/**
* Computes the given aspectKey of an alias-like target, by depending on the corresponding key of
* the next target in the alias chain (if there are more), or the "real" configured target.
*/
@Nullable
private AspectValue createAliasAspect(
Environment env,
BuildConfigurationValue hostConfiguration,
TargetAndConfiguration originalTarget,
Aspect aspect,
AspectKey originalKey,
BuildConfigurationValue aspectConfiguration,
ConfiguredTarget configuredTarget)
throws AspectFunctionException, InterruptedException {
ImmutableList<Label> aliasChain =
configuredTarget.getProvider(AliasProvider.class).getAliasChain();
// Find the next alias in the chain: either the next alias (if there are two) or the name of
// the real configured target.
Label aliasedLabel = aliasChain.size() > 1 ? aliasChain.get(1) : configuredTarget.getLabel();
NestedSetBuilder<Package> transitivePackagesForPackageRootResolution =
storeTransitivePackagesForPackageRootResolution ? NestedSetBuilder.stableOrder() : null;
NestedSetBuilder<Cause> transitiveRootCauses = NestedSetBuilder.stableOrder();
// Compute the Dependency from originalTarget to aliasedLabel
Dependency dep;
try {
UnloadedToolchainContext unloadedToolchainContext =
getUnloadedToolchainContext(env, originalKey, aspect, originalTarget.getConfiguration());
if (env.valuesMissing()) {
return null;
}
// See comment in compute() above for why we pair target with aspectConfiguration here
TargetAndConfiguration originalTargetAndAspectConfiguration =
new TargetAndConfiguration(originalTarget.getTarget(), aspectConfiguration);
// Get the configuration targets that trigger this rule's configurable attributes.
ConfigConditions configConditions =
ConfiguredTargetFunction.getConfigConditions(
env,
originalTargetAndAspectConfiguration,
transitivePackagesForPackageRootResolution,
unloadedToolchainContext == null ? null : unloadedToolchainContext.targetPlatform(),
transitiveRootCauses);
if (configConditions == null) {
// Those targets haven't yet been resolved.
return null;
}
Target aliasedTarget = getTargetFromLabel(env, aliasedLabel);
if (aliasedTarget == null) {
return null;
}
ConfigurationTransition transition =
TransitionResolver.evaluateTransition(
aspectConfiguration,
NoTransition.INSTANCE,
aliasedTarget,
((ConfiguredRuleClassProvider) ruleClassProvider).getTrimmingTransitionFactory());
// Use ConfigurationResolver to apply any configuration transitions on the alias edge.
// This is a shortened/simplified variant of ConfiguredTargetFunction.computeDependencies
// for just the one special attribute we care about here.
DependencyKey depKey =
DependencyKey.builder().setLabel(aliasedLabel).setTransition(transition).build();
DependencyKind depKind =
DependencyKind.AttributeDependencyKind.forRule(
getAttributeContainingAlias(originalTarget.getTarget()));
ConfigurationResolver resolver =
new ConfigurationResolver(
env,
originalTargetAndAspectConfiguration,
hostConfiguration,
configConditions.asProviders());
ImmutableList<Dependency> deps =
resolver.resolveConfiguration(depKind, depKey, env.getListener());
if (deps == null) {
return null;
}
// Actual should resolve to exactly one dependency
Preconditions.checkState(
deps.size() == 1, "Unexpected split in alias %s: %s", originalTarget.getLabel(), deps);
dep = deps.get(0);
} catch (NoSuchPackageException | NoSuchTargetException e) {
throw new AspectFunctionException(e);
} catch (ConfiguredValueCreationException e) {
throw new AspectFunctionException(e);
} catch (AspectCreationException e) {
throw new AspectFunctionException(e);
}
if (!transitiveRootCauses.isEmpty()) {
NestedSet<Cause> causes = transitiveRootCauses.build();
throw new AspectFunctionException(
new AspectCreationException(
"Loading failed",
causes,
ConfiguredTargetFunction.getPrioritizedDetailedExitCode(causes)));
}
// Now that we have a Dependency, we can compute the aliased key and depend on it
AspectKey actualKey = buildAliasAspectKey(originalKey, aliasedLabel, dep);
return createAliasAspect(
env,
originalTarget.getTarget(),
originalKey,
aspect,
actualKey,
transitivePackagesForPackageRootResolution);
}
private static AspectValue createAliasAspect(
Environment env,
Target originalTarget,
AspectKey originalKey,
Aspect aspect,
AspectKey depKey,
@Nullable NestedSetBuilder<Package> transitivePackagesForPackageRootResolution)
throws InterruptedException {
// Compute the AspectValue of the target the alias refers to (which can itself be either an
// alias or a real target)
AspectValue real = (AspectValue) env.getValue(depKey);
if (env.valuesMissing()) {
return null;
}
NestedSet<Package> finalTransitivePackagesForPackageRootResolution = null;
if (transitivePackagesForPackageRootResolution != null) {
finalTransitivePackagesForPackageRootResolution =
transitivePackagesForPackageRootResolution
.addTransitive(real.getTransitivePackagesForPackageRootResolution())
.add(originalTarget.getPackage())
.build();
}
return new AspectValue(
originalKey,
aspect,
originalTarget.getLocation(),
ConfiguredAspect.forAlias(real.getConfiguredAspect()),
finalTransitivePackagesForPackageRootResolution);
}
@Nullable
private static Target getTargetFromLabel(Environment env, Label aliasLabel)
throws InterruptedException, NoSuchPackageException, NoSuchTargetException {
SkyValue val =
env.getValueOrThrow(
PackageValue.key(aliasLabel.getPackageIdentifier()), NoSuchPackageException.class);
if (val == null) {
return null;
}
Package pkg = ((PackageValue) val).getPackage();
return pkg.getTarget(aliasLabel.getName());
}
private static AspectKey buildAliasAspectKey(
AspectKey originalKey, Label aliasLabel, Dependency dep) {
ImmutableList<AspectKey> aliasedBaseKeys =
originalKey.getBaseKeys().stream()
.map(baseKey -> buildAliasAspectKey(baseKey, aliasLabel, dep))
.collect(toImmutableList());
return AspectKeyCreator.createAspectKey(
originalKey.getAspectDescriptor(),
aliasedBaseKeys,
ConfiguredTargetKey.builder()
.setLabel(aliasLabel)
.setConfiguration(dep.getConfiguration())
.build());
}
/**
* Given an alias-like target, returns the attribute containing the "actual", by looking for
* attribute names used in known alias rules (Alias, Bind, LateBoundAlias, XcodeConfigAlias).
*
* <p>Alias and Bind rules use "actual", which will be by far the most common match here. It'll
* likely be rare that aspects need to traverse across other alias-like rules.
*/
// TODO(lberki,kmb): try to avoid this, maybe by recording the attribute name in AliasProvider
private static Attribute getAttributeContainingAlias(Target originalTarget) {
Attribute aliasAttr = null;
for (Attribute attr : originalTarget.getAssociatedRule().getAttributes()) {
switch (attr.getName()) {
case "actual": // alias and bind rules
case ":alias": // LateBoundAlias-derived rules
case ":xcode_config": // xcode_config_alias rule
Preconditions.checkState(
aliasAttr == null,
"Found multiple candidate attributes %s and %s in %s",
aliasAttr,
attr,
originalTarget);
aliasAttr = attr;
break;
default:
break;
}
}
Preconditions.checkState(
aliasAttr != null, "Attribute containing alias not found in %s", originalTarget);
return aliasAttr;
}
@Nullable
private AspectValue createAspect(
Environment env,
AspectKey key,
ImmutableList<Aspect> topologicalAspectPath,
Aspect aspect,
ConfiguredAspectFactory aspectFactory,
ConfiguredTargetAndData associatedTarget,
BuildConfigurationValue configuration,
ConfigConditions configConditions,
ResolvedToolchainContext toolchainContext,
OrderedSetMultimap<DependencyKind, ConfiguredTargetAndData> directDeps,
@Nullable NestedSetBuilder<Package> transitivePackagesForPackageRootResolution)
throws AspectFunctionException, InterruptedException {
// Should be successfully evaluated and cached from the loading phase.
StarlarkBuiltinsValue starlarkBuiltinsValue =
(StarlarkBuiltinsValue) env.getValue(StarlarkBuiltinsValue.key());
if (env.valuesMissing()) {
return null;
}
SkyframeBuildView view = buildViewProvider.getSkyframeBuildView();
StoredEventHandler events = new StoredEventHandler();
CachingAnalysisEnvironment analysisEnvironment =
view.createAnalysisEnvironment(key, events, env, configuration, starlarkBuiltinsValue);
ConfiguredAspect configuredAspect;
if (aspect.getDefinition().applyToGeneratingRules()
&& associatedTarget.getTarget() instanceof OutputFile) {
OutputFile outputFile = (OutputFile) associatedTarget.getTarget();
Label label = outputFile.getGeneratingRule().getLabel();
return createAliasAspect(
env,
associatedTarget.getTarget(),
key,
aspect,
key.withLabel(label),
transitivePackagesForPackageRootResolution);
} else if (AspectResolver.aspectMatchesConfiguredTarget(associatedTarget, aspect)) {
try {
CurrentRuleTracker.beginConfiguredAspect(aspect.getAspectClass());
configuredAspect =
view.getConfiguredTargetFactory()
.createAspect(
analysisEnvironment,
associatedTarget,
topologicalAspectPath,
aspectFactory,
aspect,
directDeps,
configConditions,
toolchainContext,
configuration,
view.getHostConfiguration(),
key);
} catch (MissingDepException e) {
Preconditions.checkState(env.valuesMissing());
return null;
} catch (ActionConflictException e) {
throw new AspectFunctionException(e);
} catch (InvalidExecGroupException e) {
throw new AspectFunctionException(e);
} finally {
CurrentRuleTracker.endConfiguredAspect();
}
} else {
configuredAspect = ConfiguredAspect.forNonapplicableTarget();
}
events.replayOn(env.getListener());
if (events.hasErrors()) {
analysisEnvironment.disable(associatedTarget.getTarget());
String msg = "Analysis of target '" + associatedTarget.getTarget().getLabel() + "' failed";
throw new AspectFunctionException(
new AspectCreationException(msg, key.getLabel(), configuration));
}
Preconditions.checkState(!analysisEnvironment.hasErrors(),
"Analysis environment hasError() but no errors reported");
if (env.valuesMissing()) {
return null;
}
analysisEnvironment.disable(associatedTarget.getTarget());
Preconditions.checkNotNull(configuredAspect);
return new AspectValue(
key,
aspect,
associatedTarget.getTarget().getLocation(),
configuredAspect,
transitivePackagesForPackageRootResolution == null
? null
: transitivePackagesForPackageRootResolution.build());
}
@Override
public String extractTag(SkyKey skyKey) {
AspectKey aspectKey = (AspectKey) skyKey.argument();
return Label.print(aspectKey.getLabel());
}
/** Used to indicate errors during the computation of an {@link AspectValue}. */
public static final class AspectFunctionException extends SkyFunctionException {
public AspectFunctionException(NoSuchThingException e) {
super(e, Transience.PERSISTENT);
}
public AspectFunctionException(AspectCreationException e) {
super(e, Transience.PERSISTENT);
}
public AspectFunctionException(ConfiguredValueCreationException e) {
super(e, Transience.PERSISTENT);
}
public AspectFunctionException(InvalidExecGroupException e) {
super(e, Transience.PERSISTENT);
}
public AspectFunctionException(ActionConflictException cause) {
super(cause, Transience.PERSISTENT);
}
}
}
|
|
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.verifier.missingEquality;
import org.drools.core.base.RuleNameMatchesAgendaFilter;
import org.drools.verifier.TestBaseOld;
import org.drools.verifier.components.LiteralRestriction;
import org.drools.verifier.components.VariableRestriction;
import org.drools.verifier.data.VerifierReport;
import org.drools.verifier.data.VerifierReportFactory;
import org.drools.verifier.report.components.Cause;
import org.drools.verifier.report.components.Severity;
import org.drools.verifier.report.components.VerifierMessage;
import org.drools.verifier.report.components.VerifierMessageBase;
import org.junit.Test;
import org.kie.api.runtime.KieSession;
import java.util.*;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class MissingEqualityTest extends TestBaseOld {
@Test
public void testMissingEqualityInLiteralRestrictions() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("MissingEquality.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass()
.getResourceAsStream("MissingEqualityTest.drl"), result
.getVerifierData());
session.setGlobal("result", result);
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Missing restriction in LiteralRestrictions"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(
Severity.WARNING).iterator();
Collection<String> ruleNames = new ArrayList<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Cause cause = ((VerifierMessage) o).getFaulty();
String name = ((LiteralRestriction) cause).getRuleName();
ruleNames.add(name);
}
}
assertTrue(ruleNames.remove("Missing equality 1"));
assertTrue(ruleNames.remove("Missing equality 2"));
if (!ruleNames.isEmpty()) {
for (String string : ruleNames) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testMissingEqualityInVariableRestrictions() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("MissingEquality.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass()
.getResourceAsStream("MissingEqualityTest.drl"), result
.getVerifierData());
session.setGlobal("result", result);
// for (Object o : testData) {
// if (o instanceof VariableRestriction) {
// System.out.println(o);
// VariableRestriction variableRestriction = (VariableRestriction) o;
// System.out.println(variableRestriction.getOperator());
// }
// }
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Missing restriction in VariableRestrictions, equal operator"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(
Severity.WARNING).iterator();
Set<String> ruleNames = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Cause cause = ((VerifierMessage) o).getFaulty();
String name = ((VariableRestriction) cause).getRuleName();
ruleNames.add(name);
}
}
assertTrue(ruleNames.remove("Missing equality 5"));
if (!ruleNames.isEmpty()) {
for (String string : ruleNames) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testMissingEqualityInVariableRestrictions2() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("MissingEquality.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass()
.getResourceAsStream("MissingEqualityTest.drl"), result
.getVerifierData());
session.setGlobal("result", result);
// for (Object o : testData) {
// if (o instanceof VariableRestriction) {
// System.out.println(o);
// VariableRestriction variableRestriction = (VariableRestriction) o;
// System.out.println(variableRestriction.getOperator());
// }
// }
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Missing restriction in VariableRestrictions, unequal operator"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(
Severity.WARNING).iterator();
Set<String> ruleNames = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Cause cause = ((VerifierMessage) o).getFaulty();
String name = ((VariableRestriction) cause).getRuleName();
ruleNames.add(name);
}
}
assertTrue(ruleNames.remove("Missing equality 7"));
if (!ruleNames.isEmpty()) {
for (String string : ruleNames) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testMissingEqualityInVariableRestrictions3() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("MissingEquality.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass()
.getResourceAsStream("MissingEqualityTest.drl"), result
.getVerifierData());
session.setGlobal("result", result);
// for (Object o : testData) {
// if (o instanceof VariableRestriction) {
// System.out.println(o);
// VariableRestriction variableRestriction = (VariableRestriction) o;
// System.out.println(variableRestriction.getOperator());
// }
// }
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Missing restriction in VariableRestrictions, custom operator"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(
Severity.WARNING).iterator();
Set<String> ruleNames = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Cause cause = ((VerifierMessage) o).getFaulty();
String name = ((VariableRestriction) cause).getRuleName();
ruleNames.add(name);
}
}
assertTrue(ruleNames.remove("Missing equality 3"));
assertTrue(ruleNames.remove("Missing equality 4"));
assertTrue(ruleNames.remove("Missing equality 6"));
if (!ruleNames.isEmpty()) {
for (String string : ruleNames) {
fail("Rule " + string + " caused an error.");
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.websocket.server;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.servlet.ServletContextEvent;
import javax.websocket.CloseReason;
import javax.websocket.CloseReason.CloseCode;
import javax.websocket.CloseReason.CloseCodes;
import javax.websocket.DeploymentException;
import javax.websocket.OnClose;
import javax.websocket.OnError;
import javax.websocket.OnMessage;
import javax.websocket.OnOpen;
import javax.websocket.Session;
import javax.websocket.server.ServerContainer;
import javax.websocket.server.ServerEndpointConfig;
import org.junit.Assert;
import org.junit.Before;
//import org.junit.Ignore;
import org.junit.Test;
import org.apache.catalina.Context;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.servlets.DefaultServlet;
import org.apache.catalina.startup.Tomcat;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.websocket.WebSocketBaseTest;
/**
* Test the behavior of closing websockets under various conditions.
*/
public class TestClose extends WebSocketBaseTest {
private static Log log = LogFactory.getLog(TestClose.class);
// These are static because it is simpler than trying to inject them into
// the endpoint
private static volatile Events events;
public static class Events {
// Used to block in the @OnMessage
public final CountDownLatch onMessageWait = new CountDownLatch(1);
// Used to check which methods of a server endpoint were called
public final CountDownLatch onErrorCalled = new CountDownLatch(1);
public final CountDownLatch onMessageCalled = new CountDownLatch(1);
public final CountDownLatch onCloseCalled = new CountDownLatch(1);
// Parameter of an @OnClose call
public volatile CloseReason closeReason = null;
// Parameter of an @OnError call
public volatile Throwable onErrorThrowable = null;
//This is set to true for tests where the @OnMessage should send a message
public volatile boolean onMessageSends = false;
}
private static void awaitLatch(CountDownLatch latch, String failMessage) {
try {
if (!latch.await(5000, TimeUnit.MILLISECONDS)) {
Assert.fail(failMessage);
}
} catch (InterruptedException e) {
// Won't happen
throw new RuntimeException(e);
}
}
public static void awaitOnClose(CloseCode... codes) {
Set<CloseCode> set = new HashSet<CloseCode>();
for (CloseCode code : codes) {
set.add(code);
}
awaitOnClose(set);
}
public static void awaitOnClose(Set<CloseCode> codes) {
awaitLatch(events.onCloseCalled, "onClose not called");
CloseCode received = events.closeReason.getCloseCode();
Assert.assertTrue("Rx: " + received, codes.contains(received));
}
public static void awaitOnError(Class<? extends Throwable> exceptionClazz) {
awaitLatch(events.onErrorCalled, "onError not called");
Assert.assertTrue(events.onErrorThrowable.getClass().getName(),
exceptionClazz.isAssignableFrom(events.onErrorThrowable.getClass()));
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
events = new Events();
}
@Test
public void testTcpClose() throws Exception {
startServer(TestEndpointConfig.class);
TesterWsCloseClient client = new TesterWsCloseClient("localhost", getPort());
client.httpUpgrade(BaseEndpointConfig.PATH);
client.closeSocket();
awaitOnClose(CloseCodes.CLOSED_ABNORMALLY);
}
@Test
public void testTcpReset() throws Exception {
startServer(TestEndpointConfig.class);
TesterWsCloseClient client = new TesterWsCloseClient("localhost", getPort());
client.httpUpgrade(BaseEndpointConfig.PATH);
client.forceCloseSocket();
// TODO: I'm not entirely sure when onError should be called
awaitOnError(IOException.class);
awaitOnClose(CloseCodes.CLOSED_ABNORMALLY);
}
@Test
public void testWsCloseThenTcpClose() throws Exception {
startServer(TestEndpointConfig.class);
TesterWsCloseClient client = new TesterWsCloseClient("localhost", getPort());
client.httpUpgrade(BaseEndpointConfig.PATH);
client.sendCloseFrame(CloseCodes.GOING_AWAY);
client.closeSocket();
awaitOnClose(CloseCodes.GOING_AWAY);
}
@Test
public void testWsCloseThenTcpReset() throws Exception {
startServer(TestEndpointConfig.class);
TesterWsCloseClient client = new TesterWsCloseClient("localhost", getPort());
client.httpUpgrade(BaseEndpointConfig.PATH);
client.sendCloseFrame(CloseCodes.GOING_AWAY);
client.forceCloseSocket();
// WebSocket 1.1, section 2.1.5 requires this to be CLOSED_ABNORMALLY if
// the container initiates the close and the close code from the client
// if the client initiates it. When the client resets the TCP connection
// after sending the close, different operating systems react different
// ways. Some present the close message then drop the connection, some
// just drop the connection. Therefore, this test has to handle both
// close codes.
awaitOnClose(CloseCodes.CLOSED_ABNORMALLY, CloseCodes.GOING_AWAY);
}
@Test
public void testTcpCloseInOnMessage() throws Exception {
startServer(TestEndpointConfig.class);
TesterWsCloseClient client = new TesterWsCloseClient("localhost", getPort());
client.httpUpgrade(BaseEndpointConfig.PATH);
client.sendMessage("Test");
awaitLatch(events.onMessageCalled, "onMessage not called");
client.closeSocket();
events.onMessageWait.countDown();
awaitOnClose(CloseCodes.CLOSED_ABNORMALLY);
}
@Test
public void testTcpResetInOnMessage() throws Exception {
startServer(TestEndpointConfig.class);
TesterWsCloseClient client = new TesterWsCloseClient("localhost", getPort());
client.httpUpgrade(BaseEndpointConfig.PATH);
client.sendMessage("Test");
awaitLatch(events.onMessageCalled, "onMessage not called");
client.forceCloseSocket();
events.onMessageWait.countDown();
awaitOnError(IOException.class);
awaitOnClose(CloseCodes.CLOSED_ABNORMALLY);
}
@Test
public void testTcpCloseWhenOnMessageSends() throws Exception {
events.onMessageSends = true;
testTcpCloseInOnMessage();
}
@Test
public void testTcpResetWhenOnMessageSends() throws Exception {
events.onMessageSends = true;
testTcpResetInOnMessage();
}
@Test
public void testWsCloseThenTcpCloseWhenOnMessageSends() throws Exception {
events.onMessageSends = true;
startServer(TestEndpointConfig.class);
TesterWsCloseClient client = new TesterWsCloseClient("localhost", getPort());
client.httpUpgrade(BaseEndpointConfig.PATH);
client.sendMessage("Test");
awaitLatch(events.onMessageCalled, "onMessage not called");
client.sendCloseFrame(CloseCodes.NORMAL_CLOSURE);
client.closeSocket();
events.onMessageWait.countDown();
// BIO will see close from client before it sees the TCP close
awaitOnClose(CloseCodes.CLOSED_ABNORMALLY, CloseCodes.NORMAL_CLOSURE);
}
@Test
public void testWsCloseThenTcpResetWhenOnMessageSends() throws Exception {
events.onMessageSends = true;
startServer(TestEndpointConfig.class);
TesterWsCloseClient client = new TesterWsCloseClient("localhost", getPort());
client.httpUpgrade(BaseEndpointConfig.PATH);
client.sendMessage("Test");
awaitLatch(events.onMessageCalled, "onMessage not called");
client.sendCloseFrame(CloseCodes.NORMAL_CLOSURE);
client.forceCloseSocket();
events.onMessageWait.countDown();
// APR will see close from client before it sees the TCP reset
awaitOnClose(CloseCodes.CLOSED_ABNORMALLY, CloseCodes.NORMAL_CLOSURE);
}
public static class TestEndpoint {
@OnOpen
public void onOpen() {
log.info("Session opened");
}
@OnMessage
public void onMessage(Session session, String message) {
log.info("Message received: " + message);
events.onMessageCalled.countDown();
awaitLatch(events.onMessageWait, "onMessageWait not triggered");
if (events.onMessageSends) {
try {
int count = 0;
// The latches above are meant to ensure the correct
// sequence of events but in some cases, particularly with
// APR, there is a short delay between the client closing /
// resetting the connection and the server recognising that
// fact. This loop tries to ensure that it lasts much longer
// than that delay so any close / reset from the client
// triggers an error here.
while (count < 10) {
count++;
session.getBasicRemote().sendText("Test reply");
Thread.sleep(500);
}
} catch (IOException e) {
// Expected to fail
} catch (InterruptedException e) {
// Expected to fail
}
}
}
@OnError
public void onError(Throwable t) {
log.info("onError", t);
events.onErrorThrowable = t;
events.onErrorCalled.countDown();
}
@OnClose
public void onClose(CloseReason cr) {
log.info("onClose: " + cr);
events.closeReason = cr;
events.onCloseCalled.countDown();
}
}
public static class TestEndpointConfig extends BaseEndpointConfig {
@Override
protected Class<?> getEndpointClass() {
return TestEndpoint.class;
}
}
private Tomcat startServer(
final Class<? extends WsContextListener> configClass)
throws LifecycleException {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(configClass.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMapping("/", "default");
tomcat.start();
return tomcat;
}
public abstract static class BaseEndpointConfig extends WsContextListener {
public static final String PATH = "/test";
protected abstract Class<?> getEndpointClass();
@Override
public void contextInitialized(ServletContextEvent sce) {
super.contextInitialized(sce);
ServerContainer sc = (ServerContainer) sce
.getServletContext()
.getAttribute(
Constants.SERVER_CONTAINER_SERVLET_CONTEXT_ATTRIBUTE);
ServerEndpointConfig sec = ServerEndpointConfig.Builder.create(
getEndpointClass(), PATH).build();
try {
sc.addEndpoint(sec);
} catch (DeploymentException e) {
throw new RuntimeException(e);
}
}
}
}
|
|
package org.apache.lucene.codecs.memory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.PagedBytes;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.fst.BytesRefFSTEnum;
import org.apache.lucene.util.fst.BytesRefFSTEnum.InputOutput;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FST.Arc;
import org.apache.lucene.util.fst.FST.BytesReader;
import org.apache.lucene.util.fst.PositiveIntOutputs;
import org.apache.lucene.util.fst.Util;
import org.apache.lucene.util.packed.BlockPackedReader;
import org.apache.lucene.util.packed.MonotonicBlockPackedReader;
import org.apache.lucene.util.packed.PackedInts;
/**
* Reader for {@link MemoryDocValuesFormat}
*/
class MemoryDocValuesProducer extends DocValuesProducer {
// metadata maps (just file pointers and minimal stuff)
private final Map<Integer,NumericEntry> numerics = new HashMap<>();
private final Map<Integer,BinaryEntry> binaries = new HashMap<>();
private final Map<Integer,FSTEntry> fsts = new HashMap<>();
private final Map<Integer,SortedSetEntry> sortedSets = new HashMap<>();
private final Map<Integer,SortedNumericEntry> sortedNumerics = new HashMap<>();
private final IndexInput data;
// ram instances we have already loaded
private final Map<Integer,NumericDocValues> numericInstances =
new HashMap<>();
private final Map<Integer,BytesAndAddresses> pagedBytesInstances =
new HashMap<>();
private final Map<Integer,FST<Long>> fstInstances =
new HashMap<>();
private final Map<Integer,Bits> docsWithFieldInstances = new HashMap<>();
private final Map<Integer,MonotonicBlockPackedReader> addresses = new HashMap<>();
private final int maxDoc;
private final AtomicLong ramBytesUsed;
private final int version;
static final byte NUMBER = 0;
static final byte BYTES = 1;
static final byte FST = 2;
static final byte SORTED_SET = 4;
static final byte SORTED_SET_SINGLETON = 5;
static final byte SORTED_NUMERIC = 6;
static final byte SORTED_NUMERIC_SINGLETON = 7;
static final int BLOCK_SIZE = 4096;
static final byte DELTA_COMPRESSED = 0;
static final byte TABLE_COMPRESSED = 1;
static final byte BLOCK_COMPRESSED = 2;
static final byte GCD_COMPRESSED = 3;
static final int VERSION_START = 3;
static final int VERSION_CURRENT = VERSION_START;
MemoryDocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
maxDoc = state.segmentInfo.getDocCount();
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
// read in the entries from the metadata file.
ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context);
boolean success = false;
try {
version = CodecUtil.checkHeader(in, metaCodec,
VERSION_START,
VERSION_CURRENT);
readFields(in, state.fieldInfos);
CodecUtil.checkFooter(in);
ramBytesUsed = new AtomicLong(RamUsageEstimator.shallowSizeOfInstance(getClass()));
success = true;
} finally {
if (success) {
IOUtils.close(in);
} else {
IOUtils.closeWhileHandlingException(in);
}
}
String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
this.data = state.directory.openInput(dataName, state.context);
success = false;
try {
final int version2 = CodecUtil.checkHeader(data, dataCodec,
VERSION_START,
VERSION_CURRENT);
if (version != version2) {
throw new CorruptIndexException("Format versions mismatch");
}
// NOTE: data file is too costly to verify checksum against all the bytes on open,
// but for now we at least verify proper structure of the checksum footer: which looks
// for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
// such as file truncation.
CodecUtil.retrieveChecksum(data);
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(this.data);
}
}
}
private NumericEntry readNumericEntry(IndexInput meta) throws IOException {
NumericEntry entry = new NumericEntry();
entry.offset = meta.readLong();
entry.missingOffset = meta.readLong();
if (entry.missingOffset != -1) {
entry.missingBytes = meta.readLong();
} else {
entry.missingBytes = 0;
}
entry.format = meta.readByte();
switch(entry.format) {
case DELTA_COMPRESSED:
case TABLE_COMPRESSED:
case BLOCK_COMPRESSED:
case GCD_COMPRESSED:
break;
default:
throw new CorruptIndexException("Unknown format: " + entry.format + ", input=" + meta);
}
entry.packedIntsVersion = meta.readVInt();
entry.count = meta.readLong();
return entry;
}
private BinaryEntry readBinaryEntry(IndexInput meta) throws IOException {
BinaryEntry entry = new BinaryEntry();
entry.offset = meta.readLong();
entry.numBytes = meta.readLong();
entry.missingOffset = meta.readLong();
if (entry.missingOffset != -1) {
entry.missingBytes = meta.readLong();
} else {
entry.missingBytes = 0;
}
entry.minLength = meta.readVInt();
entry.maxLength = meta.readVInt();
if (entry.minLength != entry.maxLength) {
entry.packedIntsVersion = meta.readVInt();
entry.blockSize = meta.readVInt();
}
return entry;
}
private FSTEntry readFSTEntry(IndexInput meta) throws IOException {
FSTEntry entry = new FSTEntry();
entry.offset = meta.readLong();
entry.numOrds = meta.readVLong();
return entry;
}
private void readFields(IndexInput meta, FieldInfos infos) throws IOException {
int fieldNumber = meta.readVInt();
while (fieldNumber != -1) {
int fieldType = meta.readByte();
if (fieldType == NUMBER) {
numerics.put(fieldNumber, readNumericEntry(meta));
} else if (fieldType == BYTES) {
binaries.put(fieldNumber, readBinaryEntry(meta));
} else if (fieldType == FST) {
fsts.put(fieldNumber,readFSTEntry(meta));
} else if (fieldType == SORTED_SET) {
SortedSetEntry entry = new SortedSetEntry();
entry.singleton = false;
sortedSets.put(fieldNumber, entry);
} else if (fieldType == SORTED_SET_SINGLETON) {
SortedSetEntry entry = new SortedSetEntry();
entry.singleton = true;
sortedSets.put(fieldNumber, entry);
} else if (fieldType == SORTED_NUMERIC) {
SortedNumericEntry entry = new SortedNumericEntry();
entry.singleton = false;
entry.packedIntsVersion = meta.readVInt();
entry.blockSize = meta.readVInt();
entry.addressOffset = meta.readLong();
entry.valueCount = meta.readLong();
sortedNumerics.put(fieldNumber, entry);
} else if (fieldType == SORTED_NUMERIC_SINGLETON) {
SortedNumericEntry entry = new SortedNumericEntry();
entry.singleton = true;
sortedNumerics.put(fieldNumber, entry);
} else {
throw new CorruptIndexException("invalid entry type: " + fieldType + ", input=" + meta);
}
fieldNumber = meta.readVInt();
}
}
@Override
public synchronized NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericDocValues instance = numericInstances.get(field.number);
if (instance == null) {
instance = loadNumeric(field);
numericInstances.put(field.number, instance);
}
return instance;
}
@Override
public long ramBytesUsed() {
return ramBytesUsed.get();
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(data);
}
private NumericDocValues loadNumeric(FieldInfo field) throws IOException {
NumericEntry entry = numerics.get(field.number);
data.seek(entry.offset + entry.missingBytes);
switch (entry.format) {
case TABLE_COMPRESSED:
int size = data.readVInt();
if (size > 256) {
throw new CorruptIndexException("TABLE_COMPRESSED cannot have more than 256 distinct values, input=" + data);
}
final long decode[] = new long[size];
for (int i = 0; i < decode.length; i++) {
decode[i] = data.readLong();
}
final int formatID = data.readVInt();
final int bitsPerValue = data.readVInt();
final PackedInts.Reader ordsReader = PackedInts.getReaderNoHeader(data, PackedInts.Format.byId(formatID), entry.packedIntsVersion, (int)entry.count, bitsPerValue);
ramBytesUsed.addAndGet(RamUsageEstimator.sizeOf(decode) + ordsReader.ramBytesUsed());
return new NumericDocValues() {
@Override
public long get(int docID) {
return decode[(int)ordsReader.get(docID)];
}
};
case DELTA_COMPRESSED:
final long minDelta = data.readLong();
final int formatIDDelta = data.readVInt();
final int bitsPerValueDelta = data.readVInt();
final PackedInts.Reader deltaReader = PackedInts.getReaderNoHeader(data, PackedInts.Format.byId(formatIDDelta), entry.packedIntsVersion, (int)entry.count, bitsPerValueDelta);
ramBytesUsed.addAndGet(deltaReader.ramBytesUsed());
return new NumericDocValues() {
@Override
public long get(int docID) {
return minDelta + deltaReader.get(docID);
}
};
case BLOCK_COMPRESSED:
final int blockSize = data.readVInt();
final BlockPackedReader reader = new BlockPackedReader(data, entry.packedIntsVersion, blockSize, entry.count, false);
ramBytesUsed.addAndGet(reader.ramBytesUsed());
return reader;
case GCD_COMPRESSED:
final long min = data.readLong();
final long mult = data.readLong();
final int formatIDGCD = data.readVInt();
final int bitsPerValueGCD = data.readVInt();
final PackedInts.Reader quotientReader = PackedInts.getReaderNoHeader(data, PackedInts.Format.byId(formatIDGCD), entry.packedIntsVersion, (int)entry.count, bitsPerValueGCD);
ramBytesUsed.addAndGet(quotientReader.ramBytesUsed());
return new NumericDocValues() {
@Override
public long get(int docID) {
return min + mult * quotientReader.get(docID);
}
};
default:
throw new AssertionError();
}
}
@Override
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryEntry entry = binaries.get(field.number);
BytesAndAddresses instance;
synchronized (this) {
instance = pagedBytesInstances.get(field.number);
if (instance == null) {
instance = loadBinary(field);
pagedBytesInstances.put(field.number, instance);
}
}
final PagedBytes.Reader bytesReader = instance.reader;
final MonotonicBlockPackedReader addresses = instance.addresses;
if (addresses == null) {
assert entry.minLength == entry.maxLength;
final int fixedLength = entry.minLength;
return new BinaryDocValues() {
final BytesRef term = new BytesRef();
@Override
public BytesRef get(int docID) {
bytesReader.fillSlice(term, fixedLength * (long)docID, fixedLength);
return term;
}
};
} else {
return new BinaryDocValues() {
final BytesRef term = new BytesRef();
@Override
public BytesRef get(int docID) {
long startAddress = docID == 0 ? 0 : addresses.get(docID-1);
long endAddress = addresses.get(docID);
bytesReader.fillSlice(term, startAddress, (int) (endAddress - startAddress));
return term;
}
};
}
}
private BytesAndAddresses loadBinary(FieldInfo field) throws IOException {
BytesAndAddresses bytesAndAddresses = new BytesAndAddresses();
BinaryEntry entry = binaries.get(field.number);
data.seek(entry.offset);
PagedBytes bytes = new PagedBytes(16);
bytes.copy(data, entry.numBytes);
bytesAndAddresses.reader = bytes.freeze(true);
ramBytesUsed.addAndGet(bytesAndAddresses.reader.ramBytesUsed());
if (entry.minLength != entry.maxLength) {
data.seek(data.getFilePointer() + entry.missingBytes);
bytesAndAddresses.addresses = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false);
ramBytesUsed.addAndGet(bytesAndAddresses.addresses.ramBytesUsed());
}
return bytesAndAddresses;
}
@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
final FSTEntry entry = fsts.get(field.number);
if (entry.numOrds == 0) {
return DocValues.emptySorted();
}
FST<Long> instance;
synchronized(this) {
instance = fstInstances.get(field.number);
if (instance == null) {
data.seek(entry.offset);
instance = new FST<>(data, PositiveIntOutputs.getSingleton());
ramBytesUsed.addAndGet(instance.ramBytesUsed());
fstInstances.put(field.number, instance);
}
}
final NumericDocValues docToOrd = getNumeric(field);
final FST<Long> fst = instance;
// per-thread resources
final BytesReader in = fst.getBytesReader();
final Arc<Long> firstArc = new Arc<>();
final Arc<Long> scratchArc = new Arc<>();
final IntsRefBuilder scratchInts = new IntsRefBuilder();
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
return new SortedDocValues() {
final BytesRefBuilder term = new BytesRefBuilder();
@Override
public int getOrd(int docID) {
return (int) docToOrd.get(docID);
}
@Override
public BytesRef lookupOrd(int ord) {
try {
in.setPosition(0);
fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
return Util.toBytesRef(output, term);
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public int lookupTerm(BytesRef key) {
try {
InputOutput<Long> o = fstEnum.seekCeil(key);
if (o == null) {
return -getValueCount()-1;
} else if (o.input.equals(key)) {
return o.output.intValue();
} else {
return (int) -o.output-1;
}
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public int getValueCount() {
return (int)entry.numOrds;
}
@Override
public TermsEnum termsEnum() {
return new FSTTermsEnum(fst);
}
};
}
@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
SortedNumericEntry entry = sortedNumerics.get(field.number);
if (entry.singleton) {
NumericDocValues values = getNumeric(field);
NumericEntry ne = numerics.get(field.number);
Bits docsWithField = getMissingBits(field.number, ne.missingOffset, ne.missingBytes);
return DocValues.singleton(values, docsWithField);
} else {
final NumericDocValues values = getNumeric(field);
final MonotonicBlockPackedReader addr;
synchronized (this) {
MonotonicBlockPackedReader res = addresses.get(field.number);
if (res == null) {
data.seek(entry.addressOffset);
res = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.valueCount, false);
addresses.put(field.number, res);
}
addr = res;
}
if (values instanceof LongValues) {
// probably not the greatest codec choice for this situation, but we support it
final LongValues longValues = (LongValues) values;
return new SortedNumericDocValues() {
long startOffset;
long endOffset;
@Override
public void setDocument(int doc) {
startOffset = (int) addr.get(doc);
endOffset = (int) addr.get(doc+1L);
}
@Override
public long valueAt(int index) {
return longValues.get(startOffset + index);
}
@Override
public int count() {
return (int) (endOffset - startOffset);
}
};
} else {
return new SortedNumericDocValues() {
int startOffset;
int endOffset;
@Override
public void setDocument(int doc) {
startOffset = (int) addr.get(doc);
endOffset = (int) addr.get(doc+1);
}
@Override
public long valueAt(int index) {
return values.get(startOffset + index);
}
@Override
public int count() {
return (endOffset - startOffset);
}
};
}
}
}
@Override
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
SortedSetEntry sortedSetEntry = sortedSets.get(field.number);
if (sortedSetEntry.singleton) {
return DocValues.singleton(getSorted(field));
}
final FSTEntry entry = fsts.get(field.number);
if (entry.numOrds == 0) {
return DocValues.emptySortedSet(); // empty FST!
}
FST<Long> instance;
synchronized(this) {
instance = fstInstances.get(field.number);
if (instance == null) {
data.seek(entry.offset);
instance = new FST<>(data, PositiveIntOutputs.getSingleton());
ramBytesUsed.addAndGet(instance.ramBytesUsed());
fstInstances.put(field.number, instance);
}
}
final BinaryDocValues docToOrds = getBinary(field);
final FST<Long> fst = instance;
// per-thread resources
final BytesReader in = fst.getBytesReader();
final Arc<Long> firstArc = new Arc<>();
final Arc<Long> scratchArc = new Arc<>();
final IntsRefBuilder scratchInts = new IntsRefBuilder();
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
final ByteArrayDataInput input = new ByteArrayDataInput();
return new SortedSetDocValues() {
final BytesRefBuilder term = new BytesRefBuilder();
BytesRef ref;
long currentOrd;
@Override
public long nextOrd() {
if (input.eof()) {
return NO_MORE_ORDS;
} else {
currentOrd += input.readVLong();
return currentOrd;
}
}
@Override
public void setDocument(int docID) {
ref = docToOrds.get(docID);
input.reset(ref.bytes, ref.offset, ref.length);
currentOrd = 0;
}
@Override
public BytesRef lookupOrd(long ord) {
try {
in.setPosition(0);
fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
return Util.toBytesRef(output, term);
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public long lookupTerm(BytesRef key) {
try {
InputOutput<Long> o = fstEnum.seekCeil(key);
if (o == null) {
return -getValueCount()-1;
} else if (o.input.equals(key)) {
return o.output.intValue();
} else {
return -o.output-1;
}
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public long getValueCount() {
return entry.numOrds;
}
@Override
public TermsEnum termsEnum() {
return new FSTTermsEnum(fst);
}
};
}
private Bits getMissingBits(int fieldNumber, final long offset, final long length) throws IOException {
if (offset == -1) {
return new Bits.MatchAllBits(maxDoc);
} else {
Bits instance;
synchronized(this) {
instance = docsWithFieldInstances.get(fieldNumber);
if (instance == null) {
IndexInput data = this.data.clone();
data.seek(offset);
assert length % 8 == 0;
long bits[] = new long[(int) length >> 3];
for (int i = 0; i < bits.length; i++) {
bits[i] = data.readLong();
}
instance = new FixedBitSet(bits, maxDoc);
docsWithFieldInstances.put(fieldNumber, instance);
}
}
return instance;
}
}
@Override
public Bits getDocsWithField(FieldInfo field) throws IOException {
switch(field.getDocValuesType()) {
case SORTED_SET:
return DocValues.docsWithValue(getSortedSet(field), maxDoc);
case SORTED_NUMERIC:
return DocValues.docsWithValue(getSortedNumeric(field), maxDoc);
case SORTED:
return DocValues.docsWithValue(getSorted(field), maxDoc);
case BINARY:
BinaryEntry be = binaries.get(field.number);
return getMissingBits(field.number, be.missingOffset, be.missingBytes);
case NUMERIC:
NumericEntry ne = numerics.get(field.number);
return getMissingBits(field.number, ne.missingOffset, ne.missingBytes);
default:
throw new AssertionError();
}
}
@Override
public void close() throws IOException {
data.close();
}
static class NumericEntry {
long offset;
long count;
long missingOffset;
long missingBytes;
byte format;
int packedIntsVersion;
}
static class BinaryEntry {
long offset;
long missingOffset;
long missingBytes;
long numBytes;
int minLength;
int maxLength;
int packedIntsVersion;
int blockSize;
}
static class FSTEntry {
long offset;
long numOrds;
}
static class SortedSetEntry {
boolean singleton;
}
static class SortedNumericEntry {
boolean singleton;
long addressOffset;
int packedIntsVersion;
int blockSize;
long valueCount;
}
static class BytesAndAddresses {
PagedBytes.Reader reader;
MonotonicBlockPackedReader addresses;
}
// exposes FSTEnum directly as a TermsEnum: avoids binary-search next()
static class FSTTermsEnum extends TermsEnum {
final BytesRefFSTEnum<Long> in;
// this is all for the complicated seek(ord)...
// maybe we should add a FSTEnum that supports this operation?
final FST<Long> fst;
final FST.BytesReader bytesReader;
final Arc<Long> firstArc = new Arc<>();
final Arc<Long> scratchArc = new Arc<>();
final IntsRefBuilder scratchInts = new IntsRefBuilder();
final BytesRefBuilder scratchBytes = new BytesRefBuilder();
FSTTermsEnum(FST<Long> fst) {
this.fst = fst;
in = new BytesRefFSTEnum<>(fst);
bytesReader = fst.getBytesReader();
}
@Override
public BytesRef next() throws IOException {
InputOutput<Long> io = in.next();
if (io == null) {
return null;
} else {
return io.input;
}
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public SeekStatus seekCeil(BytesRef text) throws IOException {
if (in.seekCeil(text) == null) {
return SeekStatus.END;
} else if (term().equals(text)) {
// TODO: add SeekStatus to FSTEnum like in https://issues.apache.org/jira/browse/LUCENE-3729
// to remove this comparision?
return SeekStatus.FOUND;
} else {
return SeekStatus.NOT_FOUND;
}
}
@Override
public boolean seekExact(BytesRef text) throws IOException {
if (in.seekExact(text) == null) {
return false;
} else {
return true;
}
}
@Override
public void seekExact(long ord) throws IOException {
// TODO: would be better to make this simpler and faster.
// but we dont want to introduce a bug that corrupts our enum state!
bytesReader.setPosition(0);
fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, bytesReader, firstArc, scratchArc, scratchInts);
// TODO: we could do this lazily, better to try to push into FSTEnum though?
in.seekExact(Util.toBytesRef(output, new BytesRefBuilder()));
}
@Override
public BytesRef term() throws IOException {
return in.current().input;
}
@Override
public long ord() throws IOException {
return in.current().output;
}
@Override
public int docFreq() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long totalTermFreq() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
throw new UnsupportedOperationException();
}
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.highlighter;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.tree.IElementType;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class FragmentedEditorHighlighter implements EditorHighlighter {
private final List<Element> myPieces;
private final Document myDocument;
private final int myAdditionalOffset;
private final boolean myMergeByTextAttributes;
public FragmentedEditorHighlighter(HighlighterIterator sourceIterator, List<TextRange> ranges) {
this(sourceIterator, ranges, 0, false);
}
public FragmentedEditorHighlighter(HighlighterIterator sourceIterator,
List<TextRange> ranges,
final int additionalOffset,
boolean mergeByTextAttributes) {
myMergeByTextAttributes = mergeByTextAttributes;
myDocument = sourceIterator.getDocument();
myPieces = new ArrayList<>();
myAdditionalOffset = additionalOffset;
translate(sourceIterator, ranges);
}
private void translate(HighlighterIterator iterator, List<TextRange> ranges) {
int offset = 0;
int index = 0;
while (!iterator.atEnd() && index < ranges.size()) {
TextRange range = ranges.get(index);
if (range.getStartOffset() >= iterator.getEnd()) {
iterator.advance();
continue;
}
if (range.getEndOffset() >= iterator.getStart()) {
int relativeStart = Math.max(iterator.getStart() - range.getStartOffset(), 0);
int relativeEnd = Math.min(iterator.getEnd() - range.getStartOffset(), range.getLength() + 1);
addElement(new Element(offset + relativeStart,
offset + relativeEnd,
iterator.getTokenType(),
iterator.getTextAttributes()));
}
if (range.getEndOffset() < iterator.getEnd()) {
offset += range.getLength() + 1 + myAdditionalOffset; // myAdditionalOffset because of extra line - for shoene separators
int lastEnd = myPieces.isEmpty() ? -1 : myPieces.get(myPieces.size() - 1).getEnd();
addElement(new Element(Math.max(offset - 1 - myAdditionalOffset, lastEnd), offset, null, TextAttributes.ERASE_MARKER));
index++;
continue;
}
iterator.advance();
}
}
private void addElement(@NotNull Element element) {
boolean merged = false;
if (myMergeByTextAttributes && !myPieces.isEmpty()) {
Element oldElement = myPieces.get(myPieces.size() - 1);
if (oldElement.getEnd() >= element.getStart() &&
Comparing.equal(oldElement.getAttributes(), element.getAttributes()) &&
Comparing.equal(oldElement.getElementType(), element.getElementType())) {
merged = true;
myPieces.remove(myPieces.size() - 1);
myPieces.add(new Element(oldElement.getStart(),
element.getEnd(),
element.getElementType(),
element.getAttributes()));
}
}
if (!merged) {
myPieces.add(element);
}
}
@NotNull
@Override
public HighlighterIterator createIterator(int startOffset) {
int index = Collections.binarySearch(myPieces, new Element(startOffset, 0, null, null), Comparator.comparingInt(Element::getStart));
// index: (-insertion point - 1), where insertionPoint is the index of the first element greater than the key
// and we need index of the first element that is less or equal (floorElement)
if (index < 0) index = Math.max(-index - 2, 0);
return new ProxyIterator(myDocument, index);
}
@Override
public void setEditor(@NotNull HighlighterClient editor) {
}
@Override
public void setColorScheme(@NotNull EditorColorsScheme scheme) {
}
private class ProxyIterator implements HighlighterIterator {
private final Document myDocument;
private int myIdx;
private ProxyIterator(Document document, int idx) {
myDocument = document;
myIdx = idx;
}
@Override
public TextAttributes getTextAttributes() {
return myPieces.get(myIdx).getAttributes();
}
@Override
public int getStart() {
return myPieces.get(myIdx).getStart();
}
@Override
public int getEnd() {
return myPieces.get(myIdx).getEnd();
}
@Override
public IElementType getTokenType() {
return myPieces.get(myIdx).myElementType;
}
@Override
public void advance() {
if (myIdx < myPieces.size()) {
myIdx++;
}
}
@Override
public void retreat() {
if (myIdx > -1) {
myIdx--;
}
}
@Override
public boolean atEnd() {
return myIdx < 0 || myIdx >= myPieces.size();
}
@Override
public Document getDocument() {
return myDocument;
}
}
private static class Element {
private final int myStart;
private final int myEnd;
private final IElementType myElementType;
private final TextAttributes myAttributes;
private Element(int start, int end, IElementType elementType, TextAttributes attributes) {
myStart = start;
myEnd = end;
myElementType = elementType;
myAttributes = attributes;
}
public int getStart() {
return myStart;
}
public int getEnd() {
return myEnd;
}
public IElementType getElementType() {
return myElementType;
}
public TextAttributes getAttributes() {
return myAttributes;
}
}
}
|
|
/*
* The MIT License
*
* Copyright 2015 enlo.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package info.naiv.lab.java.jmt.range;
import info.naiv.lab.java.jmt.range.Bound.NoBound;
import java.io.Serializable;
import java.util.Comparator;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import lombok.Value;
/**
*
* @author enlo
* @param <T>
*/
@AllArgsConstructor
@ToString
@EqualsAndHashCode
public abstract class Bound<T extends Comparable<T>> implements Cloneable, Serializable {
private static final long serialVersionUID = 1L;
@Getter
protected final BoundType type;
@Getter
protected final T value;
public Bound(T value, BoundType type) {
this.value = value;
this.type = type;
}
@Override
@SuppressWarnings("CloneDeclaresCloneNotSupported")
public Bound<T> clone() {
try {
return (Bound<T>) super.clone();
}
catch (CloneNotSupportedException ex) {
throw new InternalError(ex.getMessage());
}
}
public abstract Bound<T> construct(T newValue);
public boolean isClosed() {
return BoundType.CLOSED.equals(type);
}
public boolean isOpen() {
return BoundType.OPEN.equals(type);
}
public abstract boolean on(T value, Comparator<? super T> c);
@EqualsAndHashCode(callSuper = true)
@Value
@ToString(callSuper = true)
public static class ClosedLowerBound<T extends Comparable<T>> extends Bound<T> {
private static final long serialVersionUID = 1L;
public ClosedLowerBound(T value) {
super(value, BoundType.CLOSED);
}
@Override
public Bound<T> clone() {
return super.clone(); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Bound<T> construct(T newValue) {
return new ClosedLowerBound<>(newValue);
}
@Override
public boolean on(T value, Comparator<? super T> c) {
return c.compare(this.value, value) <= 0;
}
}
@EqualsAndHashCode(callSuper = true)
@Value
@ToString(callSuper = true)
public static class ClosedUpperBound<T extends Comparable<T>> extends Bound<T> {
private static final long serialVersionUID = 1L;
public ClosedUpperBound(T value) {
super(value, BoundType.CLOSED);
}
@Override
public Bound<T> clone() {
return super.clone(); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Bound<T> construct(T newValue) {
return new ClosedUpperBound<>(newValue);
}
@Override
public boolean on(T value, Comparator<? super T> c) {
return c.compare(value, this.value) <= 0;
}
}
@EqualsAndHashCode(callSuper = true)
@Value
@ToString(callSuper = true)
public static class NoBound<T extends Comparable<T>> extends Bound<T> {
private static final long serialVersionUID = 1L;
public NoBound() {
super(null, BoundType.CLOSED);
}
@Override
public Bound<T> clone() {
return super.clone(); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Bound<T> construct(T newValue) {
return new NoBound<>();
}
@Override
public boolean on(T value, Comparator<? super T> c) {
return true;
}
}
@EqualsAndHashCode(callSuper = true)
@Value
@ToString(callSuper = true)
public static class OpenLowerBound<T extends Comparable<T>> extends Bound<T> {
private static final long serialVersionUID = 1L;
public OpenLowerBound(T value) {
super(value, BoundType.OPEN);
}
@Override
public Bound<T> clone() {
return super.clone(); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Bound<T> construct(T newValue) {
return new OpenLowerBound<>(newValue);
}
@Override
public boolean on(T value, Comparator<? super T> c) {
return c.compare(this.value, value) < 0;
}
}
@EqualsAndHashCode(callSuper = true)
@Value
@ToString(callSuper = true)
public static class OpenUpperBound<T extends Comparable<T>> extends Bound<T> {
private static final long serialVersionUID = 1L;
public OpenUpperBound(T value) {
super(value, BoundType.OPEN);
}
@Override
public Bound<T> clone() {
return super.clone(); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Bound<T> construct(T newValue) {
return new OpenUpperBound<>(newValue);
}
@Override
public boolean on(T value, Comparator<? super T> c) {
return c.compare(value, this.value) < 0;
}
}
}
|
|
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.instrument;
import java.lang.reflect.Method;
import com.navercorp.pinpoint.bootstrap.instrument.*;
import com.navercorp.pinpoint.bootstrap.interceptor.annotation.Scope;
import com.navercorp.pinpoint.bootstrap.interceptor.scope.InterceptorScope;
import com.navercorp.pinpoint.profiler.instrument.interceptor.*;
import javassist.CannotCompileException;
import javassist.CtBehavior;
import javassist.CtClass;
import javassist.CtConstructor;
import javassist.CtMethod;
import javassist.Modifier;
import javassist.NotFoundException;
import javassist.bytecode.BadBytecode;
import javassist.bytecode.Bytecode;
import javassist.bytecode.CodeAttribute;
import javassist.bytecode.CodeIterator;
import javassist.bytecode.ConstPool;
import javassist.bytecode.Descriptor;
import javassist.compiler.CompileError;
import javassist.compiler.Javac;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.navercorp.pinpoint.bootstrap.context.MethodDescriptor;
import com.navercorp.pinpoint.bootstrap.interceptor.Interceptor;
import com.navercorp.pinpoint.bootstrap.interceptor.scope.ExecutionPolicy;
import com.navercorp.pinpoint.bootstrap.interceptor.registry.InterceptorRegistry;
import com.navercorp.pinpoint.common.util.Asserts;
import com.navercorp.pinpoint.profiler.context.DefaultMethodDescriptor;
import com.navercorp.pinpoint.profiler.interceptor.factory.AnnotatedInterceptorFactory;
import com.navercorp.pinpoint.profiler.interceptor.registry.InterceptorRegistryBinder;
import com.navercorp.pinpoint.profiler.util.JavaAssistUtils;
public class JavassistMethod implements InstrumentMethod {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final boolean isDebug = logger.isDebugEnabled();
private final InstrumentContext pluginContext;
private final InterceptorRegistryBinder interceptorRegistryBinder;
private final CtBehavior behavior;
private final InstrumentClass declaringClass;
private final MethodDescriptor descriptor;
// TODO fix inject InterceptorDefinitionFactory
private static final InterceptorDefinitionFactory interceptorDefinitionFactory = new InterceptorDefinitionFactory();
public JavassistMethod(InstrumentContext pluginContext, InterceptorRegistryBinder interceptorRegistryBinder, InstrumentClass declaringClass, CtBehavior behavior) {
this.pluginContext = pluginContext;
this.interceptorRegistryBinder = interceptorRegistryBinder;
this.behavior = behavior;
this.declaringClass = declaringClass;
String[] parameterVariableNames = JavaAssistUtils.getParameterVariableName(behavior);
int lineNumber = JavaAssistUtils.getLineNumber(behavior);
DefaultMethodDescriptor descriptor = new DefaultMethodDescriptor(behavior.getDeclaringClass().getName(), behavior.getName(), getParameterTypes(), parameterVariableNames);
descriptor.setLineNumber(lineNumber);
this.descriptor = descriptor;
}
@Override
public String getName() {
return behavior.getName();
}
@Override
public String[] getParameterTypes() {
return JavaAssistUtils.parseParameterSignature(behavior.getSignature());
}
@Override
public String getReturnType() {
if (behavior instanceof CtMethod) {
try {
return ((CtMethod) behavior).getReturnType().getName();
} catch (NotFoundException e) {
return null;
}
}
return null;
}
@Override
public int getModifiers() {
return behavior.getModifiers();
}
@Override
public boolean isConstructor() {
return behavior instanceof CtConstructor;
}
@Override
public MethodDescriptor getDescriptor() {
return descriptor;
}
@Override
public int addInterceptor(String interceptorClassName) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
return addInterceptor0(interceptorClassName, null, null, null);
}
@Override
public int addInterceptor(String interceptorClassName, Object[] constructorArgs) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(constructorArgs, "constructorArgs");
return addInterceptor0(interceptorClassName, constructorArgs, null, null);
}
@Override
public int addScopedInterceptor(String interceptorClassName, String scopeName) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(scopeName, "scopeName");
final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName);
return addInterceptor0(interceptorClassName, null, interceptorScope, null);
}
@Override
public int addScopedInterceptor(String interceptorClassName, InterceptorScope scope) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(scope, "scope");
return addInterceptor0(interceptorClassName, null, scope, null);
}
@Override
public int addScopedInterceptor(String interceptorClassName, String scopeName, ExecutionPolicy executionPolicy) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(scopeName, "scopeName");
Asserts.notNull(executionPolicy, "executionPolicy");
final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName);
return addInterceptor0(interceptorClassName, null, interceptorScope, executionPolicy);
}
@Override
public int addScopedInterceptor(String interceptorClassName, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(scope, "scope");
Asserts.notNull(executionPolicy, "executionPolicy");
return addInterceptor0(interceptorClassName, null, scope, executionPolicy);
}
@Override
public int addScopedInterceptor(String interceptorClassName, Object[] constructorArgs, String scopeName) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(constructorArgs, "constructorArgs");
Asserts.notNull(scopeName, "scopeName");
final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName);
return addInterceptor0(interceptorClassName, constructorArgs, interceptorScope, null);
}
@Override
public int addScopedInterceptor(String interceptorClassName, Object[] constructorArgs, InterceptorScope scope) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(constructorArgs, "constructorArgs");
Asserts.notNull(scope, "scope");
return addInterceptor0(interceptorClassName, constructorArgs, scope, null);
}
@Override
public int addScopedInterceptor(String interceptorClassName, Object[] constructorArgs, String scopeName, ExecutionPolicy executionPolicy) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(constructorArgs, "constructorArgs");
Asserts.notNull(scopeName, "scopeName");
Asserts.notNull(executionPolicy, "executionPolicy");
final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName);
return addInterceptor0(interceptorClassName, constructorArgs, interceptorScope, executionPolicy);
}
@Override
public int addScopedInterceptor(String interceptorClassName, Object[] constructorArgs, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException {
Asserts.notNull(interceptorClassName, "interceptorClassName");
Asserts.notNull(constructorArgs, "constructorArgs");
Asserts.notNull(scope, "scope");
Asserts.notNull(executionPolicy, "executionPolicy");
return addInterceptor0(interceptorClassName, constructorArgs, scope, executionPolicy);
}
@Override
public void addInterceptor(int interceptorId) throws InstrumentException {
Interceptor interceptor = InterceptorRegistry.getInterceptor(interceptorId);
try {
addInterceptor0(interceptor, interceptorId);
} catch (Exception e) {
throw new InstrumentException("Failed to add interceptor " + interceptor.getClass().getName() + " to " + behavior.getLongName(), e);
}
}
private ScopeInfo resolveScopeInfo(String interceptorClassName, InterceptorScope scope, ExecutionPolicy policy) {
final Class<? extends Interceptor> interceptorType = pluginContext.injectClass(declaringClass.getClassLoader(), interceptorClassName);
if (scope == null) {
Scope interceptorScope = interceptorType.getAnnotation(Scope.class);
if (interceptorScope != null) {
String scopeName = interceptorScope.value();
scope = pluginContext.getInterceptorScope(scopeName);
policy = interceptorScope.executionPolicy();
}
}
if (scope == null) {
policy = null;
} else if (policy == null) {
policy = ExecutionPolicy.BOUNDARY;
}
return new ScopeInfo(scope, policy);
}
private static class ScopeInfo {
private final InterceptorScope scope;
private final ExecutionPolicy policy;
public ScopeInfo(InterceptorScope scope, ExecutionPolicy policy) {
this.scope = scope;
this.policy = policy;
}
public InterceptorScope getScope() {
return scope;
}
public ExecutionPolicy getPolicy() {
return policy;
}
}
// for internal api
int addInterceptorInternal(String interceptorClassName, Object[] constructorArgs, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException {
if (interceptorClassName == null) {
throw new NullPointerException("interceptorClassName must not be null");
}
return addInterceptor0(interceptorClassName, constructorArgs, scope, executionPolicy);
}
private int addInterceptor0(String interceptorClassName, Object[] constructorArgs, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException {
try {
ScopeInfo scopeInfo = resolveScopeInfo(interceptorClassName, scope, executionPolicy);
Interceptor interceptor = createInterceptor(interceptorClassName, scopeInfo, constructorArgs);
int interceptorId = interceptorRegistryBinder.getInterceptorRegistryAdaptor().addInterceptor(interceptor);
addInterceptor0(interceptor, interceptorId);
return interceptorId;
} catch (CannotCompileException ccex) {
throw new InstrumentException("Failed to add interceptor " + interceptorClassName + " to " + behavior.getLongName(), ccex);
} catch (NotFoundException nex) {
throw new InstrumentException("Failed to add interceptor " + interceptorClassName + " to " + behavior.getLongName(), nex);
}
}
private Interceptor createInterceptor(String interceptorClassName, ScopeInfo scopeInfo, Object[] constructorArgs) {
ClassLoader classLoader = declaringClass.getClassLoader();
AnnotatedInterceptorFactory factory = new AnnotatedInterceptorFactory(pluginContext);
Interceptor interceptor = factory.getInterceptor(classLoader, interceptorClassName, constructorArgs, scopeInfo.getScope(), scopeInfo.getPolicy(), declaringClass, this);
return interceptor;
}
private void addInterceptor0(Interceptor interceptor, int interceptorId) throws CannotCompileException, NotFoundException {
if (interceptor == null) {
throw new NullPointerException("interceptor must not be null");
}
final InterceptorDefinition interceptorDefinition = interceptorDefinitionFactory.createInterceptorDefinition(interceptor.getClass());
final String localVariableName = initializeLocalVariable(interceptorId);
int originalCodeOffset = insertBefore(-1, localVariableName);
boolean localVarsInitialized = false;
final int offset = addBeforeInterceptor(interceptorDefinition, interceptorId, originalCodeOffset);
if (offset != -1) {
localVarsInitialized = true;
originalCodeOffset = offset;
}
addAfterInterceptor(interceptorDefinition, interceptorId, localVarsInitialized, originalCodeOffset);
}
private String initializeLocalVariable(int interceptorId) throws CannotCompileException, NotFoundException {
final String interceptorInstanceVar = InvokeCodeGenerator.getInterceptorVar(interceptorId);
addLocalVariable(interceptorInstanceVar, Interceptor.class);
final StringBuilder initVars = new StringBuilder();
initVars.append(interceptorInstanceVar);
initVars.append(" = null;");
return initVars.toString();
}
private void addAfterInterceptor(InterceptorDefinition interceptorDefinition, int interceptorId, boolean localVarsInitialized, int originalCodeOffset) throws NotFoundException, CannotCompileException {
final Class<?> interceptorClass = interceptorDefinition.getInterceptorClass();
final CaptureType captureType = interceptorDefinition.getCaptureType();
if (!isAfterInterceptor(captureType)) {
return;
}
final Method interceptorMethod = interceptorDefinition.getAfterMethod();
if (interceptorMethod == null) {
if (isDebug) {
logger.debug("Skip adding after interceptor because the interceptor doesn't have after method: {}", interceptorClass.getName());
}
return;
}
InvokeAfterCodeGenerator catchGenerator = new InvokeAfterCodeGenerator(interceptorId, interceptorDefinition, declaringClass, this, pluginContext.getTraceContext(), localVarsInitialized, true);
String catchCode = catchGenerator.generate();
if (isDebug) {
logger.debug("addAfterInterceptor catch behavior:{} code:{}", behavior.getLongName(), catchCode);
}
CtClass throwable = behavior.getDeclaringClass().getClassPool().get("java.lang.Throwable");
insertCatch(originalCodeOffset, catchCode, throwable, "$e");
InvokeAfterCodeGenerator afterGenerator = new InvokeAfterCodeGenerator(interceptorId, interceptorDefinition, declaringClass, this, pluginContext.getTraceContext(), localVarsInitialized, false);
final String afterCode = afterGenerator.generate();
if (isDebug) {
logger.debug("addAfterInterceptor after behavior:{} code:{}", behavior.getLongName(), afterCode);
}
behavior.insertAfter(afterCode);
}
private boolean isAfterInterceptor(CaptureType captureType) {
return CaptureType.AFTER == captureType || CaptureType.AROUND == captureType;
}
private int addBeforeInterceptor(InterceptorDefinition interceptorDefinition, int interceptorId, int pos) throws CannotCompileException, NotFoundException {
final Class<?> interceptorClass = interceptorDefinition.getInterceptorClass();
final CaptureType captureType = interceptorDefinition.getCaptureType();
if (!isBeforeInterceptor(captureType)) {
return -1;
}
final Method interceptorMethod = interceptorDefinition.getBeforeMethod();
if (interceptorMethod == null) {
if (isDebug) {
logger.debug("Skip adding before interceptorDefinition because the interceptorDefinition doesn't have before method: {}", interceptorClass.getName());
}
return -1;
}
final InvokeBeforeCodeGenerator generator = new InvokeBeforeCodeGenerator(interceptorId, interceptorDefinition, declaringClass, this, pluginContext.getTraceContext());
final String beforeCode = generator.generate();
if (isDebug) {
logger.debug("addBeforeInterceptor before behavior:{} code:{}", behavior.getLongName(), beforeCode);
}
return insertBefore(pos, beforeCode);
}
private boolean isBeforeInterceptor(CaptureType captureType) {
return CaptureType.BEFORE == captureType || CaptureType.AROUND == captureType;
}
private void addLocalVariable(String name, Class<?> type) throws CannotCompileException, NotFoundException {
final String interceptorClassName = type.getName();
final CtClass interceptorCtClass = behavior.getDeclaringClass().getClassPool().get(interceptorClassName);
behavior.addLocalVariable(name, interceptorCtClass);
}
private int insertBefore(int pos, String src) throws CannotCompileException {
if (isConstructor()) {
return insertBeforeConstructor(pos, src);
} else {
return insertBeforeMethod(pos, src);
}
}
private int insertBeforeMethod(int pos, String src) throws CannotCompileException {
CtClass cc = behavior.getDeclaringClass();
CodeAttribute ca = behavior.getMethodInfo().getCodeAttribute();
if (ca == null)
throw new CannotCompileException("no method body");
CodeIterator iterator = ca.iterator();
Javac jv = new Javac(cc);
try {
int nvars = jv.recordParams(behavior.getParameterTypes(), Modifier.isStatic(getModifiers()));
jv.recordParamNames(ca, nvars);
jv.recordLocalVariables(ca, 0);
jv.recordType(getReturnType0());
jv.compileStmnt(src);
Bytecode b = jv.getBytecode();
int stack = b.getMaxStack();
int locals = b.getMaxLocals();
if (stack > ca.getMaxStack())
ca.setMaxStack(stack);
if (locals > ca.getMaxLocals())
ca.setMaxLocals(locals);
if (pos != -1) {
iterator.insertEx(pos, b.get());
} else {
pos = iterator.insertEx(b.get());
}
iterator.insert(b.getExceptionTable(), pos);
behavior.getMethodInfo().rebuildStackMapIf6(cc.getClassPool(), cc.getClassFile2());
return pos + b.length();
} catch (NotFoundException e) {
throw new CannotCompileException(e);
} catch (CompileError e) {
throw new CannotCompileException(e);
} catch (BadBytecode e) {
throw new CannotCompileException(e);
}
}
private int insertBeforeConstructor(int pos, String src) throws CannotCompileException {
CtClass cc = behavior.getDeclaringClass();
CodeAttribute ca = behavior.getMethodInfo().getCodeAttribute();
CodeIterator iterator = ca.iterator();
Bytecode b = new Bytecode(behavior.getMethodInfo().getConstPool(),
ca.getMaxStack(), ca.getMaxLocals());
b.setStackDepth(ca.getMaxStack());
Javac jv = new Javac(b, cc);
try {
jv.recordParams(behavior.getParameterTypes(), false);
jv.recordLocalVariables(ca, 0);
jv.compileStmnt(src);
ca.setMaxStack(b.getMaxStack());
ca.setMaxLocals(b.getMaxLocals());
iterator.skipConstructor();
if (pos != -1) {
iterator.insertEx(pos, b.get());
} else {
pos = iterator.insertEx(b.get());
}
iterator.insert(b.getExceptionTable(), pos);
behavior.getMethodInfo().rebuildStackMapIf6(cc.getClassPool(), cc.getClassFile2());
return pos + b.length();
}
catch (NotFoundException e) {
throw new CannotCompileException(e);
}
catch (CompileError e) {
throw new CannotCompileException(e);
}
catch (BadBytecode e) {
throw new CannotCompileException(e);
}
}
private void insertCatch(int from, String src, CtClass exceptionType, String exceptionName) throws CannotCompileException {
CtClass cc = behavior.getDeclaringClass();
ConstPool cp = behavior.getMethodInfo().getConstPool();
CodeAttribute ca = behavior.getMethodInfo().getCodeAttribute();
CodeIterator iterator = ca.iterator();
Bytecode b = new Bytecode(cp, ca.getMaxStack(), ca.getMaxLocals());
b.setStackDepth(1);
Javac jv = new Javac(b, cc);
try {
jv.recordParams(behavior.getParameterTypes(), Modifier.isStatic(getModifiers()));
jv.recordLocalVariables(ca, from);
int var = jv.recordVariable(exceptionType, exceptionName);
b.addAstore(var);
jv.compileStmnt(src);
int stack = b.getMaxStack();
int locals = b.getMaxLocals();
if (stack > ca.getMaxStack())
ca.setMaxStack(stack);
if (locals > ca.getMaxLocals())
ca.setMaxLocals(locals);
int len = iterator.getCodeLength();
int pos = iterator.append(b.get());
ca.getExceptionTable().add(from, len, len, cp.addClassInfo(exceptionType));
iterator.append(b.getExceptionTable(), pos);
behavior.getMethodInfo().rebuildStackMapIf6(cc.getClassPool(), cc.getClassFile2());
} catch (NotFoundException e) {
throw new CannotCompileException(e);
} catch (CompileError e) {
throw new CannotCompileException(e);
} catch (BadBytecode e) {
throw new CannotCompileException(e);
}
}
private CtClass getReturnType0() throws NotFoundException {
return Descriptor.getReturnType(behavior.getMethodInfo().getDescriptor(),
behavior.getDeclaringClass().getClassPool());
}
}
|
|
package org.hisp.dhis.api.mobile.model.LWUITmodel;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.hisp.dhis.api.mobile.model.Model;
/**
* @author Nguyen Kim Lai
*/
public class Relationship extends Model
{
private String clientVersion;
private String personAName;
private String personBName;
private int personAId;
private int personBId;
private String chosenRelationship;
private String aIsToB;
private String bIsToA;
@Override
public String getClientVersion()
{
return clientVersion;
}
@Override
public void setClientVersion( String clientVersion )
{
this.clientVersion = clientVersion;
}
public String getPersonAName()
{
return personAName;
}
public void setPersonAName( String personAName )
{
this.personAName = personAName;
}
public String getPersonBName()
{
return personBName;
}
public void setPersonBName( String personBName )
{
this.personBName = personBName;
}
public int getPersonAId()
{
return personAId;
}
public void setPersonAId( int personAId )
{
this.personAId = personAId;
}
public int getPersonBId()
{
return personBId;
}
public void setPersonBId( int personBId )
{
this.personBId = personBId;
}
public String getChosenRelationship()
{
return chosenRelationship;
}
public void setChosenRelationship( String chosenRelationship )
{
this.chosenRelationship = chosenRelationship;
}
public String getaIsToB()
{
return aIsToB;
}
public void setaIsToB( String aIsToB )
{
this.aIsToB = aIsToB;
}
public String getbIsToA()
{
return bIsToA;
}
public void setbIsToA( String bIsToA )
{
this.bIsToA = bIsToA;
}
@Override
public void serialize( DataOutputStream dout )
throws IOException
{
dout.writeUTF( this.getName() );
if( this.getPersonAName() != null )
{
dout.writeBoolean( true );
dout.writeUTF( this.getPersonAName() );
}
else
{
dout.writeBoolean( false );
}
if( this.getPersonBName() != null )
{
dout.writeBoolean( true );
dout.writeUTF( this.getPersonBName() );
}
else
{
dout.writeBoolean( false );
}
// for enrollment relationship, attributes below belong to relationship type
if( this.getId() != 0 )
{
dout.writeBoolean( true );
dout.writeInt( this.getId() );
}
else
{
dout.writeBoolean( false );
}
// relationship between A and B
if( this.getaIsToB() != null )
{
dout.writeBoolean( true );
dout.writeUTF( this.getaIsToB() );
}
else
{
dout.writeBoolean( false );
}
if( this.getbIsToA() != null )
{
dout.writeBoolean( true );
dout.writeUTF( this.getbIsToA() );
}
else
{
dout.writeBoolean( false );
}
// A and B id
if( this.getPersonAId() != 0 )
{
dout.writeBoolean( true );
dout.writeInt( this.getPersonAId() );
}
else
{
dout.writeBoolean( false );
}
if( this.getPersonBId() != 0 )
{
dout.writeBoolean( true );
dout.writeInt( this.getPersonBId() );
}
else
{
dout.writeBoolean( false );
}
if( this.getChosenRelationship() != null )
{
dout.writeBoolean( true );
dout.writeUTF( this.getChosenRelationship() );
}
else
{
dout.writeBoolean( false );
}
}
@Override
public void deSerialize( DataInputStream dint )
throws IOException
{
this.setName( dint.readUTF() );
if ( dint.readBoolean() == true )
{
this.setPersonAName( dint.readUTF() );
}
else
{
this.setPersonAName( null );
}
if ( dint.readBoolean() == true )
{
this.setPersonBName( dint.readUTF() );
}
else
{
this.setPersonBName( null );
}
if ( dint.readBoolean() == true )
{
this.setId( dint.readInt() );
}
else
{
this.setId( 0 );
}
if ( dint.readBoolean() == true )
{
this.setaIsToB( dint.readUTF() );
}
else
{
this.setaIsToB( null );
}
if ( dint.readBoolean() == true )
{
this.setbIsToA( dint.readUTF() );
}
else
{
this.setbIsToA( null );
}
if ( dint.readBoolean() == true )
{
this.setPersonAId( dint.readInt() );
}
else
{
this.setPersonAId( 0 );
}
if ( dint.readBoolean() == true )
{
this.setPersonBId( dint.readInt() );
}
else
{
this.setPersonBId( 0 );
}
if ( dint.readBoolean() == true )
{
this.setChosenRelationship( dint.readUTF() );
}
else
{
this.setChosenRelationship( null );
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder;
import org.apache.camel.Expression;
/**
* A helper class for including portions of the <a
* href="http://camel.apache.org/expression.html">expression</a> and
* <a href="http://camel.apache.org/predicate.html">predicate</a> <a
* href="http://camel.apache.org/dsl.html">Java DSL</a>
*
* @version $Revision$
*/
public final class Builder {
/**
* Utility classes should not have a public constructor.
*/
private Builder() {
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">bean expression</a>
* value builder.
* <p/>
* This method accepts dual parameters. Either an bean instance or a reference to a bean (String).
*
* @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry
* @return the builder
*/
public static ValueBuilder bean(final Object beanOrBeanRef) {
return bean(beanOrBeanRef, null);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">bean expression</a>
* value builder.
* <p/>
* This method accepts dual parameters. Either an bean instance or a reference to a bean (String).
*
* @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry
* @param method the method name
* @return the builder
*/
public static ValueBuilder bean(Object beanOrBeanRef, String method) {
Expression expression;
if (beanOrBeanRef instanceof String) {
expression = ExpressionBuilder.beanExpression((String) beanOrBeanRef, method);
} else {
expression = ExpressionBuilder.beanExpression(beanOrBeanRef, method);
}
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">bean expression</a>
* value builder
*
* @param beanType the bean class which will be invoked
* @param method name of method to invoke
* @return the builder
*/
public static ValueBuilder bean(Class<?> beanType, String method) {
Expression expression = ExpressionBuilder.beanExpression(beanType, method);
return new ValueBuilder(expression);
}
/**
* Returns a constant expression
*/
public static ValueBuilder constant(Object value) {
Expression expression = ExpressionBuilder.constantExpression(value);
return new ValueBuilder(expression);
}
/**
* Returns a simple expression
*/
public static ValueBuilder simple(String value) {
Expression expression = ExpressionBuilder.simpleExpression(value);
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for headers on an exchange
*/
public static ValueBuilder header(String name) {
Expression expression = ExpressionBuilder.headerExpression(name);
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for properties on an exchange
*/
public static ValueBuilder property(String name) {
Expression expression = ExpressionBuilder.propertyExpression(name);
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the inbound body on an exchange
*/
public static ValueBuilder body() {
Expression expression = ExpressionBuilder.bodyExpression();
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the inbound message body as a
* specific type
*/
public static <T> ValueBuilder bodyAs(Class<T> type) {
Expression expression = ExpressionBuilder.bodyExpression(type);
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the outbound body on an
* exchange
*/
public static ValueBuilder outBody() {
Expression expression = ExpressionBuilder.outBodyExpression();
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the outbound message body as a
* specific type
*/
public static <T> ValueBuilder outBodyAs(Class<T> type) {
Expression expression = ExpressionBuilder.outBodyExpression(type);
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the fault body on an
* exchange
*/
public static ValueBuilder faultBody() {
Expression expression = ExpressionBuilder.faultBodyExpression();
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the fault message body as a
* specific type
*/
public static <T> ValueBuilder faultBodyAs(Class<T> type) {
Expression expression = ExpressionBuilder.faultBodyExpression(type);
return new ValueBuilder(expression);
}
/**
* Returns an expression for the given system property
*/
public static ValueBuilder systemProperty(final String name) {
return systemProperty(name, null);
}
/**
* Returns an expression for the given system property
*/
public static ValueBuilder systemProperty(final String name, final String defaultValue) {
return new ValueBuilder(ExpressionBuilder.systemPropertyExpression(name, defaultValue));
}
/**
* Returns a predicate and value builder for the exception message on an exchange
*/
public static ValueBuilder exceptionMessage() {
Expression expression = ExpressionBuilder.exchangeExceptionMessageExpression();
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the exception stacktrace on an exchange
*/
public static ValueBuilder exceptionStackTrace() {
Expression expression = ExpressionBuilder.exchangeExceptionStackTraceExpression();
return new ValueBuilder(expression);
}
/**
* Returns an expression that replaces all occurrences of the regular
* expression with the given replacement
*/
public static ValueBuilder regexReplaceAll(Expression content, String regex, String replacement) {
Expression newExp = ExpressionBuilder.regexReplaceAll(content, regex, replacement);
return new ValueBuilder(newExp);
}
/**
* Returns an expression that replaces all occurrences of the regular
* expression with the given replacement
*/
public static ValueBuilder regexReplaceAll(Expression content, String regex, Expression replacement) {
Expression newExp = ExpressionBuilder.regexReplaceAll(content, regex, replacement);
return new ValueBuilder(newExp);
}
/**
* Returns an expression processing the exchange to the given endpoint uri.
*
* @param uri endpoint uri
* @return the builder
*/
public static ValueBuilder sendTo(String uri) {
Expression expression = ExpressionBuilder.toExpression(uri);
return new ValueBuilder(expression);
}
}
|
|
/*
* Copyright 2000-2003 by Model N, Inc. All Rights Reserved.
*
* This software is the confidential and proprietary information
* of Model N, Inc ("Confidential Information"). You shall not
* disclose such Confidential Information and shall use it only
* in accordance with the terms of the license agreement you
* entered into with Model N, Inc.
*/
package com.modeln.build.common.data.product;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
/**
* This class encapsulates information specific to a single product version,
* such as the version name and product components for the version.
*
* @hibernate.class table="product_version"
*
* @version 1.0
* @author Shawn Stafford ([email protected])
*/
public class CMnProductVersion {
/** Auto-generated ID used to identify the version */
private Integer id;
/** Unique code name for the version */
private String name;
/** Version number */
private String number;
/** List of product components within this version */
private Set components;
/** List of builds released in this version */
private Set releases;
/**
* Create a copy of the version object.
*
* @return Version information
*/
public Object clone() {
CMnProductVersion clone = new CMnProductVersion();
clone.setId(getId());
clone.setName(getName());
clone.setNumber(getNumber());
if (components != null) {
Iterator componentList = components.iterator();
while (componentList.hasNext()) {
CMnProductComponent currentComponent = (CMnProductComponent) componentList.next();
CMnProductComponent componentClone = (CMnProductComponent) currentComponent.clone();
clone.addComponent(componentClone);
}
}
if (releases != null) {
Iterator releaseList = releases.iterator();
while (releaseList.hasNext()) {
CMnProductRelease currentRelease = (CMnProductRelease) releaseList.next();
CMnProductRelease releaseClone = (CMnProductRelease) currentRelease.clone();
clone.addRelease(releaseClone);
}
}
return clone;
}
/**
* Set the ID used to look-up the password type.
*
* @param id Unique ID of the password format
*/
public void setId(Integer id) {
this.id = id;
}
/**
* Return the ID used to look-up the password type.
*
* @hibernate.id generator-class="native"
*
* @return ID for the password type
*/
public Integer getId() {
return id;
}
/**
* Set the version code name.
*
* @param text Version title
*/
public void setName(String text) {
name = text;
}
/**
* Return the version code name.
*
* @hibernate.property
*
* @return Version title
*/
public String getName() {
return name;
}
/**
* Set the version number.
*
* @param text Version number
*/
public void setNumber(String text) {
number = text;
}
/**
* Return the version number.
*
* @hibernate.property
*
* @return Version number
*/
public String getNumber() {
return number;
}
/**
* Set the list of product components within this version.
*
* @param list Product component list
*/
public void setComponents(Set list) {
components = list;
}
/**
* Return the list of product components.
*
* @hibernate.set table="version_component_map" cascade="all"
* @hibernate.collection-key column="version_id"
* @hibernate.collection-one-to-many class="com.modeln.build.common.data.product.CMnProductComponent"
*
* @return Product component list
*/
public Set getComponents() {
return components;
}
/**
* Add a list of components to the existing list. This method merges
* the components by ensuring that all components from the two
* lists are merged into a single list. The component ID is used to compare
* the identity of components in the two lists.
*
* @param list List of additional product components
*/
public void addComponents(Set list) {
Iterator componentList = list.iterator();
while (componentList.hasNext()) {
CMnProductComponent currentComponent = (CMnProductComponent) componentList.next();
addComponent(currentComponent);
}
}
/**
* Returns the component with the specified ID value or NULL if the component
* doesn't exist.
*
* @param id Component ID
* @return Component object with the specified ID
*/
public CMnProductComponent getComponent(Integer id) {
Iterator componentList = components.iterator();
while (componentList.hasNext()) {
CMnProductComponent currentComponent = (CMnProductComponent) componentList.next();
if (id.equals(currentComponent.getId())) {
return currentComponent;
}
}
return null;
}
/**
* Returns TRUE if the version contains a component object with the specified
* ID value.
*
* @param id Component ID
* @return TRUE if the specified component exists
*/
public boolean containsComponent(Integer id) {
return (getComponent(id) != null);
}
/**
* Updates the specified compnent object by locating the matching component ID
* in the list merging any missing information into the existing component
* object. If a matching component does not already exist, the supplied
* component object is simply added to the list.
*
* @param component Component data
*/
public void addComponent(CMnProductComponent component) {
boolean componentFound = false;
if (component != null) {
if (components != null) {
Iterator componentList = components.iterator();
while (componentList.hasNext()) {
CMnProductComponent currentComponent = (CMnProductComponent) componentList.next();
// Match by ID
boolean matchFound = false;
if ((component.getId() != null) &&
(currentComponent.getId() != null) &&
component.getId().equals(currentComponent.getId()))
{
matchFound = true;
}
// Stub out a section for copying data from the current component
if (matchFound) {
componentFound = true;
}
}
} else {
components = new HashSet();
}
// If the component didn't already exist, add it to the list
if (!componentFound) {
components.add(component);
}
}
}
/**
* Set the list of product releases within this version.
*
* @param list Product release list
*/
public void setReleases(Set list) {
releases = list;
}
/**
* Return the list of product releases.
*
* @hibernate.set table="version_release_map" cascade="all"
* @hibernate.collection-key column="version_id"
* @hibernate.collection-one-to-many class="com.modeln.build.common.data.product.CMnProductRelease"
*
* @return Product release list
*/
public Set getReleases() {
return releases;
}
/**
* Add a list of releases to the existing list. This method merges
* the releases by ensuring that all releases from the two
* lists are merged into a single list. The release ID is used to compare
* the identity of releases in the two lists.
*
* @param list List of additional product releases
*/
public void addReleases(Set list) {
Iterator releaseList = list.iterator();
while (releaseList.hasNext()) {
CMnProductRelease currentRelease = (CMnProductRelease) releaseList.next();
addRelease(currentRelease);
}
}
/**
* Returns the release with the specified ID value or NULL if the release
* doesn't exist.
*
* @param id Release ID
* @return Release object with the specified ID
*/
public CMnProductRelease getRelease(Integer id) {
Iterator releaseList = releases.iterator();
while (releaseList.hasNext()) {
CMnProductRelease currentRelease = (CMnProductRelease) releaseList.next();
if (id.equals(currentRelease.getId())) {
return currentRelease;
}
}
return null;
}
/**
* Updates the specified release object by locating the matching release ID
* in the list merging any missing information into the existing release
* object. If a matching release does not already exist, the supplied
* release object is simply added to the list.
*
* @param component Release data
*/
public void addRelease(CMnProductRelease release) {
boolean releaseFound = false;
if (release != null) {
if (releases != null) {
Iterator releaseList = releases.iterator();
while (releaseList.hasNext()) {
CMnProductRelease currentRelease = (CMnProductRelease) releaseList.next();
// Match by ID
boolean matchFound = false;
if ((release.getId() != null) &&
(currentRelease.getId() != null) &&
release.getId().equals(currentRelease.getId()))
{
matchFound = true;
}
// Stub out a section for copying data from the current release
if (matchFound) {
releaseFound = true;
}
}
} else {
releases = new HashSet();
}
// If the release didn't already exist, add it to the list
if (!releaseFound) {
releases.add(release);
}
}
}
}
|
|
package com.intellij.platform.templates.github;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.NullableFunction;
import com.intellij.util.Producer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Enumeration;
import java.util.concurrent.Callable;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipInputStream;
/**
* @author Sergey Simonchik
*/
public class ZipUtil {
private static final Logger LOG = Logger.getInstance(ZipUtil.class);
public interface ContentProcessor {
/** Return null to skip the file */
@Nullable
byte[] processContent(byte[] content, File file) throws IOException;
}
public static void unzipWithProgressSynchronously(
@Nullable Project project,
@NotNull String progressTitle,
@NotNull final File zipArchive,
@NotNull final File extractToDir,
final boolean unwrapSingleTopLevelFolder) throws GeneratorException
{
unzipWithProgressSynchronously(project, progressTitle, zipArchive, extractToDir, null, unwrapSingleTopLevelFolder);
}
public static void unzipWithProgressSynchronously(
@Nullable Project project,
@NotNull String progressTitle,
@NotNull final File zipArchive,
@NotNull final File extractToDir,
@Nullable final NullableFunction<String, String> pathConvertor,
final boolean unwrapSingleTopLevelFolder) throws GeneratorException
{
final Outcome<Boolean> outcome = DownloadUtil.provideDataWithProgressSynchronously(
project, progressTitle, "Unpacking ...",
new Callable<Boolean>() {
@Override
public Boolean call() throws IOException {
ProgressIndicator progress = ProgressManager.getInstance().getProgressIndicator();
unzip(progress, extractToDir, zipArchive, pathConvertor, null, unwrapSingleTopLevelFolder);
return true;
}
},
new Producer<Boolean>() {
@Override
public Boolean produce() {
return false;
}
}
);
Boolean result = outcome.get();
if (result == null) {
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
Exception e = outcome.getException();
if (e != null) {
throw new GeneratorException("Unpacking failed, downloaded archive is broken");
}
throw new GeneratorException("Unpacking was cancelled");
}
}
private static File getUnzipToDir(@Nullable ProgressIndicator progress,
@NotNull File targetDir,
boolean unwrapSingleTopLevelFolder) throws IOException {
if (progress != null) {
progress.setText("Extracting...");
}
if (unwrapSingleTopLevelFolder) {
return FileUtil.createTempDirectory("unzip-dir-", null);
}
return targetDir;
}
// This method will throw IOException, if a zipArchive file isn't a valid zip archive.
public static void unzip(@Nullable ProgressIndicator progress,
@NotNull File targetDir,
@NotNull File zipArchive,
@Nullable NullableFunction<String, String> pathConvertor,
@Nullable ContentProcessor contentProcessor,
boolean unwrapSingleTopLevelFolder) throws IOException {
File unzipToDir = getUnzipToDir(progress, targetDir, unwrapSingleTopLevelFolder);
ZipFile zipFile = new ZipFile(zipArchive, ZipFile.OPEN_READ);
try {
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
InputStream entryContentStream = zipFile.getInputStream(entry);
unzipEntryToDir(progress, entry, entryContentStream, unzipToDir, pathConvertor, contentProcessor);
entryContentStream.close();
}
}
finally {
zipFile.close();
}
doUnwrapSingleTopLevelFolder(unwrapSingleTopLevelFolder, unzipToDir, targetDir);
}
public static void unzip(@Nullable ProgressIndicator progress,
@NotNull File targetDir,
@NotNull ZipInputStream stream,
@Nullable NullableFunction<String, String> pathConvertor,
@Nullable ContentProcessor contentProcessor,
boolean unwrapSingleTopLevelFolder) throws IOException {
File unzipToDir = getUnzipToDir(progress, targetDir, unwrapSingleTopLevelFolder);
ZipEntry entry;
while ((entry = stream.getNextEntry()) != null) {
unzipEntryToDir(progress, entry, stream, unzipToDir, pathConvertor, contentProcessor);
}
doUnwrapSingleTopLevelFolder(unwrapSingleTopLevelFolder, unzipToDir, targetDir);
}
private static void doUnwrapSingleTopLevelFolder(boolean unwrapSingleTopLevelFolder,
@NotNull File unzipToDir,
@NotNull File targetDir) throws IOException {
if (unwrapSingleTopLevelFolder) {
File[] topLevelFiles = unzipToDir.listFiles();
File dirToMove;
if (topLevelFiles != null && topLevelFiles.length == 1 && topLevelFiles[0].isDirectory()) {
dirToMove = topLevelFiles[0];
}
else {
dirToMove = unzipToDir;
}
// Don't "FileUtil.moveDirWithContent(dirToMove, targetDir)"
// because a file moved with "java.io.File.renameTo" won't inherit its new parent's permissions
FileUtil.copyDirContent(dirToMove, targetDir);
FileUtil.delete(unzipToDir);
}
}
private static void unzipEntryToDir(@Nullable ProgressIndicator progress,
@NotNull final ZipEntry zipEntry,
@NotNull final InputStream entryContentStream,
@NotNull final File extractToDir,
@Nullable NullableFunction<String, String> pathConvertor,
@Nullable ContentProcessor contentProcessor) throws IOException {
String relativeExtractPath = createRelativeExtractPath(zipEntry);
if (pathConvertor != null) {
relativeExtractPath = pathConvertor.fun(relativeExtractPath);
if (relativeExtractPath == null) {
// should be skipped
return;
}
}
File child = new File(extractToDir, relativeExtractPath);
File dir = zipEntry.isDirectory() ? child : child.getParentFile();
if (!dir.exists() && !dir.mkdirs()) {
throw new IOException("Unable to create dir: '" + dir + "'!");
}
if (zipEntry.isDirectory()) {
return;
}
if (progress != null) {
progress.setText("Extracting " + relativeExtractPath + " ...");
}
if (contentProcessor == null) {
FileOutputStream fileOutputStream = new FileOutputStream(child);
try {
FileUtil.copy(entryContentStream, fileOutputStream);
}
finally {
fileOutputStream.close();
}
}
else {
byte[] content = contentProcessor.processContent(FileUtil.loadBytes(entryContentStream), child);
if (content != null) {
FileOutputStream fileOutputStream = new FileOutputStream(child);
try {
fileOutputStream.write(content);
}
finally {
fileOutputStream.close();
}
}
}
LOG.info("Extract: " + relativeExtractPath);
}
@NotNull
private static String createRelativeExtractPath(@NotNull ZipEntry zipEntry) {
String name = StringUtil.trimStart(zipEntry.getName(), "/");
return StringUtil.trimEnd(name, "/");
}
}
|
|
/*
* Copyright 2014 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.routing.impl;
import com.google.common.collect.Sets;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.Ip4Prefix;
import org.onlab.packet.IpAddress;
import org.onlab.packet.IpPrefix;
import org.onlab.packet.MacAddress;
import org.onlab.packet.VlanId;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.DefaultHost;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Host;
import org.onosproject.net.HostId;
import org.onosproject.net.HostLocation;
import org.onosproject.net.PortNumber;
import org.onosproject.net.host.HostEvent;
import org.onosproject.net.host.HostListener;
import org.onosproject.net.host.HostService;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.routing.config.RoutingConfigurationService;
import org.onosproject.routing.impl.Router.InternalHostListener;
import org.onosproject.routing.BgpService;
import org.onosproject.routing.FibEntry;
import org.onosproject.routing.FibListener;
import org.onosproject.routing.FibUpdate;
import org.onosproject.routing.RouteEntry;
import org.onosproject.routing.RouteListener;
import org.onosproject.routing.RouteUpdate;
import java.util.Collections;
import static org.easymock.EasyMock.*;
/**
* This class tests adding a route and updating a route.
* The HostService module answers the MAC address asynchronously.
*/
public class RouterAsyncArpTest {
private HostService hostService;
private FibListener fibListener;
private RoutingConfigurationService routingConfigurationService;
private static final ConnectPoint SW1_ETH1 = new ConnectPoint(
DeviceId.deviceId("of:0000000000000001"),
PortNumber.portNumber(1));
private static final ConnectPoint SW2_ETH1 = new ConnectPoint(
DeviceId.deviceId("of:0000000000000002"),
PortNumber.portNumber(1));
private static final ConnectPoint SW3_ETH1 = new ConnectPoint(
DeviceId.deviceId("of:0000000000000003"),
PortNumber.portNumber(1));
private Router router;
private InternalHostListener internalHostListener;
@Before
public void setUp() throws Exception {
hostService = createMock(HostService.class);
routingConfigurationService =
createMock(RoutingConfigurationService.class);
BgpService bgpService = createMock(BgpService.class);
bgpService.start(anyObject(RouteListener.class));
bgpService.stop();
replay(bgpService);
fibListener = createMock(FibListener.class);
router = new Router();
router.hostService = hostService;
router.routingConfigurationService = routingConfigurationService;
router.bgpService = bgpService;
router.activate();
router.addFibListener(fibListener);
router.start();
internalHostListener = router.new InternalHostListener();
}
@After
public void tearDown() {
// Called during shutdown
reset(hostService);
hostService.removeListener(anyObject(HostListener.class));
router.stop();
}
/**
* Tests adding a route entry with asynchronous HostService replies.
*/
@Test
public void testRouteAdd() {
// Construct a route entry
IpPrefix prefix = Ip4Prefix.valueOf("1.1.1.0/24");
IpAddress nextHopIp = Ip4Address.valueOf("192.168.10.1");
RouteEntry routeEntry = new RouteEntry(prefix, nextHopIp);
// Host service will reply with no hosts when asked
reset(hostService);
expect(hostService.getHostsByIp(anyObject(IpAddress.class))).andReturn(
Collections.emptySet()).anyTimes();
hostService.startMonitoringIp(IpAddress.valueOf("192.168.10.1"));
replay(hostService);
reset(routingConfigurationService);
expect(routingConfigurationService.isIpPrefixLocal(
anyObject(IpPrefix.class))).andReturn(false);
replay(routingConfigurationService);
// Initially when we add the route, no FIB update will be sent
replay(fibListener);
router.processRouteUpdates(Collections.singletonList(
new RouteUpdate(RouteUpdate.Type.UPDATE, routeEntry)));
verify(fibListener);
// Now when we send the event, we expect the FIB update to be sent
reset(fibListener);
FibEntry fibEntry = new FibEntry(prefix, nextHopIp,
MacAddress.valueOf("00:00:00:00:00:01"));
fibListener.update(Collections.singletonList(new FibUpdate(
FibUpdate.Type.UPDATE, fibEntry)), Collections.emptyList());
replay(fibListener);
Host host = new DefaultHost(ProviderId.NONE, HostId.NONE,
MacAddress.valueOf("00:00:00:00:00:01"), VlanId.NONE,
new HostLocation(
SW1_ETH1.deviceId(),
SW1_ETH1.port(), 1),
Sets.newHashSet(IpAddress.valueOf("192.168.10.1")));
// Send in the host event
internalHostListener.event(
new HostEvent(HostEvent.Type.HOST_ADDED, host));
verify(fibListener);
}
/**
* Tests updating a route entry with asynchronous HostService replies.
*/
@Test
public void testRouteUpdate() {
// Add a route
testRouteAdd();
// Construct a route entry
IpPrefix prefix = Ip4Prefix.valueOf("1.1.1.0/24");
IpAddress nextHopIp = Ip4Address.valueOf("192.168.20.1");
RouteEntry routeEntry = new RouteEntry(prefix, nextHopIp);
// Host service will reply with no hosts when asked
reset(hostService);
expect(hostService.getHostsByIp(anyObject(IpAddress.class))).andReturn(
Collections.emptySet()).anyTimes();
hostService.startMonitoringIp(IpAddress.valueOf("192.168.20.1"));
replay(hostService);
reset(routingConfigurationService);
expect(routingConfigurationService.isIpPrefixLocal(
anyObject(IpPrefix.class))).andReturn(false);
replay(routingConfigurationService);
// Initially when we add the route, the DELETE FIB update will be sent
// but the UPDATE FIB update will come later when the MAC is resolved
reset(fibListener);
fibListener.update(Collections.emptyList(), Collections.singletonList(new FibUpdate(
FibUpdate.Type.DELETE, new FibEntry(prefix, null, null))));
replay(fibListener);
router.processRouteUpdates(Collections.singletonList(
new RouteUpdate(RouteUpdate.Type.UPDATE, routeEntry)));
verify(fibListener);
// Now when we send the event, we expect the FIB update to be sent
reset(fibListener);
FibEntry fibEntry = new FibEntry(prefix, nextHopIp,
MacAddress.valueOf("00:00:00:00:00:02"));
fibListener.update(Collections.singletonList(new FibUpdate(
FibUpdate.Type.UPDATE, fibEntry)), Collections.emptyList());
replay(fibListener);
Host host = new DefaultHost(ProviderId.NONE, HostId.NONE,
MacAddress.valueOf("00:00:00:00:00:02"), VlanId.NONE,
new HostLocation(
SW1_ETH1.deviceId(),
SW1_ETH1.port(), 1),
Sets.newHashSet(IpAddress.valueOf("192.168.20.1")));
// Send in the host event
internalHostListener.event(
new HostEvent(HostEvent.Type.HOST_ADDED, host));
verify(fibListener);
}
}
|
|
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.groovy.scripts;
import groovy.lang.Closure;
import org.gradle.api.Action;
import org.gradle.api.PathValidation;
import org.gradle.api.Script;
import org.gradle.api.file.ConfigurableFileCollection;
import org.gradle.api.file.ConfigurableFileTree;
import org.gradle.api.file.CopySpec;
import org.gradle.api.file.DeleteSpec;
import org.gradle.api.file.FileTree;
import org.gradle.api.initialization.dsl.ScriptHandler;
import org.gradle.api.internal.ProcessOperations;
import org.gradle.api.internal.file.DefaultFileOperations;
import org.gradle.api.internal.file.FileLookup;
import org.gradle.api.internal.file.FileOperations;
import org.gradle.api.internal.file.FileResolver;
import org.gradle.api.internal.file.collections.DirectoryFileTreeFactory;
import org.gradle.api.internal.initialization.ClassLoaderScope;
import org.gradle.api.internal.initialization.ScriptHandlerFactory;
import org.gradle.api.internal.plugins.DefaultObjectConfigurationAction;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.logging.LoggingManager;
import org.gradle.api.provider.PropertyState;
import org.gradle.api.provider.Provider;
import org.gradle.api.provider.ProviderFactory;
import org.gradle.api.resources.ResourceHandler;
import org.gradle.api.tasks.WorkResult;
import org.gradle.configuration.ScriptPluginFactory;
import org.gradle.internal.Actions;
import org.gradle.internal.hash.FileHasher;
import org.gradle.internal.hash.StreamHasher;
import org.gradle.internal.reflect.Instantiator;
import org.gradle.internal.resource.TextResourceLoader;
import org.gradle.internal.service.ServiceRegistry;
import org.gradle.process.ExecResult;
import org.gradle.process.ExecSpec;
import org.gradle.process.JavaExecSpec;
import org.gradle.util.ConfigureUtil;
import java.io.File;
import java.net.URI;
import java.util.Map;
import java.util.concurrent.Callable;
public abstract class DefaultScript extends BasicScript {
private static final Logger LOGGER = Logging.getLogger(Script.class);
private FileOperations fileOperations;
private ProcessOperations processOperations;
private ProviderFactory providerFactory;
private LoggingManager loggingManager;
public ServiceRegistry __scriptServices;
@Override
public void init(final Object target, ServiceRegistry services) {
super.init(target, services);
this.__scriptServices = services;
loggingManager = services.get(LoggingManager.class);
Instantiator instantiator = services.get(Instantiator.class);
FileLookup fileLookup = services.get(FileLookup.class);
DirectoryFileTreeFactory directoryFileTreeFactory = services.get(DirectoryFileTreeFactory.class);
StreamHasher streamHasher = services.get(StreamHasher.class);
FileHasher fileHasher = services.get(FileHasher.class);
if (target instanceof FileOperations) {
fileOperations = (FileOperations) target;
} else {
File sourceFile = getScriptSource().getResource().getLocation().getFile();
if (sourceFile != null) {
fileOperations = new DefaultFileOperations(fileLookup.getFileResolver(sourceFile.getParentFile()), null, null, instantiator, fileLookup, directoryFileTreeFactory, streamHasher, fileHasher);
} else {
fileOperations = new DefaultFileOperations(fileLookup.getFileResolver(), null, null, instantiator, fileLookup, directoryFileTreeFactory, streamHasher, fileHasher);
}
}
processOperations = (ProcessOperations) fileOperations;
providerFactory = services.get(ProviderFactory.class);
}
@Override
public FileResolver getFileResolver() {
return fileOperations.getFileResolver();
}
private DefaultObjectConfigurationAction createObjectConfigurationAction() {
ClassLoaderScope classLoaderScope = __scriptServices.get(ClassLoaderScope.class);
return new DefaultObjectConfigurationAction(
getFileResolver(),
__scriptServices.get(ScriptPluginFactory.class),
__scriptServices.get(ScriptHandlerFactory.class),
classLoaderScope,
__scriptServices.get(TextResourceLoader.class),
getScriptTarget()
);
}
@Override
public void apply(Closure closure) {
DefaultObjectConfigurationAction action = createObjectConfigurationAction();
ConfigureUtil.configure(closure, action);
action.execute();
}
@Override
public void apply(Map options) {
DefaultObjectConfigurationAction action = createObjectConfigurationAction();
ConfigureUtil.configureByMap(options, action);
action.execute();
}
@Override
public ScriptHandler getBuildscript() {
return __scriptServices.get(ScriptHandler.class);
}
@Override
public void buildscript(Closure configureClosure) {
ConfigureUtil.configure(configureClosure, getBuildscript());
}
@Override
public File file(Object path) {
return fileOperations.file(path);
}
@Override
public File file(Object path, PathValidation validation) {
return fileOperations.file(path, validation);
}
@Override
public URI uri(Object path) {
return fileOperations.uri(path);
}
@Override
public ConfigurableFileCollection files(Object... paths) {
return fileOperations.files(paths);
}
@Override
public ConfigurableFileCollection files(Object paths, Closure configureClosure) {
return ConfigureUtil.configure(configureClosure, fileOperations.files(paths));
}
@Override
public String relativePath(Object path) {
return fileOperations.relativePath(path);
}
@Override
public ConfigurableFileTree fileTree(Object baseDir) {
return fileOperations.fileTree(baseDir);
}
@Override
public ConfigurableFileTree fileTree(Map<String, ?> args) {
return fileOperations.fileTree(args);
}
@Override
public ConfigurableFileTree fileTree(Object baseDir, Closure configureClosure) {
return ConfigureUtil.configure(configureClosure, fileOperations.fileTree(baseDir));
}
@Override
public FileTree zipTree(Object zipPath) {
return fileOperations.zipTree(zipPath);
}
@Override
public FileTree tarTree(Object tarPath) {
return fileOperations.tarTree(tarPath);
}
@Override
public ResourceHandler getResources() {
return fileOperations.getResources();
}
@Override
public WorkResult copy(Closure closure) {
return copy(ConfigureUtil.configureUsing(closure));
}
@Override
public WorkResult copy(Action<? super CopySpec> action) {
return fileOperations.copy(action);
}
@Override
public WorkResult sync(Action<? super CopySpec> action) {
return fileOperations.sync(action);
}
@Override
public CopySpec copySpec(Closure closure) {
return Actions.with(copySpec(), ConfigureUtil.configureUsing(closure));
}
@Override
public CopySpec copySpec() {
return fileOperations.copySpec();
}
@Override
public File mkdir(Object path) {
return fileOperations.mkdir(path);
}
@Override
public boolean delete(Object... paths) {
return fileOperations.delete(paths);
}
@Override
public WorkResult delete(Action<? super DeleteSpec> action) {
return fileOperations.delete(action);
}
@Override
public ExecResult javaexec(Closure closure) {
return processOperations.javaexec(ConfigureUtil.configureUsing(closure));
}
@Override
public ExecResult javaexec(Action<? super JavaExecSpec> action) {
return processOperations.javaexec(action);
}
@Override
public ExecResult exec(Closure closure) {
return processOperations.exec(ConfigureUtil.configureUsing(closure));
}
@Override
public ExecResult exec(Action<? super ExecSpec> action) {
return processOperations.exec(action);
}
@Override
public <T> Provider<T> provider(Callable<T> value) {
return providerFactory.provider(value);
}
@Override
public <T> PropertyState<T> property(Class<T> clazz) {
return providerFactory.property(clazz);
}
@Override
public LoggingManager getLogging() {
return loggingManager;
}
@Override
public Logger getLogger() {
return LOGGER;
}
public String toString() {
return "script";
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.usecases;
import junit.framework.TestCase;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jms.*;
import javax.jms.Queue;
import java.util.*;
import java.util.concurrent.CountDownLatch;
/*
* Test plan:
* Producer: publish messages into a queue, with 10 message groups, closing the group with seq=-1 on message 5 and message 10
* Consumers: 2 consumers created after all messages are sent
*
* Expected: for each group, messages 1-5 are handled by one consumer and messages 6-10 are handled by the other consumer. Messages
* 1 and 6 have the JMSXGroupFirstForConsumer property set to true.
*/
public class MessageGroupCloseTest extends TestCase {
private static final Logger LOG = LoggerFactory.getLogger(MessageGroupNewConsumerTest.class);
private Connection connection;
// Released after all messages are created
private CountDownLatch latchMessagesCreated = new CountDownLatch(1);
private int messagesSent, messagesRecvd1, messagesRecvd2, messageGroupCount, errorCountFirstForConsumer, errorCountWrongConsumerClose, errorCountDuplicateClose;
// groupID, count
private HashMap<String, Integer> messageGroups1 = new HashMap<>();
private HashMap<String, Integer> messageGroups2 = new HashMap<>();
private HashSet<String> closedGroups1 = new HashSet<>();
private HashSet<String> closedGroups2 = new HashSet<>();
// with the prefetch too high, this bug is not realized
private static final String connStr = "vm://localhost?broker.persistent=false&broker.useJmx=false&jms.prefetchPolicy.all=1";
public void testNewConsumer() throws JMSException, InterruptedException {
ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory(connStr);
connection = factory.createConnection();
connection.start();
final String queueName = this.getClass().getSimpleName();
final Thread producerThread = new Thread() {
@Override
public void run() {
try {
Session session = connection.createSession(true, Session.SESSION_TRANSACTED);
Queue queue = session.createQueue(queueName);
MessageProducer prod = session.createProducer(queue);
for (int i = 0; i < 10; i++) {
for (int j = 0; j < 10; j++) {
int seq = j + 1;
if ((j + 1) % 5 == 0) {
seq = -1;
}
Message message = generateMessage(session, Integer.toString(i), seq);
prod.send(message);
session.commit();
messagesSent++;
LOG.info("Sent message: group=" + i + ", seq=" + seq);
//Thread.sleep(20);
}
if (i % 100 == 0) {
LOG.info("Sent messages: group=" + i);
}
setMessageGroupCount(getMessageGroupCount() + 1);
}
LOG.info(messagesSent + " messages sent");
latchMessagesCreated.countDown();
prod.close();
session.close();
}
catch (Exception e) {
LOG.error("Producer failed", e);
}
}
};
final Thread consumerThread1 = new Thread() {
@Override
public void run() {
try {
latchMessagesCreated.await();
LOG.info("starting consumer1");
Session session = connection.createSession(true, Session.SESSION_TRANSACTED);
Queue queue = session.createQueue(queueName);
MessageConsumer con1 = session.createConsumer(queue);
while (true) {
Message message = con1.receive(5000);
if (message == null)
break;
LOG.info("Con1: got message " + formatMessage(message));
checkMessage(message, "Con1", messageGroups1, closedGroups1);
session.commit();
messagesRecvd1++;
if (messagesRecvd1 % 100 == 0) {
LOG.info("Con1: got messages count=" + messagesRecvd1);
}
//Thread.sleep(50);
}
LOG.info("Con1: total messages=" + messagesRecvd1);
LOG.info("Con1: total message groups=" + messageGroups1.size());
con1.close();
session.close();
}
catch (Exception e) {
LOG.error("Consumer 1 failed", e);
}
}
};
final Thread consumerThread2 = new Thread() {
@Override
public void run() {
try {
latchMessagesCreated.await();
LOG.info("starting consumer2");
Session session = connection.createSession(true, Session.SESSION_TRANSACTED);
Queue queue = session.createQueue(queueName);
MessageConsumer con2 = session.createConsumer(queue);
while (true) {
Message message = con2.receive(5000);
if (message == null) {
break;
}
LOG.info("Con2: got message " + formatMessage(message));
checkMessage(message, "Con2", messageGroups2, closedGroups2);
session.commit();
messagesRecvd2++;
if (messagesRecvd2 % 100 == 0) {
LOG.info("Con2: got messages count=" + messagesRecvd2);
}
//Thread.sleep(50);
}
con2.close();
session.close();
LOG.info("Con2: total messages=" + messagesRecvd2);
LOG.info("Con2: total message groups=" + messageGroups2.size());
}
catch (Exception e) {
LOG.error("Consumer 2 failed", e);
}
}
};
consumerThread2.start();
consumerThread1.start();
producerThread.start();
// wait for threads to finish
producerThread.join();
consumerThread1.join();
consumerThread2.join();
connection.close();
// check results
assertEquals("consumers should get all the messages", messagesSent, messagesRecvd1 + messagesRecvd2);
assertEquals("not all message groups closed for consumer 1", messageGroups1.size(), closedGroups1.size());
assertEquals("not all message groups closed for consumer 2", messageGroups2.size(), closedGroups2.size());
assertTrue("producer failed to send any messages", messagesSent > 0);
assertEquals("JMSXGroupFirstForConsumer not set", 0, errorCountFirstForConsumer);
assertEquals("wrong consumer got close message", 0, errorCountWrongConsumerClose);
assertEquals("consumer got duplicate close message", 0, errorCountDuplicateClose);
}
public Message generateMessage(Session session, String groupId, int seq) throws JMSException {
TextMessage m = session.createTextMessage();
m.setJMSType("TEST_MESSAGE");
m.setStringProperty("JMSXGroupID", groupId);
m.setIntProperty("JMSXGroupSeq", seq);
m.setText("<?xml?><testMessage/>");
return m;
}
public String formatMessage(Message m) {
try {
return "group=" + m.getStringProperty("JMSXGroupID") + ", seq=" + m.getIntProperty("JMSXGroupSeq");
}
catch (Exception e) {
return e.getClass().getSimpleName() + ": " + e.getMessage();
}
}
public void checkMessage(Message m,
String consumerId,
Map<String, Integer> messageGroups,
Set<String> closedGroups) throws JMSException {
String groupId = m.getStringProperty("JMSXGroupID");
int seq = m.getIntProperty("JMSXGroupSeq");
Integer count = messageGroups.get(groupId);
if (count == null) {
// first time seeing this group
if (!m.propertyExists("JMSXGroupFirstForConsumer") || !m.getBooleanProperty("JMSXGroupFirstForConsumer")) {
LOG.info(consumerId + ": JMSXGroupFirstForConsumer not set for group=" + groupId + ", seq=" + seq);
errorCountFirstForConsumer++;
}
if (seq == -1) {
closedGroups.add(groupId);
LOG.info(consumerId + ": wrong consumer got close message for group=" + groupId);
errorCountWrongConsumerClose++;
}
messageGroups.put(groupId, 1);
}
else {
// existing group
if (closedGroups.contains(groupId)) {
// group reassigned to same consumer
closedGroups.remove(groupId);
if (!m.propertyExists("JMSXGroupFirstForConsumer") || !m.getBooleanProperty("JMSXGroupFirstForConsumer")) {
LOG.info(consumerId + ": JMSXGroupFirstForConsumer not set for group=" + groupId + ", seq=" + seq);
errorCountFirstForConsumer++;
}
if (seq == -1) {
LOG.info(consumerId + ": consumer got duplicate close message for group=" + groupId);
errorCountDuplicateClose++;
}
}
if (seq == -1) {
closedGroups.add(groupId);
}
messageGroups.put(groupId, count + 1);
}
}
/**
* @return the messageGroupCount
*/
public int getMessageGroupCount() {
return messageGroupCount;
}
/**
* @param messageGroupCount the messageGroupCount to set
*/
public void setMessageGroupCount(int messageGroupCount) {
this.messageGroupCount = messageGroupCount;
}
}
|
|
/*
* Copyright 1999-2019 Seata.io Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.seata.server.storage.db.store;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.sql.DataSource;
import io.seata.common.exception.DataAccessException;
import io.seata.common.exception.StoreException;
import io.seata.common.util.IOUtil;
import io.seata.common.util.StringUtils;
import io.seata.config.Configuration;
import io.seata.config.ConfigurationFactory;
import io.seata.core.constants.ConfigurationKeys;
import io.seata.core.constants.ServerTableColumnsName;
import io.seata.core.store.BranchTransactionDO;
import io.seata.core.store.GlobalTransactionDO;
import io.seata.core.store.LogStore;
import io.seata.core.store.db.sql.log.LogStoreSqlsFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.seata.common.DefaultValues.DEFAULT_STORE_DB_BRANCH_TABLE;
import static io.seata.common.DefaultValues.DEFAULT_STORE_DB_GLOBAL_TABLE;
/**
* The type Log store data base dao.
*
* @author zhangsen
*/
public class LogStoreDataBaseDAO implements LogStore {
private static final Logger LOGGER = LoggerFactory.getLogger(LogStoreDataBaseDAO.class);
/**
* The transaction name key
*/
private static final String TRANSACTION_NAME_KEY = "TRANSACTION_NAME";
/**
* The transaction name default size is 128
*/
private static final int TRANSACTION_NAME_DEFAULT_SIZE = 128;
/**
* The constant CONFIG.
*/
protected static final Configuration CONFIG = ConfigurationFactory.getInstance();
/**
* The Log store data source.
*/
protected DataSource logStoreDataSource = null;
/**
* The Global table.
*/
protected String globalTable;
/**
* The Branch table.
*/
protected String branchTable;
private String dbType;
private int transactionNameColumnSize = TRANSACTION_NAME_DEFAULT_SIZE;
/**
* Instantiates a new Log store data base dao.
*
* @param logStoreDataSource the log store data source
*/
public LogStoreDataBaseDAO(DataSource logStoreDataSource) {
this.logStoreDataSource = logStoreDataSource;
globalTable = CONFIG.getConfig(ConfigurationKeys.STORE_DB_GLOBAL_TABLE,
DEFAULT_STORE_DB_GLOBAL_TABLE);
branchTable = CONFIG.getConfig(ConfigurationKeys.STORE_DB_BRANCH_TABLE,
DEFAULT_STORE_DB_BRANCH_TABLE);
dbType = CONFIG.getConfig(ConfigurationKeys.STORE_DB_TYPE);
if (StringUtils.isBlank(dbType)) {
throw new StoreException("there must be db type.");
}
if (logStoreDataSource == null) {
throw new StoreException("there must be logStoreDataSource.");
}
// init transaction_name size
initTransactionNameSize();
}
@Override
public GlobalTransactionDO queryGlobalTransactionDO(String xid) {
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getQueryGlobalTransactionSQL(globalTable);
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setString(1, xid);
rs = ps.executeQuery();
if (rs.next()) {
return convertGlobalTransactionDO(rs);
} else {
return null;
}
} catch (SQLException e) {
throw new DataAccessException(e);
} finally {
IOUtil.close(rs, ps, conn);
}
}
@Override
public GlobalTransactionDO queryGlobalTransactionDO(long transactionId) {
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getQueryGlobalTransactionSQLByTransactionId(globalTable);
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setLong(1, transactionId);
rs = ps.executeQuery();
if (rs.next()) {
return convertGlobalTransactionDO(rs);
} else {
return null;
}
} catch (SQLException e) {
throw new DataAccessException(e);
} finally {
IOUtil.close(rs, ps, conn);
}
}
@Override
public List<GlobalTransactionDO> queryGlobalTransactionDO(int[] statuses, int limit) {
List<GlobalTransactionDO> ret = new ArrayList<>();
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
String paramsPlaceHolder = org.apache.commons.lang.StringUtils.repeat("?", ",", statuses.length);
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getQueryGlobalTransactionSQLByStatus(globalTable, paramsPlaceHolder);
ps = conn.prepareStatement(sql);
for (int i = 0; i < statuses.length; i++) {
int status = statuses[i];
ps.setInt(i + 1, status);
}
ps.setInt(statuses.length + 1, limit);
rs = ps.executeQuery();
while (rs.next()) {
ret.add(convertGlobalTransactionDO(rs));
}
return ret;
} catch (SQLException e) {
throw new DataAccessException(e);
} finally {
IOUtil.close(rs, ps, conn);
}
}
@Override
public boolean insertGlobalTransactionDO(GlobalTransactionDO globalTransactionDO) {
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getInsertGlobalTransactionSQL(globalTable);
Connection conn = null;
PreparedStatement ps = null;
try {
int index = 1;
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setString(index++, globalTransactionDO.getXid());
ps.setLong(index++, globalTransactionDO.getTransactionId());
ps.setInt(index++, globalTransactionDO.getStatus());
ps.setString(index++, globalTransactionDO.getApplicationId());
ps.setString(index++, globalTransactionDO.getTransactionServiceGroup());
String transactionName = globalTransactionDO.getTransactionName();
transactionName = transactionName.length() > transactionNameColumnSize ?
transactionName.substring(0, transactionNameColumnSize) :
transactionName;
ps.setString(index++, transactionName);
ps.setInt(index++, globalTransactionDO.getTimeout());
ps.setLong(index++, globalTransactionDO.getBeginTime());
ps.setString(index++, globalTransactionDO.getApplicationData());
return ps.executeUpdate() > 0;
} catch (SQLException e) {
throw new StoreException(e);
} finally {
IOUtil.close(ps, conn);
}
}
@Override
public boolean updateGlobalTransactionDO(GlobalTransactionDO globalTransactionDO) {
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getUpdateGlobalTransactionStatusSQL(globalTable);
Connection conn = null;
PreparedStatement ps = null;
try {
int index = 1;
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setInt(index++, globalTransactionDO.getStatus());
ps.setString(index++, globalTransactionDO.getXid());
return ps.executeUpdate() > 0;
} catch (SQLException e) {
throw new StoreException(e);
} finally {
IOUtil.close(ps, conn);
}
}
@Override
public boolean deleteGlobalTransactionDO(GlobalTransactionDO globalTransactionDO) {
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getDeleteGlobalTransactionSQL(globalTable);
Connection conn = null;
PreparedStatement ps = null;
try {
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setString(1, globalTransactionDO.getXid());
ps.executeUpdate();
} catch (SQLException e) {
throw new StoreException(e);
} finally {
IOUtil.close(ps, conn);
}
return true;
}
@Override
public List<BranchTransactionDO> queryBranchTransactionDO(String xid) {
List<BranchTransactionDO> rets = new ArrayList<>();
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getQueryBranchTransaction(branchTable);
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setString(1, xid);
rs = ps.executeQuery();
while (rs.next()) {
rets.add(convertBranchTransactionDO(rs));
}
return rets;
} catch (SQLException e) {
throw new DataAccessException(e);
} finally {
IOUtil.close(rs, ps, conn);
}
}
@Override
public List<BranchTransactionDO> queryBranchTransactionDO(List<String> xids) {
int length = xids.size();
List<BranchTransactionDO> rets = new ArrayList<>(length * 3);
String paramsPlaceHolder = org.apache.commons.lang.StringUtils.repeat("?", ",", length);
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getQueryBranchTransaction(branchTable, paramsPlaceHolder);
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
for (int i = 0; i < length; i++) {
ps.setString(i + 1, xids.get(i));
}
rs = ps.executeQuery();
while (rs.next()) {
rets.add(convertBranchTransactionDO(rs));
}
return rets;
} catch (SQLException e) {
throw new DataAccessException(e);
} finally {
IOUtil.close(rs, ps, conn);
}
}
@Override
public boolean insertBranchTransactionDO(BranchTransactionDO branchTransactionDO) {
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getInsertBranchTransactionSQL(branchTable);
Connection conn = null;
PreparedStatement ps = null;
try {
int index = 1;
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setString(index++, branchTransactionDO.getXid());
ps.setLong(index++, branchTransactionDO.getTransactionId());
ps.setLong(index++, branchTransactionDO.getBranchId());
ps.setString(index++, branchTransactionDO.getResourceGroupId());
ps.setString(index++, branchTransactionDO.getResourceId());
ps.setString(index++, branchTransactionDO.getBranchType());
ps.setInt(index++, branchTransactionDO.getStatus());
ps.setString(index++, branchTransactionDO.getClientId());
ps.setString(index++, branchTransactionDO.getApplicationData());
return ps.executeUpdate() > 0;
} catch (SQLException e) {
throw new StoreException(e);
} finally {
IOUtil.close(ps, conn);
}
}
@Override
public boolean updateBranchTransactionDO(BranchTransactionDO branchTransactionDO) {
boolean shouldUpdateAppData = StringUtils.isNotBlank(branchTransactionDO.getApplicationData());
String sql = shouldUpdateAppData ?
LogStoreSqlsFactory.getLogStoreSqls(dbType).getUpdateBranchTransactionStatusAppDataSQL(branchTable) :
LogStoreSqlsFactory.getLogStoreSqls(dbType).getUpdateBranchTransactionStatusSQL(branchTable);
Connection conn = null;
PreparedStatement ps = null;
try {
int index = 1;
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setInt(index++, branchTransactionDO.getStatus());
if (shouldUpdateAppData) {
ps.setString(index++, branchTransactionDO.getApplicationData());
}
ps.setString(index++, branchTransactionDO.getXid());
ps.setLong(index++, branchTransactionDO.getBranchId());
return ps.executeUpdate() > 0;
} catch (SQLException e) {
throw new StoreException(e);
} finally {
IOUtil.close(ps, conn);
}
}
@Override
public boolean deleteBranchTransactionDO(BranchTransactionDO branchTransactionDO) {
String sql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getDeleteBranchTransactionByBranchIdSQL(branchTable);
Connection conn = null;
PreparedStatement ps = null;
try {
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setString(1, branchTransactionDO.getXid());
ps.setLong(2, branchTransactionDO.getBranchId());
ps.executeUpdate();
} catch (SQLException e) {
throw new StoreException(e);
} finally {
IOUtil.close(ps, conn);
}
return true;
}
@Override
public long getCurrentMaxSessionId(long high, long low) {
String transMaxSql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getQueryGlobalMax(globalTable);
String branchMaxSql = LogStoreSqlsFactory.getLogStoreSqls(dbType).getQueryBranchMax(branchTable);
long maxTransId = getCurrentMaxSessionId(transMaxSql, high, low);
long maxBranchId = getCurrentMaxSessionId(branchMaxSql, high, low);
return Math.max(maxBranchId, maxTransId);
}
private long getCurrentMaxSessionId(String sql, long high, long low) {
long max = 0;
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
int index = 1;
conn = logStoreDataSource.getConnection();
conn.setAutoCommit(true);
ps = conn.prepareStatement(sql);
ps.setLong(index++, high);
ps.setLong(index++, low);
rs = ps.executeQuery();
while (rs.next()) {
max = rs.getLong(1);
}
} catch (SQLException e) {
throw new DataAccessException(e);
} finally {
IOUtil.close(rs, ps, conn);
}
return max;
}
private GlobalTransactionDO convertGlobalTransactionDO(ResultSet rs) throws SQLException {
GlobalTransactionDO globalTransactionDO = new GlobalTransactionDO();
globalTransactionDO.setXid(rs.getString(ServerTableColumnsName.GLOBAL_TABLE_XID));
globalTransactionDO.setStatus(rs.getInt(ServerTableColumnsName.GLOBAL_TABLE_STATUS));
globalTransactionDO.setApplicationId(rs.getString(ServerTableColumnsName.GLOBAL_TABLE_APPLICATION_ID));
globalTransactionDO.setBeginTime(rs.getLong(ServerTableColumnsName.GLOBAL_TABLE_BEGIN_TIME));
globalTransactionDO.setTimeout(rs.getInt(ServerTableColumnsName.GLOBAL_TABLE_TIMEOUT));
globalTransactionDO.setTransactionId(rs.getLong(ServerTableColumnsName.GLOBAL_TABLE_TRANSACTION_ID));
globalTransactionDO.setTransactionName(rs.getString(ServerTableColumnsName.GLOBAL_TABLE_TRANSACTION_NAME));
globalTransactionDO.setTransactionServiceGroup(
rs.getString(ServerTableColumnsName.GLOBAL_TABLE_TRANSACTION_SERVICE_GROUP));
globalTransactionDO.setApplicationData(rs.getString(ServerTableColumnsName.GLOBAL_TABLE_APPLICATION_DATA));
globalTransactionDO.setGmtCreate(rs.getTimestamp(ServerTableColumnsName.GLOBAL_TABLE_GMT_CREATE));
globalTransactionDO.setGmtModified(rs.getTimestamp(ServerTableColumnsName.GLOBAL_TABLE_GMT_MODIFIED));
return globalTransactionDO;
}
private BranchTransactionDO convertBranchTransactionDO(ResultSet rs) throws SQLException {
BranchTransactionDO branchTransactionDO = new BranchTransactionDO();
branchTransactionDO.setResourceGroupId(rs.getString(ServerTableColumnsName.BRANCH_TABLE_RESOURCE_GROUP_ID));
branchTransactionDO.setStatus(rs.getInt(ServerTableColumnsName.BRANCH_TABLE_STATUS));
branchTransactionDO.setApplicationData(rs.getString(ServerTableColumnsName.BRANCH_TABLE_APPLICATION_DATA));
branchTransactionDO.setClientId(rs.getString(ServerTableColumnsName.BRANCH_TABLE_CLIENT_ID));
branchTransactionDO.setXid(rs.getString(ServerTableColumnsName.BRANCH_TABLE_XID));
branchTransactionDO.setResourceId(rs.getString(ServerTableColumnsName.BRANCH_TABLE_RESOURCE_ID));
branchTransactionDO.setBranchId(rs.getLong(ServerTableColumnsName.BRANCH_TABLE_BRANCH_ID));
branchTransactionDO.setBranchType(rs.getString(ServerTableColumnsName.BRANCH_TABLE_BRANCH_TYPE));
branchTransactionDO.setTransactionId(rs.getLong(ServerTableColumnsName.BRANCH_TABLE_TRANSACTION_ID));
branchTransactionDO.setGmtCreate(rs.getTimestamp(ServerTableColumnsName.BRANCH_TABLE_GMT_CREATE));
branchTransactionDO.setGmtModified(rs.getTimestamp(ServerTableColumnsName.BRANCH_TABLE_GMT_MODIFIED));
return branchTransactionDO;
}
/**
* the public modifier only for test
*/
public void initTransactionNameSize() {
ColumnInfo columnInfo = queryTableStructure(globalTable, TRANSACTION_NAME_KEY);
if (columnInfo == null) {
LOGGER.warn("{} table or {} column not found", globalTable, TRANSACTION_NAME_KEY);
return;
}
this.transactionNameColumnSize = columnInfo.getColumnSize();
}
/**
* query column info from table
*
* @param tableName the table name
* @param colName the column name
* @return the column info
*/
private ColumnInfo queryTableStructure(final String tableName, String colName) {
try (Connection conn = logStoreDataSource.getConnection()) {
DatabaseMetaData dbmd = conn.getMetaData();
String schema = getSchema(conn);
ResultSet tableRs = dbmd.getTables(null, schema, "%", new String[]{"TABLE"});
while (tableRs.next()) {
String table = tableRs.getString("TABLE_NAME");
if (StringUtils.equalsIgnoreCase(table, tableName)) {
ResultSet columnRs = conn.getMetaData().getColumns(null, schema, table, null);
while (columnRs.next()) {
ColumnInfo info = new ColumnInfo();
String columnName = columnRs.getString("COLUMN_NAME");
info.setColumnName(columnName);
String typeName = columnRs.getString("TYPE_NAME");
info.setTypeName(typeName);
int columnSize = columnRs.getInt("COLUMN_SIZE");
info.setColumnSize(columnSize);
String remarks = columnRs.getString("REMARKS");
info.setRemarks(remarks);
if (StringUtils.equalsIgnoreCase(columnName, colName)) {
return info;
}
}
break;
}
}
} catch (SQLException e) {
LOGGER.error("query transaction_name size fail, {}", e.getMessage(), e);
}
return null;
}
private String getSchema(Connection conn) throws SQLException {
if ("h2".equalsIgnoreCase(dbType)) {
return null;
} else if ("postgresql".equalsIgnoreCase(dbType)) {
String sql = "select current_schema";
try (PreparedStatement ps = conn.prepareStatement(sql);
ResultSet rs = ps.executeQuery()) {
String schema = null;
if (rs.next()) {
schema = rs.getString(1);
}
return schema;
} catch (SQLException e) {
throw new StoreException(e);
}
} else {
return conn.getMetaData().getUserName();
}
}
/**
* Sets log store data source.
*
* @param logStoreDataSource the log store data source
*/
public void setLogStoreDataSource(DataSource logStoreDataSource) {
this.logStoreDataSource = logStoreDataSource;
}
/**
* Sets global table.
*
* @param globalTable the global table
*/
public void setGlobalTable(String globalTable) {
this.globalTable = globalTable;
}
/**
* Sets branch table.
*
* @param branchTable the branch table
*/
public void setBranchTable(String branchTable) {
this.branchTable = branchTable;
}
/**
* Sets db type.
*
* @param dbType the db type
*/
public void setDbType(String dbType) {
this.dbType = dbType;
}
public int getTransactionNameColumnSize() {
return transactionNameColumnSize;
}
/**
* column info
*/
private static class ColumnInfo {
private String columnName;
private String typeName;
private int columnSize;
private String remarks;
public String getColumnName() {
return columnName;
}
public void setColumnName(String columnName) {
this.columnName = columnName;
}
public String getTypeName() {
return typeName;
}
public void setTypeName(String typeName) {
this.typeName = typeName;
}
public int getColumnSize() {
return columnSize;
}
public void setColumnSize(int columnSize) {
this.columnSize = columnSize;
}
public String getRemarks() {
return remarks;
}
public void setRemarks(String remarks) {
this.remarks = remarks;
}
}
}
|
|
package de.ravenguard.ausbildungsnachweis.model;
import de.ravenguard.ausbildungsnachweis.utils.DateUtils;
import de.ravenguard.ausbildungsnachweis.utils.Utils;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
@XmlAccessorType(XmlAccessType.FIELD)
public class TrainingPeriod implements TreeElement {
private String label;
private LocalDate begin;
private LocalDate end;
private String schoolClass;
private String classTeacher;
@XmlElementWrapper(name = "months")
@XmlElement(name = "month")
private final List<DataMonth> months = new ArrayList<>();
@XmlElementWrapper(name = "schoolSubjects")
@XmlElement(name = "schoolSubject")
private final List<SchoolSubject> schoolSubjects = new ArrayList<>();
/**
* empty argument constructor.
*/
public TrainingPeriod() {}
/**
* Field Constructor.
*
* @param label label, may not be null or empty
* @param begin begin of period, may not be null
* @param end end of period, may not be null
* @param schoolClass schoolClass, may not be null or empty
* @param classTeacher class teacher, may not be null or empty
* @param months months of period, may not be null
* @param schoolSubjects schoolSubjects, may not be null
* @throws IllegalDateException if begin or end is not a working day or end is before begin
*/
public TrainingPeriod(String label, LocalDate begin, LocalDate end, String schoolClass,
String classTeacher, List<DataMonth> months, List<SchoolSubject> schoolSubjects)
throws IllegalDateException {
super();
setLabel(label);
setBegin(begin);
setEnd(end);
setSchoolClass(schoolClass);
setClassTeacher(classTeacher);
setMonths(months);
setSchoolSubjects(schoolSubjects);
}
/**
* Adds a month to the training period.
*
* @param month month to add, may not be null, not already in the list and must be within the
* bonds of the period
*/
public void addMonth(DataMonth month) {
if (month == null) {
throw new IllegalArgumentException("month cannot be null.");
}
final boolean conflict = months.stream().anyMatch(currentMonth -> {
return currentMonth.getBegin().getMonth() == month.getBegin().getMonth()
|| month.getBegin().isBefore(begin) || month.getBegin().isAfter(end);
});
if (conflict) {
throw new IllegalArgumentException(
"month allready added or before begin or after end of period.");
}
months.add(month);
}
/**
* Adds a school subject to the training period.
*
* @param schoolSubject school subject to add, may not be null and not already in the list
*/
public void addSchoolSubject(SchoolSubject schoolSubject) {
if (schoolSubject == null) {
throw new IllegalArgumentException("schoolSubject cannot be null.");
}
final boolean conflict = schoolSubjects.stream().anyMatch(currentSchoolSubject -> {
return currentSchoolSubject.getLabel().equalsIgnoreCase(schoolSubject.getLabel());
});
if (conflict) {
throw new IllegalArgumentException(
"schoolSubject allready added or before begin or after end of period.");
}
schoolSubjects.add(schoolSubject);
schoolSubjects.sort(null);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TrainingPeriod other = (TrainingPeriod) obj;
if (begin == null) {
if (other.begin != null) {
return false;
}
} else if (!begin.equals(other.begin)) {
return false;
}
if (classTeacher == null) {
if (other.classTeacher != null) {
return false;
}
} else if (!classTeacher.equals(other.classTeacher)) {
return false;
}
if (end == null) {
if (other.end != null) {
return false;
}
} else if (!end.equals(other.end)) {
return false;
}
if (label == null) {
if (other.label != null) {
return false;
}
} else if (!label.equals(other.label)) {
return false;
}
if (months == null) {
if (other.months != null) {
return false;
}
} else if (!months.equals(other.months)) {
return false;
}
if (schoolClass == null) {
if (other.schoolClass != null) {
return false;
}
} else if (!schoolClass.equals(other.schoolClass)) {
return false;
}
if (schoolSubjects == null) {
if (other.schoolSubjects != null) {
return false;
}
} else if (!schoolSubjects.equals(other.schoolSubjects)) {
return false;
}
return true;
}
public LocalDate getBegin() {
return begin;
}
@Override
public List<DataMonth> getChildren() {
return getMonths();
}
public String getClassTeacher() {
return classTeacher;
}
public LocalDate getEnd() {
return end;
}
public String getLabel() {
return label;
}
public List<DataMonth> getMonths() {
return months;
}
public String getSchoolClass() {
return schoolClass;
}
public List<SchoolSubject> getSchoolSubjects() {
return schoolSubjects;
}
@Override
public String getTreeLabel() {
return getLabel() + System.lineSeparator() + "Beginn: " + Utils.formatDate(begin)
+ System.lineSeparator() + "Ende: " + Utils.formatDate(end);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (begin == null ? 0 : begin.hashCode());
result = prime * result + (classTeacher == null ? 0 : classTeacher.hashCode());
result = prime * result + (end == null ? 0 : end.hashCode());
result = prime * result + (label == null ? 0 : label.hashCode());
result = prime * result + (months == null ? 0 : months.hashCode());
result = prime * result + (schoolClass == null ? 0 : schoolClass.hashCode());
result = prime * result + (schoolSubjects == null ? 0 : schoolSubjects.hashCode());
return result;
}
/**
* Removes a month from the training period.
*
* @param month the month to remove, may not be null.
*/
public void removeMonth(DataMonth month) {
if (month == null) {
throw new IllegalArgumentException("month cannot be null.");
}
months.remove(month);
}
/**
* Removes a SchoolSubject from the training period.
*
* @param subject the subject to remove, may not be null.
*/
public void removeSchoolSubject(SchoolSubject subject) {
if (subject == null) {
throw new IllegalArgumentException("subject cannot be null.");
}
schoolSubjects.remove(subject);
}
/**
* Sets the begin date.
*
* @param begin LocalDate of the begin, cannot be null.
* @throws IllegalDateException if begin is not a working day or is after end
*/
public void setBegin(LocalDate begin) throws IllegalDateException {
if (begin == null) {
throw new NullPointerException("begin may not be null.");
}
if (!DateUtils.checkWorkday(begin)) {
throw new IllegalDateException("begin must be a working day.");
}
if (end != null && begin.isAfter(end)) {
throw new IllegalDateException("begin may not be after end.");
}
this.begin = begin;
}
/**
* Sets the class teacher of the period.
*
* @param classTeacher class teacher to set, may not be null or empty
*/
public void setClassTeacher(String classTeacher) {
if (classTeacher == null || classTeacher.trim().length() == 0) {
throw new IllegalArgumentException("classTeacher cannot be null or empty.");
}
this.classTeacher = classTeacher.trim();
}
/**
* Sets the end date.
*
* @param end LocalDate of the end, cannot be null.
* @throws IllegalDateException if end is not a working day or end is before begin
*/
public void setEnd(LocalDate end) throws IllegalDateException {
if (end == null) {
throw new NullPointerException("end may not be null.");
}
if (!DateUtils.checkWorkday(end)) {
throw new IllegalDateException("end must be a working day.");
}
if (end != null && end.isBefore(begin)) {
throw new IllegalDateException("end may not be before begin.");
}
this.end = end;
}
/**
* Sets the label of the period.
*
* @param label label of the period, may not be empty or null
*/
public void setLabel(String label) {
if (label == null || label.trim().length() == 0) {
throw new IllegalArgumentException("label cannot be null or empty.");
}
this.label = label.trim();
}
/**
* Sets the months.
*
* @param months List of months to add, may not be null.
*/
public void setMonths(List<DataMonth> months) {
if (months == null) {
throw new IllegalArgumentException("months cannot be null.");
}
this.months.clear();
months.forEach(month -> {
addMonth(month);
});
}
/**
* Sets the schoolClass of the period.
*
* @param schoolClass schoolClass of the period, may not be empty or null
*/
public void setSchoolClass(String schoolClass) {
if (schoolClass == null || schoolClass.trim().length() == 0) {
throw new IllegalArgumentException("schoolClass cannot be null or empty.");
}
this.schoolClass = schoolClass.trim();
}
/**
* Sets the schoolSubjects.
*
* @param schoolSubjects List of schoolSubjects to add, may not be null.
*/
public void setSchoolSubjects(List<SchoolSubject> schoolSubjects) {
if (schoolSubjects == null) {
throw new IllegalArgumentException("schoolSubjects cannot be null.");
}
this.schoolSubjects.clear();
schoolSubjects.forEach(subject -> {
addSchoolSubject(subject);
});
}
@Override
public String toString() {
return getTreeLabel();
}
}
|
|
package tagit2.service.importer;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;
import java.util.UUID;
import javax.imageio.ImageIO;
import kiwi.api.config.ConfigurationService;
import kiwi.api.content.ContentItemService;
import kiwi.api.entity.KiWiEntityManager;
import kiwi.api.event.KiWiEvents;
import kiwi.api.geo.Location;
import kiwi.api.importexport.ImportService;
import kiwi.api.importexport.importer.ImporterLocal;
import kiwi.api.importexport.importer.ImporterRemote;
import kiwi.context.CurrentContentItemFactory;
import kiwi.model.content.ContentItem;
import kiwi.model.kbase.KiWiUriResource;
import kiwi.model.user.User;
import org.jboss.seam.Component;
import org.jboss.seam.ScopeType;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Observer;
import org.jboss.seam.annotations.Scope;
import org.jboss.seam.annotations.intercept.BypassInterceptors;
import org.jboss.seam.log.Log;
import tagit2.api.content.route.RouteFacade;
import tagit2.api.content.route.TrackPointFacade;
import tagit2.api.importer.TrackCleaningService;
import tagit2.util.parser.GPXParser;
import tagit2.util.parser.gpx.Track;
import tagit2.util.parser.gpx.Trackpoint;
import tagit2.util.route.AltitudeService;
import tagit2.util.route.DistanceService;
import tagit2.util.route.ProfileService;
@Name("kiwi.service.importer.gpx")
@Scope(ScopeType.STATELESS)
public class GPXImporterImpl implements ImporterLocal, ImporterRemote {
@Logger
private Log log;
@In
private KiWiEntityManager kiwiEntityManager;
@In(create = true)
private ContentItemService contentItemService;
@In(create = true)
private User currentUser;
@In(create=true)
private ConfigurationService configurationService;
@In(create=true)
private CurrentContentItemFactory currentContentItemFactory;
@In(create=true,value="routeit.trackPointCleaner")
private TrackCleaningService trackCleaner;
private static String[] mime_types = {
"application/gpx+xml"
};
private final String NO_NAME = "Route";
@Observer(KiWiEvents.SEAM_POSTINIT)
@BypassInterceptors
public void initialise() {
log.info("registering GPX importer ...");
ImportService ies = (ImportService) Component.getInstance("kiwi.core.importService");
ies.registerImporter(this.getName(),"kiwi.service.importer.gpx",this);
}
@Override
public Set<String> getAcceptTypes() {
return new HashSet<String>(Arrays.asList(mime_types));
}
@Override
public String getDescription() {
return "Importer for parsing the GPX format";
}
@Override
public String getName() {
return "GPX";
}
@Override
public int importData(URL url, String format, Set<KiWiUriResource> types,
Set<ContentItem> tags, User user, Collection<ContentItem> output) {
try {
log.info("importing gpx from URL #0", url);
trackname = url.getPath().substring(url.getPath().lastIndexOf('/')+1,url.getPath().length()-4);
trackname = trackname.replaceAll("%20", " ");
return importData(url.openStream(), format, types, tags, user, output);
} catch(IOException e) {
log.error("I/O error while importing data from URL #0: #1",url, e.getMessage());
return 0;
}
}
@Override
public int importData(InputStream is, String format,
Set<KiWiUriResource> types, Set<ContentItem> tags, User user, Collection<ContentItem> output) {
GPXParser parser = new GPXParser();
try {
return importData(parser.parse(is), types, tags, user, output);
} catch (IOException e) {
e.printStackTrace();
}
return 0;
}
@Override
public int importData(Reader reader, String format,
Set<KiWiUriResource> types, Set<ContentItem> tags, User user, Collection<ContentItem> output) {
//TODO implement
log.error("not yet implemented");
return 0;
}
private String trackname;
private int importData( Track t,
Set<KiWiUriResource> types, Set<ContentItem> tags, User user, Collection<ContentItem> output ) {
try {
log.info("track was parsed");
//set trackname
if( t.getName() == null ) {
if( trackname != null ) {
t.setName(trackname);
trackname = null;
} else {
t.setName(this.NO_NAME);
}
}
//set max trackpoints
int maxTrackpoints = configurationService.getIntConfiguration("tagit.route.maxTrackpoints",100);
//TODO should be more configurable
if( t.getTracklist().size() > maxTrackpoints ) {
trackCleaner.reduceTracklistByNumber(t, maxTrackpoints);
}
log.info("trackpoints reduced, track now contains #0 points", t.getTracklist().size() );
//create Route
RouteFacade currentRoute = kiwiEntityManager.createFacade(contentItemService.createContentItem("/tagit/route/" + UUID.randomUUID().toString()), RouteFacade.class);
currentRoute.setAuthor(currentUser);
//set startPoint
Location startTP = t.getTracklist().get(0).getLocation();
currentRoute.setLatitude(startTP.getLatitude());
currentRoute.setLongitude(startTP.getLongitude());
//persist route
kiwiEntityManager.persist(currentRoute);
//create TrackpointList
LinkedList<TrackPointFacade> trackpoints = new LinkedList<TrackPointFacade>();
for( Trackpoint tp : t.getTracklist() ) {
TrackPointFacade tpf = kiwiEntityManager.createFacade( contentItemService.createContentItem("/tagit/trackPoint/" + UUID.randomUUID().toString()), TrackPointFacade.class);
tpf.setAuthor(currentUser);
tpf.setLatitude(tp.getLocation().getLatitude());
tpf.setLongitude(tp.getLocation().getLongitude());
if( tp.getAltitude() != 0 ) {
tpf.setAltitude(tp.getAltitude());
} else {
tpf.setAltitude(0);
}
trackpoints.add(tpf);
}
//set altitude and distance
//test if altService is active
boolean altitudeServiceIsActive = true;
if( AltitudeService.getAltitude(0, 0) == 0 ) {
log.info("AltitudeSerivce is not active");
altitudeServiceIsActive = false;
}
if( altitudeServiceIsActive ) {
//get elevations
//TODO if first value is not defined!!!
double lastValidElevation = 0;
for( TrackPointFacade tp : trackpoints ) {
if( tp.getAltitude() == 0 ) {
tp.setAltitude( AltitudeService.getAltitude(tp.getLatitude(),tp.getLongitude()));
if( tp.getAltitude() == -32768 ) {
tp.setAltitude(lastValidElevation);
}
}
lastValidElevation = tp.getAltitude();
}
log.info("Altitudes were setted");
}
if( !trackpoints.isEmpty() ) {
double dist = DistanceService.getTotalDistance( trackpoints);
log.info("orig dist: #0", dist);
dist = Math.round( dist * 100.0 ) / 100.0;
log.info(dist);
currentRoute.setDistance( dist );
currentRoute.setVerticalClimb( AltitudeService.getVerticalClimb( trackpoints ) );
}
//get profile
byte [] profile = getImageBytes( ProfileService.getProfile(trackpoints,currentRoute).createBufferedImage(540, 180) );
//create multimedia (profile)
//some string ops for fileName
String name = t.getName();
contentItemService.updateMediaContentItem(currentRoute.getDelegate(), profile, "image/png", name);
//persist trackpoints
int ordinal = 0;
for( TrackPointFacade tp : trackpoints ) {
//set ordinal
tp.setOrdinal(ordinal);
contentItemService.updateTitle(tp, "R:"+currentRoute.getId()+"_TP:"+ordinal);
++ordinal;
//persist
kiwiEntityManager.persist(tp);
}
//set Title and TODO description
contentItemService.updateTitle(currentRoute, t.getName());
currentRoute.setTrackPoints(trackpoints);
// kiwiEntityManager.flush();
// save again to process updates
//kiwiEntityManager.refresh(currentRoute);
//set route as currentContentItem
currentContentItemFactory.setCurrentItemId(currentRoute.getId());
currentContentItemFactory.refresh();
} catch (Exception e) {
log.error("persist route #0 failed", t.getName());
e.printStackTrace();
}
return 1;
}
private byte[] getImageBytes(BufferedImage image) {
byte[] resultImageAsRawBytes = new byte[0];
//write
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write( image, "png", baos );
//close
baos.flush();
resultImageAsRawBytes = baos.toByteArray();
baos.flush();
baos.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return resultImageAsRawBytes;
}
}
|
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* AlternativeUnitTypeForecast.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202111;
/**
* A view of the forecast in terms of an alternative unit type.
*
* <p>For example, a forecast for an impressions goal may
* include this to
* express the matched, available, and possible viewable
* impressions.
*/
public class AlternativeUnitTypeForecast implements java.io.Serializable {
/* The alternative unit type being presented. */
private com.google.api.ads.admanager.axis.v202111.UnitType unitType;
/* The number of units, defined by {@link #unitType}, that match
* the
* specified targeting and delivery settings. */
private java.lang.Long matchedUnits;
/* The number of units, defined by {@link #unitType}, that can
* be
* booked without affecting the delivery of any reserved
* line items. Exceeding
* this value will not cause an overbook, but lower-priority
* line items may
* not run. */
private java.lang.Long availableUnits;
/* The maximum number of units, defined by {@link #unitType},
* that
* could be booked by taking inventory away from lower-priority
* line items. */
private java.lang.Long possibleUnits;
public AlternativeUnitTypeForecast() {
}
public AlternativeUnitTypeForecast(
com.google.api.ads.admanager.axis.v202111.UnitType unitType,
java.lang.Long matchedUnits,
java.lang.Long availableUnits,
java.lang.Long possibleUnits) {
this.unitType = unitType;
this.matchedUnits = matchedUnits;
this.availableUnits = availableUnits;
this.possibleUnits = possibleUnits;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("availableUnits", getAvailableUnits())
.add("matchedUnits", getMatchedUnits())
.add("possibleUnits", getPossibleUnits())
.add("unitType", getUnitType())
.toString();
}
/**
* Gets the unitType value for this AlternativeUnitTypeForecast.
*
* @return unitType * The alternative unit type being presented.
*/
public com.google.api.ads.admanager.axis.v202111.UnitType getUnitType() {
return unitType;
}
/**
* Sets the unitType value for this AlternativeUnitTypeForecast.
*
* @param unitType * The alternative unit type being presented.
*/
public void setUnitType(com.google.api.ads.admanager.axis.v202111.UnitType unitType) {
this.unitType = unitType;
}
/**
* Gets the matchedUnits value for this AlternativeUnitTypeForecast.
*
* @return matchedUnits * The number of units, defined by {@link #unitType}, that match
* the
* specified targeting and delivery settings.
*/
public java.lang.Long getMatchedUnits() {
return matchedUnits;
}
/**
* Sets the matchedUnits value for this AlternativeUnitTypeForecast.
*
* @param matchedUnits * The number of units, defined by {@link #unitType}, that match
* the
* specified targeting and delivery settings.
*/
public void setMatchedUnits(java.lang.Long matchedUnits) {
this.matchedUnits = matchedUnits;
}
/**
* Gets the availableUnits value for this AlternativeUnitTypeForecast.
*
* @return availableUnits * The number of units, defined by {@link #unitType}, that can
* be
* booked without affecting the delivery of any reserved
* line items. Exceeding
* this value will not cause an overbook, but lower-priority
* line items may
* not run.
*/
public java.lang.Long getAvailableUnits() {
return availableUnits;
}
/**
* Sets the availableUnits value for this AlternativeUnitTypeForecast.
*
* @param availableUnits * The number of units, defined by {@link #unitType}, that can
* be
* booked without affecting the delivery of any reserved
* line items. Exceeding
* this value will not cause an overbook, but lower-priority
* line items may
* not run.
*/
public void setAvailableUnits(java.lang.Long availableUnits) {
this.availableUnits = availableUnits;
}
/**
* Gets the possibleUnits value for this AlternativeUnitTypeForecast.
*
* @return possibleUnits * The maximum number of units, defined by {@link #unitType},
* that
* could be booked by taking inventory away from lower-priority
* line items.
*/
public java.lang.Long getPossibleUnits() {
return possibleUnits;
}
/**
* Sets the possibleUnits value for this AlternativeUnitTypeForecast.
*
* @param possibleUnits * The maximum number of units, defined by {@link #unitType},
* that
* could be booked by taking inventory away from lower-priority
* line items.
*/
public void setPossibleUnits(java.lang.Long possibleUnits) {
this.possibleUnits = possibleUnits;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof AlternativeUnitTypeForecast)) return false;
AlternativeUnitTypeForecast other = (AlternativeUnitTypeForecast) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.unitType==null && other.getUnitType()==null) ||
(this.unitType!=null &&
this.unitType.equals(other.getUnitType()))) &&
((this.matchedUnits==null && other.getMatchedUnits()==null) ||
(this.matchedUnits!=null &&
this.matchedUnits.equals(other.getMatchedUnits()))) &&
((this.availableUnits==null && other.getAvailableUnits()==null) ||
(this.availableUnits!=null &&
this.availableUnits.equals(other.getAvailableUnits()))) &&
((this.possibleUnits==null && other.getPossibleUnits()==null) ||
(this.possibleUnits!=null &&
this.possibleUnits.equals(other.getPossibleUnits())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getUnitType() != null) {
_hashCode += getUnitType().hashCode();
}
if (getMatchedUnits() != null) {
_hashCode += getMatchedUnits().hashCode();
}
if (getAvailableUnits() != null) {
_hashCode += getAvailableUnits().hashCode();
}
if (getPossibleUnits() != null) {
_hashCode += getPossibleUnits().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(AlternativeUnitTypeForecast.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AlternativeUnitTypeForecast"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("unitType");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "unitType"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "UnitType"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("matchedUnits");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "matchedUnits"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("availableUnits");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "availableUnits"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("possibleUnits");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "possibleUnits"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
|
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package poo;
import java.util.ArrayList;
/**
*
* @author aluno
*/
public class JFPrincipal extends javax.swing.JFrame {
public ArrayList<Contato> contatos;
/**
* Creates new form JFPrincipal
*/
public JFPrincipal() {
initComponents();
this.contatos = new ArrayList<Contato>();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
JLNome = new javax.swing.JLabel();
JTFNome = new javax.swing.JTextField();
JLTelefone = new javax.swing.JLabel();
JTFTelefone = new javax.swing.JTextField();
JLEmail = new javax.swing.JLabel();
JTFEmail = new javax.swing.JTextField();
JBAnterior = new javax.swing.JButton();
JBProximo = new javax.swing.JButton();
JBPesquisar = new javax.swing.JButton();
JBSalvar = new javax.swing.JButton();
JBExcluir = new javax.swing.JButton();
JBLimpar = new javax.swing.JButton();
jLabel1 = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
JLNome.setText("Nome:");
JTFNome.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
JTFNomeActionPerformed(evt);
}
});
JTFNome.addKeyListener(new java.awt.event.KeyAdapter() {
public void keyReleased(java.awt.event.KeyEvent evt) {
JTFNomeKeyReleased(evt);
}
});
JLTelefone.setText("Telefone:");
JLEmail.setText("e-mail:");
JBAnterior.setText("Anterior");
JBAnterior.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
JBAnteriorActionPerformed(evt);
}
});
JBProximo.setText("Proximo");
JBProximo.setMaximumSize(new java.awt.Dimension(92, 25));
JBProximo.setMinimumSize(new java.awt.Dimension(92, 25));
JBProximo.setPreferredSize(new java.awt.Dimension(92, 25));
JBProximo.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
JBProximoActionPerformed(evt);
}
});
JBPesquisar.setText("Pesquisar");
JBPesquisar.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
JBPesquisarActionPerformed(evt);
}
});
JBSalvar.setText("Salvar");
JBSalvar.setEnabled(false);
JBSalvar.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
JBSalvarActionPerformed(evt);
}
});
JBExcluir.setText("Excluir");
JBExcluir.setEnabled(false);
JBExcluir.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
JBExcluirActionPerformed(evt);
}
});
JBLimpar.setText("Limpar");
JBLimpar.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
JBLimparActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(JBAnterior)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(JBProximo, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addComponent(JBPesquisar)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(JBSalvar)))
.addContainerGap(198, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(JLNome)
.addComponent(JTFNome)
.addComponent(JLTelefone)
.addComponent(JTFTelefone)
.addComponent(JLEmail)
.addComponent(JTFEmail, javax.swing.GroupLayout.DEFAULT_SIZE, 120, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(JBExcluir)
.addComponent(JBLimpar))
.addGap(25, 25, 25))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addComponent(jLabel1)
.addGap(75, 75, 75))))))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(JLNome)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(JTFNome, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(JLTelefone)
.addComponent(JBExcluir))
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(JTFTelefone, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(JLEmail))
.addGroup(layout.createSequentialGroup()
.addGap(16, 16, 16)
.addComponent(JBLimpar)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(JTFEmail, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(JBPesquisar)
.addComponent(JBSalvar))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 40, Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(JBAnterior)
.addComponent(JBProximo, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(20, 20, 20))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void JBLimparActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_JBLimparActionPerformed
JTFNome.setText("");
JTFTelefone.setText("");
JTFEmail.setText("");
}//GEN-LAST:event_JBLimparActionPerformed
private void JBSalvarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_JBSalvarActionPerformed
Contato d ;
if (index == -1) {
Contato c = new Contato(JTFNome.getText(), JTFEmail.getText(),
JTFTelefone.getText());
contatos.add(c);
} else {
d = contatos.get(index);
d.setNome(JTFNome.getText());
d.setTelefone(JTFTelefone.getText());
d.setEmail(JTFEmail.getText());
//contatos.set(index, d);
}
}//GEN-LAST:event_JBSalvarActionPerformed
private void JBProximoActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_JBProximoActionPerformed
index++;
if (index == contatos.size()) {
index=0;
}
Contato d;
d = contatos.get(index);
JTFNome.setText(d.getNome());
JTFTelefone.setText(d.getTelefone());
JTFEmail.setText(d.getEmail());
}//GEN-LAST:event_JBProximoActionPerformed
private void JBAnteriorActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_JBAnteriorActionPerformed
if (index == 0 || index == -1) {
index=contatos.size();
}
index--;
Contato d;
d = contatos.get(index);
JTFNome.setText(d.getNome());
JTFTelefone.setText(d.getTelefone());
JTFEmail.setText(d.getEmail());
}//GEN-LAST:event_JBAnteriorActionPerformed
private void JBExcluirActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_JBExcluirActionPerformed
if((index>=0) && (index < contatos.size())){
contatos.remove(index);
JTFNome.setText("");
JTFEmail.setText("");
JTFTelefone.setText("");
}
}//GEN-LAST:event_JBExcluirActionPerformed
private void JBPesquisarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_JBPesquisarActionPerformed
JDPesquisar dialog = new JDPesquisar(this, true);
dialog.setVisible(true);
}//GEN-LAST:event_JBPesquisarActionPerformed
private void JTFNomeActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_JTFNomeActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_JTFNomeActionPerformed
private void JTFNomeKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_JTFNomeKeyReleased
if(JTFNome.getText().length() > 0){
JBSalvar.setEnabled(true);
} else{
JBSalvar.setEnabled(false);
}
}//GEN-LAST:event_JTFNomeKeyReleased
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(JFPrincipal.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(JFPrincipal.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(JFPrincipal.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(JFPrincipal.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new JFPrincipal().setVisible(true);
}
});
}
private int index = -1;
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton JBAnterior;
private javax.swing.JButton JBExcluir;
private javax.swing.JButton JBLimpar;
private javax.swing.JButton JBPesquisar;
private javax.swing.JButton JBProximo;
private javax.swing.JButton JBSalvar;
private javax.swing.JLabel JLEmail;
private javax.swing.JLabel JLNome;
private javax.swing.JLabel JLTelefone;
private javax.swing.JTextField JTFEmail;
private javax.swing.JTextField JTFNome;
private javax.swing.JTextField JTFTelefone;
private javax.swing.JLabel jLabel1;
// End of variables declaration//GEN-END:variables
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.support.Parameter;
import org.apache.dubbo.rpc.support.ProtocolUtils;
import static org.apache.dubbo.common.constants.CommonConstants.INVOKER_LISTENER_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.REFERENCE_FILTER_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.STUB_EVENT_KEY;
/**
* AbstractConsumerConfig
*
* @export
* @see ReferenceConfigBase
*/
public abstract class AbstractReferenceConfig extends AbstractInterfaceConfig {
private static final long serialVersionUID = -2786526984373031126L;
// ======== Reference config default values, will take effect if reference's attribute is not set ========
/**
* Check if service provider exists, if not exists, it will be fast fail
*/
protected Boolean check;
/**
* Whether to eagle-init
*/
protected Boolean init;
/**
* Whether to use generic interface
*/
protected String generic;
/**
* Whether to find reference's instance from the current JVM
*/
protected Boolean injvm;
/**
* Lazy create connection
*/
protected Boolean lazy = false;
protected String reconnect;
protected Boolean sticky = false;
/**
* Whether to support event in stub.
*/
//TODO solve merge problem
protected Boolean stubevent;//= Constants.DEFAULT_STUB_EVENT;
/**
* The remote service version the customer side will reference
*/
protected String version;
/**
* The remote service group the customer side will reference
*/
protected String group;
/**
* declares which app or service this interface belongs to
*/
protected String providedBy;
public Boolean isCheck() {
return check;
}
public void setCheck(Boolean check) {
this.check = check;
}
public Boolean isInit() {
return init;
}
public void setInit(Boolean init) {
this.init = init;
}
@Deprecated
@Parameter(excluded = true)
public Boolean isGeneric() {
return this.generic != null ? ProtocolUtils.isGeneric(generic) : null;
}
@Deprecated
public void setGeneric(Boolean generic) {
if (generic != null) {
this.generic = generic.toString();
}
}
public String getGeneric() {
return generic;
}
public void setGeneric(String generic) {
if (StringUtils.isEmpty(generic)) {
return;
}
if (ProtocolUtils.isValidGenericValue(generic)) {
this.generic = generic;
} else {
throw new IllegalArgumentException("Unsupported generic type " + generic);
}
}
/**
* @return
* @deprecated instead, use the parameter <b>scope</> to judge if it's in jvm, scope=local
*/
@Deprecated
public Boolean isInjvm() {
return injvm;
}
/**
* @param injvm
* @deprecated instead, use the parameter <b>scope</b> to judge if it's in jvm, scope=local
*/
@Deprecated
public void setInjvm(Boolean injvm) {
this.injvm = injvm;
}
@Override
@Parameter(key = REFERENCE_FILTER_KEY, append = true)
public String getFilter() {
return super.getFilter();
}
@Override
@Parameter(key = INVOKER_LISTENER_KEY, append = true)
public String getListener() {
return super.getListener();
}
@Override
public void setListener(String listener) {
super.setListener(listener);
}
public Boolean getLazy() {
return lazy;
}
public void setLazy(Boolean lazy) {
this.lazy = lazy;
}
@Override
public void setOnconnect(String onconnect) {
if (onconnect != null && onconnect.length() > 0) {
this.stubevent = true;
}
super.setOnconnect(onconnect);
}
@Override
public void setOndisconnect(String ondisconnect) {
if (ondisconnect != null && ondisconnect.length() > 0) {
this.stubevent = true;
}
super.setOndisconnect(ondisconnect);
}
@Parameter(key = STUB_EVENT_KEY)
public Boolean getStubevent() {
return stubevent;
}
public String getReconnect() {
return reconnect;
}
public void setReconnect(String reconnect) {
this.reconnect = reconnect;
}
public Boolean getSticky() {
return sticky;
}
public void setSticky(Boolean sticky) {
this.sticky = sticky;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
@Parameter(key = "provided-by")
public String getProvidedBy() {
return providedBy;
}
public void setProvidedBy(String providedBy) {
this.providedBy = providedBy;
}
}
|
|
package org.apache.commons.jcs3.auxiliary.disk.block;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import org.apache.commons.jcs3.utils.serialization.StandardSerializer;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.TestCase;
/**
* Test for the disk access layer of the Block Disk Cache.
* <p>
* @author Aaron Smuts
*/
public class BlockDiskUnitTest
extends TestCase
{
/** data file. */
private File rafDir;
private BlockDisk disk;
/**
* @see junit.framework.TestCase#setUp()
* Creates the base directory
*/
@Override
protected void setUp() throws Exception
{
super.setUp();
final String rootDirName = "target/test-sandbox/block";
this.rafDir = new File( rootDirName );
this.rafDir.mkdirs();
}
private void setUpBlockDisk(final String fileName) throws IOException
{
final File file = new File(rafDir, fileName + ".data");
file.delete();
this.disk = new BlockDisk(file, new StandardSerializer());
}
private void setUpBlockDisk(final String fileName, final int blockSize) throws IOException
{
final File file = new File(rafDir, fileName + ".data");
file.delete();
this.disk = new BlockDisk(file, blockSize, new StandardSerializer());
}
/**
* @see junit.framework.TestCase#tearDown()
*/
@Override
protected void tearDown() throws Exception
{
disk.close();
super.tearDown();
}
/**
* Test writing a null object within a single block size.
* <p>
* @throws Exception
*/
public void testWrite_NullBlockElement()
throws Exception
{
// SETUP
setUpBlockDisk("testWrite_NullBlockElement");
// DO WORK
final int[] blocks = disk.write( null );
// VERIFY
assertEquals( "Wrong number of blocks recorded.", 1, disk.getNumberOfBlocks() );
assertEquals( "Wrong number of blocks returned.", 1, blocks.length );
assertEquals( "Wrong block returned.", 0, blocks[0] );
}
/**
* Test writing an element within a single block size.
* <p>
* @throws Exception
*/
public void testWrite_SingleBlockElement()
throws Exception
{
// SETUP
setUpBlockDisk("testWrite_SingleBlockElement");
// DO WORK
final int bytes = 1 * 1024;
final int[] blocks = disk.write( new byte[bytes] );
// VERIFY
assertEquals( "Wrong number of blocks recorded.", 1, disk.getNumberOfBlocks() );
assertEquals( "Wrong number of blocks returned.", 1, blocks.length );
assertEquals( "Wrong block returned.", 0, blocks[0] );
}
/**
* Test writing and reading an element within a single block size.
* <p>
* @throws Exception
*/
public void testWriteAndRead_SingleBlockElement()
throws Exception
{
// SETUP
setUpBlockDisk("testWriteAndRead_SingleBlockElement");
// DO WORK
final int bytes = 1 * 1024;
final int[] blocks = disk.write( new byte[bytes] );
final byte[] result = (byte[]) disk.read( blocks );
// VERIFY
assertEquals( "Wrong item retured.", new byte[bytes].length, result.length );
}
/**
* Test writing two elements that each fit within a single block size.
* <p>
* @throws Exception
*/
public void testWrite_TwoSingleBlockElements()
throws Exception
{
// SETUP
setUpBlockDisk("testWrite_TwoSingleBlockElements");
// DO WORK
final int bytes = 1 * 1024;
final int[] blocks1 = disk.write( new byte[bytes] );
final int[] blocks2 = disk.write( new byte[bytes] );
// VERIFY
assertEquals( "Wrong number of blocks recorded.", 2, disk.getNumberOfBlocks() );
assertEquals( "Wrong number of blocks returned.", 1, blocks1.length );
assertEquals( "Wrong block returned.", 0, blocks1[0] );
assertEquals( "Wrong number of blocks returned.", 1, blocks2.length );
assertEquals( "Wrong block returned.", 1, blocks2[0] );
}
/**
* Verify that it says we need two blocks if the total size will fit.
* <p>
* @throws Exception
*/
public void testCalculateBlocksNeededDouble()
throws Exception
{
// SETUP
setUpBlockDisk("testCalculateBlocksNeededDouble");
// DO WORK
final int result = disk.calculateTheNumberOfBlocksNeeded( new byte[disk.getBlockSizeBytes() * 2
- ( 2 * BlockDisk.HEADER_SIZE_BYTES )] );
// Verify
assertEquals( "Wrong number of blocks", 2, result );
}
/**
* Test writing an element that takes two blocks.
* <p>
* @throws Exception
*/
public void testWrite_DoubleBlockElement()
throws Exception
{
// SETUP
setUpBlockDisk("testWriteDoubleBlockElement");
// DO WORK
// byte arrays encur 27 bytes of serialization overhead.
final int bytes = getBytesForBlocksOfByteArrays( disk.getBlockSizeBytes(), 2 );
final int[] blocks = disk.write( new byte[bytes] );
// VERIFY
assertEquals( "Wrong number of blocks recorded.", 2, disk.getNumberOfBlocks() );
assertEquals( "Wrong number of blocks returned.", 2, blocks.length );
assertEquals( "Wrong block returned.", 0, blocks[0] );
}
/**
* Test writing an element that takes 128 blocks. There was a byte in a for loop that limited the number to 127. I fixed this.
* <p>
* @throws Exception
*/
public void testWrite_128BlockElement()
throws Exception
{
// SETUP
final int numBlocks = 128;
setUpBlockDisk("testWrite_128BlockElement");
// DO WORK
// byte arrays encur 27 bytes of serialization overhead.
final int bytes = getBytesForBlocksOfByteArrays( disk.getBlockSizeBytes(), numBlocks );
final int[] blocks = disk.write( new byte[bytes] );
// VERIFY
assertEquals( "Wrong number of blocks recorded.", numBlocks, disk.getNumberOfBlocks() );
assertEquals( "Wrong number of blocks returned.", numBlocks, blocks.length );
assertEquals( "Wrong block returned.", 0, blocks[0] );
}
/**
* Test writing and reading elements that do not fit within a single block.
* <p>
* @throws Exception
*/
public void testWriteAndReadMultipleMultiBlockElement()
throws Exception
{
// SETUP
setUpBlockDisk("testWriteAndReadSingleBlockElement");
// DO WORK
final int numBlocksPerElement = 4;
final int bytes = getBytesForBlocksOfByteArrays( disk.getBlockSizeBytes(), numBlocksPerElement );
final int numElements = 100;
for ( int i = 0; i < numElements; i++ )
{
final int[] blocks = disk.write( new byte[bytes] );
final byte[] result = (byte[]) disk.read( blocks );
// VERIFY
assertEquals( "Wrong item retured.", new byte[bytes].length, result.length );
assertEquals( "Wrong number of blocks returned.", numBlocksPerElement, blocks.length );
}
}
/**
* Test writing and reading elements that do not fit within a single block.
* <p>
* @throws Exception
*/
public void testWriteAndReadMultipleMultiBlockElement_setSize()
throws Exception
{
// SETUP
setUpBlockDisk("testWriteAndReadSingleBlockElement", 1024);
// DO WORK
final int numBlocksPerElement = 4;
final int bytes = getBytesForBlocksOfByteArrays( disk.getBlockSizeBytes(), numBlocksPerElement );
final int numElements = 100;
final Random r = new Random(System.currentTimeMillis());
final byte[] src = new byte[bytes];
for ( int i = 0; i < numElements; i++ )
{
r.nextBytes(src); // Ensure we don't just write zeros out
final int[] blocks = disk.write( src );
final byte[] result = (byte[]) disk.read( blocks );
// VERIFY
assertEquals( "Wrong item length retured.", src.length, result.length );
assertEquals( "Wrong number of blocks returned.", numBlocksPerElement, blocks.length );
// We check the array contents, too, to ensure we read back what we wrote out
for (int j = 0 ; j < src.length ; j++) {
assertEquals( "Mismatch at offset " + j + " in attempt # " + (i + 1), src[j], result[j] );
}
}
assertEquals( "Wrong number of elements. "+disk, numBlocksPerElement * numElements, disk.getNumberOfBlocks() );
}
/**
* Used to get the size for byte arrays that will take up the number of blocks specified.
* <p>
* @param blockSize
* @param numBlocks
* @return num bytes.
*/
private int getBytesForBlocksOfByteArrays( final int blockSize, final int numBlocks )
{
// byte arrays encur some bytes of serialization overhead.
return blockSize * numBlocks - ( numBlocks * BlockDisk.HEADER_SIZE_BYTES ) - ( numBlocks * 14 );
}
/**
* Verify that the block disk can handle a big string.
* <p>
* @throws Exception
*/
public void testWriteAndRead_BigString()
throws Exception
{
// SETUP
setUpBlockDisk("testWriteAndRead_BigString", 4096); //1024
String string = "This is my big string ABCDEFGH";
final StringBuilder sb = new StringBuilder();
sb.append( string );
for ( int i = 0; i < 8; i++ )
{
sb.append( " " + i + sb.toString() ); // big string
}
string = sb.toString();
// DO WORK
final int[] blocks = disk.write( string );
final String result = (String) disk.read( blocks );
// VERIFY
// System.out.println( string );
// System.out.println( result );
// System.out.println( disk );
assertEquals( "Wrong item retured.", string, result );
}
/**
* Verify that the block disk can handle a big string.
* <p>
* @throws Exception
*/
public void testWriteAndRead_BigString2()
throws Exception
{
// SETUP
setUpBlockDisk("testWriteAndRead_BigString", 47); //4096;//1024
String string = "abcdefghijklmnopqrstuvwxyz1234567890";
string += string;
string += string;
// DO WORK
final int[] blocks = disk.write( string );
final String result = (String) disk.read( blocks );
// VERIFY
assertEquals( "Wrong item retured.", string, result );
}
public void testJCS156() throws Exception
{
// SETUP
setUpBlockDisk("testJCS156", 4096);
final long offset = disk.calculateByteOffsetForBlockAsLong(Integer.MAX_VALUE);
assertTrue("Must not wrap round", offset > 0);
assertEquals(Integer.MAX_VALUE*4096L,offset);
}
}
|
|
package sonar.systems.frameworks.Facebook;
import java.util.Arrays;
import com.facebook.FacebookException;
import com.facebook.FacebookOperationCanceledException;
import com.facebook.Session;
import com.facebook.SessionState;
import com.facebook.UiLifecycleHelper;
import com.facebook.widget.FacebookDialog;
import com.facebook.widget.WebDialog;
import com.facebook.widget.WebDialog.OnCompleteListener;
import sonar.systems.frameworks.BaseClass.Framework;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.widget.Toast;
public class Facebook extends Framework
{
private Activity activity;
private UiLifecycleHelper uiHelper;
private Session.StatusCallback statusCallback = new SessionStatusCallback();
public Facebook()
{
}
@Override
public void SetActivity(Activity activity)
{
this.activity = activity;
}
@Override
public void onCreate(Bundle b)
{
uiHelper = new UiLifecycleHelper(activity, null);
uiHelper.onCreate(b);
}
@Override
public void onStart()
{
}
@Override
public void onStop()
{
}
@Override
public void onActivityResult(int request, int response, Intent data)
{
uiHelper.onActivityResult(request, response, data, new FacebookDialog.Callback()
{
@Override
public void onError(FacebookDialog.PendingCall pendingCall, Exception error, Bundle data)
{
Log.e("Activity", String.format("Error: %s", error.toString()));
}
@Override
public void onComplete(FacebookDialog.PendingCall pendingCall, Bundle data)
{
Log.i("Activity", "Success!");
}
});
}
@Override
public void onResume()
{
uiHelper.onResume();
}
@Override
public void onSaveInstanceState(Bundle outState)
{
uiHelper.onSaveInstanceState(outState);
}
@Override
public void onPause()
{
uiHelper.onPause();
}
@Override
public void onDestroy()
{
uiHelper.onDestroy();
}
@Override
public void FacebookSignIn()
{
Session session = Session.getActiveSession();
if (session != null)
{
if(!session.isOpened() && !session.isClosed())
{
session.openForRead(new Session.OpenRequest(activity)
.setPermissions(Arrays.asList("public_profile"))
.setCallback(statusCallback));
}
else
{
Session.openActiveSession(activity, true, Arrays.asList("public_profile"), statusCallback);
}
}
else
{
Session.openActiveSession(activity, true, Arrays.asList("public_profile"), statusCallback);
}
//to sign out would be
//session.closeAndClearTokenInformation();
}
@Override
public void Share(String name, String link, String description, String caption, String imagePath)
{
if (FacebookDialog.canPresentShareDialog(activity.getApplicationContext(), FacebookDialog.ShareDialogFeature.SHARE_DIALOG))
{
// Publish the post using the Share Dialog
FacebookDialog shareDialog = new FacebookDialog.ShareDialogBuilder(activity).setName(name).
setDescription(description).
setLink(link).build();
uiHelper.trackPendingDialogCall(shareDialog.present());
}
else
{
// Fallback. For example, publish the post using the Feed Dialog
Bundle params = new Bundle();
params.putString("name", name);
// params.putString("caption",caption);
params.putString("description", description);
params.putString("link", link);
//params.putString("picture", imagePath);
WebDialog feedDialog = (new WebDialog.FeedDialogBuilder(activity,
Session.getActiveSession(),
params))
.setOnCompleteListener(new OnCompleteListener()
{
@Override
public void onComplete(Bundle values, FacebookException error)
{
if (error == null)
{
// When the story is posted, echo the success
// and the post Id.
final String postId = values.getString("post_id");
if (postId != null)
{
Toast.makeText(activity,
"Posted story, id: "+postId,
Toast.LENGTH_SHORT).show();
}
else
{
// User clicked the Cancel button
Toast.makeText(activity.getApplicationContext(), "Publish cancelled", Toast.LENGTH_SHORT).show();
}
}
else if (error instanceof FacebookOperationCanceledException)
{
// User clicked the "x" button
Toast.makeText(activity.getApplicationContext(), "Publish cancelled", Toast.LENGTH_SHORT).show();
}
else
{
// Generic, ex: network error
Toast.makeText(activity.getApplicationContext(), "Error posting story", Toast.LENGTH_SHORT).show();
}
}
}).build();
feedDialog.show();
}
}
private class SessionStatusCallback implements Session.StatusCallback
{
@Override
public void call(Session session, SessionState state,
Exception exception)
{
if (state.isOpened())
{
}
else if (state.isClosed())
{
}
}
}
}
|
|
/**
* Copyright 2011-2013 FoundationDB, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* The original from which this derives bore the following: */
/*
Derby - Class org.apache.derby.iapi.types.JSQLType
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.foundationdb.sql.types;
import com.foundationdb.sql.StandardException;
/**
* Type descriptor which wraps all 3 kinds of types supported in Derby's
* JSQL language: SQL types, Java primitives, Java classes.
*
* This interface was originally added to support the serializing of WorkUnit
* signatures.
*
*
*/
public final class JSQLType
{
public static final byte SQLTYPE = 0;
public static final byte JAVA_CLASS = 1;
public static final byte JAVA_PRIMITIVE = 2;
public static final byte NOT_PRIMITIVE = -1;
public static final byte BOOLEAN = 0;
public static final byte CHAR = 1;
public static final byte BYTE = 2;
public static final byte SHORT = 3;
public static final byte INT = 4;
public static final byte LONG = 5;
public static final byte FLOAT = 6;
public static final byte DOUBLE = 7;
// these two arrays are in the order of the primitive constants
static private final String[] wrapperClassNames = {
"java.lang.Boolean",
"java.lang.Integer", // we can't serialize char, so we convert it to int
"java.lang.Integer",
"java.lang.Integer",
"java.lang.Integer",
"java.lang.Long",
"java.lang.Float",
"java.lang.Double"
};
static private final String[] primitiveNames = {
"boolean",
"char",
"byte",
"short",
"int",
"long",
"float",
"double"
};
private byte category = JAVA_PRIMITIVE;
private DataTypeDescriptor sqlType;
private String javaClassName;
private byte primitiveKind;
/**
* Create a JSQLType from a SQL type.
*
* @param sqlType the SQL type to wrap
*/
public JSQLType(DataTypeDescriptor sqlType) {
initialize(sqlType);
}
/**
* Create a JSQLType given the name of a Java primitive or java class.
*
* @param javaName name of java primitive or class to wrap
*/
public JSQLType(String javaName) {
byte primitiveID = getPrimitiveID(javaName);
if (primitiveID != NOT_PRIMITIVE) {
initialize(primitiveID);
}
else {
initialize(javaName);
}
}
/**
* Create a JSQLType for a Java primitive.
*
* @param primitiveKind primitive to wrap
*/
public JSQLType(byte primitiveKind) {
initialize(primitiveKind);
}
/**
* What kind of type is this:
*
* @return one of the following: SQLTYPE, JAVA_PRIMITIVE, JAVA_CLASS
*/
public byte getCategory() {
return category;
}
/**
* If this is a JAVA_PRIMITIVE, what is its name?
*
* @return BOOLEAN, INT, ... if this is a JAVA_PRIMITIVE.
* NOT_PRIMITIVE if this is SQLTYPE or JAVA_CLASS.
*/
public byte getPrimitiveKind() {
return primitiveKind;
}
/**
* If this is a JAVA_CLASS, what is it's name?
*
* @return java class name if this is a JAVA_CLASS
* null if this is SQLTYPE or JAVA_PRIMITIVE
*/
public String getJavaClassName() {
return javaClassName;
}
public String getPrimitiveTypeName() {
if (primitiveKind == NOT_PRIMITIVE)
return null;
else
return primitiveNames[primitiveKind];
}
/**
* What's our SQLTYPE?
*
* @return the DataTypeDescriptor corresponding to this type
*
*/
public DataTypeDescriptor getSQLType() throws StandardException {
// Might not be filled in if this is a JAVA_CLASS or JAVA_PRIMITIVE.
if (sqlType == null) {
String className;
if (category == JAVA_CLASS) {
className = javaClassName;
}
else {
className = getWrapperClassName(primitiveKind);
}
sqlType = DataTypeDescriptor.getSQLDataTypeDescriptor(className);
}
return sqlType;
}
// Give read-only access to array of strings
public static String getPrimitiveName(byte index) {
return primitiveNames[index];
}
private void initialize(byte primitiveKind) {
initialize(JAVA_PRIMITIVE, null, null, primitiveKind);
}
private void initialize(DataTypeDescriptor sqlType) {
initialize(SQLTYPE, sqlType, null, NOT_PRIMITIVE);
}
private void initialize(String javaClassName) {
initialize(JAVA_CLASS, null, javaClassName, NOT_PRIMITIVE);
}
/**
* Initialize this JSQL type. Minion of all constructors.
*
* @param category SQLTYPE, JAVA_CLASS, JAVA_PRIMITIVE
* @param sqlType corresponding SQL type if category=SQLTYPE
* @param javaClassName corresponding java class if category=JAVA_CLASS
* @param primitiveKind kind of primitive if category=JAVA_PRIMITIVE
*/
private void initialize (byte category, DataTypeDescriptor sqlType,
String javaClassName, byte primitiveKind) {
this.category = category;
this.sqlType = sqlType;
this.javaClassName = javaClassName;
this.primitiveKind = primitiveKind;
}
/**
* Gets the name of the java wrapper class corresponding to a primitive.
*
* @param primitive BOOLEAN, INT, ... etc.
*
* @return name of the java wrapper class corresponding to the primitive
*/
private static String getWrapperClassName(byte primitive) {
if (primitive == NOT_PRIMITIVE) {
return "";
}
return wrapperClassNames[primitive];
}
/**
* Translate the name of a java primitive to an id
*
* @param name name of primitive
*
* @return BOOLEAN, INT, ... etc if the name is that of a primitive.
* NOT_PRIMITIVE otherwise
*/
private static byte getPrimitiveID (String name) {
for (byte ictr = BOOLEAN; ictr <= DOUBLE; ictr++) {
if (primitiveNames[ictr].equals(name)) {
return ictr;
}
}
return NOT_PRIMITIVE;
}
}
|
|
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn.status;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Getter;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.Convertor;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.SvnUtil;
import org.jetbrains.idea.svn.api.NodeKind;
import org.jetbrains.idea.svn.info.Info;
import org.jetbrains.idea.svn.lock.Lock;
import org.tmatesoft.svn.core.internal.util.SVNDate;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import java.io.File;
import java.util.*;
/**
* Created with IntelliJ IDEA.
* User: Irina.Chernushina
* Date: 1/25/12
* Time: 7:59 PM
*/
public class SvnStatusHandler extends DefaultHandler {
private static final Logger LOG = Logger.getInstance(SvnStatusHandler.class);
@Nullable
public static StatusType getStatus(@NotNull String code) {
StatusType result = StatusType.forStatusOperation(code);
if (result == null) {
LOG.info("Unknown status type " + code);
}
return result;
}
private String myChangelistName;
private List<PortableStatus> myDefaultListStatuses;
private MultiMap<String, PortableStatus> myCurrentListChanges;
private PortableStatus myPending;
private boolean myInRemoteStatus;
private Lock.Builder myLockBuilder;
private final List<ElementHandlerBase> myParseStack;
private final Map<String, Getter<ElementHandlerBase>> myElementsMap;
private final DataCallback myDataCallback;
private final File myBase;
private final StringBuilder mySb;
private boolean myAnythingReported;
public SvnStatusHandler(final ExternalDataCallback dataCallback, File base, final Convertor<File, Info> infoGetter) {
myBase = base;
myParseStack = new ArrayList<>();
myParseStack.add(new Fake());
myElementsMap = new HashMap<>();
fillElements();
if (dataCallback != null) {
myDataCallback = new DataCallback() {
@Override
public void startLock() {
myLockBuilder = new Lock.Builder();
}
@Override
public void endLock() {
if (myInRemoteStatus) {
myPending.setRemoteLock(myLockBuilder.build());
} else {
myPending.setLocalLock(myLockBuilder.build());
}
myLockBuilder = null;
}
@Override
public void startRemoteStatus() {
myInRemoteStatus = true;
}
@Override
public void endRemoteStatus() {
myInRemoteStatus = false;
}
@Override
public void switchPath() {
myAnythingReported = true;
dataCallback.switchPath();
newPending(infoGetter);
}
@Override
public void switchChangeList(String newList) {
dataCallback.switchChangeList(newList);
}
};
} else {
myDataCallback = new DataCallback() {
@Override
public void startLock() {
myLockBuilder = new Lock.Builder();
}
@Override
public void endLock() {
if (myInRemoteStatus) {
myPending.setRemoteLock(myLockBuilder.build());
} else {
myPending.setLocalLock(myLockBuilder.build());
}
myLockBuilder = null;
}
@Override
public void startRemoteStatus() {
myInRemoteStatus = true;
}
@Override
public void endRemoteStatus() {
myInRemoteStatus = false;
}
@Override
public void switchPath() {
myAnythingReported = true;
if (myChangelistName == null) {
myDefaultListStatuses.add(myPending);
} else {
myCurrentListChanges.putValue(myChangelistName, myPending);
}
newPending(infoGetter);
}
@Override
public void switchChangeList(String newList) {
myChangelistName = newList;
}
};
}
newPending(infoGetter);
mySb = new StringBuilder();
}
public boolean isAnythingReported() {
return myAnythingReported;
}
private void newPending(final Convertor<File, Info> infoGetter) {
final PortableStatus status = new PortableStatus();
myPending = status;
status.setInfoGetter(new Getter<Info>() {
@Override
public Info get() {
return infoGetter.convert(status.getFile());
}
});
}
public PortableStatus getPending() {
return myPending;
}
public List<PortableStatus> getDefaultListStatuses() {
return myDefaultListStatuses;
}
public MultiMap<String, PortableStatus> getCurrentListChanges() {
return myCurrentListChanges;
}
private void fillElements() {
myElementsMap.put("repos-status", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new ReposStatus();
}
});
myElementsMap.put("lock", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new LockElement();
}
});
myElementsMap.put("token", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new LockToken();
}
});
myElementsMap.put("owner", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new LockOwner();
}
});
myElementsMap.put("comment", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new LockComment();
}
});
myElementsMap.put("created", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new LockCreatedDate();
}
});
// --
myElementsMap.put("status", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new Status();
}
});
myElementsMap.put("author", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new Author();
}
});
myElementsMap.put("changelist", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new Changelist();
}
});
myElementsMap.put("commit", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new Commit();
}
});
myElementsMap.put("date", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new Date();
}
});
myElementsMap.put("entry", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new Entry(myBase);
}
});
myElementsMap.put("target", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new Target();
}
});
myElementsMap.put("against", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new Against();
}
});
myElementsMap.put("wc-status", new Getter<ElementHandlerBase>() {
@Override
public ElementHandlerBase get() {
return new WcStatus();
}
});
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
//
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
assertSAX(! myParseStack.isEmpty());
ElementHandlerBase current = myParseStack.get(myParseStack.size() - 1);
if (mySb.length() > 0) {
current.characters(mySb.toString().trim(), myPending, myLockBuilder);
mySb.setLength(0);
}
while (true) {
final boolean createNewChild = current.startElement(uri, localName, qName, attributes);
if (createNewChild) {
assertSAX(myElementsMap.containsKey(qName));
final ElementHandlerBase newChild = myElementsMap.get(qName).get();
newChild.preAttributesEffect(myDataCallback);
newChild.updateStatus(attributes, myPending, myLockBuilder);
newChild.preEffect(myDataCallback);
myParseStack.add(newChild);
return;
} else {
// go up
current.postEffect(myDataCallback);
myParseStack.remove(myParseStack.size() - 1);
assertSAX(! myParseStack.isEmpty());
current = myParseStack.get(myParseStack.size() - 1);
}
}
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
assertSAX(! myParseStack.isEmpty());
mySb.append(ch, start, length);
}
@Override
public void endDocument() throws SAXException {
assertSAX(! myParseStack.isEmpty());
for (int i = myParseStack.size() - 1; i >= 0; -- i) {
ElementHandlerBase current = myParseStack.get(i);
current.postEffect(myDataCallback);
}
myParseStack.clear();
}
private static void assertSAX(final boolean shouldBeTrue) throws SAXException {
if (! shouldBeTrue) {
throw new SAXException("can not parse output");
}
}
private static StatusType parseContentsStatus(Attributes attributes) throws SAXException {
final String item = attributes.getValue("item");
assertSAX(item != null);
return getStatus(item);
}
private static StatusType parsePropertiesStatus(Attributes attributes) throws SAXException {
final String props = attributes.getValue("props");
assertSAX(props != null);
return getStatus(props);
}
private static class Fake extends ElementHandlerBase {
private Fake() {
super(new String[]{"status"}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
private static class Date extends ElementHandlerBase {
private Date() {
super(new String[]{}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
private static class Author extends ElementHandlerBase {
private Author() {
super(new String[]{}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
/* <commit
revision="25">
<author>admin</author>
<date>2011-11-09T12:21:02.401530Z</date>
*/
private static class Commit extends ElementHandlerBase {
private Commit() {
super(new String[]{"author", "date"}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
final String revision = attributes.getValue("revision");
if (!StringUtil.isEmpty(revision)) {
status.setCommittedRevision(SVNRevision.create(Long.valueOf(revision)));
}
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
/*<lock>
<token>opaquelocktoken:27ee743a-5376-fc4a-a209-b7834e1a3f39</token>
<owner>admin</owner>
<comment>LLL</comment>
<created>2012-02-21T09:59:39.771077Z</created>
</lock>*/
/*<lock>
<token>opaquelocktoken:e21e93d2-0623-b347-bb39-900b01387555</token>
<owner>admin</owner>
<comment>787878</comment>
<created>2012-02-21T10:17:29.160005Z</created>
</lock>
</wc-status>
<repos-status
props="none"
item="none">
<lock>
<token>opaquelocktoken:e21e93d2-0623-b347-bb39-900b01387555</token>
<owner>admin</owner>
<comment>787878</comment>
<created>2012-02-21T10:17:29.160005Z</created>
</lock>
</repos-status>
</entry>*/
private static class LockCreatedDate extends ElementHandlerBase {
private LockCreatedDate() {
super(new String[]{}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
final SVNDate date = SVNDate.parseDate(s);
lock.setCreationDate(date);
}
}
private static class LockComment extends ElementHandlerBase {
private LockComment() {
super(new String[]{}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
lock.setComment(s);
}
}
private static class LockOwner extends ElementHandlerBase {
private LockOwner() {
super(new String[]{}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
lock.setOwner(s);
}
}
private static class LockToken extends ElementHandlerBase {
private LockToken() {
super(new String[]{}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
lock.setToken(s);
}
}
private static class LockElement extends ElementHandlerBase {
private LockElement() {
super(new String[]{"token", "owner", "comment", "created"}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
callback.endLock();
}
@Override
public void preAttributesEffect(DataCallback callback) {
callback.startLock();
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
private static class ReposStatus extends ElementHandlerBase {
private ReposStatus() {
super(new String[]{"lock"}, new String[]{});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
final StatusType propertiesStatus = parsePropertiesStatus(attributes);
status.setRemotePropertiesStatus(propertiesStatus);
final StatusType contentsStatus = parseContentsStatus(attributes);
status.setRemoteContentsStatus(contentsStatus);
}
@Override
public void postEffect(DataCallback callback) {
callback.endRemoteStatus();
}
@Override
public void preAttributesEffect(DataCallback callback) {
callback.startLock();
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
/* <wc-status props="none" copied="true" tree-conflicted="true" item="added">
<wc-status props="none" item="unversioned">
<wc-status props="none" item="added" revision="-1">
<wc-status props="none" item="modified" revision="112">
<wc-status props="conflicted" item="normal" revision="112">
*/
private static class WcStatus extends ElementHandlerBase {
private WcStatus() {
super(new String[]{"commit", "lock"}, new String[]{});
}
/*<wc-status
props="none"
wc-locked="true"
item="normal"
revision="120">*/
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
final StatusType propertiesStatus = parsePropertiesStatus(attributes);
status.setPropertiesStatus(propertiesStatus);
final StatusType contentsStatus = parseContentsStatus(attributes);
status.setContentsStatus(contentsStatus);
if (StatusType.STATUS_CONFLICTED.equals(propertiesStatus) || StatusType.STATUS_CONFLICTED.equals(contentsStatus)) {
status.setIsConflicted(true);
}
// optional
final String locked = attributes.getValue("wc-locked");
if (locked != null && Boolean.parseBoolean(locked)) {
status.setIsLocked(true);
}
final String copied = attributes.getValue("copied");
if (copied != null && Boolean.parseBoolean(copied)) {
status.setIsCopied(true);
}
final String treeConflicted = attributes.getValue("tree-conflicted");
if (treeConflicted != null && Boolean.parseBoolean(treeConflicted)) {
status.setIsConflicted(true);
}
final String switched = attributes.getValue("switched");
if (switched != null && Boolean.parseBoolean(switched)) {
status.setIsSwitched(true);
}
final String revision = attributes.getValue("revision");
if (! StringUtil.isEmptyOrSpaces(revision)) {
try {
final long number = Long.parseLong(revision);
status.setRevision(SVNRevision.create(number));
} catch (NumberFormatException e) {
throw new SAXException(e);
}
}
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
private static class Entry extends ElementHandlerBase {
private final File myBase;
private Entry(final File base) {
super(new String[]{"wc-status", "repos-status"}, new String[]{});
myBase = base;
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
final String path = attributes.getValue("path");
assertSAX(path != null);
final File file = SvnUtil.resolvePath(myBase, path);
status.setFile(file);
final boolean exists = file.exists();
if (exists) {
status.setKind(exists, NodeKind.from(file.isDirectory()));
} else {
// this is a hack. This is done so because of strange svn native client output:
/*
c:\TestProjects\sortedProjects\Subversion\local\withExt82420\mod4>svn st --xml
<?xml version="1.0" encoding="UTF-8"?>
<status>
<target
path=".">
<entry
path="mod4">
<wc-status
props="none"
item="unversioned">
</wc-status>
</entry>
</target>
</status>
while
c:\TestProjects\sortedProjects\Subversion\local\withExt82420\mod4>dir
Volume in drive C has no label.
Volume Serial Number is B4EA-B379
Directory of c:\TestProjects\sortedProjects\Subversion\local\withExt82420\mod4
03/09/2012 05:30 PM <DIR> .
03/09/2012 05:30 PM <DIR> ..
03/09/2012 05:30 PM 437 mod4.iml
03/09/2012 05:30 PM <DIR> src
and no "mod4" under
*/
final StatusType ns = status.getNodeStatus();
if (myBase.getName().equals(path) && ! StatusType.MISSING.equals(ns) &&
! StatusType.STATUS_DELETED.equals(ns) ) {
status.setKind(true, NodeKind.DIR);
status.setFile(myBase);
status.setPath("");
return;
}
status.setKind(exists, NodeKind.UNKNOWN);
}
status.setPath(path);
}
@Override
public void postEffect(DataCallback callback) {
callback.switchPath();
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
private static class Changelist extends ElementHandlerBase {
private String myName;
private Changelist() {
super(new String[]{}, new String[]{"entry"});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
final String name = attributes.getValue("name");
assertSAX(! StringUtil.isEmptyOrSpaces(name));
myName = name;
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
callback.switchChangeList(myName);
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
private static class Target extends ElementHandlerBase {
private Target() {
super(new String[]{"against"}, new String[]{"entry"});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
private static class Against extends ElementHandlerBase {
private Against() {
super(new String[0], new String[0]);
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException {
}
@Override
public void postEffect(DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
private static class Status extends ElementHandlerBase {
private Status() {
super(new String[]{"target"}, new String[]{"changelist"});
}
@Override
protected void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) {
}
@Override
public void postEffect(final DataCallback callback) {
}
@Override
public void preEffect(DataCallback callback) {
}
@Override
public void characters(String s, PortableStatus pending, Lock.Builder lock) {
}
}
public abstract static class ElementHandlerBase {
private final Set<String> myAwaitedChildren;
private final Set<String> myAwaitedChildrenMultiple;
ElementHandlerBase(String[] awaitedChildren, String[] awaitedChildrenMultiple) {
myAwaitedChildren = new HashSet<>(Arrays.asList(awaitedChildren));
myAwaitedChildrenMultiple = new HashSet<>(Arrays.asList(awaitedChildrenMultiple));
}
protected abstract void updateStatus(Attributes attributes, PortableStatus status, Lock.Builder lock) throws SAXException;
public abstract void postEffect(final DataCallback callback);
public abstract void preEffect(final DataCallback callback);
public boolean startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
if (myAwaitedChildrenMultiple.contains(qName)) {
return true;
}
return myAwaitedChildren.remove(qName);
}
public abstract void characters(String s, PortableStatus pending, Lock.Builder lock);
public void preAttributesEffect(DataCallback callback) {}
}
public interface ExternalDataCallback {
void switchPath();
void switchChangeList(final String newList);
}
private interface DataCallback extends ExternalDataCallback {
void startRemoteStatus();
void endRemoteStatus();
void startLock();
void endLock();
void switchPath();
void switchChangeList(final String newList);
}
}
|
|
package org.jbehave.core.configuration.needle;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Type;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.jbehave.core.annotations.Configure;
import org.jbehave.core.annotations.UsingSteps;
import org.jbehave.core.annotations.needle.UsingNeedle;
import org.jbehave.core.configuration.AnnotationBuilder;
import org.jbehave.core.configuration.Configuration;
import org.jbehave.core.configuration.MostUsefulConfiguration;
import org.jbehave.core.model.ExamplesTable;
import org.jbehave.core.model.ExamplesTableFactory;
import org.jbehave.core.model.TableTransformers;
import org.jbehave.core.steps.CandidateSteps;
import org.jbehave.core.steps.ParameterConverters;
import org.jbehave.core.steps.ParameterConverters.ParameterConverter;
import org.jbehave.core.steps.Steps;
import org.jbehave.core.steps.needle.NeedleStepsFactoryBehaviour.FooSteps;
import org.jbehave.core.steps.needle.NeedleStepsFactoryBehaviour.FooStepsWithDependency;
import org.jbehave.core.steps.needle.ValueGetter;
import org.junit.Assert;
import org.junit.Test;
public class NeedleAnnotationBuilderBehaviour {
@Test
public void shouldBuildConfigurationFromAnnotationsUsingConfigureAndGuiceConverters() {
final AnnotationBuilder builderAnnotated = new NeedleAnnotationBuilder(
AnnotatedUsingConfigureAndNeedleConverters.class);
final Configuration configuration = builderAnnotated
.buildConfiguration();
assertThatCustomObjectIsConverted(configuration.parameterConverters());
assertThatDateIsConvertedWithFormat(
configuration.parameterConverters(), new SimpleDateFormat(
"yyyy-MM-dd"));
assertThatExamplesTableIsConverted(configuration.parameterConverters());
}
private void assertThatCustomObjectIsConverted(
final ParameterConverters parameterConverters) {
assertThat(((CustomObject) parameterConverters.convert("value",
CustomObject.class)).toString(), equalTo(new CustomObject(
"value").toString()));
}
private void assertThatDateIsConvertedWithFormat(
final ParameterConverters parameterConverters,
final DateFormat dateFormat) {
final String date = "2010-10-10";
try {
assertThat((Date) parameterConverters.convert(date, Date.class),
equalTo(dateFormat.parse(date)));
} catch (final ParseException e) {
Assert.fail();
}
}
private void assertThatExamplesTableIsConverted(
final ParameterConverters parameterConverters) {
final String tableAsString = "||one||two||\n" + "|1|2|";
final ExamplesTable table = new ExamplesTable(tableAsString);
assertThat(table.getHeaders(), hasItems("one", "two"));
}
@Test
public void shouldBuildDefaultConfigurationIfAnnotationOrAnnotatedValuesNotPresent() {
final AnnotationBuilder builderNotAnnotated = new NeedleAnnotationBuilder(
NotAnnotated.class);
assertThatConfigurationIs(builderNotAnnotated.buildConfiguration(),
new MostUsefulConfiguration());
final AnnotationBuilder builderAnnotatedWithoutModules = new NeedleAnnotationBuilder(
AnnotatedWithoutInjectors.class);
assertThatConfigurationIs(
builderAnnotatedWithoutModules.buildConfiguration(),
new MostUsefulConfiguration());
}
private void assertThatConfigurationIs(
final Configuration builtConfiguration,
final Configuration defaultConfiguration) {
assertThat(builtConfiguration.failureStrategy(),
instanceOf(defaultConfiguration.failureStrategy().getClass()));
assertThat(builtConfiguration.storyLoader(),
instanceOf(defaultConfiguration.storyLoader().getClass()));
assertThat(builtConfiguration.stepPatternParser(),
instanceOf(defaultConfiguration.stepPatternParser().getClass()));
assertThat(builtConfiguration.storyReporterBuilder().formats(),
equalTo(defaultConfiguration.storyReporterBuilder().formats()));
assertThat(builtConfiguration.storyReporterBuilder().outputDirectory(),
equalTo(defaultConfiguration.storyReporterBuilder()
.outputDirectory()));
assertThat(builtConfiguration.storyReporterBuilder().viewResources(),
equalTo(defaultConfiguration.storyReporterBuilder()
.viewResources()));
assertThat(builtConfiguration.storyReporterBuilder()
.reportFailureTrace(), equalTo(defaultConfiguration
.storyReporterBuilder().reportFailureTrace()));
}
@Test
public void shouldBuildCandidateStepsFromAnnotationsUsingNeedle() {
final AnnotationBuilder builderAnnotated = new NeedleAnnotationBuilder(
AnnotatedUsingNeedle.class);
final Configuration configuration = builderAnnotated
.buildConfiguration();
assertTrue(builderAnnotated.buildCandidateSteps(configuration)
.isEmpty());
}
@Test
public void shouldBuildCandidateStepsFromAnnotationsUsingStepsAndNeedle() {
final AnnotationBuilder builderAnnotated = new NeedleAnnotationBuilder(
AnnotatedUsingStepsAndNeedle.class);
final Configuration configuration = builderAnnotated
.buildConfiguration();
assertThatStepsInstancesAre(
builderAnnotated.buildCandidateSteps(configuration),
FooSteps.class);
}
@Test
public void shouldBuildCandidateStepsFromAnnotationsUsingStepsAndGuiceAndConverters() {
final AnnotationBuilder builderAnnotated = new NeedleAnnotationBuilder(
AnnotatedUsingConfigureAndNeedleConverters.class);
final Configuration configuration = builderAnnotated
.buildConfiguration();
assertThatStepsInstancesAre(
builderAnnotated.buildCandidateSteps(configuration),
FooSteps.class);
}
@Test
public void shouldBuildEmptyStepsListIfAnnotationOrAnnotatedValuesNotPresent() {
final AnnotationBuilder builderNotAnnotated = new NeedleAnnotationBuilder(
NotAnnotated.class);
assertThatStepsInstancesAre(builderNotAnnotated.buildCandidateSteps());
final AnnotationBuilder builderAnnotatedWithoutLocations = new NeedleAnnotationBuilder(
AnnotatedWithoutInjectors.class);
assertThatStepsInstancesAre(builderAnnotatedWithoutLocations
.buildCandidateSteps());
}
@Test
public void shouldBuildStepsList() {
final AnnotationBuilder builderAnnotated = new NeedleAnnotationBuilder(
AnnotatedMultipleSteps.class);
final List<CandidateSteps> actual = builderAnnotated
.buildCandidateSteps();
assertThatStepsInstancesAre(actual, FooStepsWithDependency.class,
FooSteps.class);
}
@Test
public void shouldCreateOnlyOneContainerForMultipleBuildInvocations() {
final NeedleAnnotationBuilder builderAnnotated = new NeedleAnnotationBuilder(
AnnotatedUsingStepsAndNeedle.class);
builderAnnotated.buildConfiguration();
assertTrue(!builderAnnotated.getProvider().isEmpty());
}
@Test
public void shouldSupplyInjectors() {
final NeedleAnnotationBuilder builderAnnotated = new NeedleAnnotationBuilder(
AnnotatedWithStepsWithDependency.class);
final List<CandidateSteps> buildCandidateSteps = builderAnnotated
.buildCandidateSteps();
assertThatStepsInstancesAre(buildCandidateSteps,
FooStepsWithDependency.class);
final ValueGetter getter = ((FooStepsWithDependency) ((Steps) buildCandidateSteps
.get(0)).instance()).getGetter();
assertNotNull(getter);
assertThat((String) getter.getValue(), is(ValueGetter.VALUE));
}
private void assertThatStepsInstancesAre(
final List<CandidateSteps> candidateSteps,
final Class<?>... stepsClasses) {
assertThat(candidateSteps.size(), equalTo(stepsClasses.length));
// transform candidateSteps to Set of classes
final Set<Class<?>> candidateStepClasses = new HashSet<Class<?>>();
for (final CandidateSteps step : candidateSteps) {
candidateStepClasses.add(((Steps) step).instance().getClass());
}
assertThat(candidateStepClasses, hasItems(stepsClasses));
}
@Configure()
@UsingNeedle(provider = { ValueGetterProvider.class })
private static class AnnotatedUsingNeedle {
}
@Configure()
@UsingSteps(instances = { FooSteps.class })
@UsingNeedle(provider = { ValueGetterProvider.class })
private static class AnnotatedUsingStepsAndNeedle {
}
@Configure(parameterConverters = { MyExampleTableConverter.class,
MyDateConverter.class, CustomConverter.class })
@UsingSteps(instances = { FooSteps.class })
@UsingNeedle(provider = { ValueGetterProvider.class })
private static class AnnotatedUsingConfigureAndNeedleConverters {
}
@Configure()
@UsingNeedle()
private static class AnnotatedWithoutInjectors {
}
@Configure()
@UsingSteps(instances = { FooStepsWithDependency.class })
@UsingNeedle()
private static class AnnotatedWithStepsWithDependency {
}
@Configure()
@UsingSteps(instances = { FooStepsWithDependency.class, FooSteps.class })
@UsingNeedle()
private static class AnnotatedMultipleSteps {
}
private static class NotAnnotated {
}
public static class CustomConverter implements ParameterConverter {
public boolean accept(final Type type) {
return ((Class<?>) type).isAssignableFrom(CustomObject.class);
}
public Object convertValue(final String value, final Type type) {
return new CustomObject(value);
}
}
public static class MyExampleTableConverter extends ParameterConverters.ExamplesTableConverter {
public MyExampleTableConverter() {
super(new ExamplesTableFactory(new TableTransformers()));
}
}
public static class MyDateConverter extends
ParameterConverters.DateConverter {
public MyDateConverter() {
super(new SimpleDateFormat("yyyy-MM-dd"));
}
}
public static class CustomObject {
private final String value;
public CustomObject(final String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}
}
|
|
/**
git diff * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.runtime.library.input;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.tez.common.TezUtils;
import org.apache.tez.common.TezRuntimeFrameworkConfigs;
import org.apache.tez.common.counters.TaskCounter;
import org.apache.tez.common.counters.TezCounter;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.runtime.api.AbstractLogicalInput;
import org.apache.tez.runtime.api.Event;
import org.apache.tez.runtime.api.InputContext;
import org.apache.tez.runtime.library.api.KeyValueReader;
import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
import org.apache.tez.runtime.library.common.ConfigUtils;
import org.apache.tez.runtime.library.common.MemoryUpdateCallbackHandler;
import org.apache.tez.runtime.library.common.readers.UnorderedKVReader;
import org.apache.tez.runtime.library.common.shuffle.ShuffleEventHandler;
import org.apache.tez.runtime.library.common.shuffle.impl.ShuffleInputEventHandlerImpl;
import org.apache.tez.runtime.library.common.shuffle.impl.ShuffleManager;
import org.apache.tez.runtime.library.common.shuffle.impl.SimpleFetchedInputAllocator;
import com.google.common.base.Preconditions;
/**
* {@link UnorderedKVInput} provides unordered key value input by
* bringing together (shuffling) a set of distributed data and providing a
* unified view to that data. There are no ordering constraints applied by
* this input.
*/
@Public
public class UnorderedKVInput extends AbstractLogicalInput {
private static final Log LOG = LogFactory.getLog(UnorderedKVInput.class);
private Configuration conf;
private ShuffleManager shuffleManager;
private final BlockingQueue<Event> pendingEvents = new LinkedBlockingQueue<Event>();
private long firstEventReceivedTime = -1;
private MemoryUpdateCallbackHandler memoryUpdateCallbackHandler;
@SuppressWarnings("rawtypes")
private UnorderedKVReader kvReader;
private final AtomicBoolean isStarted = new AtomicBoolean(false);
private TezCounter inputRecordCounter;
private SimpleFetchedInputAllocator inputManager;
private ShuffleEventHandler inputEventHandler;
public UnorderedKVInput(InputContext inputContext, int numPhysicalInputs) {
super(inputContext, numPhysicalInputs);
}
@Override
public synchronized List<Event> initialize() throws Exception {
Preconditions.checkArgument(getNumPhysicalInputs() != -1, "Number of Inputs has not been set");
this.conf = TezUtils.createConfFromUserPayload(getContext().getUserPayload());
if (getNumPhysicalInputs() == 0) {
getContext().requestInitialMemory(0l, null);
isStarted.set(true);
getContext().inputIsReady();
LOG.info("input fetch not required since there are 0 physical inputs for input vertex: "
+ getContext().getSourceVertexName());
return Collections.emptyList();
} else {
long initalMemReq = getInitialMemoryReq();
memoryUpdateCallbackHandler = new MemoryUpdateCallbackHandler();
this.getContext().requestInitialMemory(initalMemReq, memoryUpdateCallbackHandler);
}
this.conf.setStrings(TezRuntimeFrameworkConfigs.LOCAL_DIRS, getContext().getWorkDirs());
this.inputRecordCounter = getContext().getCounters().findCounter(
TaskCounter.INPUT_RECORDS_PROCESSED);
return Collections.emptyList();
}
@Override
public synchronized void start() throws IOException {
if (!isStarted.get()) {
////// Initial configuration
memoryUpdateCallbackHandler.validateUpdateReceived();
CompressionCodec codec;
if (ConfigUtils.isIntermediateInputCompressed(conf)) {
Class<? extends CompressionCodec> codecClass = ConfigUtils
.getIntermediateInputCompressorClass(conf, DefaultCodec.class);
codec = ReflectionUtils.newInstance(codecClass, conf);
} else {
codec = null;
}
boolean ifileReadAhead = conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD,
TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD_DEFAULT);
int ifileReadAheadLength = 0;
int ifileBufferSize = 0;
if (ifileReadAhead) {
ifileReadAheadLength = conf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD_BYTES,
TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD_BYTES_DEFAULT);
}
ifileBufferSize = conf.getInt("io.file.buffer.size",
TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_BUFFER_SIZE_DEFAULT);
this.inputManager = new SimpleFetchedInputAllocator(getContext().getUniqueIdentifier(), conf,
getContext().getTotalMemoryAvailableToTask(),
memoryUpdateCallbackHandler.getMemoryAssigned());
this.shuffleManager = new ShuffleManager(getContext(), conf, getNumPhysicalInputs(), ifileBufferSize,
ifileReadAhead, ifileReadAheadLength, codec, inputManager);
this.inputEventHandler = new ShuffleInputEventHandlerImpl(getContext(), shuffleManager,
inputManager, codec, ifileReadAhead, ifileReadAheadLength);
////// End of Initial configuration
this.shuffleManager.run();
this.kvReader = createReader(inputRecordCounter, codec,
ifileBufferSize, ifileReadAhead, ifileReadAheadLength);
List<Event> pending = new LinkedList<Event>();
pendingEvents.drainTo(pending);
if (pending.size() > 0) {
LOG.info("NoAutoStart delay in processing first event: "
+ (System.currentTimeMillis() - firstEventReceivedTime));
inputEventHandler.handleEvents(pending);
}
isStarted.set(true);
}
}
@Override
public synchronized KeyValueReader getReader() throws Exception {
Preconditions.checkState(isStarted.get(), "Must start input before invoking this method");
if (getNumPhysicalInputs() == 0) {
return new KeyValueReader() {
@Override
public boolean next() throws IOException {
return false;
}
@Override
public Object getCurrentKey() throws IOException {
throw new RuntimeException("No data available in Input");
}
@Override
public Object getCurrentValue() throws IOException {
throw new RuntimeException("No data available in Input");
}
};
}
return this.kvReader;
}
@Override
public void handleEvents(List<Event> inputEvents) throws IOException {
synchronized (this) {
if (getNumPhysicalInputs() == 0) {
throw new RuntimeException("No input events expected as numInputs is 0");
}
if (!isStarted.get()) {
if (firstEventReceivedTime == -1) {
firstEventReceivedTime = System.currentTimeMillis();
}
// This queue will keep growing if the Processor decides never to
// start the event. The Input, however has no idea, on whether start
// will be invoked or not.
pendingEvents.addAll(inputEvents);
return;
}
}
inputEventHandler.handleEvents(inputEvents);
}
@Override
public synchronized List<Event> close() throws Exception {
if (this.shuffleManager != null) {
this.shuffleManager.shutdown();
}
return null;
}
private long getInitialMemoryReq() {
return SimpleFetchedInputAllocator.getInitialMemoryReq(conf,
getContext().getTotalMemoryAvailableToTask());
}
@SuppressWarnings("rawtypes")
private UnorderedKVReader createReader(TezCounter inputRecordCounter, CompressionCodec codec,
int ifileBufferSize, boolean ifileReadAheadEnabled, int ifileReadAheadLength)
throws IOException {
return new UnorderedKVReader(shuffleManager, conf, codec, ifileReadAheadEnabled,
ifileReadAheadLength, ifileBufferSize, inputRecordCounter);
}
private static final Set<String> confKeys = new HashSet<String>();
static {
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD_BYTES);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_IO_FILE_BUFFER_SIZE);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_FACTOR);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_FAILURES_LIMIT);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_MAX_TASK_OUTPUT_AT_ONCE);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_NOTIFY_READERROR);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_CONNECT_TIMEOUT);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_KEEP_ALIVE_ENABLED);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_KEEP_ALIVE_MAX_CONNECTIONS);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_READ_TIMEOUT);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_BUFFER_SIZE);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_ENABLE_SSL);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_INPUT_POST_MERGE_BUFFER_PERCENT);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_VALUE_CLASS);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_COMPRESS);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_COMPRESS_CODEC);
confKeys.add(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH);
confKeys.add(TezConfiguration.TEZ_COUNTERS_MAX);
confKeys.add(TezConfiguration.TEZ_COUNTERS_GROUP_NAME_MAX_LENGTH);
confKeys.add(TezConfiguration.TEZ_COUNTERS_COUNTER_NAME_MAX_LENGTH);
confKeys.add(TezConfiguration.TEZ_COUNTERS_MAX_GROUPS);
}
// TODO Maybe add helper methods to extract keys
// TODO Maybe add constants or an Enum to access the keys
@InterfaceAudience.Private
public static Set<String> getConfigurationKeySet() {
return Collections.unmodifiableSet(confKeys);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.exception;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.util.List;
import org.apache.commons.lang3.test.NotVisibleExceptionFactory;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
/**
* Tests {@link org.apache.commons.lang3.exception.ExceptionUtils}.
*
* @since 1.0
*/
public class ExceptionUtilsTest {
/**
* Provides a method with a well known chained/nested exception
* name which matches the full signature (e.g. has a return value
* of {@code Throwable}.
*/
private static class ExceptionWithCause extends Exception {
private static final long serialVersionUID = 1L;
private Throwable cause;
ExceptionWithCause(final String str, final Throwable cause) {
super(str);
setCause(cause);
}
ExceptionWithCause(final Throwable cause) {
setCause(cause);
}
@Override
public Throwable getCause() {
return cause;
}
public void setCause(final Throwable cause) {
this.cause = cause;
}
}
/**
* Provides a method with a well known chained/nested exception
* name which does not match the full signature (e.g. lacks a
* return value of {@code Throwable}.
*/
private static class ExceptionWithoutCause extends Exception {
private static final long serialVersionUID = 1L;
@SuppressWarnings("unused")
public void getTargetException() {
// noop
}
}
// Temporary classes to allow the nested exception code to be removed
// prior to a rewrite of this test class.
private static class NestableException extends Exception {
private static final long serialVersionUID = 1L;
@SuppressWarnings("unused")
NestableException() {
}
NestableException(final Throwable t) {
super(t);
}
}
public static class TestThrowable extends Throwable {
private static final long serialVersionUID = 1L;
}
private static int redeclareCheckedException() {
return throwsCheckedException();
}
private static int throwsCheckedException() {
try {
throw new IOException();
} catch (final Exception e) {
return ExceptionUtils.<Integer>rethrow(e);
}
}
private NestableException nested;
private Throwable withCause;
private Throwable withoutCause;
private Throwable jdkNoCause;
private ExceptionWithCause cyclicCause;
private Throwable notVisibleException;
private Throwable createExceptionWithCause() {
try {
try {
throw new ExceptionWithCause(createExceptionWithoutCause());
} catch (final Throwable t) {
throw new ExceptionWithCause(t);
}
} catch (final Throwable t) {
return t;
}
}
private Throwable createExceptionWithoutCause() {
try {
throw new ExceptionWithoutCause();
} catch (final Throwable t) {
return t;
}
}
@BeforeEach
public void setUp() {
withoutCause = createExceptionWithoutCause();
nested = new NestableException(withoutCause);
withCause = new ExceptionWithCause(nested);
jdkNoCause = new NullPointerException();
final ExceptionWithCause a = new ExceptionWithCause(null);
final ExceptionWithCause b = new ExceptionWithCause(a);
a.setCause(b);
cyclicCause = new ExceptionWithCause(a);
notVisibleException = NotVisibleExceptionFactory.createException(withoutCause);
}
@AfterEach
public void tearDown() {
withoutCause = null;
nested = null;
withCause = null;
jdkNoCause = null;
cyclicCause = null;
notVisibleException = null;
}
@Test
public void test_getMessage_Throwable() {
Throwable th = null;
assertEquals("", ExceptionUtils.getMessage(th));
th = new IllegalArgumentException("Base");
assertEquals("IllegalArgumentException: Base", ExceptionUtils.getMessage(th));
th = new ExceptionWithCause("Wrapper", th);
assertEquals("ExceptionUtilsTest.ExceptionWithCause: Wrapper", ExceptionUtils.getMessage(th));
}
@Test
public void test_getRootCauseMessage_Throwable() {
Throwable th = null;
assertEquals("", ExceptionUtils.getRootCauseMessage(th));
th = new IllegalArgumentException("Base");
assertEquals("IllegalArgumentException: Base", ExceptionUtils.getRootCauseMessage(th));
th = new ExceptionWithCause("Wrapper", th);
assertEquals("IllegalArgumentException: Base", ExceptionUtils.getRootCauseMessage(th));
}
@Test
public void testCatchTechniques() {
IOException ioe = assertThrows(IOException.class, ExceptionUtilsTest::throwsCheckedException);
assertEquals(1, ExceptionUtils.getThrowableCount(ioe));
ioe = assertThrows(IOException.class, ExceptionUtilsTest::redeclareCheckedException);
assertEquals(1, ExceptionUtils.getThrowableCount(ioe));
}
@Test
public void testConstructor() {
assertNotNull(new ExceptionUtils());
final Constructor<?>[] cons = ExceptionUtils.class.getDeclaredConstructors();
assertEquals(1, cons.length);
assertTrue(Modifier.isPublic(cons[0].getModifiers()));
assertTrue(Modifier.isPublic(ExceptionUtils.class.getModifiers()));
assertFalse(Modifier.isFinal(ExceptionUtils.class.getModifiers()));
}
@SuppressWarnings("deprecation") // Specifically tests the deprecated methods
@Test
public void testGetCause_Throwable() {
assertSame(null, ExceptionUtils.getCause(null));
assertSame(null, ExceptionUtils.getCause(withoutCause));
assertSame(withoutCause, ExceptionUtils.getCause(nested));
assertSame(nested, ExceptionUtils.getCause(withCause));
assertSame(null, ExceptionUtils.getCause(jdkNoCause));
assertSame(cyclicCause.getCause(), ExceptionUtils.getCause(cyclicCause));
assertSame(cyclicCause.getCause().getCause(), ExceptionUtils.getCause(cyclicCause.getCause()));
assertSame(cyclicCause.getCause(), ExceptionUtils.getCause(cyclicCause.getCause().getCause()));
assertSame(withoutCause, ExceptionUtils.getCause(notVisibleException));
}
@SuppressWarnings("deprecation") // Specifically tests the deprecated methods
@Test
public void testGetCause_ThrowableArray() {
assertSame(null, ExceptionUtils.getCause(null, null));
assertSame(null, ExceptionUtils.getCause(null, new String[0]));
// not known type, so match on supplied method names
assertSame(nested, ExceptionUtils.getCause(withCause, null)); // default names
assertSame(null, ExceptionUtils.getCause(withCause, new String[0]));
assertSame(null, ExceptionUtils.getCause(withCause, new String[]{null}));
assertSame(nested, ExceptionUtils.getCause(withCause, new String[]{"getCause"}));
// not known type, so match on supplied method names
assertSame(null, ExceptionUtils.getCause(withoutCause, null));
assertSame(null, ExceptionUtils.getCause(withoutCause, new String[0]));
assertSame(null, ExceptionUtils.getCause(withoutCause, new String[]{null}));
assertSame(null, ExceptionUtils.getCause(withoutCause, new String[]{"getCause"}));
assertSame(null, ExceptionUtils.getCause(withoutCause, new String[]{"getTargetException"}));
}
@Test
public void testGetRootCause_Throwable() {
assertSame(null, ExceptionUtils.getRootCause(null));
assertSame(withoutCause, ExceptionUtils.getRootCause(withoutCause));
assertSame(withoutCause, ExceptionUtils.getRootCause(nested));
assertSame(withoutCause, ExceptionUtils.getRootCause(withCause));
assertSame(jdkNoCause, ExceptionUtils.getRootCause(jdkNoCause));
assertSame(cyclicCause.getCause().getCause(), ExceptionUtils.getRootCause(cyclicCause));
}
@Test
public void testGetRootCauseStackTrace_Throwable() {
assertEquals(0, ExceptionUtils.getRootCauseStackTrace(null).length);
final Throwable cause = createExceptionWithCause();
String[] stackTrace = ExceptionUtils.getRootCauseStackTrace(cause);
boolean match = false;
for (final String element : stackTrace) {
if (element.startsWith(ExceptionUtils.WRAPPED_MARKER)) {
match = true;
break;
}
}
assertTrue(match);
stackTrace = ExceptionUtils.getRootCauseStackTrace(withoutCause);
match = false;
for (final String element : stackTrace) {
if (element.startsWith(ExceptionUtils.WRAPPED_MARKER)) {
match = true;
break;
}
}
assertFalse(match);
}
@Test
public void testGetThrowableCount_Throwable() {
assertEquals(0, ExceptionUtils.getThrowableCount(null));
assertEquals(1, ExceptionUtils.getThrowableCount(withoutCause));
assertEquals(2, ExceptionUtils.getThrowableCount(nested));
assertEquals(3, ExceptionUtils.getThrowableCount(withCause));
assertEquals(1, ExceptionUtils.getThrowableCount(jdkNoCause));
assertEquals(3, ExceptionUtils.getThrowableCount(cyclicCause));
}
@Test
public void testGetThrowableList_Throwable_jdkNoCause() {
final List<?> throwables = ExceptionUtils.getThrowableList(jdkNoCause);
assertEquals(1, throwables.size());
assertSame(jdkNoCause, throwables.get(0));
}
@Test
public void testGetThrowableList_Throwable_nested() {
final List<?> throwables = ExceptionUtils.getThrowableList(nested);
assertEquals(2, throwables.size());
assertSame(nested, throwables.get(0));
assertSame(withoutCause, throwables.get(1));
}
@Test
public void testGetThrowableList_Throwable_null() {
final List<?> throwables = ExceptionUtils.getThrowableList(null);
assertEquals(0, throwables.size());
}
@Test
public void testGetThrowableList_Throwable_recursiveCause() {
final List<?> throwables = ExceptionUtils.getThrowableList(cyclicCause);
assertEquals(3, throwables.size());
assertSame(cyclicCause, throwables.get(0));
assertSame(cyclicCause.getCause(), throwables.get(1));
assertSame(cyclicCause.getCause().getCause(), throwables.get(2));
}
@Test
public void testGetThrowableList_Throwable_withCause() {
final List<?> throwables = ExceptionUtils.getThrowableList(withCause);
assertEquals(3, throwables.size());
assertSame(withCause, throwables.get(0));
assertSame(nested, throwables.get(1));
assertSame(withoutCause, throwables.get(2));
}
@Test
public void testGetThrowableList_Throwable_withoutCause() {
final List<?> throwables = ExceptionUtils.getThrowableList(withoutCause);
assertEquals(1, throwables.size());
assertSame(withoutCause, throwables.get(0));
}
@Test
public void testGetThrowables_Throwable_jdkNoCause() {
final Throwable[] throwables = ExceptionUtils.getThrowables(jdkNoCause);
assertEquals(1, throwables.length);
assertSame(jdkNoCause, throwables[0]);
}
@Test
public void testGetThrowables_Throwable_nested() {
final Throwable[] throwables = ExceptionUtils.getThrowables(nested);
assertEquals(2, throwables.length);
assertSame(nested, throwables[0]);
assertSame(withoutCause, throwables[1]);
}
@Test
public void testGetThrowables_Throwable_null() {
assertEquals(0, ExceptionUtils.getThrowables(null).length);
}
@Test
public void testGetThrowables_Throwable_recursiveCause() {
final Throwable[] throwables = ExceptionUtils.getThrowables(cyclicCause);
assertEquals(3, throwables.length);
assertSame(cyclicCause, throwables[0]);
assertSame(cyclicCause.getCause(), throwables[1]);
assertSame(cyclicCause.getCause().getCause(), throwables[2]);
}
@Test
public void testGetThrowables_Throwable_withCause() {
final Throwable[] throwables = ExceptionUtils.getThrowables(withCause);
assertEquals(3, throwables.length);
assertSame(withCause, throwables[0]);
assertSame(nested, throwables[1]);
assertSame(withoutCause, throwables[2]);
}
@Test
public void testGetThrowables_Throwable_withoutCause() {
final Throwable[] throwables = ExceptionUtils.getThrowables(withoutCause);
assertEquals(1, throwables.length);
assertSame(withoutCause, throwables[0]);
}
@Test
public void testIndexOf_ThrowableClass() {
assertEquals(-1, ExceptionUtils.indexOfThrowable(null, null));
assertEquals(-1, ExceptionUtils.indexOfThrowable(null, NestableException.class));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withoutCause, null));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withoutCause, ExceptionWithCause.class));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withoutCause, NestableException.class));
assertEquals(0, ExceptionUtils.indexOfThrowable(withoutCause, ExceptionWithoutCause.class));
assertEquals(-1, ExceptionUtils.indexOfThrowable(nested, null));
assertEquals(-1, ExceptionUtils.indexOfThrowable(nested, ExceptionWithCause.class));
assertEquals(0, ExceptionUtils.indexOfThrowable(nested, NestableException.class));
assertEquals(1, ExceptionUtils.indexOfThrowable(nested, ExceptionWithoutCause.class));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withCause, null));
assertEquals(0, ExceptionUtils.indexOfThrowable(withCause, ExceptionWithCause.class));
assertEquals(1, ExceptionUtils.indexOfThrowable(withCause, NestableException.class));
assertEquals(2, ExceptionUtils.indexOfThrowable(withCause, ExceptionWithoutCause.class));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withCause, Exception.class));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withCause, Throwable.class));
}
@Test
public void testIndexOf_ThrowableClassInt() {
assertEquals(-1, ExceptionUtils.indexOfThrowable(null, null, 0));
assertEquals(-1, ExceptionUtils.indexOfThrowable(null, NestableException.class, 0));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withoutCause, null));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withoutCause, ExceptionWithCause.class, 0));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withoutCause, NestableException.class, 0));
assertEquals(0, ExceptionUtils.indexOfThrowable(withoutCause, ExceptionWithoutCause.class, 0));
assertEquals(-1, ExceptionUtils.indexOfThrowable(nested, null, 0));
assertEquals(-1, ExceptionUtils.indexOfThrowable(nested, ExceptionWithCause.class, 0));
assertEquals(0, ExceptionUtils.indexOfThrowable(nested, NestableException.class, 0));
assertEquals(1, ExceptionUtils.indexOfThrowable(nested, ExceptionWithoutCause.class, 0));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withCause, null));
assertEquals(0, ExceptionUtils.indexOfThrowable(withCause, ExceptionWithCause.class, 0));
assertEquals(1, ExceptionUtils.indexOfThrowable(withCause, NestableException.class, 0));
assertEquals(2, ExceptionUtils.indexOfThrowable(withCause, ExceptionWithoutCause.class, 0));
assertEquals(0, ExceptionUtils.indexOfThrowable(withCause, ExceptionWithCause.class, -1));
assertEquals(0, ExceptionUtils.indexOfThrowable(withCause, ExceptionWithCause.class, 0));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withCause, ExceptionWithCause.class, 1));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withCause, ExceptionWithCause.class, 9));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withCause, Exception.class, 0));
assertEquals(-1, ExceptionUtils.indexOfThrowable(withCause, Throwable.class, 0));
}
@Test
public void testIndexOfType_ThrowableClass() {
assertEquals(-1, ExceptionUtils.indexOfType(null, null));
assertEquals(-1, ExceptionUtils.indexOfType(null, NestableException.class));
assertEquals(-1, ExceptionUtils.indexOfType(withoutCause, null));
assertEquals(-1, ExceptionUtils.indexOfType(withoutCause, ExceptionWithCause.class));
assertEquals(-1, ExceptionUtils.indexOfType(withoutCause, NestableException.class));
assertEquals(0, ExceptionUtils.indexOfType(withoutCause, ExceptionWithoutCause.class));
assertEquals(-1, ExceptionUtils.indexOfType(nested, null));
assertEquals(-1, ExceptionUtils.indexOfType(nested, ExceptionWithCause.class));
assertEquals(0, ExceptionUtils.indexOfType(nested, NestableException.class));
assertEquals(1, ExceptionUtils.indexOfType(nested, ExceptionWithoutCause.class));
assertEquals(-1, ExceptionUtils.indexOfType(withCause, null));
assertEquals(0, ExceptionUtils.indexOfType(withCause, ExceptionWithCause.class));
assertEquals(1, ExceptionUtils.indexOfType(withCause, NestableException.class));
assertEquals(2, ExceptionUtils.indexOfType(withCause, ExceptionWithoutCause.class));
assertEquals(0, ExceptionUtils.indexOfType(withCause, Exception.class));
assertEquals(0, ExceptionUtils.indexOfType(withCause, Throwable.class));
}
@Test
public void testIndexOfType_ThrowableClassInt() {
assertEquals(-1, ExceptionUtils.indexOfType(null, null, 0));
assertEquals(-1, ExceptionUtils.indexOfType(null, NestableException.class, 0));
assertEquals(-1, ExceptionUtils.indexOfType(withoutCause, null));
assertEquals(-1, ExceptionUtils.indexOfType(withoutCause, ExceptionWithCause.class, 0));
assertEquals(-1, ExceptionUtils.indexOfType(withoutCause, NestableException.class, 0));
assertEquals(0, ExceptionUtils.indexOfType(withoutCause, ExceptionWithoutCause.class, 0));
assertEquals(-1, ExceptionUtils.indexOfType(nested, null, 0));
assertEquals(-1, ExceptionUtils.indexOfType(nested, ExceptionWithCause.class, 0));
assertEquals(0, ExceptionUtils.indexOfType(nested, NestableException.class, 0));
assertEquals(1, ExceptionUtils.indexOfType(nested, ExceptionWithoutCause.class, 0));
assertEquals(-1, ExceptionUtils.indexOfType(withCause, null));
assertEquals(0, ExceptionUtils.indexOfType(withCause, ExceptionWithCause.class, 0));
assertEquals(1, ExceptionUtils.indexOfType(withCause, NestableException.class, 0));
assertEquals(2, ExceptionUtils.indexOfType(withCause, ExceptionWithoutCause.class, 0));
assertEquals(0, ExceptionUtils.indexOfType(withCause, ExceptionWithCause.class, -1));
assertEquals(0, ExceptionUtils.indexOfType(withCause, ExceptionWithCause.class, 0));
assertEquals(-1, ExceptionUtils.indexOfType(withCause, ExceptionWithCause.class, 1));
assertEquals(-1, ExceptionUtils.indexOfType(withCause, ExceptionWithCause.class, 9));
assertEquals(0, ExceptionUtils.indexOfType(withCause, Exception.class, 0));
assertEquals(0, ExceptionUtils.indexOfType(withCause, Throwable.class, 0));
}
@Test
public void testPrintRootCauseStackTrace_Throwable() {
ExceptionUtils.printRootCauseStackTrace(null);
// could pipe system.err to a known stream, but not much point as
// internally this method calls stream method anyway
}
@Test
public void testPrintRootCauseStackTrace_ThrowableStream() {
ByteArrayOutputStream out = new ByteArrayOutputStream(1024);
ExceptionUtils.printRootCauseStackTrace(null, (PrintStream) null);
ExceptionUtils.printRootCauseStackTrace(null, new PrintStream(out));
assertEquals(0, out.toString().length());
out = new ByteArrayOutputStream(1024);
assertThrows(
NullPointerException.class,
() -> ExceptionUtils.printRootCauseStackTrace(withCause, (PrintStream) null));
out = new ByteArrayOutputStream(1024);
final Throwable cause = createExceptionWithCause();
ExceptionUtils.printRootCauseStackTrace(cause, new PrintStream(out));
String stackTrace = out.toString();
assertTrue(stackTrace.contains(ExceptionUtils.WRAPPED_MARKER));
out = new ByteArrayOutputStream(1024);
ExceptionUtils.printRootCauseStackTrace(withoutCause, new PrintStream(out));
stackTrace = out.toString();
assertFalse(stackTrace.contains(ExceptionUtils.WRAPPED_MARKER));
}
@Test
public void testPrintRootCauseStackTrace_ThrowableWriter() {
StringWriter writer = new StringWriter(1024);
ExceptionUtils.printRootCauseStackTrace(null, (PrintWriter) null);
ExceptionUtils.printRootCauseStackTrace(null, new PrintWriter(writer));
assertEquals(0, writer.getBuffer().length());
writer = new StringWriter(1024);
assertThrows(
NullPointerException.class,
() -> ExceptionUtils.printRootCauseStackTrace(withCause, (PrintWriter) null));
writer = new StringWriter(1024);
final Throwable cause = createExceptionWithCause();
ExceptionUtils.printRootCauseStackTrace(cause, new PrintWriter(writer));
String stackTrace = writer.toString();
assertTrue(stackTrace.contains(ExceptionUtils.WRAPPED_MARKER));
writer = new StringWriter(1024);
ExceptionUtils.printRootCauseStackTrace(withoutCause, new PrintWriter(writer));
stackTrace = writer.toString();
assertFalse(stackTrace.contains(ExceptionUtils.WRAPPED_MARKER));
}
@Test
public void testRemoveCommonFrames_ListList() {
assertThrows(IllegalArgumentException.class, () -> ExceptionUtils.removeCommonFrames(null, null));
}
@Test
public void testThrow() {
final Exception expected = new InterruptedException();
final Exception actual = assertThrows(Exception.class, () -> ExceptionUtils.rethrow(expected));
assertSame(expected, actual);
}
@Test
public void testThrowableOf_ThrowableClass() {
assertNull(ExceptionUtils.throwableOfThrowable(null, null));
assertNull(ExceptionUtils.throwableOfThrowable(null, NestableException.class));
assertNull(ExceptionUtils.throwableOfThrowable(withoutCause, null));
assertNull(ExceptionUtils.throwableOfThrowable(withoutCause, ExceptionWithCause.class));
assertNull(ExceptionUtils.throwableOfThrowable(withoutCause, NestableException.class));
assertEquals(withoutCause, ExceptionUtils.throwableOfThrowable(withoutCause, ExceptionWithoutCause.class));
assertNull(ExceptionUtils.throwableOfThrowable(nested, null));
assertNull(ExceptionUtils.throwableOfThrowable(nested, ExceptionWithCause.class));
assertEquals(nested, ExceptionUtils.throwableOfThrowable(nested, NestableException.class));
assertEquals(nested.getCause(), ExceptionUtils.throwableOfThrowable(nested, ExceptionWithoutCause.class));
assertNull(ExceptionUtils.throwableOfThrowable(withCause, null));
assertEquals(withCause, ExceptionUtils.throwableOfThrowable(withCause, ExceptionWithCause.class));
assertEquals(withCause.getCause(), ExceptionUtils.throwableOfThrowable(withCause, NestableException.class));
assertEquals(withCause.getCause().getCause(), ExceptionUtils.throwableOfThrowable(withCause, ExceptionWithoutCause.class));
assertNull(ExceptionUtils.throwableOfThrowable(withCause, Exception.class));
assertNull(ExceptionUtils.throwableOfThrowable(withCause, Throwable.class));
}
@Test
public void testThrowableOf_ThrowableClassInt() {
assertNull(ExceptionUtils.throwableOfThrowable(null, null, 0));
assertNull(ExceptionUtils.throwableOfThrowable(null, NestableException.class, 0));
assertNull(ExceptionUtils.throwableOfThrowable(withoutCause, null));
assertNull(ExceptionUtils.throwableOfThrowable(withoutCause, ExceptionWithCause.class, 0));
assertNull(ExceptionUtils.throwableOfThrowable(withoutCause, NestableException.class, 0));
assertEquals(withoutCause, ExceptionUtils.throwableOfThrowable(withoutCause, ExceptionWithoutCause.class, 0));
assertNull(ExceptionUtils.throwableOfThrowable(nested, null, 0));
assertNull(ExceptionUtils.throwableOfThrowable(nested, ExceptionWithCause.class, 0));
assertEquals(nested, ExceptionUtils.throwableOfThrowable(nested, NestableException.class, 0));
assertEquals(nested.getCause(), ExceptionUtils.throwableOfThrowable(nested, ExceptionWithoutCause.class, 0));
assertNull(ExceptionUtils.throwableOfThrowable(withCause, null));
assertEquals(withCause, ExceptionUtils.throwableOfThrowable(withCause, ExceptionWithCause.class, 0));
assertEquals(withCause.getCause(), ExceptionUtils.throwableOfThrowable(withCause, NestableException.class, 0));
assertEquals(withCause.getCause().getCause(), ExceptionUtils.throwableOfThrowable(withCause, ExceptionWithoutCause.class, 0));
assertEquals(withCause, ExceptionUtils.throwableOfThrowable(withCause, ExceptionWithCause.class, -1));
assertEquals(withCause, ExceptionUtils.throwableOfThrowable(withCause, ExceptionWithCause.class, 0));
assertNull(ExceptionUtils.throwableOfThrowable(withCause, ExceptionWithCause.class, 1));
assertNull(ExceptionUtils.throwableOfThrowable(withCause, ExceptionWithCause.class, 9));
assertNull(ExceptionUtils.throwableOfThrowable(withCause, Exception.class, 0));
assertNull(ExceptionUtils.throwableOfThrowable(withCause, Throwable.class, 0));
}
@Test
public void testThrowableOfType_ThrowableClass() {
assertNull(ExceptionUtils.throwableOfType(null, null));
assertNull(ExceptionUtils.throwableOfType(null, NestableException.class));
assertNull(ExceptionUtils.throwableOfType(withoutCause, null));
assertNull(ExceptionUtils.throwableOfType(withoutCause, ExceptionWithCause.class));
assertNull(ExceptionUtils.throwableOfType(withoutCause, NestableException.class));
assertEquals(withoutCause, ExceptionUtils.throwableOfType(withoutCause, ExceptionWithoutCause.class));
assertNull(ExceptionUtils.throwableOfType(nested, null));
assertNull(ExceptionUtils.throwableOfType(nested, ExceptionWithCause.class));
assertEquals(nested, ExceptionUtils.throwableOfType(nested, NestableException.class));
assertEquals(nested.getCause(), ExceptionUtils.throwableOfType(nested, ExceptionWithoutCause.class));
assertNull(ExceptionUtils.throwableOfType(withCause, null));
assertEquals(withCause, ExceptionUtils.throwableOfType(withCause, ExceptionWithCause.class));
assertEquals(withCause.getCause(), ExceptionUtils.throwableOfType(withCause, NestableException.class));
assertEquals(withCause.getCause().getCause(), ExceptionUtils.throwableOfType(withCause, ExceptionWithoutCause.class));
assertEquals(withCause, ExceptionUtils.throwableOfType(withCause, Exception.class));
assertEquals(withCause, ExceptionUtils.throwableOfType(withCause, Throwable.class));
}
@Test
public void testThrowableOfType_ThrowableClassInt() {
assertNull(ExceptionUtils.throwableOfType(null, null, 0));
assertNull(ExceptionUtils.throwableOfType(null, NestableException.class, 0));
assertNull(ExceptionUtils.throwableOfType(withoutCause, null));
assertNull(ExceptionUtils.throwableOfType(withoutCause, ExceptionWithCause.class, 0));
assertNull(ExceptionUtils.throwableOfType(withoutCause, NestableException.class, 0));
assertEquals(withoutCause, ExceptionUtils.throwableOfType(withoutCause, ExceptionWithoutCause.class, 0));
assertNull(ExceptionUtils.throwableOfType(nested, null, 0));
assertNull(ExceptionUtils.throwableOfType(nested, ExceptionWithCause.class, 0));
assertEquals(nested, ExceptionUtils.throwableOfType(nested, NestableException.class, 0));
assertEquals(nested.getCause(), ExceptionUtils.throwableOfType(nested, ExceptionWithoutCause.class, 0));
assertNull(ExceptionUtils.throwableOfType(withCause, null));
assertEquals(withCause, ExceptionUtils.throwableOfType(withCause, ExceptionWithCause.class, 0));
assertEquals(withCause.getCause(), ExceptionUtils.throwableOfType(withCause, NestableException.class, 0));
assertEquals(withCause.getCause().getCause(), ExceptionUtils.throwableOfType(withCause, ExceptionWithoutCause.class, 0));
assertEquals(withCause, ExceptionUtils.throwableOfType(withCause, ExceptionWithCause.class, -1));
assertEquals(withCause, ExceptionUtils.throwableOfType(withCause, ExceptionWithCause.class, 0));
assertNull(ExceptionUtils.throwableOfType(withCause, ExceptionWithCause.class, 1));
assertNull(ExceptionUtils.throwableOfType(withCause, ExceptionWithCause.class, 9));
assertEquals(withCause, ExceptionUtils.throwableOfType(withCause, Exception.class, 0));
assertEquals(withCause, ExceptionUtils.throwableOfType(withCause, Throwable.class, 0));
}
@Test
public void testWrapAndUnwrapCheckedException() {
final Throwable t = assertThrows(Throwable.class, () -> ExceptionUtils.wrapAndThrow(new IOException()));
assertTrue(ExceptionUtils.hasCause(t, IOException.class));
}
@Test
public void testWrapAndUnwrapError() {
final Throwable t = assertThrows(Throwable.class, () -> ExceptionUtils.wrapAndThrow(new OutOfMemoryError()));
assertTrue(ExceptionUtils.hasCause(t, Error.class));
}
@Test
public void testWrapAndUnwrapRuntimeException() {
final Throwable t = assertThrows(Throwable.class, () -> ExceptionUtils.wrapAndThrow(new IllegalArgumentException()));
assertTrue(ExceptionUtils.hasCause(t, RuntimeException.class));
}
@Test
public void testWrapAndUnwrapThrowable() {
final Throwable t = assertThrows(Throwable.class, () -> ExceptionUtils.wrapAndThrow(new TestThrowable()));
assertTrue(ExceptionUtils.hasCause(t, TestThrowable.class));
}
@Test
@DisplayName("getStackFrames returns the string array of the stack frames when there is a real exception")
public void testgetStackFramesNullArg() {
final String[] actual = ExceptionUtils.getStackFrames((Throwable) null);
assertEquals(0, actual.length);
}
@Test
@DisplayName("getStackFrames returns empty string array when the argument is null")
public void testgetStackFramesHappyPath() {
final String[] actual = ExceptionUtils.getStackFrames(new Throwable() {
// provide static stack trace to make test stable
@Override
public void printStackTrace(final PrintWriter s) {
s.write("org.apache.commons.lang3.exception.ExceptionUtilsTest$1\n" +
"\tat org.apache.commons.lang3.exception.ExceptionUtilsTest.testgetStackFramesGappyPath(ExceptionUtilsTest.java:706)\n" +
"\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n" +
"\tat com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)\n" +
"\tat com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)\n");
}
});
assertArrayEquals(new String[]{
"org.apache.commons.lang3.exception.ExceptionUtilsTest$1",
"\tat org.apache.commons.lang3.exception.ExceptionUtilsTest.testgetStackFramesGappyPath(ExceptionUtilsTest.java:706)",
"\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)",
"\tat com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)",
"\tat com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)"
}, actual);
}
}
|
|
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.GwtCompatible;
import com.google.common.base.Objects;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.Collection;
import java.util.Iterator;
import javax.annotation.CheckForNull;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A collection which forwards all its method calls to another collection. Subclasses should
* override one or more methods to modify the behavior of the backing collection as desired per the
* <a href="http://en.wikipedia.org/wiki/Decorator_pattern">decorator pattern</a>.
*
* <p><b>Warning:</b> The methods of {@code ForwardingCollection} forward <b>indiscriminately</b> to
* the methods of the delegate. For example, overriding {@link #add} alone <b>will not</b> change
* the behavior of {@link #addAll}, which can lead to unexpected behavior. In this case, you should
* override {@code addAll} as well, either providing your own implementation, or delegating to the
* provided {@code standardAddAll} method.
*
* <p><b>{@code default} method warning:</b> This class does <i>not</i> forward calls to {@code
* default} methods. Instead, it inherits their default implementations. When those implementations
* invoke methods, they invoke methods on the {@code ForwardingCollection}.
*
* <p>The {@code standard} methods are not guaranteed to be thread-safe, even when all of the
* methods that they depend on are thread-safe.
*
* @author Kevin Bourrillion
* @author Louis Wasserman
* @since 2.0
*/
@GwtCompatible
@ElementTypesAreNonnullByDefault
public abstract class ForwardingCollection<E extends @Nullable Object> extends ForwardingObject
implements Collection<E> {
// TODO(lowasser): identify places where thread safety is actually lost
/** Constructor for use by subclasses. */
protected ForwardingCollection() {}
@Override
protected abstract Collection<E> delegate();
@Override
public Iterator<E> iterator() {
return delegate().iterator();
}
@Override
public int size() {
return delegate().size();
}
@CanIgnoreReturnValue
@Override
public boolean removeAll(Collection<?> collection) {
return delegate().removeAll(collection);
}
@Override
public boolean isEmpty() {
return delegate().isEmpty();
}
@Override
public boolean contains(@CheckForNull Object object) {
return delegate().contains(object);
}
@CanIgnoreReturnValue
@Override
public boolean add(@ParametricNullness E element) {
return delegate().add(element);
}
@CanIgnoreReturnValue
@Override
public boolean remove(@CheckForNull Object object) {
return delegate().remove(object);
}
@Override
public boolean containsAll(Collection<?> collection) {
return delegate().containsAll(collection);
}
@CanIgnoreReturnValue
@Override
public boolean addAll(Collection<? extends E> collection) {
return delegate().addAll(collection);
}
@CanIgnoreReturnValue
@Override
public boolean retainAll(Collection<?> collection) {
return delegate().retainAll(collection);
}
@Override
public void clear() {
delegate().clear();
}
@Override
public @Nullable Object[] toArray() {
return delegate().toArray();
}
@CanIgnoreReturnValue
@Override
@SuppressWarnings("nullness") // b/192354773 in our checker affects toArray declarations
public <T extends @Nullable Object> T[] toArray(T[] array) {
return delegate().toArray(array);
}
/**
* A sensible definition of {@link #contains} in terms of {@link #iterator}. If you override
* {@link #iterator}, you may wish to override {@link #contains} to forward to this
* implementation.
*
* @since 7.0
*/
protected boolean standardContains(@CheckForNull Object object) {
return Iterators.contains(iterator(), object);
}
/**
* A sensible definition of {@link #containsAll} in terms of {@link #contains} . If you override
* {@link #contains}, you may wish to override {@link #containsAll} to forward to this
* implementation.
*
* @since 7.0
*/
protected boolean standardContainsAll(Collection<?> collection) {
return Collections2.containsAllImpl(this, collection);
}
/**
* A sensible definition of {@link #addAll} in terms of {@link #add}. If you override {@link
* #add}, you may wish to override {@link #addAll} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardAddAll(Collection<? extends E> collection) {
return Iterators.addAll(this, collection.iterator());
}
/**
* A sensible definition of {@link #remove} in terms of {@link #iterator}, using the iterator's
* {@code remove} method. If you override {@link #iterator}, you may wish to override {@link
* #remove} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardRemove(@CheckForNull Object object) {
Iterator<E> iterator = iterator();
while (iterator.hasNext()) {
if (Objects.equal(iterator.next(), object)) {
iterator.remove();
return true;
}
}
return false;
}
/**
* A sensible definition of {@link #removeAll} in terms of {@link #iterator}, using the iterator's
* {@code remove} method. If you override {@link #iterator}, you may wish to override {@link
* #removeAll} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardRemoveAll(Collection<?> collection) {
return Iterators.removeAll(iterator(), collection);
}
/**
* A sensible definition of {@link #retainAll} in terms of {@link #iterator}, using the iterator's
* {@code remove} method. If you override {@link #iterator}, you may wish to override {@link
* #retainAll} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardRetainAll(Collection<?> collection) {
return Iterators.retainAll(iterator(), collection);
}
/**
* A sensible definition of {@link #clear} in terms of {@link #iterator}, using the iterator's
* {@code remove} method. If you override {@link #iterator}, you may wish to override {@link
* #clear} to forward to this implementation.
*
* @since 7.0
*/
protected void standardClear() {
Iterators.clear(iterator());
}
/**
* A sensible definition of {@link #isEmpty} as {@code !iterator().hasNext}. If you override
* {@link #isEmpty}, you may wish to override {@link #isEmpty} to forward to this implementation.
* Alternately, it may be more efficient to implement {@code isEmpty} as {@code size() == 0}.
*
* @since 7.0
*/
protected boolean standardIsEmpty() {
return !iterator().hasNext();
}
/**
* A sensible definition of {@link #toString} in terms of {@link #iterator}. If you override
* {@link #iterator}, you may wish to override {@link #toString} to forward to this
* implementation.
*
* @since 7.0
*/
protected String standardToString() {
return Collections2.toStringImpl(this);
}
/**
* A sensible definition of {@link #toArray()} in terms of {@link #toArray(Object[])}. If you
* override {@link #toArray(Object[])}, you may wish to override {@link #toArray} to forward to
* this implementation.
*
* @since 7.0
*/
protected @Nullable Object[] standardToArray() {
@Nullable Object[] newArray = new @Nullable Object[size()];
return toArray(newArray);
}
/**
* A sensible definition of {@link #toArray(Object[])} in terms of {@link #size} and {@link
* #iterator}. If you override either of these methods, you may wish to override {@link #toArray}
* to forward to this implementation.
*
* @since 7.0
*/
protected <T extends @Nullable Object> T[] standardToArray(T[] array) {
return ObjectArrays.toArrayImpl(this, array);
}
}
|
|
/*
* Copyright 2013, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.dexlib2.builder;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.jf.dexlib2.builder.debug.BuilderEndLocal;
import org.jf.dexlib2.builder.debug.BuilderEpilogueBegin;
import org.jf.dexlib2.builder.debug.BuilderLineNumber;
import org.jf.dexlib2.builder.debug.BuilderPrologueEnd;
import org.jf.dexlib2.builder.debug.BuilderRestartLocal;
import org.jf.dexlib2.builder.debug.BuilderSetSourceFile;
import org.jf.dexlib2.builder.debug.BuilderStartLocal;
import org.jf.dexlib2.iface.instruction.Instruction;
import org.jf.dexlib2.iface.reference.StringReference;
import org.jf.dexlib2.iface.reference.TypeReference;
import com.google.common.collect.ImmutableList;
public class MethodLocation {
@Nullable BuilderInstruction instruction;
int codeAddress;
int index;
// We end up creating and keeping around a *lot* of MethodLocation objects
// when building a new dex file, so it's worth the trouble of lazily creating
// the labels and debugItems lists only when they are needed
@Nullable
private List<Label> labels = null;
@Nullable
private List<BuilderDebugItem> debugItems = null;
MethodLocation(@Nullable BuilderInstruction instruction, int codeAddress, int index) {
this.instruction = instruction;
this.codeAddress = codeAddress;
this.index = index;
}
@Nullable
public Instruction getInstruction() {
return instruction;
}
public int getCodeAddress() {
return codeAddress;
}
public int getIndex() {
return index;
}
@Nonnull
private List<Label> getLabels(boolean mutable) {
if (labels == null) {
if (mutable) {
labels = new ArrayList<Label>(1);
return labels;
}
return ImmutableList.of();
}
return labels;
}
@Nonnull
private List<BuilderDebugItem> getDebugItems(boolean mutable) {
if (debugItems == null) {
if (mutable) {
debugItems = new ArrayList<BuilderDebugItem>(1);
return debugItems;
}
return ImmutableList.of();
}
return debugItems;
}
void mergeInto(@Nonnull MethodLocation other) {
if (this.labels != null || other.labels != null) {
List<Label> otherLabels = other.getLabels(true);
for (Label label: this.getLabels(false)) {
label.location = other;
otherLabels.add(label);
}
this.labels = null;
}
if (this.debugItems != null || other.labels != null) {
// We need to keep the debug items in the same order. We add the other debug items to this list, then reassign
// the list.
List<BuilderDebugItem> debugItems = getDebugItems(true);
for (BuilderDebugItem debugItem: debugItems) {
debugItem.location = other;
}
debugItems.addAll(other.getDebugItems(false));
other.debugItems = debugItems;
this.debugItems = null;
}
}
@Nonnull
public Set<Label> getLabels() {
return new AbstractSet<Label>() {
@Nonnull
@Override public Iterator<Label> iterator() {
final Iterator<Label> it = getLabels(false).iterator();
return new Iterator<Label>() {
private @Nullable Label currentLabel = null;
@Override public boolean hasNext() {
return it.hasNext();
}
@Override public Label next() {
currentLabel = it.next();
return currentLabel;
}
@Override public void remove() {
if (currentLabel != null) {
currentLabel.location = null;
}
it.remove();
}
};
}
@Override public int size() {
return getLabels(false).size();
}
@Override public boolean add(@Nonnull Label label) {
if (label.isPlaced()) {
throw new IllegalArgumentException("Cannot add a label that is already placed. You must remove " +
"it from its current location first.");
}
label.location = MethodLocation.this;
getLabels(true).add(label);
return true;
}
};
}
@Nonnull
public Label addNewLabel() {
Label label = new Label(this);
getLabels(true).add(label);
return label;
}
@Nonnull
public Set<BuilderDebugItem> getDebugItems() {
return new AbstractSet<BuilderDebugItem>() {
@Nonnull
@Override public Iterator<BuilderDebugItem> iterator() {
final Iterator<BuilderDebugItem> it = getDebugItems(false).iterator();
return new Iterator<BuilderDebugItem>() {
private @Nullable BuilderDebugItem currentDebugItem = null;
@Override public boolean hasNext() {
return it.hasNext();
}
@Override public BuilderDebugItem next() {
currentDebugItem = it.next();
return currentDebugItem;
}
@Override public void remove() {
if (currentDebugItem != null) {
currentDebugItem.location = null;
}
it.remove();
}
};
}
@Override public int size() {
return getDebugItems(false).size();
}
@Override public boolean add(@Nonnull BuilderDebugItem debugItem) {
if (debugItem.location != null) {
throw new IllegalArgumentException("Cannot add a debug item that has already been added to a " +
"method. You must remove it from its current location first.");
}
debugItem.location = MethodLocation.this;
getDebugItems(true).add(debugItem);
return true;
}
};
}
public void addLineNumber(int lineNumber) {
getDebugItems().add(new BuilderLineNumber(lineNumber));
}
public void addStartLocal(int registerNumber, @Nullable StringReference name, @Nullable TypeReference type,
@Nullable StringReference signature) {
getDebugItems().add(new BuilderStartLocal(registerNumber, name, type, signature));
}
public void addEndLocal(int registerNumber) {
getDebugItems().add(new BuilderEndLocal(registerNumber));
}
public void addRestartLocal(int registerNumber) {
getDebugItems().add(new BuilderRestartLocal(registerNumber));
}
public void addPrologue() {
getDebugItems().add(new BuilderPrologueEnd());
}
public void addEpilogue() {
getDebugItems().add(new BuilderEpilogueBegin());
}
public void addSetSourceFile(@Nullable StringReference sourceFile) {
getDebugItems().add(new BuilderSetSourceFile(sourceFile));
}
}
|
|
/**
* Copyright 2012 The PlayN Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package playn.java;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.font.FontRenderContext;
import java.awt.image.BufferedImage;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.DisplayMode;
import pythagoras.f.Point;
import playn.core.CanvasImage;
import playn.core.Font;
import playn.core.Gradient;
import playn.core.TextFormat;
import playn.core.TextLayout;
import playn.core.TextWrap;
import playn.core.gl.GL20;
import playn.core.gl.GL20Context;
import playn.core.gl.GraphicsGL;
import playn.core.gl.GroupLayerGL;
import playn.core.gl.Scale;
import static playn.core.PlayN.*;
public class JavaGraphics extends GraphicsGL {
protected final JavaPlatform platform;
protected final GL20Context ctx;
protected final GroupLayerGL rootLayer;
// antialiased font context and aliased font context
final FontRenderContext aaFontContext, aFontContext;
public JavaGraphics(JavaPlatform platform, JavaPlatform.Config config) {
this.platform = platform;
// if we're being run in headless mode, create a stub GL context which does not trigger the
// initialization of LWJGL; this allows tests to run against non-graphics services without
// needing to configure LWJGL native libraries
this.ctx = config.headless ? new GL20Context(platform, null, config.scaleFactor, false) :
new JavaGLContext(platform, config.scaleFactor);
this.rootLayer = new GroupLayerGL(ctx);
// set up the dummy font contexts
Graphics2D aaGfx = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB).createGraphics();
aaGfx.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
aaFontContext = aaGfx.getFontRenderContext();
Graphics2D aGfx = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB).createGraphics();
aGfx.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
aFontContext = aGfx.getFontRenderContext();
if (!config.headless) {
setDisplayMode(ctx.scale.scaledCeil(config.width), ctx.scale.scaledCeil(config.height),
config.fullscreen);
}
}
/**
* Registers a font with the graphics system.
*
* @param name the name under which to register the font.
* @param path the path to the font resource (relative to the asset manager's path prefix).
* Currently only TrueType ({@code .ttf}) fonts are supported.
*/
public void registerFont(String name, String path) {
try {
_fonts.put(name, ((JavaAssets) assets()).requireResource(path).createFont());
} catch (Exception e) {
platform.reportError("Failed to load font [name=" + name + ", path=" + path + "]", e);
}
}
/**
* Changes the size of the PlayN window.
*/
public void setSize(int pixelWidth, int pixelHeight, boolean fullscreen) {
setDisplayMode(pixelWidth, pixelHeight, fullscreen);
ctx.setSize(pixelWidth, pixelHeight);
}
protected void setDisplayMode(int width, int height, boolean fullscreen) {
try {
// check if current mode is suitable
DisplayMode mode = Display.getDisplayMode();
if (fullscreen == Display.isFullscreen() &&
mode.getWidth() == width && mode.getHeight() == height)
return;
if (fullscreen) {
// try and find a mode matching width and height
DisplayMode matching = null;
for (DisplayMode test : Display.getAvailableDisplayModes()) {
if (test.getWidth() == width && test.getHeight() == height && test.isFullscreenCapable()) {
matching = test;
}
}
if (matching == null) {
platform.log().info("Could not find a matching fullscreen mode, available: " +
Arrays.asList(Display.getAvailableDisplayModes()));
} else {
mode = matching;
}
} else {
mode = new DisplayMode(width, height);
}
platform.log().debug("Updating display mode: " + mode + ", fullscreen: " + fullscreen);
// TODO: fix crashes when fullscreen is toggled repeatedly
if (fullscreen) {
Display.setDisplayModeAndFullscreen(mode);
// TODO: fix alt-tab, maybe add a key listener or something?
} else {
Display.setDisplayMode(mode);
}
} catch (LWJGLException ex) {
throw new RuntimeException(ex);
}
}
@Override
public GroupLayerGL rootLayer() {
return rootLayer;
}
@Override
public CanvasImage createImage(float width, float height) {
return new JavaCanvasImage(ctx, width, height);
}
@Override
public Gradient createLinearGradient(float x0, float y0, float x1, float y1,
int[] colors, float[] positions) {
return JavaGradient.createLinear(x0, y0, x1, y1, positions, colors);
}
@Override
public Gradient createRadialGradient(float x, float y, float r, int[] colors, float[] positions) {
return JavaGradient.createRadial(x, y, r, positions, colors);
}
@Override
public Font createFont(String name, Font.Style style, float size) {
java.awt.Font jfont = _fonts.get(name);
// if we don't have a custom font registered for this name, assume it's a platform font
if (jfont == null) {
jfont = new java.awt.Font(name, java.awt.Font.PLAIN, 12);
}
return new JavaFont(this, name, style, size, jfont);
}
@Override
public TextLayout layoutText(String text, TextFormat format) {
return JavaTextLayout.layoutText(this, text, format);
}
@Override
public TextLayout[] layoutText(String text, TextFormat format, TextWrap wrap) {
return JavaTextLayout.layoutText(this, text, format, wrap);
}
@Override
public int screenWidth() {
return ctx.scale.invScaledFloor(Display.getDesktopDisplayMode().getWidth());
}
@Override
public int screenHeight() {
return ctx.scale.invScaledFloor(Display.getDesktopDisplayMode().getHeight());
}
@Override
public GL20 gl20() {
return ctx.gl;
}
@Override
public GL20Context ctx() {
return ctx;
}
protected JavaImage createStaticImage(BufferedImage source, Scale scale) {
return new JavaStaticImage(ctx, source, scale);
}
protected JavaAsyncImage createAsyncImage(float width, float height) {
return new JavaAsyncImage(ctx, width, height);
}
protected void init() {
DisplayMode mode = Display.getDisplayMode();
ctx.setSize(mode.getWidth(), mode.getHeight());
ctx.init();
}
protected void paint() {
ctx.paint(rootLayer);
}
Point transformMouse(Point point) {
point.x /= ctx.scale.factor;
point.y /= ctx.scale.factor;
return point;
}
protected Map<String,java.awt.Font> _fonts = new HashMap<String,java.awt.Font>();
}
|
|
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmUdpSrcVer14 implements OFOxmUdpSrc {
private static final Logger logger = LoggerFactory.getLogger(OFOxmUdpSrcVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 6;
private final static TransportPort DEFAULT_VALUE = TransportPort.NONE;
// OF message fields
private final TransportPort value;
//
// Immutable default instance
final static OFOxmUdpSrcVer14 DEFAULT = new OFOxmUdpSrcVer14(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFOxmUdpSrcVer14(TransportPort value) {
if(value == null) {
throw new NullPointerException("OFOxmUdpSrcVer14: property value cannot be null");
}
this.value = value;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x80001e02L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_SRC;
}
@Override
public boolean isMasked() {
return false;
}
public OFOxm<TransportPort> getCanonical() {
// exact match OXM is always canonical
return this;
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFOxmUdpSrc.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmUdpSrc.Builder {
final OFOxmUdpSrcVer14 parentMessage;
// OF message fields
private boolean valueSet;
private TransportPort value;
BuilderWithParent(OFOxmUdpSrcVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x80001e02L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public OFOxmUdpSrc.Builder setValue(TransportPort value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_SRC;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFOxmUdpSrc build() {
TransportPort value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
//
return new OFOxmUdpSrcVer14(
value
);
}
}
static class Builder implements OFOxmUdpSrc.Builder {
// OF message fields
private boolean valueSet;
private TransportPort value;
@Override
public long getTypeLen() {
return 0x80001e02L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public OFOxmUdpSrc.Builder setValue(TransportPort value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_SRC;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFOxmUdpSrc build() {
TransportPort value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
return new OFOxmUdpSrcVer14(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmUdpSrc> {
@Override
public OFOxmUdpSrc readFrom(ByteBuf bb) throws OFParseError {
// fixed value property typeLen == 0x80001e02L
int typeLen = bb.readInt();
if(typeLen != (int) 0x80001e02)
throw new OFParseError("Wrong typeLen: Expected=0x80001e02L(0x80001e02L), got="+typeLen);
TransportPort value = TransportPort.read2Bytes(bb);
OFOxmUdpSrcVer14 oxmUdpSrcVer14 = new OFOxmUdpSrcVer14(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmUdpSrcVer14);
return oxmUdpSrcVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmUdpSrcVer14Funnel FUNNEL = new OFOxmUdpSrcVer14Funnel();
static class OFOxmUdpSrcVer14Funnel implements Funnel<OFOxmUdpSrcVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmUdpSrcVer14 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x80001e02L
sink.putInt((int) 0x80001e02);
message.value.putTo(sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmUdpSrcVer14> {
@Override
public void write(ByteBuf bb, OFOxmUdpSrcVer14 message) {
// fixed value property typeLen = 0x80001e02L
bb.writeInt((int) 0x80001e02);
message.value.write2Bytes(bb);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmUdpSrcVer14(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmUdpSrcVer14 other = (OFOxmUdpSrcVer14) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
}
|
|
package io.joynr.capabilities;
/*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.argThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.joynr.arbitration.ArbitrationStrategy;
import io.joynr.arbitration.DiscoveryQos;
import io.joynr.arbitration.DiscoveryScope;
import io.joynr.dispatching.Dispatcher;
import io.joynr.exceptions.JoynrException;
import io.joynr.exceptions.JoynrRuntimeException;
import io.joynr.messaging.routing.MessageRouter;
import io.joynr.provider.DeferredVoid;
import io.joynr.provider.Promise;
import io.joynr.provider.PromiseListener;
import io.joynr.proxy.Callback;
import io.joynr.proxy.Future;
import io.joynr.proxy.ProxyBuilderFactory;
import io.joynr.runtime.GlobalAddressProvider;
import io.joynr.runtime.JoynrRuntime;
import joynr.infrastructure.GlobalCapabilitiesDirectory;
import joynr.infrastructure.GlobalDomainAccessController;
import joynr.system.RoutingTypes.ChannelAddress;
import joynr.types.CustomParameter;
import joynr.types.DiscoveryEntry;
import joynr.types.DiscoveryEntryWithMetaInfo;
import joynr.types.GlobalDiscoveryEntry;
import joynr.types.ProviderQos;
import joynr.types.ProviderScope;
import joynr.types.Version;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.internal.matchers.VarargMatcher;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
@RunWith(MockitoJUnitRunner.class)
public class LocalCapabilitiesDirectoryTest {
private static final String TEST_URL = "http://testUrl";
private static final long ONE_DAY_IN_MS = 1 * 24 * 60 * 60 * 1000;
private static final long defaultDiscoveryRetryIntervalMs = 2000L;
private Long expiryDateMs = System.currentTimeMillis() + ONE_DAY_IN_MS;
private String publicKeyId = "publicKeyId";
@Mock
JoynrRuntime runtime;
@Mock
private GlobalCapabilitiesDirectoryClient globalCapabilitiesClient;
@Mock
private ExpiredDiscoveryEntryCacheCleaner expiredDiscoveryEntryCacheCleaner;
@Mock
private MessageRouter messageRouter;
@Mock
private Dispatcher dispatcher;
@Mock
private ProxyBuilderFactory proxyBuilderFactoryMock;
@Mock
private DiscoveryEntryStore localDiscoveryEntryStoreMock;
@Mock
private DiscoveryEntryStore globalDiscoveryEntryCacheMock;
@Mock
private GlobalAddressProvider globalAddressProvider;
@Mock
private CapabilitiesProvisioning capabilitiesProvisioning;
@Mock
private ScheduledExecutorService capabilitiesFreshnessUpdateExecutor;
@Captor
private ArgumentCaptor<Collection<DiscoveryEntryWithMetaInfo>> capabilitiesCaptor;
@Captor
private ArgumentCaptor<Runnable> runnableCaptor;
private LocalCapabilitiesDirectory localCapabilitiesDirectory;
private ChannelAddress channelAddress;
private String channelAddressSerialized;
private DiscoveryEntry discoveryEntry;
private GlobalDiscoveryEntry globalDiscoveryEntry;
public interface TestInterface {
public static final String INTERFACE_NAME = "interfaceName";
}
private static class DiscoveryEntryStoreVarargMatcher extends ArgumentMatcher<DiscoveryEntryStore[]> implements
VarargMatcher {
private final DiscoveryEntryStore[] matchAgainst;
private DiscoveryEntryStoreVarargMatcher(DiscoveryEntryStore... matchAgainst) {
this.matchAgainst = matchAgainst;
}
@Override
public boolean matches(Object argument) {
assertNotNull(argument);
assertArrayEquals(matchAgainst, (DiscoveryEntryStore[]) argument);
return true;
}
}
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
channelAddress = new ChannelAddress(TEST_URL, "testChannelId");
ObjectMapper objectMapper = new ObjectMapper();
channelAddressSerialized = objectMapper.writeValueAsString(channelAddress);
Field objectMapperField = CapabilityUtils.class.getDeclaredField("objectMapper");
objectMapperField.setAccessible(true);
objectMapperField.set(CapabilityUtils.class, objectMapper);
Answer<Void> answer = new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
((Callback<Void>) args[0]).onSuccess(null);
return null;
}
};
doAnswer(answer).when(globalCapabilitiesClient).add(any(Callback.class), any(GlobalDiscoveryEntry.class));
String discoveryDirectoriesDomain = "io.joynr";
String capabilitiesDirectoryParticipantId = "capDir_participantId";
String capabiltitiesDirectoryChannelId = "dirchannelId";
String domainAccessControllerParticipantId = "domainAccessControllerParticipantId";
String domainAccessControllerChannelId = "domainAccessControllerChannelId";
DiscoveryEntry globalCapabilitiesDirectoryDiscoveryEntry = CapabilityUtils.newGlobalDiscoveryEntry(new Version(0,
1),
discoveryDirectoriesDomain,
GlobalCapabilitiesDirectory.INTERFACE_NAME,
capabilitiesDirectoryParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
domainAccessControllerChannelId,
new ChannelAddress(TEST_URL,
capabiltitiesDirectoryChannelId));
DiscoveryEntry domainAccessControllerDiscoveryEntry = CapabilityUtils.newGlobalDiscoveryEntry(new Version(0, 1),
discoveryDirectoriesDomain,
GlobalDomainAccessController.INTERFACE_NAME,
domainAccessControllerParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
domainAccessControllerChannelId,
new ChannelAddress(TEST_URL,
domainAccessControllerChannelId));
when(capabilitiesProvisioning.getDiscoveryEntries()).thenReturn(Sets.newHashSet(globalCapabilitiesDirectoryDiscoveryEntry,
domainAccessControllerDiscoveryEntry));
// use default freshnessUpdateIntervalMs: 3600000ms (1h)
localCapabilitiesDirectory = new LocalCapabilitiesDirectoryImpl(capabilitiesProvisioning,
globalAddressProvider,
localDiscoveryEntryStoreMock,
globalDiscoveryEntryCacheMock,
messageRouter,
globalCapabilitiesClient,
expiredDiscoveryEntryCacheCleaner,
3600000,
capabilitiesFreshnessUpdateExecutor,
defaultDiscoveryRetryIntervalMs);
verify(expiredDiscoveryEntryCacheCleaner).scheduleCleanUpForCaches(Mockito.<ExpiredDiscoveryEntryCacheCleaner.CleanupAction> any(),
argThat(new DiscoveryEntryStoreVarargMatcher(globalDiscoveryEntryCacheMock,
localDiscoveryEntryStoreMock)));
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
anyLong(),
anyLong(),
eq(TimeUnit.MILLISECONDS));
ProviderQos providerQos = new ProviderQos();
CustomParameter[] parameterList = { new CustomParameter("key1", "value1"),
new CustomParameter("key2", "value2") };
providerQos.setCustomParameters(parameterList);
String participantId = "testParticipantId";
String domain = "domain";
discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain,
TestInterface.INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
globalDiscoveryEntry = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
TestInterface.INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
channelAddressSerialized);
}
@SuppressWarnings("unchecked")
@Test(timeout = 1000)
public void addCapability() throws InterruptedException {
when(globalAddressProvider.get()).thenReturn(channelAddress);
localCapabilitiesDirectory.add(discoveryEntry);
ArgumentCaptor<GlobalDiscoveryEntry> argumentCaptor = ArgumentCaptor.forClass(GlobalDiscoveryEntry.class);
verify(globalCapabilitiesClient, timeout(200)).add(any(Callback.class), argumentCaptor.capture());
GlobalDiscoveryEntry capturedGlobalDiscoveryEntry = argumentCaptor.getValue();
assertNotNull(capturedGlobalDiscoveryEntry);
assertEquals(discoveryEntry.getDomain(), capturedGlobalDiscoveryEntry.getDomain());
assertEquals(discoveryEntry.getInterfaceName(), capturedGlobalDiscoveryEntry.getInterfaceName());
}
@SuppressWarnings("unchecked")
@Test(timeout = 2000)
public void addLocalOnlyCapability() throws InterruptedException {
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
globalDiscoveryEntry = new GlobalDiscoveryEntry(new Version(47, 11),
"test",
TestInterface.INTERFACE_NAME,
"participantId",
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
channelAddressSerialized);
localCapabilitiesDirectory.add(discoveryEntry);
Thread.sleep(1000);
verify(globalCapabilitiesClient, never()).add(any(Callback.class), any(GlobalDiscoveryEntry.class));
}
@SuppressWarnings("unchecked")
@Test(timeout = 1000)
public void addGlobalCapSucceeds_NextAddShallNotAddGlobalAgain() throws InterruptedException {
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.GLOBAL);
String participantId = LocalCapabilitiesDirectoryTest.class.getName()
+ ".addGlobalCapSucceeds_NextAddShallNotAddGlobalAgain";
String domain = "testDomain";
final DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain,
TestInterface.INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
globalDiscoveryEntry = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
TestInterface.INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
channelAddressSerialized);
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry);
promise.then(new PromiseListener() {
@Override
public void onFulfillment(Object... values) {
Mockito.doAnswer(createAddAnswerWithSuccess())
.when(globalCapabilitiesClient)
.add(any(Callback.class), eq(globalDiscoveryEntry));
verify(globalDiscoveryEntryCacheMock).add(eq(globalDiscoveryEntry));
verify(globalCapabilitiesClient).add(any(Callback.class), eq(globalDiscoveryEntry));
reset(globalCapabilitiesClient);
localCapabilitiesDirectory.add(discoveryEntry);
verify(globalCapabilitiesClient, timeout(200).never()).add(any(Callback.class),
eq(globalDiscoveryEntry));
}
@Override
public void onRejection(JoynrException error) {
Assert.fail("adding capability failed: " + error);
}
});
}
@SuppressWarnings("unchecked")
@Test(timeout = 1000)
public void addGlobalCapFails_NextAddShallAddGlobalAgain() throws InterruptedException {
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.GLOBAL);
String participantId = LocalCapabilitiesDirectoryTest.class.getName() + ".addLocalAndThanGlobalShallWork";
String domain = "testDomain";
final DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain,
TestInterface.INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
globalDiscoveryEntry = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
TestInterface.INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
channelAddressSerialized);
Mockito.doAnswer(createAddAnswerWithError())
.when(globalCapabilitiesClient)
.add(any(Callback.class), eq(globalDiscoveryEntry));
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry);
promise.then(new PromiseListener() {
@Override
public void onFulfillment(Object... values) {
verify(globalDiscoveryEntryCacheMock, never()).add(eq(globalDiscoveryEntry));
verify(globalCapabilitiesClient).add(any(Callback.class), eq(globalDiscoveryEntry));
reset(globalCapabilitiesClient);
localCapabilitiesDirectory.add(discoveryEntry);
verify(globalCapabilitiesClient, timeout(200)).add(any(Callback.class), eq(globalDiscoveryEntry));
}
@Override
public void onRejection(JoynrException error) {
}
});
}
private Answer<Future<List<GlobalDiscoveryEntry>>> createAnswer(final List<GlobalDiscoveryEntry> caps) {
return new Answer<Future<List<GlobalDiscoveryEntry>>>() {
@SuppressWarnings("unchecked")
@Override
public Future<List<GlobalDiscoveryEntry>> answer(InvocationOnMock invocation) throws Throwable {
Future<List<GlobalDiscoveryEntry>> result = new Future<List<GlobalDiscoveryEntry>>();
Object[] args = invocation.getArguments();
((Callback<List<GlobalDiscoveryEntry>>) args[0]).onSuccess(caps);
result.onSuccess(caps);
return result;
}
};
}
private Answer<Future<Void>> createAddAnswerWithSuccess() {
return new Answer<Future<Void>>() {
@SuppressWarnings("unchecked")
@Override
public Future<Void> answer(InvocationOnMock invocation) throws Throwable {
Future<Void> result = new Future<Void>();
Object[] args = invocation.getArguments();
((Callback<Void>) args[0]).onSuccess(null);
result.onSuccess(null);
return result;
}
};
}
private Answer<Future<Void>> createAddAnswerWithError() {
return new Answer<Future<Void>>() {
@SuppressWarnings("unchecked")
@Override
public Future<Void> answer(InvocationOnMock invocation) throws Throwable {
Future<Void> result = new Future<Void>();
Object[] args = invocation.getArguments();
((Callback<Void>) args[0]).onFailure(new JoynrRuntimeException("Simulating a JoynrRuntimeException on callback"));
result.onSuccess(null);
return result;
}
};
}
@SuppressWarnings("unchecked")
@Test(timeout = 1000)
public void lookupWithScopeGlobalOnly() throws InterruptedException {
List<GlobalDiscoveryEntry> caps = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000,
ArbitrationStrategy.HighestPriority,
1000,
DiscoveryScope.GLOBAL_ONLY);
CapabilitiesCallback capabilitiesCallback = mock(CapabilitiesCallback.class);
when(globalDiscoveryEntryCacheMock.lookup(eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(new ArrayList<DiscoveryEntry>());
doAnswer(createAnswer(caps)).when(globalCapabilitiesClient).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(1)).lookup(any(Callback.class),
any(String[].class),
any(String.class),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(0)).processCapabilitiesReceived(argThat(hasNEntries(1)));
reset(capabilitiesCallback);
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
"localParticipant",
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
localCapabilitiesDirectory.add(discoveryEntry);
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(2)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(0)).processCapabilitiesReceived(argThat(hasNEntries(1)));
reset(capabilitiesCallback);
// even deleting local cap entries shall have no effect, the global cap dir shall be invoked
localCapabilitiesDirectory.remove(discoveryEntry);
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(3)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(0)).processCapabilitiesReceived(argThat(hasNEntries(1)));
reset(capabilitiesCallback);
// add global entry
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
"globalParticipant",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
channelAddressSerialized);
caps.add(capInfo);
doAnswer(createAnswer(caps)).when(globalCapabilitiesClient).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(4)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(0)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(1)));
reset(capabilitiesCallback);
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
reset(globalDiscoveryEntryCacheMock);
when(globalDiscoveryEntryCacheMock.lookup(eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(Lists.newArrayList((DiscoveryEntry) capInfo));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(4)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(0)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(1)));
reset(capabilitiesCallback);
// and now, invalidate the existing cached global values, resulting in another call to glocalcapclient
discoveryQos.setCacheMaxAgeMs(0);
Thread.sleep(1);
// now, another lookup call shall call the globalCapabilitiesClient, as the global cap dir is expired
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(5)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(0)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(1)));
reset(capabilitiesCallback);
reset(globalCapabilitiesClient);
}
@SuppressWarnings("unchecked")
@Test(timeout = 1000)
public void lookupWithScopeLocalThenGlobal() throws InterruptedException {
List<GlobalDiscoveryEntry> caps = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000,
ArbitrationStrategy.HighestPriority,
1000,
DiscoveryScope.LOCAL_THEN_GLOBAL);
CapabilitiesCallback capabilitiesCallback = Mockito.mock(CapabilitiesCallback.class);
Mockito.doAnswer(createAnswer(caps))
.when(globalCapabilitiesClient)
.lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(1)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(0)).processCapabilitiesReceived(argThat(hasNEntries(1)));
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
"localParticipant",
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
reset(localDiscoveryEntryStoreMock);
when(localDiscoveryEntryStoreMock.lookup(eq(new String[]{ domain1 }), eq(interfaceName1))).thenReturn(Lists.newArrayList(discoveryEntry));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(1)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(1)));
// add global entry
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
"globalParticipant",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
channelAddressSerialized);
caps.add(capInfo);
Mockito.doAnswer(createAnswer(caps))
.when(globalCapabilitiesClient)
.lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(1)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
reset(localDiscoveryEntryStoreMock);
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(2)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
when(globalDiscoveryEntryCacheMock.lookup(eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(Lists.newArrayList((DiscoveryEntry) capInfo));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(2)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
// and now, invalidate the existing cached global values, resulting in another call to glocalcapclient
discoveryQos.setCacheMaxAgeMs(0);
Thread.sleep(1);
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(3)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
reset(globalCapabilitiesClient);
reset(capabilitiesCallback);
}
@Test(timeout = 1000)
public void lookupByParticipantIdWithScopeLocalSync() throws InterruptedException {
String domain1 = "domain1";
String interfaceName1 = "interfaceName1";
String participantId1 = "participantId1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000,
ArbitrationStrategy.HighestPriority,
10000,
DiscoveryScope.LOCAL_ONLY);
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
participantId1,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
DiscoveryEntryWithMetaInfo expectedDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
discoveryEntry);
when(localDiscoveryEntryStoreMock.lookup(eq(participantId1), eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(discoveryEntry);
DiscoveryEntry retrievedCapabilityEntry = localCapabilitiesDirectory.lookup(participantId1, discoveryQos);
assertEquals(expectedDiscoveryEntry, retrievedCapabilityEntry);
}
@SuppressWarnings("unchecked")
@Test(timeout = 1000)
public void lookupWithScopeLocalAndGlobal() throws InterruptedException {
List<GlobalDiscoveryEntry> caps = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000,
ArbitrationStrategy.HighestPriority,
500,
DiscoveryScope.LOCAL_AND_GLOBAL);
CapabilitiesCallback capabilitiesCallback = Mockito.mock(CapabilitiesCallback.class);
Mockito.doAnswer(createAnswer(caps))
.when(globalCapabilitiesClient)
.lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(1)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(0)).processCapabilitiesReceived(argThat(hasNEntries(1)));
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
"localParticipant",
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
when(localDiscoveryEntryStoreMock.lookup(eq(new String[]{ domain1 }), eq(interfaceName1))).thenReturn(Lists.newArrayList(discoveryEntry));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(2)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(0)));
verify(capabilitiesCallback, times(1)).processCapabilitiesReceived(argThat(hasNEntries(1)));
// add global entry
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
"globalParticipant",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
channelAddressSerialized);
caps.add(capInfo);
Mockito.doAnswer(createAnswer(caps))
.when(globalCapabilitiesClient)
.lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(3)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
when(globalDiscoveryEntryCacheMock.lookup(eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(Lists.newArrayList((DiscoveryEntry) capInfo));
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(3)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
// and now, invalidate the existing cached global values, resulting in another call to glocalcapclient
discoveryQos.setCacheMaxAgeMs(0);
Thread.sleep(1);
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
localCapabilitiesDirectory.lookup(new String[]{ domain1 }, interfaceName1, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(4)).lookup(any(Callback.class),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeoutMs()));
reset(globalCapabilitiesClient);
reset(capabilitiesCallback);
}
@Test
public void testLookupMultipleDomainsLocalOnly() {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_ONLY);
CapabilitiesCallback capabilitiesCallback = mock(CapabilitiesCallback.class);
Collection<DiscoveryEntry> entries = Lists.newArrayList(new DiscoveryEntry(new Version(0, 0),
"domain1",
interfaceName,
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
interfaceName),
new DiscoveryEntry(new Version(0, 0),
"domain2",
interfaceName,
"participantId2",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
interfaceName));
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(entries);
localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos, capabilitiesCallback);
verify(capabilitiesCallback).processCapabilitiesReceived(capabilitiesCaptor.capture());
Collection<DiscoveryEntry> discoveredEntries = CapabilityUtils.convertToDiscoveryEntryList(capabilitiesCaptor.getValue());
assertNotNull(discoveredEntries);
assertEquals(2, discoveredEntries.size());
}
@SuppressWarnings("unchecked")
@Test
public void testLookupMultipleDomainsGlobalOnly() {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
CapabilitiesCallback capabilitiesCallback = mock(CapabilitiesCallback.class);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(Lists.<DiscoveryEntry> newArrayList());
localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient).lookup(any(Callback.class),
argThat(Matchers.arrayContainingInAnyOrder(domains)),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeoutMs()));
}
@SuppressWarnings("unchecked")
@Test
public void testLookupMultipleDomainsGlobalOnlyAllCached() {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
CapabilitiesCallback capabilitiesCallback = mock(CapabilitiesCallback.class);
List<DiscoveryEntry> entries = new ArrayList<>();
for (String domain : domains) {
GlobalDiscoveryEntry entry = new GlobalDiscoveryEntry();
entry.setDomain(domain);
entries.add(entry);
}
when(globalDiscoveryEntryCacheMock.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(entries);
localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient, times(0)).lookup(any(Callback.class),
argThat(Matchers.arrayContainingInAnyOrder(domains)),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeoutMs()));
}
@SuppressWarnings("unchecked")
@Test
public void testLookupMultipleDomainsGlobalOnlyOneCached() {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
discoveryQos.setCacheMaxAgeMs(ONE_DAY_IN_MS);
CapabilitiesCallback capabilitiesCallback = mock(CapabilitiesCallback.class);
GlobalDiscoveryEntry entry = new GlobalDiscoveryEntry();
entry.setDomain("domain1");
Collection<DiscoveryEntry> entries = Lists.newArrayList((DiscoveryEntry) entry);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(entries);
localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient).lookup(any(Callback.class),
argThat(Matchers.arrayContainingInAnyOrder(new String[]{ "domain2" })),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeoutMs()));
}
@SuppressWarnings("unchecked")
@Test
public void testLookupMultipleDomainsLocalThenGlobal() {
String[] domains = new String[]{ "domain1", "domain2", "domain3" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAgeMs(ONE_DAY_IN_MS);
CapabilitiesCallback capabilitiesCallback = mock(CapabilitiesCallback.class);
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setDomain("domain1");
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(Lists.newArrayList(localEntry));
GlobalDiscoveryEntry globalEntry = new GlobalDiscoveryEntry();
globalEntry.setDomain("domain2");
Collection<DiscoveryEntry> entries = Lists.newArrayList((DiscoveryEntry) globalEntry);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(entries);
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
"domain3",
interfaceName,
"participantIdRemote",
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
channelAddressSerialized);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Callback<List<GlobalDiscoveryEntry>> callback = (Callback<List<GlobalDiscoveryEntry>>) invocation.getArguments()[0];
callback.onSuccess(Lists.newArrayList(remoteGlobalEntry));
return null;
}
}).when(globalCapabilitiesClient).lookup(any(Callback.class), Mockito.<String[]> any(), anyString(), anyLong());
localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos, capabilitiesCallback);
verify(globalCapabilitiesClient).lookup(any(Callback.class),
argThat(Matchers.arrayContainingInAnyOrder(new String[]{ "domain3" })),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeoutMs()));
verify(capabilitiesCallback).processCapabilitiesReceived(capabilitiesCaptor.capture());
Collection<DiscoveryEntry> captured = CapabilityUtils.convertToDiscoveryEntrySet(capabilitiesCaptor.getValue());
assertNotNull(captured);
assertEquals(3, captured.size());
assertTrue(captured.contains(localEntry));
assertTrue(captured.contains(new DiscoveryEntry(globalEntry)));
assertTrue(captured.contains(new DiscoveryEntry(remoteGlobalEntry)));
}
@Test
public void testLookupByParticipantId_localEntry_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue() {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
// local DiscoveryEntry
String localDomain = "localDomain";
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setDomain(localDomain);
localEntry.setInterfaceName(interfaceName);
localEntry.setParticipantId(participantId);
when(localDiscoveryEntryStoreMock.lookup(eq(participantId), eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(localEntry);
DiscoveryEntryWithMetaInfo capturedLocalEntry = localCapabilitiesDirectory.lookup(participantId, discoveryQos);
DiscoveryEntryWithMetaInfo localEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry);
assertEquals(localEntryWithMetaInfo, capturedLocalEntry);
}
@Test
public void testLookupByParticipantId_cachedEntry_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue() {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
// cached global DiscoveryEntry
String globalDomain = "globalDomain";
GlobalDiscoveryEntry cachedGlobalEntry = new GlobalDiscoveryEntry();
cachedGlobalEntry.setDomain(globalDomain);
cachedGlobalEntry.setInterfaceName(interfaceName);
cachedGlobalEntry.setParticipantId(participantId);
when(globalDiscoveryEntryCacheMock.lookup(eq(participantId), eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(cachedGlobalEntry);
DiscoveryEntryWithMetaInfo capturedCachedGlobalEntry = localCapabilitiesDirectory.lookup(participantId,
discoveryQos);
DiscoveryEntryWithMetaInfo cachedGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedGlobalEntry);
assertEquals(cachedGlobalEntryWithMetaInfo, capturedCachedGlobalEntry);
}
@Test
public void testLookupByParticipantId_globalEntry_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue() {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
// remote global DiscoveryEntry
String remoteGlobalDomain = "remoteglobaldomain";
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
remoteGlobalDomain,
interfaceName,
participantId,
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
channelAddressSerialized);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Callback<GlobalDiscoveryEntry> callback = (Callback<GlobalDiscoveryEntry>) invocation.getArguments()[0];
callback.onSuccess(remoteGlobalEntry);
return null;
}
}).when(globalCapabilitiesClient).lookup(any(Callback.class), eq(participantId), anyLong());
DiscoveryEntryWithMetaInfo capturedRemoteGlobalEntry = localCapabilitiesDirectory.lookup(participantId,
discoveryQos);
DiscoveryEntryWithMetaInfo remoteGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
remoteGlobalEntry);
assertEquals(remoteGlobalEntryWithMetaInfo, capturedRemoteGlobalEntry);
}
@Test
public void testLookup_DiscoveryEntriesWithMetaInfoContainExpectedIsLocalValue() {
String globalDomain = "globaldomain";
String remoteGlobalDomain = "remoteglobaldomain";
String[] domains = new String[]{ "localdomain", globalDomain, remoteGlobalDomain };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
CapabilitiesCallback capabilitiesCallback = mock(CapabilitiesCallback.class);
// local DiscoveryEntry
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setDomain(domains[0]);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(Lists.newArrayList(localEntry));
// cached global DiscoveryEntry
GlobalDiscoveryEntry cachedGlobalEntry = new GlobalDiscoveryEntry();
cachedGlobalEntry.setDomain(globalDomain);
Collection<DiscoveryEntry> cachedEntries = Lists.newArrayList((DiscoveryEntry) cachedGlobalEntry);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAgeMs()))).thenReturn(cachedEntries);
// remote global DiscoveryEntry
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
remoteGlobalDomain,
interfaceName,
"participantIdRemote",
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
channelAddressSerialized);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Callback<List<GlobalDiscoveryEntry>> callback = (Callback<List<GlobalDiscoveryEntry>>) invocation.getArguments()[0];
callback.onSuccess(Lists.newArrayList(remoteGlobalEntry));
return null;
}
}).when(globalCapabilitiesClient).lookup(any(Callback.class),
eq(new String[]{ remoteGlobalDomain }),
eq(interfaceName),
anyLong());
localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos, capabilitiesCallback);
verify(capabilitiesCallback).processCapabilitiesReceived(capabilitiesCaptor.capture());
Collection<DiscoveryEntryWithMetaInfo> captured = capabilitiesCaptor.getValue();
assertNotNull(captured);
assertEquals(3, captured.size());
DiscoveryEntryWithMetaInfo localEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry);
assertTrue(captured.contains(localEntryWithMetaInfo));
DiscoveryEntryWithMetaInfo cachedGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedGlobalEntry);
assertTrue(captured.contains(cachedGlobalEntryWithMetaInfo));
DiscoveryEntryWithMetaInfo remoteGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
remoteGlobalEntry);
assertTrue(captured.contains(remoteGlobalEntryWithMetaInfo));
}
private class MyCollectionMatcher extends TypeSafeMatcher<Collection<DiscoveryEntryWithMetaInfo>> {
private int n;
public MyCollectionMatcher(int n) {
this.n = n;
}
@Override
public void describeTo(Description description) {
description.appendText("list has " + n + " entries");
}
@Override
protected boolean matchesSafely(Collection<DiscoveryEntryWithMetaInfo> item) {
return item.size() == n;
}
}
Matcher<Collection<DiscoveryEntryWithMetaInfo>> hasNEntries(int n) {
return new MyCollectionMatcher(n);
}
@SuppressWarnings("unchecked")
@Test(timeout = 1000)
public void removeCapabilities() throws InterruptedException {
localCapabilitiesDirectory.remove(discoveryEntry);
verify(globalCapabilitiesClient, timeout(1000)).remove(any(Callback.class),
eq(Arrays.asList(globalDiscoveryEntry.getParticipantId())));
}
@Test
public void callTouchPeriodically() throws InterruptedException {
Runnable runnable = runnableCaptor.getValue();
runnable.run();
verify(globalCapabilitiesClient).touch(anyLong());
}
}
|
|
package de.homelab.madgaksha.lotsofbs.cutscenesystem.textbox;
import static de.homelab.madgaksha.lotsofbs.GlobalBag.batchPixel;
import static de.homelab.madgaksha.lotsofbs.GlobalBag.viewportGame;
import org.apache.commons.lang3.StringUtils;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.g2d.BitmapFontCache;
import com.badlogic.gdx.graphics.g2d.NinePatch;
import com.badlogic.gdx.graphics.g2d.TextureAtlas.AtlasRegion;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.utils.Align;
import de.homelab.madgaksha.lotsofbs.enums.ESpeaker;
import de.homelab.madgaksha.lotsofbs.logging.Logger;
import de.homelab.madgaksha.lotsofbs.resourcecache.ETextbox;
import de.homelab.madgaksha.lotsofbs.resourcecache.ResourceCache;
/**
* A textbox supporting character portraits and character names.
*
* @author madgaksha
*
*/
public class FancyTextbox extends PlainTextbox {
@SuppressWarnings("unused")
private final static Logger LOG = Logger.getLogger(FancyTextbox.class);
private final static float DEFAULT_FACE_HEIGHT_RATIO = 0.18f;
/** Speaker with an optional name and/or face. */
private ESpeaker speaker = null;
/** Variation of the speaker face to use. */
private EFaceVariation faceVariation = null;
/** Ratio of face height to game screen height. */
private float faceHeightRatio = DEFAULT_FACE_HEIGHT_RATIO;
private boolean requestedFullHeight = false;
private boolean hasSpeakerName = false;
private boolean hasFaceVariation = false;
/** Nine patch for the text area when there is only text. */
private final NinePatch ninePatchAllBox;
/** Nine patch for the text area when there is only a speaker. */
private final NinePatch ninePatchBottomBox;
/** Nine patch for the speaker area. */
private final NinePatch ninePatchTopBox;
/**
* Nine patch for the speaker area when there is both a speaker and face.
*/
private final NinePatch ninePatchBottomLeftBox;
/** Nine patch for the face when there is a speaker as well. */
private final NinePatch ninePatchBottomRightBox;
/** Nine patch for the text area when there is no speaker. */
private final NinePatch ninePatchLeftBox;
/** Nine patch for the face when there is no speaker. */
private final NinePatch ninePatchRightBox;
/** A separate the bitmap font cache for the speaker's name. */
private BitmapFontCache bitmapFontCacheSpeaker;
/** Texture with the face of the character speaking. */
private AtlasRegion faceTexture;
/** Offset for slide animation. */
private float currentOffsetY = 0.0f;
private float animationFactor = 1.0f;
/**
* When {@link PlainTextbox#USE_INTEGER_POSITIONS} is <code>true</code>,
* float translations cannot be performed exactly. We need to store the
* difference between the actual and should translation.
*/
private float leftoverVerticalTranslation = 0.0f;
private NinePatch ninePatchText;
private NinePatch ninePatchSpeaker;
private NinePatch ninePatchFace;
private final Rectangle speakerBoxFrame = new Rectangle();
private final Rectangle speakerBoxContent = new Rectangle();
private final Rectangle faceBoxFrame = new Rectangle();
private final Rectangle faceBoxContent = new Rectangle();
public FancyTextbox(NinePatch ninePatchAllBox, NinePatch ninePatchBottomBox, NinePatch ninePatchTopBox,
NinePatch ninePatchLeftBox, NinePatch ninePatchRightBox, NinePatch ninePatchBottomLeftBox,
NinePatch ninePatchBottomRightBox, ETextbox type) {
super(type);
this.ninePatchAllBox = ninePatchAllBox;
this.ninePatchBottomBox = ninePatchBottomBox;
this.ninePatchTopBox = ninePatchTopBox;
this.ninePatchLeftBox = ninePatchLeftBox;
this.ninePatchRightBox = ninePatchRightBox;
this.ninePatchBottomLeftBox = ninePatchBottomLeftBox;
this.ninePatchBottomRightBox = ninePatchBottomRightBox;
initialize();
}
private final void initialize() {
ninePatchAllBox.setColor(Color.WHITE);
ninePatchBottomBox.setColor(Color.WHITE);
ninePatchTopBox.setColor(Color.WHITE);
ninePatchBottomLeftBox.setColor(Color.WHITE);
ninePatchBottomRightBox.setColor(Color.WHITE);
ninePatchLeftBox.setColor(Color.WHITE);
ninePatchRightBox.setColor(Color.WHITE);
}
public void setSpeaker(ESpeaker speaker) {
this.speaker = speaker;
hasSpeakerName = speaker != null && speaker.hasName();
setRequiredCharacters();
dirty = true;
}
public void setFaceVariation(EFaceVariation faceVariation) {
if ((faceVariation != null && !hasFaceVariation) || (faceVariation == null && hasFaceVariation))
dirty = true;
hasFaceVariation = faceVariation != null;
this.faceVariation = faceVariation;
}
/**
* Ratio of the face height and game screen height. You might want to change
* this together with {@link PlainTextbox#setTextHeightRatio(float)}
*/
public void setFaceHeightRatio(float faceHeightRatio) {
if (this.faceHeightRatio != faceHeightRatio)
dirty = true;
this.faceHeightRatio = faceHeightRatio;
if (this.faceHeightRatio <= 0.0f)
this.faceHeightRatio = DEFAULT_FACE_HEIGHT_RATIO;
}
@Override
public void setBoxColor(Color boxColor) {
if (boxColor != null) {
ninePatchAllBox.setColor(boxColor);
ninePatchBottomBox.setColor(boxColor);
ninePatchTopBox.setColor(boxColor);
ninePatchBottomLeftBox.setColor(boxColor);
ninePatchBottomRightBox.setColor(boxColor);
ninePatchBottomLeftBox.setColor(boxColor);
ninePatchBottomRightBox.setColor(boxColor);
}
}
@Override
public void setFullHeight(boolean fullHeight) {
super.setFullHeight(fullHeight);
this.requestedFullHeight = fullHeight;
}
@Override
protected void updateBox() {
// Convert ttf font to a bitmap font.
beginTexboxFontLayout();
bitmapFontCacheSpeaker = new BitmapFontCache(bitmapFont, USE_INTEGER_POSITIONS);
// Apply original value for fullHeight property. Need to switch to full
// height when there is a face.
setFullHeight(requestedFullHeight);
// Check if a texture exists for this face variation.
hasFaceVariation = speaker != null && hasFaceVariation && speaker.hasFaceVariation(faceVariation);
// Try to load the face texture.
faceTexture = null;
if (hasFaceVariation) {
faceTexture = ResourceCache.getTexture(speaker.getFaceVariation(faceVariation));
hasFaceVariation = hasFaceVariation && faceTexture != null;
}
// Layout textbox with or without speaker and face.
if (hasSpeakerName && hasFaceVariation) {
// speaker name and face
setFullHeight(true);
layoutMainBox(1.0f, ninePatchBottomLeftBox);
layoutFaceBox(mainBoxFrame.height, ninePatchBottomRightBox);
layoutMainBox(1.0f - (faceBoxFrame.width) / (viewportGame.getScreenWidth()),
ninePatchBottomLeftBox);
layoutSpeakerBox(mainBoxFrame.height, ninePatchTopBox);
ninePatchText = ninePatchBottomLeftBox;
ninePatchFace = ninePatchBottomRightBox;
ninePatchSpeaker = ninePatchTopBox;
} else if (hasSpeakerName) {
// speaker name only
layoutMainBox(1.0f, ninePatchBottomBox);
layoutSpeakerBox(mainBoxFrame.height, ninePatchTopBox);
faceBoxFrame.set(0, 0, 0, 0);
ninePatchText = ninePatchBottomBox;
ninePatchSpeaker = ninePatchTopBox;
} else if (hasFaceVariation) {
// face only
setFullHeight(true);
layoutMainBox(1.0f, ninePatchLeftBox);
layoutFaceBox(mainBoxFrame.height, ninePatchRightBox);
layoutMainBox(1.0f - faceBoxFrame.width / viewportGame.getScreenWidth(),
ninePatchLeftBox);
speakerBoxFrame.set(0, 0, 0, 0);
ninePatchText = ninePatchLeftBox;
ninePatchFace = ninePatchRightBox;
} else {
// plain text box
layoutMainBox(1.0f, ninePatchAllBox);
speakerBoxFrame.set(0, 0, 0, 0);
faceBoxFrame.set(0, 0, 0, 0);
ninePatchText = ninePatchAllBox;
}
finishTexboxFontLayout();
// Compute layout for speaker name.
if (hasSpeakerName) {
float verticalPosition = fontType.getVerticalAlignPosition().positionForCentered(speakerBoxContent,
bitmapFont);
bitmapFontCacheSpeaker.clear();
bitmapFontCacheSpeaker.setColor(speaker.getColor());
bitmapFontCacheSpeaker.addText(speaker.getName(), speakerBoxContent.x, verticalPosition, 0,
speaker.getName().length(), speakerBoxContent.width, Align.bottomLeft, false, StringUtils.EMPTY);
}
// Restore current animation state.
currentOffsetY = 0.0f;
leftoverVerticalTranslation = 0.0f;
applySlideEffect();
// Done updating.
dirty = false;
}
@Override
public void mainRender() {
// Draw background first.
ninePatchText.draw(batchPixel, mainBoxFrame.x, mainBoxFrame.y + currentOffsetY, mainBoxFrame.width,
mainBoxFrame.height);
if (hasSpeakerName) {
ninePatchSpeaker.draw(batchPixel, speakerBoxFrame.x, speakerBoxFrame.y + currentOffsetY,
speakerBoxFrame.width, speakerBoxFrame.height);
}
if (hasFaceVariation) {
ninePatchFace.draw(batchPixel, faceBoxFrame.x, faceBoxFrame.y + currentOffsetY, faceBoxFrame.width,
faceBoxFrame.height);
batchPixel.draw(faceTexture, faceBoxContent.x, faceBoxContent.y + currentOffsetY, faceBoxContent.width,
faceBoxContent.height);
}
// Draw text last.
renderTextboxText();
bitmapFontCacheSpeaker.draw(batchPixel);
}
@Override
protected String getSpeakerName() {
return hasSpeakerName ? speaker.getName() : StringUtils.EMPTY;
}
/**
* Computes the layout for the speaker box, positioned beginning at height.
*
* @param startY
* Bottom left edge of the speaker box.
* @param NinePatch
* The nine patch to be used.
*/
private void layoutSpeakerBox(float startY, NinePatch ninePatch) {
layoutTextArea(1.0f, startY, ninePatch, speakerBoxFrame, speakerBoxContent, 1, 1);
}
private void layoutFaceBox(float height, NinePatch ninePatch) {
faceBoxFrame.height = height;
faceBoxContent.height = faceHeightRatio * viewportGame.getScreenHeight();
faceBoxContent.width = faceBoxContent.height * faceTexture.getRegionWidth() / faceTexture.getRegionHeight();
faceBoxFrame.width = faceBoxContent.width + ninePatch.getPadLeft() + ninePatch.getPadRight();
faceBoxContent.x = viewportGame.getScreenWidth() - faceBoxContent.width - ninePatch.getPadRight();
faceBoxContent.y = ninePatch.getPadBottom();
faceBoxFrame.x = faceBoxContent.x - ninePatch.getPadLeft();
faceBoxFrame.y = 0;
}
@Override
public void dispose() {
super.dispose();
if (bitmapFontCacheSpeaker != null)
bitmapFontCacheSpeaker.clear();
faceTexture = null;
ninePatchFace = null;
ninePatchSpeaker = null;
ninePatchText = null;
bitmapFontCacheSpeaker = null;
}
@Override
public void reset() {
super.reset();
if (bitmapFontCacheSpeaker != null)
bitmapFontCacheSpeaker.clear();
setSpeaker(null);
setFaceVariation(null);
setFaceHeightRatio(DEFAULT_FACE_HEIGHT_RATIO);
setSlideEffect(1.0f);
}
public void translateTextVertically(float dy) {
if (USE_INTEGER_POSITIONS) {
leftoverVerticalTranslation += dy;
dy = Math.round(leftoverVerticalTranslation);
leftoverVerticalTranslation -= dy;
}
// Translate main font.
bitmapFont.getCache().translate(0, dy);
// Translate speaker.
bitmapFontCacheSpeaker.translate(0, dy);
}
public void setSlideEffect(float animationFactor) {
this.animationFactor = animationFactor;
}
@Override
protected void applySlideEffect() {
float targetOffsetY = (animationFactor - 1.0f) * (mainBoxFrame.height + speakerBoxFrame.height);
if (targetOffsetY != currentOffsetY) {
translateTextVertically(targetOffsetY - currentOffsetY);
currentOffsetY = targetOffsetY;
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.gluedatabrew.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/databrew-2017-07-25/ListJobs" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListJobsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of a dataset. Using this parameter indicates to return only those jobs that act on the specified
* dataset.
* </p>
*/
private String datasetName;
/**
* <p>
* The maximum number of results to return in this request.
* </p>
*/
private Integer maxResults;
/**
* <p>
* A token generated by DataBrew that specifies where to continue pagination if a previous request was truncated. To
* get the next set of pages, pass in the NextToken value from the response object of the previous page call.
* </p>
*/
private String nextToken;
/**
* <p>
* The name of a project. Using this parameter indicates to return only those jobs that are associated with the
* specified project.
* </p>
*/
private String projectName;
/**
* <p>
* The name of a dataset. Using this parameter indicates to return only those jobs that act on the specified
* dataset.
* </p>
*
* @param datasetName
* The name of a dataset. Using this parameter indicates to return only those jobs that act on the specified
* dataset.
*/
public void setDatasetName(String datasetName) {
this.datasetName = datasetName;
}
/**
* <p>
* The name of a dataset. Using this parameter indicates to return only those jobs that act on the specified
* dataset.
* </p>
*
* @return The name of a dataset. Using this parameter indicates to return only those jobs that act on the specified
* dataset.
*/
public String getDatasetName() {
return this.datasetName;
}
/**
* <p>
* The name of a dataset. Using this parameter indicates to return only those jobs that act on the specified
* dataset.
* </p>
*
* @param datasetName
* The name of a dataset. Using this parameter indicates to return only those jobs that act on the specified
* dataset.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListJobsRequest withDatasetName(String datasetName) {
setDatasetName(datasetName);
return this;
}
/**
* <p>
* The maximum number of results to return in this request.
* </p>
*
* @param maxResults
* The maximum number of results to return in this request.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of results to return in this request.
* </p>
*
* @return The maximum number of results to return in this request.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of results to return in this request.
* </p>
*
* @param maxResults
* The maximum number of results to return in this request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListJobsRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* A token generated by DataBrew that specifies where to continue pagination if a previous request was truncated. To
* get the next set of pages, pass in the NextToken value from the response object of the previous page call.
* </p>
*
* @param nextToken
* A token generated by DataBrew that specifies where to continue pagination if a previous request was
* truncated. To get the next set of pages, pass in the NextToken value from the response object of the
* previous page call.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A token generated by DataBrew that specifies where to continue pagination if a previous request was truncated. To
* get the next set of pages, pass in the NextToken value from the response object of the previous page call.
* </p>
*
* @return A token generated by DataBrew that specifies where to continue pagination if a previous request was
* truncated. To get the next set of pages, pass in the NextToken value from the response object of the
* previous page call.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A token generated by DataBrew that specifies where to continue pagination if a previous request was truncated. To
* get the next set of pages, pass in the NextToken value from the response object of the previous page call.
* </p>
*
* @param nextToken
* A token generated by DataBrew that specifies where to continue pagination if a previous request was
* truncated. To get the next set of pages, pass in the NextToken value from the response object of the
* previous page call.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListJobsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The name of a project. Using this parameter indicates to return only those jobs that are associated with the
* specified project.
* </p>
*
* @param projectName
* The name of a project. Using this parameter indicates to return only those jobs that are associated with
* the specified project.
*/
public void setProjectName(String projectName) {
this.projectName = projectName;
}
/**
* <p>
* The name of a project. Using this parameter indicates to return only those jobs that are associated with the
* specified project.
* </p>
*
* @return The name of a project. Using this parameter indicates to return only those jobs that are associated with
* the specified project.
*/
public String getProjectName() {
return this.projectName;
}
/**
* <p>
* The name of a project. Using this parameter indicates to return only those jobs that are associated with the
* specified project.
* </p>
*
* @param projectName
* The name of a project. Using this parameter indicates to return only those jobs that are associated with
* the specified project.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListJobsRequest withProjectName(String projectName) {
setProjectName(projectName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDatasetName() != null)
sb.append("DatasetName: ").append(getDatasetName()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getProjectName() != null)
sb.append("ProjectName: ").append(getProjectName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListJobsRequest == false)
return false;
ListJobsRequest other = (ListJobsRequest) obj;
if (other.getDatasetName() == null ^ this.getDatasetName() == null)
return false;
if (other.getDatasetName() != null && other.getDatasetName().equals(this.getDatasetName()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getProjectName() == null ^ this.getProjectName() == null)
return false;
if (other.getProjectName() != null && other.getProjectName().equals(this.getProjectName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDatasetName() == null) ? 0 : getDatasetName().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getProjectName() == null) ? 0 : getProjectName().hashCode());
return hashCode;
}
@Override
public ListJobsRequest clone() {
return (ListJobsRequest) super.clone();
}
}
|
|
/*
* Copyright (c) 2010-2018 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.evolveum.midpoint.model.intest.manual;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import java.io.File;
import com.evolveum.midpoint.prism.PrismPropertyValue;
import com.evolveum.prism.xml.ns._public.types_3.RawType;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import org.testng.annotations.Listeners;
import org.testng.annotations.Test;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.query.FilterUtils;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.query.builder.QueryBuilder;
import com.evolveum.midpoint.schema.SearchResultList;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationStatusType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultStatusType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.PendingOperationExecutionStatusType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.PendingOperationType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.TaskType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import com.evolveum.prism.xml.ns._public.query_3.SearchFilterType;
/**
* MID-4347
*
* @author Radovan Semancik
*/
@ContextConfiguration(locations = {"classpath:ctx-model-intest-test-main.xml"})
@DirtiesContext(classMode = ClassMode.AFTER_CLASS)
@Listeners({ com.evolveum.midpoint.tools.testng.AlphabeticalMethodInterceptor.class })
public class TestSemiManualGroupingProposed extends TestSemiManualGrouping {
private static final String USER_BIGMOUTH_NAME = "BIGMOUTH";
private static final String USER_BIGMOUTH_FULLNAME = "Shouty Bigmouth";
private String userBigmouthOid;
private String accountBigmouthOid;
private String bigmouthLastCaseOid;
@Override
protected File getResourceFile() {
return RESOURCE_SEMI_MANUAL_GROUPING_PROPOSED_FILE;
}
@Override
public void initSystem(Task initTask, OperationResult initResult) throws Exception {
super.initSystem(initTask, initResult);
// More resources. Not really used. They are here just to confuse propagation task.
initDummyResourcePirate(RESOURCE_DUMMY_RED_NAME,
RESOURCE_DUMMY_RED_FILE, RESOURCE_DUMMY_RED_OID, initTask, initResult);
initDummyResourcePirate(RESOURCE_DUMMY_BLUE_NAME,
RESOURCE_DUMMY_BLUE_FILE, RESOURCE_DUMMY_BLUE_OID, initTask, initResult);
}
@Test
public void test020ResourcesSanity() throws Exception {
final String TEST_NAME = "test020ResourcesSanity";
displayTestTitle(TEST_NAME);
Task task = createTask(TEST_NAME);
OperationResult result = task.getResult();
SearchResultList<PrismObject<ResourceType>> resources = repositoryService.searchObjects(ResourceType.class, null, null, result);
display("Resources", resources.size() + ": " + resources);
assertEquals("Unexpected number of resources", 3, resources.size());
ObjectQuery query = QueryBuilder.queryFor(ResourceType.class, prismContext)
.item("extension","provisioning").eq("propagated")
.build();
SearchResultList<PrismObject<ResourceType>> propagatedResources = repositoryService.searchObjects(ResourceType.class, query, null, result);
display("Propagated resources", propagatedResources.size() + ": " + propagatedResources);
assertEquals("Unexpected number of propagated resources", 1, propagatedResources.size());
}
@Override
protected void assertNewPropagationTask() throws Exception {
OperationResult result = new OperationResult("assertNewPropagationTask");
PrismObject<TaskType> propTask = repositoryService.getObject(TaskType.class, getPropagationTaskOid(), null, result);
display("Propagation task (new)", propTask);
SearchFilterType filterType = propTask.asObjectable().getObjectRef().getFilter();
display("Propagation task filter", filterType);
assertFalse("Empty filter in propagation task", FilterUtils.isFilterEmpty(filterType));
}
@Override
protected void assertFinishedPropagationTask(Task finishedTask, OperationResultStatusType expectedStatus) {
super.assertFinishedPropagationTask(finishedTask, expectedStatus);
SearchFilterType filterType = finishedTask.getTaskType().getObjectRef().getFilter();
display("Propagation task filter", filterType);
assertEquals("Unexpected propagation task progress", 1, finishedTask.getProgress());
}
/**
* The resource has caseIgnore matching rule for username. Make sure everything
* works well wih uppercase username.
* MID-4393
*/
@Test
public void test500AssignBigmouthRoleOne() throws Exception {
final String TEST_NAME = "test500AssignBigmouthRoleOne";
displayTestTitle(TEST_NAME);
// GIVEN
Task task = createTask(TEST_NAME);
OperationResult result = task.getResult();
PrismObject<UserType> userBefore = createUser(USER_BIGMOUTH_NAME, USER_BIGMOUTH_FULLNAME, true);
userBigmouthOid = addObject(userBefore);
display("User before", userBefore);
// WHEN
displayWhen(TEST_NAME);
assignRole(userBigmouthOid, getRoleOneOid(), task, result);
// THEN
displayThen(TEST_NAME);
display("result", result);
bigmouthLastCaseOid = assertInProgress(result);
PrismObject<UserType> userAfter = getUser(userBigmouthOid);
display("User after", userAfter);
accountBigmouthOid = getSingleLinkOid(userAfter);
PendingOperationExecutionStatusType executionStage = PendingOperationExecutionStatusType.EXECUTION_PENDING;
PrismObject<ShadowType> shadowRepo = repositoryService.getObject(ShadowType.class, accountBigmouthOid, null, result);
display("Repo shadow", shadowRepo);
PendingOperationType pendingOperation = assertSinglePendingOperation(shadowRepo, null, null, executionStage);
assertNotNull("No ID in pending operation", pendingOperation.getId());
assertAttribute(shadowRepo, ATTR_USERNAME_QNAME,
new RawType(new PrismPropertyValue(USER_BIGMOUTH_NAME.toLowerCase()), ATTR_USERNAME_QNAME, prismContext));
assertAttributeFromCache(shadowRepo, ATTR_FULLNAME_QNAME,
new RawType(new PrismPropertyValue(USER_BIGMOUTH_FULLNAME), ATTR_FULLNAME_QNAME, prismContext));
assertShadowActivationAdministrativeStatusFromCache(shadowRepo, ActivationStatusType.ENABLED);
assertShadowExists(shadowRepo, false);
assertNoShadowPassword(shadowRepo);
PrismObject<ShadowType> shadowModel = modelService.getObject(ShadowType.class,
accountBigmouthOid, null, task, result);
display("Model shadow", shadowModel);
ShadowType shadowTypeProvisioning = shadowModel.asObjectable();
assertShadowName(shadowModel, USER_BIGMOUTH_NAME);
assertEquals("Wrong kind (provisioning)", ShadowKindType.ACCOUNT, shadowTypeProvisioning.getKind());
assertAttribute(shadowModel, ATTR_USERNAME_QNAME, USER_BIGMOUTH_NAME.toLowerCase());
assertAttributeFromCache(shadowModel, ATTR_FULLNAME_QNAME, USER_BIGMOUTH_FULLNAME);
assertShadowActivationAdministrativeStatusFromCache(shadowModel, ActivationStatusType.ENABLED);
assertShadowExists(shadowModel, false);
assertNoShadowPassword(shadowModel);
PendingOperationType pendingOperationType = assertSinglePendingOperation(shadowModel, null, null, executionStage);
}
/**
* MID-4393
*/
@Test
public void test502RunPropagation() throws Exception {
final String TEST_NAME = "test502RunPropagation";
displayTestTitle(TEST_NAME);
// GIVEN
Task task = createTask(TEST_NAME);
OperationResult result = task.getResult();
clockForward("PT20M");
// WHEN
displayWhen(TEST_NAME);
runPropagation();
// THEN
displayThen(TEST_NAME);
assertSuccess(result);
PendingOperationExecutionStatusType executionStage = PendingOperationExecutionStatusType.EXECUTING;
PrismObject<ShadowType> shadowRepo = repositoryService.getObject(ShadowType.class, accountBigmouthOid, null, result);
display("Repo shadow", shadowRepo);
PendingOperationType pendingOperation = assertSinglePendingOperation(shadowRepo, null, null, executionStage);
assertNotNull("No ID in pending operation", pendingOperation.getId());
assertAttribute(shadowRepo, ATTR_USERNAME_QNAME,
new RawType(new PrismPropertyValue(USER_BIGMOUTH_NAME.toLowerCase()), ATTR_USERNAME_QNAME, prismContext));
assertAttributeFromCache(shadowRepo, ATTR_FULLNAME_QNAME,
new RawType(new PrismPropertyValue(USER_BIGMOUTH_FULLNAME), ATTR_FULLNAME_QNAME, prismContext));
assertShadowActivationAdministrativeStatusFromCache(shadowRepo, ActivationStatusType.ENABLED);
assertShadowExists(shadowRepo, false);
assertNoShadowPassword(shadowRepo);
PrismObject<ShadowType> shadowModel = modelService.getObject(ShadowType.class,
accountBigmouthOid, null, task, result);
display("Model shadow", shadowModel);
ShadowType shadowTypeProvisioning = shadowModel.asObjectable();
assertShadowName(shadowModel, USER_BIGMOUTH_NAME);
assertEquals("Wrong kind (provisioning)", ShadowKindType.ACCOUNT, shadowTypeProvisioning.getKind());
assertAttribute(shadowModel, ATTR_USERNAME_QNAME, USER_BIGMOUTH_NAME.toLowerCase());
assertAttributeFromCache(shadowModel, ATTR_FULLNAME_QNAME, USER_BIGMOUTH_FULLNAME);
assertShadowActivationAdministrativeStatusFromCache(shadowModel, ActivationStatusType.ENABLED);
assertShadowExists(shadowModel, false);
assertNoShadowPassword(shadowModel);
PendingOperationType pendingOperationType = assertSinglePendingOperation(shadowModel, null, null, executionStage);
String pendingOperationRef = pendingOperationType.getAsynchronousOperationReference();
assertNotNull("No async reference in pending operation", pendingOperationRef);
assertCase(pendingOperationRef, SchemaConstants.CASE_STATE_OPEN);
}
// Note: we have left bigmouth here half-created with open case. If should not do any harm.
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.snapshots.mockstore;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.snapshots.IndexShardRepository;
import org.elasticsearch.repositories.RepositoryName;
import org.elasticsearch.repositories.RepositorySettings;
import org.elasticsearch.repositories.fs.FsRepository;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.nio.file.Path;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
/**
*/
public class MockRepository extends FsRepository {
private final AtomicLong failureCounter = new AtomicLong();
public long getFailureCount() {
return failureCounter.get();
}
private final double randomControlIOExceptionRate;
private final double randomDataFileIOExceptionRate;
private final long waitAfterUnblock;
private final MockBlobStore mockBlobStore;
private final String randomPrefix;
private volatile boolean blockOnInitialization;
private volatile boolean blockOnControlFiles;
private volatile boolean blockOnDataFiles;
private volatile boolean blocked = false;
@Inject
public MockRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ClusterService clusterService, Environment environment) throws IOException {
super(name, overrideSettings(repositorySettings, clusterService), indexShardRepository, environment);
randomControlIOExceptionRate = repositorySettings.settings().getAsDouble("random_control_io_exception_rate", 0.0);
randomDataFileIOExceptionRate = repositorySettings.settings().getAsDouble("random_data_file_io_exception_rate", 0.0);
blockOnControlFiles = repositorySettings.settings().getAsBoolean("block_on_control", false);
blockOnDataFiles = repositorySettings.settings().getAsBoolean("block_on_data", false);
blockOnInitialization = repositorySettings.settings().getAsBoolean("block_on_init", false);
randomPrefix = repositorySettings.settings().get("random", "default");
waitAfterUnblock = repositorySettings.settings().getAsLong("wait_after_unblock", 0L);
logger.info("starting mock repository with random prefix " + randomPrefix);
mockBlobStore = new MockBlobStore(super.blobStore());
}
@Override
public void initializeSnapshot(SnapshotId snapshotId, List<String> indices, MetaData metaData) {
if (blockOnInitialization ) {
blockExecution();
}
super.initializeSnapshot(snapshotId, indices, metaData);
}
private static RepositorySettings overrideSettings(RepositorySettings repositorySettings, ClusterService clusterService) {
if (repositorySettings.settings().getAsBoolean("localize_location", false)) {
return new RepositorySettings(
repositorySettings.globalSettings(),
localizeLocation(repositorySettings.settings(), clusterService));
} else {
return repositorySettings;
}
}
private static Settings localizeLocation(Settings settings, ClusterService clusterService) {
Path location = PathUtils.get(settings.get("location"));
location = location.resolve(clusterService.localNode().getId());
return settingsBuilder().put(settings).put("location", location.toAbsolutePath()).build();
}
private void addFailure() {
failureCounter.incrementAndGet();
}
@Override
protected void doStop() {
unblock();
super.doStop();
}
@Override
protected BlobStore blobStore() {
return mockBlobStore;
}
public void unblock() {
unblockExecution();
}
public void blockOnDataFiles(boolean blocked) {
blockOnDataFiles = blocked;
}
public void blockOnControlFiles(boolean blocked) {
blockOnControlFiles = blocked;
}
public synchronized void unblockExecution() {
if (blocked) {
blocked = false;
// Clean blocking flags, so we wouldn't try to block again
blockOnDataFiles = false;
blockOnControlFiles = false;
blockOnInitialization = false;
this.notifyAll();
}
}
public boolean blocked() {
return blocked;
}
private synchronized boolean blockExecution() {
logger.debug("Blocking execution");
boolean wasBlocked = false;
try {
while (blockOnDataFiles || blockOnControlFiles || blockOnInitialization) {
blocked = true;
this.wait();
wasBlocked = true;
}
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
logger.debug("Unblocking execution");
return wasBlocked;
}
public class MockBlobStore extends BlobStoreWrapper {
ConcurrentMap<String, AtomicLong> accessCounts = new ConcurrentHashMap<>();
private long incrementAndGet(String path) {
AtomicLong value = accessCounts.get(path);
if (value == null) {
value = accessCounts.putIfAbsent(path, new AtomicLong(1));
}
if (value != null) {
return value.incrementAndGet();
}
return 1;
}
public MockBlobStore(BlobStore delegate) {
super(delegate);
}
@Override
public BlobContainer blobContainer(BlobPath path) {
return new MockBlobContainer(super.blobContainer(path));
}
private class MockBlobContainer extends BlobContainerWrapper {
private MessageDigest digest;
private boolean shouldFail(String blobName, double probability) {
if (probability > 0.0) {
String path = path().add(blobName).buildAsString("/") + "/" + randomPrefix;
path += "/" + incrementAndGet(path);
logger.info("checking [{}] [{}]", path, Math.abs(hashCode(path)) < Integer.MAX_VALUE * probability);
return Math.abs(hashCode(path)) < Integer.MAX_VALUE * probability;
} else {
return false;
}
}
private int hashCode(String path) {
try {
digest = MessageDigest.getInstance("MD5");
byte[] bytes = digest.digest(path.getBytes("UTF-8"));
int i = 0;
return ((bytes[i++] & 0xFF) << 24) | ((bytes[i++] & 0xFF) << 16)
| ((bytes[i++] & 0xFF) << 8) | (bytes[i++] & 0xFF);
} catch (NoSuchAlgorithmException | UnsupportedEncodingException ex) {
throw new ElasticsearchException("cannot calculate hashcode", ex);
}
}
private void maybeIOExceptionOrBlock(String blobName) throws IOException {
if (blobName.startsWith("__")) {
if (shouldFail(blobName, randomDataFileIOExceptionRate)) {
logger.info("throwing random IOException for file [{}] at path [{}]", blobName, path());
addFailure();
throw new IOException("Random IOException");
} else if (blockOnDataFiles) {
logger.info("blocking I/O operation for file [{}] at path [{}]", blobName, path());
if (blockExecution() && waitAfterUnblock > 0) {
try {
// Delay operation after unblocking
// So, we can start node shutdown while this operation is still running.
Thread.sleep(waitAfterUnblock);
} catch (InterruptedException ex) {
//
}
}
}
} else {
if (shouldFail(blobName, randomControlIOExceptionRate)) {
logger.info("throwing random IOException for file [{}] at path [{}]", blobName, path());
addFailure();
throw new IOException("Random IOException");
} else if (blockOnControlFiles) {
logger.info("blocking I/O operation for file [{}] at path [{}]", blobName, path());
if (blockExecution() && waitAfterUnblock > 0) {
try {
// Delay operation after unblocking
// So, we can start node shutdown while this operation is still running.
Thread.sleep(waitAfterUnblock);
} catch (InterruptedException ex) {
//
}
}
}
}
}
public MockBlobContainer(BlobContainer delegate) {
super(delegate);
}
@Override
public boolean blobExists(String blobName) {
return super.blobExists(blobName);
}
@Override
public InputStream openInput(String name) throws IOException {
maybeIOExceptionOrBlock(name);
return super.openInput(name);
}
@Override
public void deleteBlob(String blobName) throws IOException {
maybeIOExceptionOrBlock(blobName);
super.deleteBlob(blobName);
}
@Override
public void deleteBlobsByPrefix(String blobNamePrefix) throws IOException {
maybeIOExceptionOrBlock(blobNamePrefix);
super.deleteBlobsByPrefix(blobNamePrefix);
}
@Override
public Map<String, BlobMetaData> listBlobs() throws IOException {
maybeIOExceptionOrBlock("");
return super.listBlobs();
}
@Override
public Map<String, BlobMetaData> listBlobsByPrefix(String blobNamePrefix) throws IOException {
maybeIOExceptionOrBlock(blobNamePrefix);
return super.listBlobsByPrefix(blobNamePrefix);
}
@Override
public void move(String sourceBlob, String targetBlob) throws IOException {
maybeIOExceptionOrBlock(targetBlob);
super.move(sourceBlob, targetBlob);
}
@Override
public OutputStream createOutput(String blobName) throws IOException {
maybeIOExceptionOrBlock(blobName);
return super.createOutput(blobName);
}
}
}
}
|
|
package com.gdn.venice.facade;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import javax.ejb.EJBException;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.Query;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.XMLConfiguration;
import org.apache.log4j.Logger;
import com.gdn.venice.facade.callback.SessionCallback;
import com.gdn.venice.facade.finder.FinderReturn;
import com.gdn.venice.persistence.VenPaymentType;
import com.djarum.raf.utilities.JPQLAdvancedQueryCriteria;
import com.djarum.raf.utilities.JPQLQueryStringBuilder;
import com.djarum.raf.utilities.Log4jLoggerFactory;
/**
* Session Bean implementation class VenPaymentTypeSessionEJBBean
*
* <p>
* <b>author:</b> <a href="mailto:[email protected]">David Forden</a>
* <p>
* <b>version:</b> 1.0
* <p>
* <b>since:</b> 2011
*
*/
@Stateless(mappedName = "VenPaymentTypeSessionEJBBean")
public class VenPaymentTypeSessionEJBBean implements VenPaymentTypeSessionEJBRemote,
VenPaymentTypeSessionEJBLocal {
/*
* Implements an IOC model for pre/post callbacks to persist, merge, and
* remove operations. The onPrePersist, onPostPersist, onPreMerge,
* onPostMerge, onPreRemove and OnPostRemove operations must be implemented
* by the callback class.
*/
private String _sessionCallbackClassName = null;
// A reference to the callback object that has been instantiated
private SessionCallback _callback = null;
protected static Logger _log = null;
// The configuration file to use
private String _configFile = System.getenv("VENICE_HOME")
+ "/conf/module-config.xml";
//The binding array used when binding variables into a JPQL query
private Object[] bindingArray = null;
@PersistenceContext(unitName = "GDN-Venice-Persistence", type = PersistenceContextType.TRANSACTION)
protected EntityManager em;
/**
* Default constructor.
*/
public VenPaymentTypeSessionEJBBean() {
super();
Log4jLoggerFactory loggerFactory = new Log4jLoggerFactory();
_log = loggerFactory
.getLog4JLogger("com.gdn.venice.facade.VenPaymentTypeSessionEJBBean");
// If the configuration is successful then instantiate the callback
if (this.configure())
this.instantiateTriggerCallback();
}
/**
* Reads the venice configuration file and configures the EJB's
* triggerCallbackClassName
*/
private Boolean configure() {
_log.debug("Venice Configuration File:" + _configFile);
try {
XMLConfiguration config = new XMLConfiguration(_configFile);
/*
* Get the index entry for the adapter configuration from the
* configuration file - there will be multiple adapter
* configurations
*/
@SuppressWarnings({ "rawtypes" })
List callbacks = config
.getList("sessionBeanConfig.callback.[@name]");
Integer beanConfigIndex = new Integer(Integer.MAX_VALUE);
@SuppressWarnings("rawtypes")
Iterator i = callbacks.iterator();
while (i.hasNext()) {
String beanName = (String) i.next();
if (this.getClass().getSimpleName().equals(beanName)) {
beanConfigIndex = callbacks.indexOf(beanName);
_log.debug("Bean configuration for " + beanName
+ " found at " + beanConfigIndex);
}
}
this._sessionCallbackClassName = config
.getString("sessionBeanConfig.callback(" + beanConfigIndex + ").[@class]");
_log.debug("Loaded configuration for _sessionCallbackClassName:"
+ _sessionCallbackClassName);
} catch (ConfigurationException e) {
_log.error("A ConfigurationException occured when processing the configuration file"
+ e.getMessage());
e.printStackTrace();
return Boolean.FALSE;
}
return Boolean.TRUE;
}
/**
* Instantiates the trigger callback handler class
*
* @return
*/
Boolean instantiateTriggerCallback() {
if (_sessionCallbackClassName != null
&& !_sessionCallbackClassName.isEmpty())
try {
Class<?> c = Class.forName(_sessionCallbackClassName);
_callback = (SessionCallback) c.newInstance();
} catch (ClassNotFoundException e) {
_log.error("A ClassNotFoundException occured when trying to instantiate:"
+ this._sessionCallbackClassName);
e.printStackTrace();
return Boolean.FALSE;
} catch (InstantiationException e) {
_log.error("A InstantiationException occured when trying to instantiate:"
+ this._sessionCallbackClassName);
e.printStackTrace();
return Boolean.FALSE;
} catch (IllegalAccessException e) {
_log.error("A IllegalAccessException occured when trying to instantiate:"
+ this._sessionCallbackClassName);
e.printStackTrace();
return Boolean.FALSE;
}
return Boolean.TRUE;
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#queryByRange(java.lang
* .String, int, int)
*/
@Override
@SuppressWarnings({ "unchecked" })
public List<VenPaymentType> queryByRange(String jpqlStmt, int firstResult,
int maxResults) {
Long startTime = System.currentTimeMillis();
_log.debug("queryByRange()");
Query query = null;
try {
query = em.createQuery(jpqlStmt);
if(this.bindingArray != null){
for(int i = 0; i < bindingArray.length; ++i){
if(bindingArray[i] != null){
query.setParameter(i+1, bindingArray[i]);
}
}
}
} catch (Exception e) {
_log.error("An exception occured when calling em.createQuery():"
+ e.getMessage());
throw new EJBException(e);
}
try {
if (firstResult > 0) {
query = query.setFirstResult(firstResult);
}
if (maxResults > 0) {
query = query.setMaxResults(maxResults);
}
} catch (Exception e) {
_log.error("An exception occured when accessing the result set of a query:"
+ e.getMessage());
throw new EJBException(e);
}
List<VenPaymentType> returnList = (List<VenPaymentType>)query.getResultList();
this.bindingArray = null;
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("queryByRange() duration:" + duration + "ms");
return returnList;
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#persistVenPaymentType(com
* .gdn.venice.persistence.VenPaymentType)
*/
@Override
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public VenPaymentType persistVenPaymentType(VenPaymentType venPaymentType) {
Long startTime = System.currentTimeMillis();
_log.debug("persistVenPaymentType()");
// Call the onPrePersist() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPrePersist(venPaymentType)) {
_log.error("An onPrePersist callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPrePersist callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
VenPaymentType existingVenPaymentType = null;
if (venPaymentType != null && venPaymentType.getPaymentTypeId() != null) {
_log.debug("persistVenPaymentType:em.find()");
try {
existingVenPaymentType = em.find(VenPaymentType.class,
venPaymentType.getPaymentTypeId());
} catch (Exception e) {
_log.error("An exception occured when calling em.find():"
+ e.getMessage());
throw new EJBException(e);
}
}
if (existingVenPaymentType == null) {
_log.debug("persistVenPaymentType:em.persist()");
try {
em.persist(venPaymentType);
} catch (Exception e) {
_log.error("An exception occured when calling em.persist():"
+ e.getMessage());
throw new EJBException(e);
}
_log.debug("persistVenPaymentType:em.flush()");
try {
em.flush();
em.clear();
} catch (Exception e) {
_log.error("An exception occured when calling em.flush():"
+ e.getMessage());
throw new EJBException(e);
}
// Call the onPostPersist() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPostPersist(venPaymentType)) {
_log.error("An onPostPersist callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPostPersist callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("persistVenPaymentType() duration:" + duration + "ms");
return venPaymentType;
} else {
throw new EJBException("VenPaymentType exists!. VenPaymentType = "
+ venPaymentType.getPaymentTypeId());
}
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#persistVenPaymentTypeList
* (java.util.List)
*/
@Override
@SuppressWarnings("rawtypes")
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public ArrayList<VenPaymentType> persistVenPaymentTypeList(
List<VenPaymentType> venPaymentTypeList) {
_log.debug("persistVenPaymentTypeList()");
Iterator i = venPaymentTypeList.iterator();
while (i.hasNext()) {
this.persistVenPaymentType((VenPaymentType) i.next());
}
return (ArrayList<VenPaymentType>)venPaymentTypeList;
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#mergeVenPaymentType(com.
* gdn.venice.persistence.VenPaymentType)
*/
@Override
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public VenPaymentType mergeVenPaymentType(VenPaymentType venPaymentType) {
Long startTime = System.currentTimeMillis();
_log.debug("mergeVenPaymentType()");
// Call the onPreMerge() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPreMerge(venPaymentType)) {
_log.error("An onPreMerge callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPreMerge callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
VenPaymentType existing = null;
if (venPaymentType.getPaymentTypeId() != null){
_log.debug("mergeVenPaymentType:em.find()");
existing = em.find(VenPaymentType.class, venPaymentType.getPaymentTypeId());
}
if (existing == null) {
return this.persistVenPaymentType(venPaymentType);
} else {
_log.debug("mergeVenPaymentType:em.merge()");
try {
em.merge(venPaymentType);
} catch (Exception e) {
_log.error("An exception occured when calling em.merge():"
+ e.getMessage());
throw new EJBException(e);
}
_log.debug("mergeVenPaymentType:em.flush()");
try {
em.flush();
em.clear();
} catch (Exception e) {
_log.error("An exception occured when calling em.flush():"
+ e.getMessage());
throw new EJBException(e);
}
VenPaymentType newobject = em.find(VenPaymentType.class,
venPaymentType.getPaymentTypeId());
_log.debug("mergeVenPaymentType():em.refresh");
try {
em.refresh(newobject);
} catch (Exception e) {
_log.error("An exception occured when calling em.refresh():"
+ e.getMessage());
throw new EJBException(e);
}
// Call the onPostMerge() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPostMerge(newobject)) {
_log.error("An onPostMerge callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPostMerge callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("mergeVenPaymentType() duration:" + duration + "ms");
return newobject;
}
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#mergeVenPaymentTypeList(
* java.util.List)
*/
@Override
@SuppressWarnings("rawtypes")
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public ArrayList<VenPaymentType> mergeVenPaymentTypeList(
List<VenPaymentType> venPaymentTypeList) {
_log.debug("mergeVenPaymentTypeList()");
Iterator i = venPaymentTypeList.iterator();
while (i.hasNext()) {
this.mergeVenPaymentType((VenPaymentType) i.next());
}
return (ArrayList<VenPaymentType>)venPaymentTypeList;
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#removeVenPaymentType(com.
* gdn.venice.persistence.VenPaymentType)
*/
@Override
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public void removeVenPaymentType(VenPaymentType venPaymentType) {
Long startTime = System.currentTimeMillis();
_log.debug("removeVenPaymentType()");
// Call the onPreRemove() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPreRemove(venPaymentType)) {
_log.error("An onPreRemove callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPreRemove callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
_log.debug("removeVenPaymentType:em.find()");
venPaymentType = em.find(VenPaymentType.class, venPaymentType.getPaymentTypeId());
try {
_log.debug("removeVenPaymentType:em.remove()");
em.remove(venPaymentType);
} catch (Exception e) {
_log.error("An exception occured when calling em.remove():"
+ e.getMessage());
throw new EJBException(e);
}
// Call the onPostRemove() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPostRemove(venPaymentType)) {
_log.error("An onPostRemove callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPostRemove callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
_log.debug("removeVenPaymentType:em.flush()");
em.flush();
em.clear();
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("removeVenPaymentType() duration:" + duration + "ms");
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#removeVenPaymentTypeList(
* java.util.List)
*/
@Override
@SuppressWarnings("rawtypes")
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public void removeVenPaymentTypeList(List<VenPaymentType> venPaymentTypeList) {
_log.debug("removeVenPaymentTypeList()");
Iterator i = venPaymentTypeList.iterator();
while (i.hasNext()) {
this.removeVenPaymentType((VenPaymentType) i.next());
}
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#findByVenPaymentTypeLike(
* com.gdn.venice.persistence.VenPaymentType, int, int)
*/
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public List<VenPaymentType> findByVenPaymentTypeLike(VenPaymentType venPaymentType,
JPQLAdvancedQueryCriteria criteria, int firstResult, int maxResults) {
Long startTime = System.currentTimeMillis();
_log.debug("findByVenPaymentTypeLike()");
JPQLQueryStringBuilder qb = new JPQLQueryStringBuilder(venPaymentType);
HashMap complexTypeBindings = new HashMap();
String stmt = qb.buildQueryString(complexTypeBindings, criteria);
if(criteria != null){
/*
* Get the binding array from the query builder and make
* it available to the queryByRange method
*/
this.bindingArray = qb.getBindingArray();
for(int i = 0; i < qb.getBindingArray().length; i++){
System.out.println("Bindings:" + i + ":" + qb.getBindingArray()[i]);
}
List<VenPaymentType> venPaymentTypeList = this.queryByRange(stmt, firstResult, maxResults);
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("findByVenPaymentTypeLike() duration:" + duration + "ms");
return venPaymentTypeList;
}else{
String errMsg = "A query has been initiated with null criteria.";
_log.error(errMsg);
throw new EJBException(errMsg);
}
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.VenPaymentTypeSessionEJBRemote#findByVenPaymentTypeLikeFR(
* com.gdn.venice.persistence.VenPaymentType, int, int)
*/
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public FinderReturn findByVenPaymentTypeLikeFR(VenPaymentType venPaymentType,
JPQLAdvancedQueryCriteria criteria, int firstResult, int maxResults) {
Long startTime = System.currentTimeMillis();
_log.debug("findByVenPaymentTypeLikeFR()");
JPQLQueryStringBuilder qb = new JPQLQueryStringBuilder(venPaymentType);
HashMap complexTypeBindings = new HashMap();
String stmt = qb.buildQueryString(complexTypeBindings, criteria);
if(criteria != null){
/*
* Get the binding array from the query builder and make
* it available to the queryByRange method
*/
this.bindingArray = qb.getBindingArray();
for(int i = 0; i < qb.getBindingArray().length; i++){
System.out.println("Bindings:" + i + ":" + qb.getBindingArray()[i]);
}
//Set the finder return object with the count of the total query rows
FinderReturn fr = new FinderReturn();
String countStmt = "select count(o) " + stmt.substring(stmt.indexOf("from"));
Query query = null;
try {
query = em.createQuery(countStmt);
if(this.bindingArray != null){
for(int i = 0; i < bindingArray.length; ++i){
if(bindingArray[i] != null){
query.setParameter(i+1, bindingArray[i]);
}
}
}
Long totalRows = (Long)query.getSingleResult();
fr.setNumQueryRows(totalRows);
} catch (Exception e) {
_log.error("An exception occured when calling em.createQuery():"
+ e.getMessage());
throw new EJBException(e);
}
//Set the finder return object with the query list
fr.setResultList(this.queryByRange(stmt, firstResult, maxResults));
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("findByVenPaymentTypeLike() duration:" + duration + "ms");
return fr;
}else{
String errMsg = "A query has been initiated with null criteria.";
_log.error(errMsg);
throw new EJBException(errMsg);
}
}
}
|
|
/**
* PrismTech licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License and with the PrismTech Vortex product. You may obtain a copy of the
* License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License and README for the specific language governing permissions and
* limitations under the License.
*/
package vortex.demo;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.twitter.TwitterUtils;
import org.omg.dds.pub.DataWriter;
import org.omg.dds.topic.Topic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;
import twitter4j.Status;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetEncoder;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicReference;
import static vortex.commons.util.VConfig.DefaultEntities.defaultDomainParticipant;
import static vortex.commons.util.VConfig.DefaultEntities.defaultPub;
public class VortexSparkTwitterDemo {
private static Logger LOG = LoggerFactory.getLogger("vortex.demo");
private static final CharsetEncoder encoder = Charset.forName("US-ASCII").newEncoder();
private static AtomicReference<DataWriter<TopTenHashtagsType>> vortexWriter =
new AtomicReference<>(null);
private static final String[] FILTER = {"iot"};
private static final String KEY = StringUtils.join(FILTER, ";");
public static void main(String[] args) {
// URL of the Spark cluster
final String sparkURL = "local[4]"; // use four threads on the local machine
// The directory that will be used for check pointing
final String defaultCheckpointDir = System.getProperty("java.io.tmpdir") + File.pathSeparator + "spark";
final String checkPointDir =
System.getProperty("vortex.spark.checkpointdir", defaultCheckpointDir);
if (!configureTwitterCredentials()) {
LOG.info("Unable to configure Twitter credentials exiting.");
System.exit(1);
}
// Configure Spark and setup the Spark streaming context
final SparkConf conf = new SparkConf().setAppName("VortexSparkTwitterDemo").setMaster(sparkURL);
final Duration batchDuration = new Duration(1000);
final JavaStreamingContext streamingContext = new JavaStreamingContext(conf, batchDuration);
streamingContext.checkpoint(checkPointDir);
// Create a new stream of
// "iot", "Internet of Things", "M2M", "mqtt"
final JavaDStream<Status> tweets
= TwitterUtils.createStream(
streamingContext, FILTER, StorageLevel.MEMORY_ONLY());
final JavaDStream<String> statuses = tweets.map(new Function<Status, String>() {
@Override
public String call(Status status) throws Exception {
return status.getText();
}
});
final JavaDStream<String> words = statuses.flatMap(new FlatMapFunction<String, String>() {
@Override
public Iterable<String> call(String s) throws Exception {
return Arrays.asList(s.split(" "));
}
});
final JavaDStream<String> hashtags = words.filter(new Function<String, Boolean>() {
@Override
public Boolean call(String word) throws Exception {
return word.startsWith("#");
}
});
final JavaPairDStream<String, Integer> tuples = hashtags.mapToPair(new PairFunction<String, String, Integer>() {
@Override
public Tuple2<String, Integer> call(String s) throws Exception {
return new Tuple2<String, Integer>(s, 1);
}
});
final JavaPairDStream<String, Integer> counts = tuples.reduceByKeyAndWindow(
new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer i1, Integer i2) throws Exception {
return i1 + i2;
}
},
new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer i1, Integer i2) throws Exception {
return i1 - i2;
}
},
new Duration(60 * 5 * 1000),
new Duration(1 * 1000)
);
final JavaPairDStream<Integer, String> swappedCounts = counts.mapToPair(new PairFunction<Tuple2<String, Integer>, Integer, String>() {
@Override
public Tuple2<Integer, String> call(Tuple2<String, Integer> in) throws Exception {
return in.swap();
}
});
final JavaPairDStream<Integer, String> sortedCounts = swappedCounts.transformToPair(new Function<JavaPairRDD<Integer, String>, JavaPairRDD<Integer, String>>() {
@Override
public JavaPairRDD<Integer, String> call(JavaPairRDD<Integer, String> in) throws Exception {
return in.sortByKey(false);
}
});
sortedCounts.foreach(new Function<JavaPairRDD<Integer, String>, Void>() {
@Override
public Void call(JavaPairRDD<Integer, String> rdd) throws Exception {
int idx = 0;
int[] count = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
String[] hashtag = {"", "", "", "", "", "", "", "", "", ""};
StringBuilder sb = new StringBuilder("\nTop 10 hashtags:\n");
for (Tuple2<Integer, String> next : rdd.take(10)) {
count[idx] = next._1();
try {
final ByteBuffer encoded = encoder.encode(CharBuffer.wrap(next._2()));
hashtag[idx] = new String(encoded.array(), "US-ASCII");
} catch (Exception ex) {
hashtag[idx] = "Encoding error";
}
idx++;
sb.append(next.toString()).append("\n");
}
final TopTenHashtagsType stats = new TopTenHashtagsType(KEY, count, hashtag);
System.out.println(sb.toString());
getWriter().write(stats);
return null;
}
});
streamingContext.start();
}
private static boolean configureTwitterCredentials() {
final String[] configKeys = {"consumerKey", "consumerSecret", "accessToken", "accessTokenSecret"};
for (String key : configKeys) {
final String value = System.getProperty(key);
if (StringUtils.isEmpty(value)) {
LOG.error("Error setting OAuth authentication - value for " + key + " not found.");
return false;
} else {
final String oauthKey = "twitter4j.oauth." + key;
System.setProperty(oauthKey, value);
LOG.info("\t{} set as {}", oauthKey, value);
}
}
return true;
}
private static DataWriter<TopTenHashtagsType> getWriter() {
if (vortexWriter.get() == null) {
final Topic<TopTenHashtagsType> topic = defaultDomainParticipant().createTopic("TopTenHashtags", TopTenHashtagsType.class);
final DataWriter<TopTenHashtagsType> writer = defaultPub().createDataWriter(topic);
if (!vortexWriter.compareAndSet(null, writer)) {
writer.close();
}
}
return vortexWriter.get();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.types.inference;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.functions.FunctionKind;
import org.apache.flink.table.types.AbstractDataType;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.inference.utils.CallContextMock;
import org.apache.flink.table.types.inference.utils.FunctionDefinitionMock;
import org.apache.flink.table.types.utils.DataTypeFactoryMock;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.flink.core.testutils.FlinkAssertions.anyCauseMatches;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/** Base class for testing {@link InputTypeStrategy}. */
@RunWith(Parameterized.class)
public abstract class InputTypeStrategiesTestBase {
@Parameterized.Parameter public TestSpec testSpec;
@Test
public void testStrategy() {
if (testSpec.expectedSignature != null) {
assertThat(generateSignature()).isEqualTo(testSpec.expectedSignature);
}
for (List<DataType> actualArgumentTypes : testSpec.actualArgumentTypes) {
if (testSpec.expectedErrorMessage != null) {
assertThatThrownBy(() -> runTypeInference(actualArgumentTypes))
.satisfies(
anyCauseMatches(
ValidationException.class, testSpec.expectedErrorMessage));
} else if (testSpec.expectedArgumentTypes != null) {
assertThat(runTypeInference(actualArgumentTypes).getExpectedArgumentTypes())
.isEqualTo(testSpec.expectedArgumentTypes);
}
}
}
// --------------------------------------------------------------------------------------------
private String generateSignature() {
final FunctionDefinitionMock functionDefinitionMock = new FunctionDefinitionMock();
functionDefinitionMock.functionKind = FunctionKind.SCALAR;
return TypeInferenceUtil.generateSignature(
createTypeInference(), "f", functionDefinitionMock);
}
private TypeInferenceUtil.Result runTypeInference(List<DataType> actualArgumentTypes) {
final FunctionDefinitionMock functionDefinitionMock = new FunctionDefinitionMock();
functionDefinitionMock.functionKind = FunctionKind.SCALAR;
final CallContextMock callContextMock = new CallContextMock();
callContextMock.typeFactory = new DataTypeFactoryMock();
callContextMock.functionDefinition = functionDefinitionMock;
callContextMock.argumentDataTypes = actualArgumentTypes;
callContextMock.argumentLiterals =
IntStream.range(0, actualArgumentTypes.size())
.mapToObj(i -> testSpec.literalPos != null && i == testSpec.literalPos)
.collect(Collectors.toList());
callContextMock.argumentValues =
IntStream.range(0, actualArgumentTypes.size())
.mapToObj(
i ->
(testSpec.literalPos != null && i == testSpec.literalPos)
? Optional.ofNullable(testSpec.literalValue)
: Optional.empty())
.collect(Collectors.toList());
callContextMock.argumentNulls =
IntStream.range(0, actualArgumentTypes.size())
.mapToObj(i -> false)
.collect(Collectors.toList());
callContextMock.name = "f";
callContextMock.outputDataType = Optional.empty();
final TypeInferenceUtil.SurroundingInfo surroundingInfo;
if (testSpec.surroundingStrategy != null) {
final TypeInference outerTypeInference =
TypeInference.newBuilder()
.inputTypeStrategy(testSpec.surroundingStrategy)
.outputTypeStrategy(TypeStrategies.MISSING)
.build();
surroundingInfo =
TypeInferenceUtil.SurroundingInfo.of(
"f_outer",
functionDefinitionMock,
outerTypeInference,
1,
0,
callContextMock.isGroupedAggregation);
} else {
surroundingInfo = null;
}
return TypeInferenceUtil.runTypeInference(
createTypeInference(), callContextMock, surroundingInfo);
}
private TypeInference createTypeInference() {
final TypeInference.Builder builder =
TypeInference.newBuilder()
.inputTypeStrategy(testSpec.strategy)
.outputTypeStrategy(TypeStrategies.explicit(DataTypes.BOOLEAN()));
if (testSpec.namedArguments != null) {
builder.namedArguments(testSpec.namedArguments);
}
if (testSpec.typedArguments != null) {
builder.typedArguments(testSpec.typedArguments);
}
return builder.build();
}
// --------------------------------------------------------------------------------------------
/** A specification for tests to execute. */
protected static class TestSpec {
private final @Nullable String description;
private final InputTypeStrategy strategy;
private @Nullable List<String> namedArguments;
private @Nullable List<DataType> typedArguments;
private List<List<DataType>> actualArgumentTypes = new ArrayList<>();
private @Nullable Integer literalPos;
private @Nullable Object literalValue;
private @Nullable InputTypeStrategy surroundingStrategy;
private @Nullable String expectedSignature;
private @Nullable List<DataType> expectedArgumentTypes;
private @Nullable String expectedErrorMessage;
private TestSpec(@Nullable String description, InputTypeStrategy strategy) {
this.description = description;
this.strategy = strategy;
}
public static TestSpec forStrategy(InputTypeStrategy strategy) {
return new TestSpec(null, strategy);
}
public static TestSpec forStrategy(String description, InputTypeStrategy strategy) {
return new TestSpec(description, strategy);
}
public TestSpec namedArguments(String... names) {
this.namedArguments = Arrays.asList(names);
return this;
}
public TestSpec typedArguments(DataType... dataTypes) {
this.typedArguments = Arrays.asList(dataTypes);
return this;
}
public TestSpec surroundingStrategy(InputTypeStrategy surroundingStrategy) {
this.surroundingStrategy = surroundingStrategy;
return this;
}
public TestSpec calledWithArgumentTypes(AbstractDataType<?>... dataTypes) {
this.actualArgumentTypes.add(resolveDataTypes(dataTypes));
return this;
}
public TestSpec calledWithLiteralAt(int pos) {
this.literalPos = pos;
return this;
}
public TestSpec calledWithLiteralAt(int pos, Object value) {
this.literalPos = pos;
this.literalValue = value;
return this;
}
public TestSpec expectSignature(String signature) {
this.expectedSignature = signature;
return this;
}
public TestSpec expectArgumentTypes(AbstractDataType<?>... dataTypes) {
this.expectedArgumentTypes = resolveDataTypes(dataTypes);
return this;
}
public TestSpec expectErrorMessage(String expectedErrorMessage) {
this.expectedErrorMessage = expectedErrorMessage;
return this;
}
private List<DataType> resolveDataTypes(AbstractDataType<?>[] dataTypes) {
final DataTypeFactoryMock factoryMock = new DataTypeFactoryMock();
return Arrays.stream(dataTypes)
.map(factoryMock::createDataType)
.collect(Collectors.toList());
}
@Override
public String toString() {
return description != null ? description : strategy.getClass().getSimpleName();
}
}
}
|
|
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.transforms.join;
import static com.google.cloud.dataflow.sdk.util.Structs.addObject;
import com.google.api.client.util.Preconditions;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.CoderException;
import com.google.cloud.dataflow.sdk.coders.IterableCoder;
import com.google.cloud.dataflow.sdk.coders.MapCoder;
import com.google.cloud.dataflow.sdk.coders.StandardCoder;
import com.google.cloud.dataflow.sdk.util.CloudObject;
import com.google.cloud.dataflow.sdk.util.PropertyNames;
import com.google.cloud.dataflow.sdk.util.common.Reiterator;
import com.google.cloud.dataflow.sdk.values.TupleTag;
import com.google.cloud.dataflow.sdk.values.TupleTagList;
import com.google.common.collect.Iterators;
import com.google.common.collect.PeekingIterator;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
/**
* A row result of a {@link CoGroupByKey}. This is a tuple of {@link Iterable}s produced for
* a given key, and these can be accessed in different ways.
*/
public class CoGbkResult {
/**
* A map of integer union tags to a list of union objects.
* Note: the key and the embedded union tag are the same, so it is redundant
* to store it multiple times, but for now it makes encoding easier.
*/
private final List<Iterable<?>> valueMap;
private final CoGbkResultSchema schema;
private static final int DEFAULT_IN_MEMORY_ELEMENT_COUNT = 10_000;
private static final Logger LOG = LoggerFactory.getLogger(CoGbkResult.class);
/**
* A row in the PCollection resulting from a CoGroupByKey transform.
* Currently, this row must fit into memory.
*
* @param schema the set of tuple tags used to refer to input tables and
* result values
* @param taggedValues the raw results from a group-by-key
*/
public CoGbkResult(
CoGbkResultSchema schema,
Iterable<RawUnionValue> taggedValues) {
this(schema, taggedValues, DEFAULT_IN_MEMORY_ELEMENT_COUNT);
}
@SuppressWarnings("unchecked")
public CoGbkResult(
CoGbkResultSchema schema,
Iterable<RawUnionValue> taggedValues,
int inMemoryElementCount) {
this.schema = schema;
valueMap = new ArrayList<>();
for (int unionTag = 0; unionTag < schema.size(); unionTag++) {
valueMap.add(new ArrayList<>());
}
// Demultiplex the first imMemoryElementCount tagged union values
// according to their tag.
final Iterator<RawUnionValue> taggedIter = taggedValues.iterator();
int elementCount = 0;
while (taggedIter.hasNext()) {
if (elementCount++ >= inMemoryElementCount && taggedIter instanceof Reiterator) {
// Let the tails be lazy.
break;
}
RawUnionValue value = taggedIter.next();
// Make sure the given union tag has a corresponding tuple tag in the
// schema.
int unionTag = value.getUnionTag();
if (schema.size() <= unionTag) {
throw new IllegalStateException("union tag " + unionTag +
" has no corresponding tuple tag in the result schema");
}
List<Object> valueList = (List<Object>) valueMap.get(unionTag);
valueList.add(value.getValue());
}
if (taggedIter.hasNext()) {
// If we get here, there were more elements than we can afford to
// keep in memory, so we copy the re-iterable of remaining items
// and append filtered views to each of the sorted lists computed earlier.
LOG.info("CoGbkResult has more than " + inMemoryElementCount + " elements,"
+ "reiteration (which may be slow) is required.");
final Reiterator<RawUnionValue> tail = (Reiterator<RawUnionValue>) taggedIter;
// This is a trinary-state array recording whether a given tag is present in the tail. The
// initial value is null (unknown) for all tags, and the first iteration through the entire
// list will set these values to true or false to avoid needlessly iterating if filtering
// against a given tag would not match anything.
final Boolean[] containsTag = new Boolean[schema.size()];
for (int unionTag = 0; unionTag < schema.size(); unionTag++) {
final int unionTag0 = unionTag;
updateUnionTag(tail, containsTag, unionTag, unionTag0);
}
}
}
private <T> void updateUnionTag(
final Reiterator<RawUnionValue> tail, final Boolean[] containsTag,
int unionTag, final int unionTag0) {
@SuppressWarnings("unchecked")
final Iterable<T> head = (Iterable<T>) valueMap.get(unionTag);
valueMap.set(
unionTag,
new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.concat(
head.iterator(),
new UnionValueIterator<T>(unionTag0, tail.copy(), containsTag));
}
});
}
public boolean isEmpty() {
for (Iterable<?> tagValues : valueMap) {
if (tagValues.iterator().hasNext()) {
return false;
}
}
return true;
}
/**
* Returns the schema used by this CoGbkResult.
*/
public CoGbkResultSchema getSchema() {
return schema;
}
@Override
public String toString() {
return valueMap.toString();
}
/**
* Returns the values from the table represented by the given
* {@code TupleTag<V>} as an {@code Iterable<V>} (which may be empty if there
* are no results).
* <p>
* If tag was not part of the original CoGroupByKey,
* throws an IllegalArgumentException.
*/
public <V> Iterable<V> getAll(TupleTag<V> tag) {
int index = schema.getIndex(tag);
if (index < 0) {
throw new IllegalArgumentException("TupleTag " + tag +
" is not in the schema");
}
@SuppressWarnings("unchecked")
Iterable<V> unions = (Iterable<V>) valueMap.get(index);
return unions;
}
/**
* If there is a singleton value for the given tag, returns it.
* Otherwise, throws an IllegalArgumentException.
* <p>
* If tag was not part of the original CoGroupByKey,
* throws an IllegalArgumentException.
*/
public <V> V getOnly(TupleTag<V> tag) {
return innerGetOnly(tag, null, false);
}
/**
* If there is a singleton value for the given tag, returns it. If there is
* no value for the given tag, returns the defaultValue.
* <p>
* If tag was not part of the original CoGroupByKey,
* throws an IllegalArgumentException.
*/
public <V> V getOnly(TupleTag<V> tag, V defaultValue) {
return innerGetOnly(tag, defaultValue, true);
}
/**
* A {@link Coder} for {@link CoGbkResult}s.
*/
@SuppressWarnings("serial")
public static class CoGbkResultCoder extends StandardCoder<CoGbkResult> {
private final CoGbkResultSchema schema;
private final UnionCoder unionCoder;
private MapCoder<Integer, List<RawUnionValue>> mapCoder;
/**
* Returns a CoGbkResultCoder for the given schema and unionCoder.
*/
public static CoGbkResultCoder of(
CoGbkResultSchema schema,
UnionCoder unionCoder) {
return new CoGbkResultCoder(schema, unionCoder);
}
@JsonCreator
public static CoGbkResultCoder of(
@JsonProperty(PropertyNames.COMPONENT_ENCODINGS)
List<Coder<?>> components,
@JsonProperty(PropertyNames.CO_GBK_RESULT_SCHEMA) CoGbkResultSchema schema) {
Preconditions.checkArgument(components.size() == 1,
"Expecting 1 component, got " + components.size());
return new CoGbkResultCoder(schema, (UnionCoder) components.get(0));
}
private CoGbkResultCoder(
CoGbkResultSchema tupleTags,
UnionCoder unionCoder) {
this.schema = tupleTags;
this.unionCoder = unionCoder;
}
@Override
public List<? extends Coder<?>> getCoderArguments() {
return null;
}
@Override
public List<? extends Coder<?>> getComponents() {
return Arrays.<Coder<?>>asList(unionCoder);
}
@Override
public CloudObject asCloudObject() {
CloudObject result = super.asCloudObject();
addObject(result, PropertyNames.CO_GBK_RESULT_SCHEMA, schema.asCloudObject());
return result;
}
@Override
@SuppressWarnings("unchecked")
public void encode(
CoGbkResult value,
OutputStream outStream,
Context context) throws CoderException,
IOException {
if (!schema.equals(value.getSchema())) {
throw new CoderException("input schema does not match coder schema");
}
for (int unionTag = 0; unionTag < schema.size(); unionTag++) {
tagListCoder(unionTag).encode(value.valueMap.get(unionTag), outStream, Context.NESTED);
}
}
@Override
public CoGbkResult decode(
InputStream inStream,
Context context)
throws CoderException, IOException {
List<Iterable<?>> valueMap = new ArrayList<>();
for (int unionTag = 0; unionTag < schema.size(); unionTag++) {
valueMap.add(tagListCoder(unionTag).decode(inStream, Context.NESTED));
}
return new CoGbkResult(schema, valueMap);
}
@SuppressWarnings("rawtypes")
private IterableCoder tagListCoder(int unionTag) {
return IterableCoder.of(unionCoder.getComponents().get(unionTag));
}
@Override
public boolean equals(Object other) {
if (!super.equals(other)) {
return false;
}
return schema.equals(((CoGbkResultCoder) other).schema);
}
@Override
public int hashCode() {
return Objects.hashCode(schema);
}
@Override
public void verifyDeterministic() throws NonDeterministicException {
verifyDeterministic(
"CoGbkResult requires the mapCoder to be deterministic", mapCoder);
}
}
//////////////////////////////////////////////////////////////////////////////
// Methods for directly constructing a CoGbkResult
//
// (for example, creating test data for a transform that consumes a
// CoGbkResult)
/**
* Returns a new CoGbkResult that contains just the given tag and given data.
*/
public static <V> CoGbkResult of(TupleTag<V> tag, List<V> data) {
return CoGbkResult.empty().and(tag, data);
}
/**
* Returns a new CoGbkResult based on this, with the given tag and given data
* added to it.
*/
public <V> CoGbkResult and(TupleTag<V> tag, List<V> data) {
if (nextTestUnionId != schema.size()) {
throw new IllegalArgumentException(
"Attempting to call and() on a CoGbkResult apparently not created by"
+ " of().");
}
List<Iterable<?>> valueMap = new ArrayList<>(this.valueMap);
valueMap.add(data);
return new CoGbkResult(
new CoGbkResultSchema(schema.getTupleTagList().and(tag)), valueMap,
nextTestUnionId + 1);
}
/**
* Returns an empty CoGbkResult.
*/
public static <V> CoGbkResult empty() {
return new CoGbkResult(new CoGbkResultSchema(TupleTagList.empty()),
new ArrayList<Iterable<?>>());
}
//////////////////////////////////////////////////////////////////////////////
private int nextTestUnionId = 0;
private CoGbkResult(
CoGbkResultSchema schema,
List<Iterable<?>> valueMap,
int nextTestUnionId) {
this(schema, valueMap);
this.nextTestUnionId = nextTestUnionId;
}
private CoGbkResult(
CoGbkResultSchema schema,
List<Iterable<?>> valueMap) {
this.schema = schema;
this.valueMap = valueMap;
}
private <V> V innerGetOnly(
TupleTag<V> tag,
V defaultValue,
boolean useDefault) {
int index = schema.getIndex(tag);
if (index < 0) {
throw new IllegalArgumentException("TupleTag " + tag
+ " is not in the schema");
}
@SuppressWarnings("unchecked")
Iterator<V> unions = (Iterator<V>) valueMap.get(index).iterator();
if (!unions.hasNext()) {
if (useDefault) {
return defaultValue;
} else {
throw new IllegalArgumentException("TupleTag " + tag
+ " corresponds to an empty result, and no default was provided");
}
}
V value = unions.next();
if (unions.hasNext()) {
throw new IllegalArgumentException("TupleTag " + tag
+ " corresponds to a non-singleton result");
}
return value;
}
/**
* Lazily filters and recasts an {@code Iterator<RawUnionValue>} into an
* {@code Iterator<V>}, where V is the type of the raw union value's contents.
*/
private static class UnionValueIterator<V> implements Iterator<V> {
private final int tag;
private final PeekingIterator<RawUnionValue> unions;
private final Boolean[] containsTag;
private UnionValueIterator(int tag, Iterator<RawUnionValue> unions, Boolean[] containsTag) {
this.tag = tag;
this.unions = Iterators.peekingIterator(unions);
this.containsTag = containsTag;
}
@Override
public boolean hasNext() {
if (containsTag[tag] == Boolean.FALSE) {
return false;
}
advance();
if (unions.hasNext()) {
return true;
} else {
// Now that we've iterated over all the values, we can resolve all the "unknown" null
// values to false.
for (int i = 0; i < containsTag.length; i++) {
if (containsTag[i] == null) {
containsTag[i] = false;
}
}
return false;
}
}
@Override
@SuppressWarnings("unchecked")
public V next() {
advance();
return (V) unions.next().getValue();
}
private void advance() {
while (unions.hasNext()) {
int curTag = unions.peek().getUnionTag();
containsTag[curTag] = true;
if (curTag == tag) {
break;
}
unions.next();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
}
|
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.extractor.ts;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.extractor.Extractor;
import com.google.android.exoplayer.extractor.ExtractorInput;
import com.google.android.exoplayer.extractor.ExtractorOutput;
import com.google.android.exoplayer.extractor.PositionHolder;
import com.google.android.exoplayer.extractor.SeekMap;
import com.google.android.exoplayer.util.ParsableBitArray;
import com.google.android.exoplayer.util.ParsableByteArray;
import android.util.Log;
import android.util.SparseArray;
import android.util.SparseBooleanArray;
import java.io.IOException;
/**
* Facilitates the extraction of data from the MPEG-2 TS container format.
*/
public final class TsExtractor implements Extractor {
private static final String TAG = "TsExtractor";
private static final int TS_PACKET_SIZE = 188;
private static final int TS_SYNC_BYTE = 0x47; // First byte of each TS packet.
private static final int TS_PAT_PID = 0;
private static final int TS_STREAM_TYPE_MPA = 0x03;
private static final int TS_STREAM_TYPE_MPA_LSF = 0x04;
private static final int TS_STREAM_TYPE_AAC = 0x0F;
private static final int TS_STREAM_TYPE_ATSC_AC3 = 0x81;
private static final int TS_STREAM_TYPE_ATSC_E_AC3 = 0x87;
private static final int TS_STREAM_TYPE_H264 = 0x1B;
private static final int TS_STREAM_TYPE_H265 = 0x24;
private static final int TS_STREAM_TYPE_ID3 = 0x15;
private static final int TS_STREAM_TYPE_EIA608 = 0x100; // 0xFF + 1
private static final long MAX_PTS = 0x1FFFFFFFFL;
private final ParsableByteArray tsPacketBuffer;
private final ParsableBitArray tsScratch;
private final boolean idrKeyframesOnly;
private final long firstSampleTimestampUs;
/* package */ final SparseBooleanArray streamTypes;
/* package */ final SparseBooleanArray allowedPassthroughStreamTypes;
/* package */ final SparseArray<TsPayloadReader> tsPayloadReaders; // Indexed by pid
// Accessed only by the loading thread.
private ExtractorOutput output;
private long timestampOffsetUs;
private long lastPts;
/* package */ Id3Reader id3Reader;
public TsExtractor() {
this(0);
}
public TsExtractor(long firstSampleTimestampUs) {
this(firstSampleTimestampUs, null);
}
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities) {
this(firstSampleTimestampUs, audioCapabilities, true);
}
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities,
boolean idrKeyframesOnly) {
this.firstSampleTimestampUs = firstSampleTimestampUs;
this.idrKeyframesOnly = idrKeyframesOnly;
tsScratch = new ParsableBitArray(new byte[3]);
tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE);
streamTypes = new SparseBooleanArray();
allowedPassthroughStreamTypes = getPassthroughStreamTypes(audioCapabilities);
tsPayloadReaders = new SparseArray<>();
tsPayloadReaders.put(TS_PAT_PID, new PatReader());
lastPts = Long.MIN_VALUE;
}
// Extractor implementation.
@Override
public boolean sniff(ExtractorInput input) throws IOException, InterruptedException {
byte[] scratch = new byte[1];
for (int i = 0; i < 5; i++) {
input.peekFully(scratch, 0, 1);
if ((scratch[0] & 0xFF) != 0x47) {
return false;
}
input.advancePeekPosition(TS_PACKET_SIZE - 1);
}
return true;
}
@Override
public void init(ExtractorOutput output) {
this.output = output;
output.seekMap(SeekMap.UNSEEKABLE);
}
@Override
public void seek() {
timestampOffsetUs = 0;
lastPts = Long.MIN_VALUE;
for (int i = 0; i < tsPayloadReaders.size(); i++) {
tsPayloadReaders.valueAt(i).seek();
}
}
@Override
public int read(ExtractorInput input, PositionHolder seekPosition)
throws IOException, InterruptedException {
if (!input.readFully(tsPacketBuffer.data, 0, TS_PACKET_SIZE, true)) {
return RESULT_END_OF_INPUT;
}
// Note: see ISO/IEC 13818-1, section 2.4.3.2 for detailed information on the format of
// the header.
tsPacketBuffer.setPosition(0);
tsPacketBuffer.setLimit(TS_PACKET_SIZE);
int syncByte = tsPacketBuffer.readUnsignedByte();
if (syncByte != TS_SYNC_BYTE) {
return RESULT_CONTINUE;
}
tsPacketBuffer.readBytes(tsScratch, 3);
tsScratch.skipBits(1); // transport_error_indicator
boolean payloadUnitStartIndicator = tsScratch.readBit();
tsScratch.skipBits(1); // transport_priority
int pid = tsScratch.readBits(13);
tsScratch.skipBits(2); // transport_scrambling_control
boolean adaptationFieldExists = tsScratch.readBit();
boolean payloadExists = tsScratch.readBit();
// Last 4 bits of scratch are skipped: continuity_counter
// Skip the adaptation field.
if (adaptationFieldExists) {
int adaptationFieldLength = tsPacketBuffer.readUnsignedByte();
tsPacketBuffer.skipBytes(adaptationFieldLength);
}
// Read the payload.
if (payloadExists) {
TsPayloadReader payloadReader = tsPayloadReaders.get(pid);
if (payloadReader != null) {
payloadReader.consume(tsPacketBuffer, payloadUnitStartIndicator, output);
}
}
return RESULT_CONTINUE;
}
// Internals.
/**
* Adjusts a PTS value to the corresponding time in microseconds, accounting for PTS wraparound.
*
* @param pts The raw PTS value.
* @return The corresponding time in microseconds.
*/
/* package */ long ptsToTimeUs(long pts) {
if (lastPts != Long.MIN_VALUE) {
// The wrap count for the current PTS may be closestWrapCount or (closestWrapCount - 1),
// and we need to snap to the one closest to lastPts.
long closestWrapCount = (lastPts + (MAX_PTS / 2)) / MAX_PTS;
long ptsWrapBelow = pts + (MAX_PTS * (closestWrapCount - 1));
long ptsWrapAbove = pts + (MAX_PTS * closestWrapCount);
pts = Math.abs(ptsWrapBelow - lastPts) < Math.abs(ptsWrapAbove - lastPts)
? ptsWrapBelow : ptsWrapAbove;
}
// Calculate the corresponding timestamp.
long timeUs = (pts * C.MICROS_PER_SECOND) / 90000;
// If we haven't done the initial timestamp adjustment, do it now.
if (lastPts == Long.MIN_VALUE) {
timestampOffsetUs = firstSampleTimestampUs - timeUs;
}
// Record the adjusted PTS to adjust for wraparound next time.
lastPts = pts;
return timeUs + timestampOffsetUs;
}
/**
* Returns a sparse boolean array of stream types that can be played back based on
* {@code audioCapabilities}.
*/
private static SparseBooleanArray getPassthroughStreamTypes(AudioCapabilities audioCapabilities) {
SparseBooleanArray streamTypes = new SparseBooleanArray();
if (audioCapabilities != null) {
if (audioCapabilities.supportsEncoding(C.ENCODING_AC3)) {
streamTypes.put(TS_STREAM_TYPE_ATSC_AC3, true);
}
if (audioCapabilities.supportsEncoding(C.ENCODING_E_AC3)) {
// TODO: Uncomment when Ac3Reader supports enhanced AC-3.
// streamTypes.put(TS_STREAM_TYPE_ATSC_E_AC3, true);
}
}
return streamTypes;
}
/**
* Parses TS packet payload data.
*/
private abstract static class TsPayloadReader {
/**
* Notifies the reader that a seek has occurred.
* <p>
* Following a call to this method, the data passed to the next invocation of
* {@link #consume(ParsableByteArray, boolean, ExtractorOutput)} will not be a continuation of
* the data that was previously passed. Hence the reader should reset any internal state.
*/
public abstract void seek();
/**
* Consumes the payload of a TS packet.
*
* @param data The TS packet. The position will be set to the start of the payload.
* @param payloadUnitStartIndicator Whether payloadUnitStartIndicator was set on the TS packet.
* @param output The output to which parsed data should be written.
*/
public abstract void consume(ParsableByteArray data, boolean payloadUnitStartIndicator,
ExtractorOutput output);
}
/**
* Parses Program Association Table data.
*/
private class PatReader extends TsPayloadReader {
private final ParsableBitArray patScratch;
public PatReader() {
patScratch = new ParsableBitArray(new byte[4]);
}
@Override
public void seek() {
// Do nothing.
}
@Override
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator,
ExtractorOutput output) {
// Skip pointer.
if (payloadUnitStartIndicator) {
int pointerField = data.readUnsignedByte();
data.skipBytes(pointerField);
}
data.readBytes(patScratch, 3);
patScratch.skipBits(12); // table_id (8), section_syntax_indicator (1), '0' (1), reserved (2)
int sectionLength = patScratch.readBits(12);
// transport_stream_id (16), reserved (2), version_number (5), current_next_indicator (1),
// section_number (8), last_section_number (8)
data.skipBytes(5);
int programCount = (sectionLength - 9) / 4;
for (int i = 0; i < programCount; i++) {
data.readBytes(patScratch, 4);
patScratch.skipBits(19); // program_number (16), reserved (3)
int pid = patScratch.readBits(13);
tsPayloadReaders.put(pid, new PmtReader());
}
// Skip CRC_32.
}
}
/**
* Parses Program Map Table.
*/
private class PmtReader extends TsPayloadReader {
private final ParsableBitArray pmtScratch;
public PmtReader() {
pmtScratch = new ParsableBitArray(new byte[5]);
}
@Override
public void seek() {
// Do nothing.
}
@Override
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator,
ExtractorOutput output) {
// Skip pointer.
if (payloadUnitStartIndicator) {
int pointerField = data.readUnsignedByte();
data.skipBytes(pointerField);
}
// Note: see ISO/IEC 13818-1, section 2.4.4.8 for detailed information on the format of
// the header.
data.readBytes(pmtScratch, 3);
pmtScratch.skipBits(12); // table_id (8), section_syntax_indicator (1), '0' (1), reserved (2)
int sectionLength = pmtScratch.readBits(12);
// program_number (16), reserved (2), version_number (5), current_next_indicator (1),
// section_number (8), last_section_number (8), reserved (3), PCR_PID (13)
// Skip the rest of the PMT header.
data.skipBytes(7);
data.readBytes(pmtScratch, 2);
pmtScratch.skipBits(4);
int programInfoLength = pmtScratch.readBits(12);
// Skip the descriptors.
data.skipBytes(programInfoLength);
if (id3Reader == null) {
// Setup an ID3 track regardless of whether there's a corresponding entry, in case one
// appears intermittently during playback. See b/20261500.
id3Reader = new Id3Reader(output.track(TS_STREAM_TYPE_ID3));
}
int entriesSize = sectionLength - 9 /* Size of the rest of the fields before descriptors */
- programInfoLength - 4 /* CRC size */;
while (entriesSize > 0) {
data.readBytes(pmtScratch, 5);
int streamType = pmtScratch.readBits(8);
pmtScratch.skipBits(3); // reserved
int elementaryPid = pmtScratch.readBits(13);
pmtScratch.skipBits(4); // reserved
int esInfoLength = pmtScratch.readBits(12);
// Skip the descriptors.
data.skipBytes(esInfoLength);
entriesSize -= esInfoLength + 5;
if (streamTypes.get(streamType)) {
continue;
}
// TODO: Detect and read DVB AC-3 streams with Ac3Reader.
ElementaryStreamReader pesPayloadReader = null;
switch (streamType) {
case TS_STREAM_TYPE_MPA:
pesPayloadReader = new MpegAudioReader(output.track(TS_STREAM_TYPE_MPA));
break;
case TS_STREAM_TYPE_MPA_LSF:
pesPayloadReader = new MpegAudioReader(output.track(TS_STREAM_TYPE_MPA_LSF));
break;
case TS_STREAM_TYPE_AAC:
pesPayloadReader = new AdtsReader(output.track(TS_STREAM_TYPE_AAC));
break;
case TS_STREAM_TYPE_ATSC_E_AC3:
case TS_STREAM_TYPE_ATSC_AC3:
if (!allowedPassthroughStreamTypes.get(streamType)) {
continue;
}
pesPayloadReader = new Ac3Reader(output.track(streamType));
break;
case TS_STREAM_TYPE_H264:
pesPayloadReader = new H264Reader(output.track(TS_STREAM_TYPE_H264),
new SeiReader(output.track(TS_STREAM_TYPE_EIA608)), idrKeyframesOnly);
break;
case TS_STREAM_TYPE_H265:
pesPayloadReader = new H265Reader(output.track(TS_STREAM_TYPE_H265),
new SeiReader(output.track(TS_STREAM_TYPE_EIA608)));
break;
case TS_STREAM_TYPE_ID3:
pesPayloadReader = id3Reader;
break;
}
if (pesPayloadReader != null) {
streamTypes.put(streamType, true);
tsPayloadReaders.put(elementaryPid, new PesReader(pesPayloadReader));
}
}
output.endTracks();
}
}
/**
* Parses PES packet data and extracts samples.
*/
private class PesReader extends TsPayloadReader {
private static final int STATE_FINDING_HEADER = 0;
private static final int STATE_READING_HEADER = 1;
private static final int STATE_READING_HEADER_EXTENSION = 2;
private static final int STATE_READING_BODY = 3;
private static final int HEADER_SIZE = 9;
private static final int MAX_HEADER_EXTENSION_SIZE = 5;
private final ParsableBitArray pesScratch;
private final ElementaryStreamReader pesPayloadReader;
private int state;
private int bytesRead;
private boolean bodyStarted;
private boolean ptsFlag;
private int extendedHeaderLength;
private int payloadSize;
private long timeUs;
public PesReader(ElementaryStreamReader pesPayloadReader) {
this.pesPayloadReader = pesPayloadReader;
pesScratch = new ParsableBitArray(new byte[HEADER_SIZE]);
state = STATE_FINDING_HEADER;
}
@Override
public void seek() {
state = STATE_FINDING_HEADER;
bytesRead = 0;
bodyStarted = false;
pesPayloadReader.seek();
}
@Override
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator,
ExtractorOutput output) {
if (payloadUnitStartIndicator) {
switch (state) {
case STATE_FINDING_HEADER:
case STATE_READING_HEADER:
// Expected.
break;
case STATE_READING_HEADER_EXTENSION:
Log.w(TAG, "Unexpected start indicator reading extended header");
break;
case STATE_READING_BODY:
// If payloadSize == -1 then the length of the previous packet was unspecified, and so
// we only know that it's finished now that we've seen the start of the next one. This
// is expected. If payloadSize != -1, then the length of the previous packet was known,
// but we didn't receive that amount of data. This is not expected.
if (payloadSize != -1) {
Log.w(TAG, "Unexpected start indicator: expected " + payloadSize + " more bytes");
}
// Either way, if the body was started, notify the reader that it has now finished.
if (bodyStarted) {
pesPayloadReader.packetFinished();
}
break;
}
setState(STATE_READING_HEADER);
}
while (data.bytesLeft() > 0) {
switch (state) {
case STATE_FINDING_HEADER:
data.skipBytes(data.bytesLeft());
break;
case STATE_READING_HEADER:
if (continueRead(data, pesScratch.data, HEADER_SIZE)) {
setState(parseHeader() ? STATE_READING_HEADER_EXTENSION : STATE_FINDING_HEADER);
}
break;
case STATE_READING_HEADER_EXTENSION:
int readLength = Math.min(MAX_HEADER_EXTENSION_SIZE, extendedHeaderLength);
// Read as much of the extended header as we're interested in, and skip the rest.
if (continueRead(data, pesScratch.data, readLength)
&& continueRead(data, null, extendedHeaderLength)) {
parseHeaderExtension();
bodyStarted = false;
setState(STATE_READING_BODY);
}
break;
case STATE_READING_BODY:
readLength = data.bytesLeft();
int padding = payloadSize == -1 ? 0 : readLength - payloadSize;
if (padding > 0) {
readLength -= padding;
data.setLimit(data.getPosition() + readLength);
}
pesPayloadReader.consume(data, timeUs, !bodyStarted);
bodyStarted = true;
if (payloadSize != -1) {
payloadSize -= readLength;
if (payloadSize == 0) {
pesPayloadReader.packetFinished();
setState(STATE_READING_HEADER);
}
}
break;
}
}
}
private void setState(int state) {
this.state = state;
bytesRead = 0;
}
/**
* Continues a read from the provided {@code source} into a given {@code target}. It's assumed
* that the data should be written into {@code target} starting from an offset of zero.
*
* @param source The source from which to read.
* @param target The target into which data is to be read, or {@code null} to skip.
* @param targetLength The target length of the read.
* @return Whether the target length has been reached.
*/
private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) {
int bytesToRead = Math.min(source.bytesLeft(), targetLength - bytesRead);
if (bytesToRead <= 0) {
return true;
} else if (target == null) {
source.skipBytes(bytesToRead);
} else {
source.readBytes(target, bytesRead, bytesToRead);
}
bytesRead += bytesToRead;
return bytesRead == targetLength;
}
private boolean parseHeader() {
// Note: see ISO/IEC 13818-1, section 2.4.3.6 for detailed information on the format of
// the header.
pesScratch.setPosition(0);
int startCodePrefix = pesScratch.readBits(24);
if (startCodePrefix != 0x000001) {
Log.w(TAG, "Unexpected start code prefix: " + startCodePrefix);
payloadSize = -1;
return false;
}
pesScratch.skipBits(8); // stream_id.
int packetLength = pesScratch.readBits(16);
// First 8 bits are skipped: '10' (2), PES_scrambling_control (2), PES_priority (1),
// data_alignment_indicator (1), copyright (1), original_or_copy (1)
pesScratch.skipBits(8);
ptsFlag = pesScratch.readBit();
// DTS_flag (1), ESCR_flag (1), ES_rate_flag (1), DSM_trick_mode_flag (1),
// additional_copy_info_flag (1), PES_CRC_flag (1), PES_extension_flag (1)
pesScratch.skipBits(7);
extendedHeaderLength = pesScratch.readBits(8);
if (packetLength == 0) {
payloadSize = -1;
} else {
payloadSize = packetLength + 6 /* packetLength does not include the first 6 bytes */
- HEADER_SIZE - extendedHeaderLength;
}
return true;
}
private void parseHeaderExtension() {
pesScratch.setPosition(0);
timeUs = 0;
if (ptsFlag) {
pesScratch.skipBits(4); // '0010' or '0011'
long pts = (long) pesScratch.readBits(3) << 30;
pesScratch.skipBits(1); // marker_bit
pts |= pesScratch.readBits(15) << 15;
pesScratch.skipBits(1); // marker_bit
pts |= pesScratch.readBits(15);
pesScratch.skipBits(1); // marker_bit
timeUs = ptsToTimeUs(pts);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. The ASF licenses this file to You
* under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. For additional information regarding
* copyright in this work, please see the NOTICE file in the top level
* directory of this distribution.
*/
package org.apache.roller.business;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.roller.RollerException;
import org.apache.roller.ThemeNotFoundException;
import org.apache.roller.config.RollerConfig;
import org.apache.roller.model.RollerFactory;
import org.apache.roller.model.ThemeManager;
import org.apache.roller.model.UserManager;
import org.apache.roller.pojos.Theme;
import org.apache.roller.pojos.ThemeTemplate;
import org.apache.roller.pojos.WeblogTemplate;
import org.apache.roller.pojos.WebsiteData;
/**
* Base implementation of a ThemeManager.
*
* This particular implementation reads theme data off the filesystem
* and assumes that those themes are not changable at runtime.
*/
public class ThemeManagerImpl implements ThemeManager {
private static Log log = LogFactory.getLog(ThemeManagerImpl.class);
private Map themes = null;
protected ThemeManagerImpl() {
// rather than be lazy we are going to load all themes from
// the disk preemptively during initialization and cache them
log.debug("Initializing ThemeManagerImpl");
this.themes = this.loadAllThemesFromDisk();
log.info("Loaded "+this.themes.size()+" themes from disk.");
}
/**
* @see org.apache.roller.model.ThemeManager#getTheme(java.lang.String)
*/
public Theme getTheme(String name)
throws ThemeNotFoundException, RollerException {
Theme theme = (Theme) this.themes.get(name);
if(theme == null)
throw new ThemeNotFoundException("Couldn't find theme ["+name+"]");
return theme;
}
/**
* @see org.apache.roller.model.ThemeManager#getThemeById(java.lang.String)
*/
public Theme getThemeById(String id)
throws ThemeNotFoundException, RollerException {
// In this implementation where themes come from the filesystem we
// know that the name and id for a theme are the same
return this.getTheme(id);
}
/**
* @see org.apache.roller.model.ThemeManager#getThemesList()
*/
public List getThemesList() {
List themes = new ArrayList(this.themes.keySet());
// sort 'em ... the natural sorting order for Strings is alphabetical
Collections.sort(themes);
return themes;
}
/**
* @see org.apache.roller.model.ThemeManager#getEnabledThemesList()
*/
public List getEnabledThemesList() {
Collection all_themes = this.themes.values();
// make a new list of only the enabled themes
List enabled_themes = new ArrayList();
Iterator it = all_themes.iterator();
Theme theme = null;
while(it.hasNext()) {
theme = (Theme) it.next();
if(theme.isEnabled())
enabled_themes.add(theme.getName());
}
// sort 'em ... the natural sorting order for Strings is alphabetical
Collections.sort(enabled_themes);
return enabled_themes;
}
/**
* @see org.apache.roller.model.ThemeManager#getTemplate(String, String)
*/
public ThemeTemplate getTemplate(String theme_name, String template_name)
throws ThemeNotFoundException, RollerException {
// basically we just try and lookup the theme first, then template
Theme theme = this.getTheme(theme_name);
return theme.getTemplate(template_name);
}
/**
* @see org.apache.roller.model.ThemeManager#getTemplateById(java.lang.String)
*/
public ThemeTemplate getTemplateById(String id)
throws ThemeNotFoundException, RollerException {
if(id == null)
throw new ThemeNotFoundException("Theme id was null");
// in our case we expect a template id to be <theme>:<template>
// so extract each piece and do the lookup
String[] split = id.split(":", 2);
if(split.length != 2)
throw new ThemeNotFoundException("Invalid theme id ["+id+"]");
return this.getTemplate(split[0], split[1]);
}
/**
* @see org.apache.roller.model.ThemeManager#getTemplateByLink(java.lang.String)
*/
public ThemeTemplate getTemplateByLink(String theme_name, String template_link)
throws ThemeNotFoundException, RollerException {
// basically we just try and lookup the theme first, then template
Theme theme = this.getTheme(theme_name);
return theme.getTemplateByLink(template_link);
}
/**
* This is a convenience method which loads all the theme data from
* themes stored on the filesystem in the roller webapp /themes/ directory.
*/
private Map loadAllThemesFromDisk() {
Map themes = new HashMap();
// NOTE: we need to figure out how to get the roller context path
String themespath = RollerConfig.getProperty("context.realPath");
if(themespath.endsWith(File.separator))
themespath += "themes";
else
themespath += File.separator + "themes";
// first, get a list of the themes available
File themesdir = new File(themespath);
FilenameFilter filter = new FilenameFilter() {
public boolean accept(File dir, String name) {
File file =
new File(dir.getAbsolutePath() + File.separator + name);
return file.isDirectory();
}
};
String[] themenames = themesdir.list(filter);
if(themenames == null)
themenames = new String[0];
// now go through each theme and read all it's templates
Theme theme = null;
for(int i=0; i < themenames.length; i++) {
try {
theme = this.loadThemeFromDisk(themenames[i],
themespath + File.separator + themenames[i]);
themes.put(theme.getName(), theme);
} catch (Throwable unexpected) {
// shouldn't happen, so let's learn why it did
log.error("Problem reading theme " + themenames[i], unexpected);
}
}
return themes;
}
/**
* Another convenience method which knows how to load a single theme
* off the filesystem and return a Theme object
*/
private Theme loadThemeFromDisk(String theme_name, String themepath) {
log.info("Loading theme "+theme_name);
Theme theme = new Theme();
theme.setName(theme_name);
theme.setAuthor("Roller");
theme.setLastEditor("Roller");
theme.setEnabled(true);
// start by getting a list of the .vm files for this theme
File themedir = new File(themepath);
FilenameFilter filter = new FilenameFilter()
{
public boolean accept(File dir, String name)
{
return name.endsWith(".vm");
}
};
String[] templates = themedir.list(filter);
// go through each .vm file and read in its contents to a ThemeTemplate
String template_name = null;
ThemeTemplate theme_template = null;
for (int i=0; i < templates.length; i++) {
// strip off the .vm part
template_name = templates[i].substring(0, templates[i].length() - 3);
File template_file = new File(themepath + File.separator + templates[i]);
// Continue reading theme even if problem encountered with one file
String msg = "read theme template file ["+template_file+"]";
if(!template_file.exists() && !template_file.canRead()) {
log.error("Couldn't " + msg);
continue;
}
char[] chars = null;
int length;
try {
// FileReader reader = new FileReader(template_file);
chars = new char[(int) template_file.length()];
FileInputStream stream = new FileInputStream(template_file);
InputStreamReader reader = new InputStreamReader(stream, "UTF-8");
length = reader.read(chars);
} catch (Exception noprob) {
log.error("Exception while attempting to " + msg);
if (log.isDebugEnabled()) log.debug(noprob);
continue;
}
// Strip "_" from name to form link
boolean navbar = true;
String template_link = template_name;
if (template_name.startsWith("_") && template_name.length() > 1) {
navbar = false;
template_link = template_link.substring(1);
log.debug("--- " + template_link);
}
String decorator = "_decorator";
if("_decorator".equals(template_name)) {
decorator = null;
}
// construct ThemeTemplate representing this file
// a few restrictions for now:
// - we only allow "velocity" for the template language
// - decorator is always "_decorator" or null
// - all theme templates are considered not hidden
theme_template = new ThemeTemplate(
theme,
theme_name+":"+template_name,
template_name,
template_name,
new String(chars, 0, length),
template_link,
new Date(template_file.lastModified()),
"velocity",
false,
navbar,
decorator);
// add it to the theme
theme.setTemplate(template_name, theme_template);
}
// use the last mod date of the last template file
// as the last mod date of the theme
theme.setLastModified(theme_template.getLastModified());
return theme;
}
/**
* Helper method that copies down the pages from a given theme into a
* users weblog templates.
*
* @param rreq Request wrapper.
* @param theme Name of theme to save.
* @throws RollerException
*/
public void saveThemePages(WebsiteData website, Theme theme)
throws RollerException {
log.debug("Setting custom templates for website: "+website.getName());
try {
UserManager userMgr = RollerFactory.getRoller().getUserManager();
Collection templates = theme.getTemplates();
Iterator iter = templates.iterator();
ThemeTemplate theme_template = null;
while ( iter.hasNext() ) {
theme_template = (ThemeTemplate) iter.next();
WeblogTemplate template = null;
if(theme_template.getName().equals(WeblogTemplate.DEFAULT_PAGE)) {
// this is the main Weblog template
try {
template = userMgr.getPage(website.getDefaultPageId());
} catch(Exception e) {
// user may not have a default page yet
}
} else {
// any other template
template = userMgr.getPageByName(website, theme_template.getName());
}
if (template != null) {
// User already has page by that name, so overwrite it.
template.setContents(theme_template.getContents());
template.setLink(theme_template.getLink());
} else {
// User does not have page by that name, so create new page.
template = new WeblogTemplate(
null, // id
website, // website
theme_template.getName(), // name
theme_template.getDescription(), // description
theme_template.getLink(), // link
theme_template.getContents(), // contents
new Date(), // last mod
theme_template.getTemplateLanguage(), // temp lang
theme_template.isHidden(), // hidden
theme_template.isNavbar(), // navbar
theme_template.getDecoratorName() // decorator
);
userMgr.savePage( template );
}
}
// now update this website's theme to custom
website.setEditorTheme(Theme.CUSTOM);
// if this is the first time someone is customizing a theme then
// we need to set a default page
if(website.getDefaultPageId() == null ||
website.getDefaultPageId().trim().equals("") ||
website.getDefaultPageId().equals("dummy")) {
// we have to go back to the db to figure out the id
WeblogTemplate template = userMgr.getPageByName(website, "Weblog");
if(template != null) {
log.debug("Setting default page to "+template.getId());
website.setDefaultPageId(template.getId());
}
}
// we also want to set the weblogdayid
WeblogTemplate dayTemplate = userMgr.getPageByName(website, "_day");
if(dayTemplate != null) {
log.debug("Setting default day page to "+dayTemplate.getId());
website.setWeblogDayPageId(dayTemplate.getId());
}
// save our updated website
userMgr.saveWebsite(website);
} catch (Exception e) {
log.error("ERROR in action",e);
throw new RollerException( e );
}
}
}
|
|
package org.apereo.cas.mfa.accepto.web.flow;
import org.apereo.cas.authentication.CoreAuthenticationTestUtils;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.mfa.accepto.AccepttoEmailCredential;
import org.apereo.cas.mfa.accepto.BaseAccepttoMultifactorAuthenticationTests;
import org.apereo.cas.util.EncodingUtils;
import org.apereo.cas.util.MockServletContext;
import org.apereo.cas.util.MockWebServer;
import org.apereo.cas.web.flow.CasWebflowConstants;
import org.apereo.cas.web.support.WebUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.val;
import org.apereo.inspektr.common.web.ClientInfo;
import org.apereo.inspektr.common.web.ClientInfoHolder;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.http.MediaType;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.webflow.context.servlet.ServletExternalContext;
import org.springframework.webflow.execution.RequestContextHolder;
import org.springframework.webflow.test.MockRequestContext;
import javax.servlet.http.Cookie;
import java.nio.charset.StandardCharsets;
import java.security.KeyPairGenerator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import static org.junit.jupiter.api.Assertions.*;
/**
* This is {@link AccepttoMultifactorDetermineUserAccountStatusActionTests}.
*
* @author Misagh Moayyed
* @since 6.2.0
*/
@Tag("WebflowMfaActions")
@SpringBootTest(classes = BaseAccepttoMultifactorAuthenticationTests.SharedTestConfiguration.class,
properties = {
"cas.authn.mfa.acceptto.application-id=thisisatestid",
"cas.authn.mfa.acceptto.group-attribute=group",
"cas.authn.mfa.acceptto.email-attribute=email",
"cas.authn.mfa.acceptto.secret=255724611137f7eb0280dd76b0546eea4bca1c7ba1",
"cas.authn.mfa.acceptto.organization-id=thisisatestid",
"cas.authn.mfa.acceptto.organization-secret=255724611137f7eb0280dd76b0546eea4bca1c7ba1",
"cas.authn.mfa.acceptto.registration-api-public-key.location=classpath:publickey.pem"
})
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class AccepttoMultifactorDetermineUserAccountStatusActionTests {
private static final ObjectMapper MAPPER = new ObjectMapper().findAndRegisterModules();
@Test
public void verifyEmpty(@Autowired final CasConfigurationProperties casProperties) throws Exception {
val context = prepareRequestContext();
val keyGen = KeyPairGenerator.getInstance("RSA");
val pair = keyGen.generateKeyPair();
val priv = pair.getPrivate();
val pub = pair.getPublic();
val payload = MAPPER.writeValueAsString(Map.of());
val jwt = EncodingUtils.signJwsRSASha512(priv, payload.getBytes(StandardCharsets.UTF_8), Map.of());
val data = MAPPER.writeValueAsString(Map.of("content", new String(jwt, StandardCharsets.UTF_8)));
try (val webServer = new MockWebServer(5013,
new ByteArrayResource(data.getBytes(StandardCharsets.UTF_8), "REST Output"), MediaType.APPLICATION_JSON_VALUE)) {
webServer.start();
val action = new AccepttoMultifactorDetermineUserAccountStatusAction(casProperties, pub);
val principal = CoreAuthenticationTestUtils.getPrincipal(Map.of(
"email", List.of("[email protected]"),
"group", List.of("staff")));
val authentication = CoreAuthenticationTestUtils.getAuthentication(principal);
WebUtils.putAuthentication(authentication, context);
RequestContextHolder.setRequestContext(context);
val result = action.doExecute(context);
assertEquals(result.getId(), CasWebflowConstants.TRANSITION_ID_DENY);
}
}
@Test
public void verifyOperationFail(@Autowired final CasConfigurationProperties casProperties) throws Exception {
val context = prepareRequestContext();
val keyGen = KeyPairGenerator.getInstance("RSA");
val pair = keyGen.generateKeyPair();
val priv = pair.getPrivate();
val pub = pair.getPublic();
val payload = MAPPER.writeValueAsString(Map.of(
"success", "false",
"status", "FAIL",
"eguardian_user_id", "cas-user",
"channel", UUID.randomUUID().toString()));
val jwt = EncodingUtils.signJwsRSASha512(priv, payload.getBytes(StandardCharsets.UTF_8), Map.of());
val data = MAPPER.writeValueAsString(Map.of("content", new String(jwt, StandardCharsets.UTF_8)));
casProperties.getAuthn().getMfa()
.getAcceptto()
.setRegistrationApiUrl("http://localhost:5014");
try (val webServer = new MockWebServer(5014,
new ByteArrayResource(data.getBytes(StandardCharsets.UTF_8), "REST Output"), MediaType.APPLICATION_JSON_VALUE)) {
webServer.start();
val action = new AccepttoMultifactorDetermineUserAccountStatusAction(casProperties, pub);
val principal = CoreAuthenticationTestUtils.getPrincipal(Map.of(
"email", List.of("[email protected]"),
"group", List.of("staff")));
val authentication = CoreAuthenticationTestUtils.getAuthentication(principal);
WebUtils.putAuthentication(authentication, context);
RequestContextHolder.setRequestContext(context);
val result = action.doExecute(context);
assertEquals(result.getId(), CasWebflowConstants.TRANSITION_ID_DENY);
}
}
@Test
public void verifyOperationApprove(@Autowired final CasConfigurationProperties casProperties) throws Exception {
val context = prepareRequestContext();
val keyGen = KeyPairGenerator.getInstance("RSA");
val pair = keyGen.generateKeyPair();
val priv = pair.getPrivate();
val pub = pair.getPublic();
val payload = MAPPER.writeValueAsString(Map.of(
"success", "true",
"status", "OK",
"eguardian_user_id", "cas-user",
"channel", UUID.randomUUID().toString(),
"response_code", "approved"));
val jwt = EncodingUtils.signJwsRSASha512(priv, payload.getBytes(StandardCharsets.UTF_8), Map.of());
val data = MAPPER.writeValueAsString(Map.of("content", new String(jwt, StandardCharsets.UTF_8)));
casProperties.getAuthn().getMfa()
.getAcceptto()
.setRegistrationApiUrl("http://localhost:5015");
try (val webServer = new MockWebServer(5015,
new ByteArrayResource(data.getBytes(StandardCharsets.UTF_8), "REST Output"), MediaType.APPLICATION_JSON_VALUE)) {
webServer.start();
val action = new AccepttoMultifactorDetermineUserAccountStatusAction(casProperties, pub);
val principal = CoreAuthenticationTestUtils.getPrincipal(Map.of(
"email", List.of("[email protected]"),
"group", List.of("staff")));
val authentication = CoreAuthenticationTestUtils.getAuthentication(principal);
WebUtils.putAuthentication(authentication, context);
RequestContextHolder.setRequestContext(context);
val result = action.doExecute(context);
assertEquals(result.getId(), CasWebflowConstants.TRANSITION_ID_APPROVE);
}
}
@Test
public void verifyOperationSuccess(@Autowired final CasConfigurationProperties casProperties) throws Exception {
val context = prepareRequestContext();
val keyGen = KeyPairGenerator.getInstance("RSA");
val pair = keyGen.generateKeyPair();
val priv = pair.getPrivate();
val pub = pair.getPublic();
val payload = MAPPER.writeValueAsString(Map.of(
"success", "true",
"status", "OK",
"eguardian_user_id", "cas-user",
"response_code", "success",
"channel", UUID.randomUUID().toString()));
val jwt = EncodingUtils.signJwsRSASha512(priv, payload.getBytes(StandardCharsets.UTF_8), Map.of());
val data = MAPPER.writeValueAsString(Map.of("content", new String(jwt, StandardCharsets.UTF_8)));
casProperties.getAuthn().getMfa()
.getAcceptto()
.setRegistrationApiUrl("http://localhost:5017");
try (val webServer = new MockWebServer(5017,
new ByteArrayResource(data.getBytes(StandardCharsets.UTF_8), "REST Output"), MediaType.APPLICATION_JSON_VALUE)) {
webServer.start();
val action = new AccepttoMultifactorDetermineUserAccountStatusAction(casProperties, pub);
val principal = CoreAuthenticationTestUtils.getPrincipal(Map.of(
"email", List.of("[email protected]"),
"group", List.of("staff")));
val authentication = CoreAuthenticationTestUtils.getAuthentication(principal);
WebUtils.putAuthentication(authentication, context);
RequestContextHolder.setRequestContext(context);
val result = action.doExecute(context);
assertEquals(result.getId(), CasWebflowConstants.TRANSITION_ID_SUCCESS);
}
}
@Test
public void verifyOperationRegister(@Autowired final CasConfigurationProperties casProperties) throws Exception {
val context = prepareRequestContext();
val keyGen = KeyPairGenerator.getInstance("RSA");
val pair = keyGen.generateKeyPair();
val priv = pair.getPrivate();
val pub = pair.getPublic();
val inviteToken = MAPPER.writeValueAsString(Map.of("invitation_token", UUID.randomUUID().toString()));
val payload = MAPPER.writeValueAsString(Map.of("invite_token", EncodingUtils.encodeBase64(inviteToken),
"success", "true",
"eguardian_user_id", "cas-user",
"channel", UUID.randomUUID().toString(),
"response_code", "pair_device"));
val jwt = EncodingUtils.signJwsRSASha512(priv, payload.getBytes(StandardCharsets.UTF_8), Map.of());
val data = MAPPER.writeValueAsString(Map.of("content", new String(jwt, StandardCharsets.UTF_8)));
casProperties.getAuthn().getMfa()
.getAcceptto()
.setRegistrationApiUrl("http://localhost:5019");
try (val webServer = new MockWebServer(5019,
new ByteArrayResource(data.getBytes(StandardCharsets.UTF_8), "REST Output"), MediaType.APPLICATION_JSON_VALUE)) {
webServer.start();
val action = new AccepttoMultifactorDetermineUserAccountStatusAction(casProperties, pub);
val principal = CoreAuthenticationTestUtils.getPrincipal(Map.of(
"email", List.of("[email protected]"),
"group", List.of("staff")));
val authentication = CoreAuthenticationTestUtils.getAuthentication(principal);
WebUtils.putAuthentication(authentication, context);
RequestContextHolder.setRequestContext(context);
val result = action.doExecute(context);
assertEquals(result.getId(), CasWebflowConstants.TRANSITION_ID_REGISTER);
}
}
private static MockRequestContext prepareRequestContext() {
val context = new MockRequestContext();
val request = new MockHttpServletRequest();
request.setRemoteAddr("185.86.151.11");
request.setLocalAddr("185.88.151.11");
request.setCookies(new Cookie("jwt", UUID.randomUUID().toString()));
ClientInfoHolder.setClientInfo(new ClientInfo(request));
val response = new MockHttpServletResponse();
context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, response));
AccepttoWebflowUtils.setEGuardianUserId(context, "eguardian-userid");
WebUtils.putCredential(context, new AccepttoEmailCredential("[email protected]"));
return context;
}
}
|
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 - 2020 Thibault Meyer
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.zero_x_baadf00d.play.module.aws.s3.ebean;
import com.amazonaws.services.s3.model.*;
import com.fasterxml.uuid.Generators;
import com.zero_x_baadf00d.play.module.aws.s3.PlayS3;
import io.ebean.Model;
import play.Logger;
import javax.persistence.*;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.UUID;
/**
* This abstract class provides all the necessary foundations for
* the implementation of a model using the Amazon PlayS3 plugin.
*
* @author Thibault Meyer
* @author Pierre Adam
* @version 17.02.02
* @since 16.03.13
*/
@MappedSuperclass
public abstract class BaseS3FileModel extends Model implements Cloneable {
/**
* The unique ID of the PlayS3 file.
*
* @since 16.03.13
*/
@Id
@Column(name = "id")
protected UUID id;
/**
* The human readable name of the PlayS3 file.
*
* @since 16.03.13
*/
@Column(name = "name", nullable = false, columnDefinition = "VARCHAR(50)")
protected String name;
/**
* The content type of the PlayS3 file.
*
* @since 16.03.13
*/
@Column(name = "content_type", nullable = false, columnDefinition = "VARCHAR(20)")
protected String contentType;
/**
* Temporary object data. Used to upload
* object on PlayS3.
*
* @since 16.03.13
*/
@Transient
protected InputStream objectData;
/**
* Is this file private or not. Private file can only be accessed
* by this application.
*
* @since 16.03.13
*/
@Column(name = "is_private", nullable = false, columnDefinition = "BOOLEAN DEFAULT TRUE")
protected boolean isPrivate;
/**
* Subdirectory on the bucket where this file is located.
*
* @since 16.03.13
*/
@Column(name = "sub_directory", nullable = false, columnDefinition = "VARCHAR(25) DEFAULT ''")
protected String subDirectory;
/**
* Name of the bucket where the file is stored in.
*
* @since 16.03.13
*/
@Column(name = "bucket")
protected String bucket;
/**
* Get the ID of this {@code S3File} entry.
*
* @return The ID
* @see UUID
* @since 16.03.13
*/
public UUID getId() {
return this.id;
}
/**
* Get the ID of this {@code S3File} entry as string.
*
* @return The ID as string
* @since 16.03.13
*/
public String getIdAsString() {
return this.id.toString();
}
/**
* Get the filename.
*
* @return The filename
* @since 16.03.13
*/
public String getName() {
return this.name;
}
/**
* Set the filename.
*
* @param name The filename to use
* @since 16.03.13
*/
public void setName(final String name) {
if (this.id == null) {
this.name = name.trim();
}
}
/**
* Get the content type (ie: image/png).
*
* @return The content type
* @since 16.03.13
*/
public String getContentType() {
return this.contentType;
}
/**
* Set the content type (ie: image/png).
*
* @param contentType The content type of the file
* @since 16.03.13
*/
public void setContentType(final String contentType) {
if (this.id == null) {
this.contentType = contentType.trim();
}
}
/**
* Is this file private?
*
* @return {@code true} if private, otherwise, {@code false}
* @since 16.03.13
*/
public boolean isPrivate() {
return this.isPrivate;
}
/**
* Set if this file private or not.
*
* @param aPrivate {@code true} if private, otherwise, {@code false}
* @since 16.03.13
*/
public void setPrivate(final boolean aPrivate) {
if (this.id == null) {
this.isPrivate = aPrivate;
}
}
/**
* Get the subdirectory where is located the file.
*
* @return The subdirectory where the file is located
* @since 16.03.13
*/
public String getSubDirectory() {
return this.subDirectory;
}
/**
* Set the subdirectory where the file will be saved.
*
* @param subDirectory The subdirectory to use
* @since 16.03.13
*/
public void setSubDirectory(final String subDirectory) {
if (this.id == null) {
this.subDirectory = subDirectory.trim();
}
}
/**
* Set the object to send to PlayS3.
*
* @param file The file to upload
* @throws FileNotFoundException If the file does not exist, is a directory rather than a regular file, or for some other reason cannot be opened for reading.
* @since 16.03.13
*/
public void setObject(final File file) throws FileNotFoundException {
if (this.id == null) {
if (this.objectData != null) {
try {
this.objectData.close();
} catch (final IOException ignore) {
}
}
this.objectData = new FileInputStream(file);
}
}
/**
* Set the object to send to PlayS3.
*
* @param inputStream The data to upload
* @since 16.03.13
*/
public void setObject(final InputStream inputStream) {
if (this.id == null) {
if (this.objectData != null) {
try {
this.objectData.close();
} catch (final IOException ignore) {
}
}
this.objectData = inputStream;
}
}
/**
* Get the public URL of this PlayS3 file.
*
* @return The public URL of this PlayS3 file
* @throws MalformedURLException If URL is malformed (check application.conf)
* @since 16.03.13
*/
public URL getUrl() throws MalformedURLException {
return new URL(PlayS3.getPublicUrl() + this.bucket + "/" + this.getActualFileName());
}
/**
* Get the public URL of this PlayS3 file as string.
*
* @return The public URL of this PlayS3 file, otherwise, null
* @since 16.03.13
*/
public String getUrlAsString() {
if (this.id.toString().isEmpty()) {
return null;
}
try {
return new URL(PlayS3.getPublicUrl() + this.bucket + "/" + this.getActualFileName()).toString();
} catch (final MalformedURLException e) {
return null;
}
}
/**
* Get the actual file name.
*
* @return The actual file name
* @since 16.03.13
*/
protected String getActualFileName() {
if (this.subDirectory == null || this.subDirectory.isEmpty()) {
return String.format("%s", this.id);
}
return String.format("%s/%s", this.subDirectory, this.id);
}
/**
* Save the current object. The file will be uploaded to PlayS3 bucket.
*
* @since 16.03.13
*/
@Override
public void save() {
if (this.id == null) {
this.id = Generators.timeBasedGenerator().generate();
}
if (!PlayS3.isReady()) {
Logger.error("Could not save PlayS3 file because amazonS3 variable is null");
throw new RuntimeException("Could not save");
} else {
this.bucket = PlayS3.getBucketName();
if (this.subDirectory == null) {
this.subDirectory = "";
}
this.subDirectory = this.subDirectory.trim();
// Set cache control and server side encryption
final ObjectMetadata objMetaData = new ObjectMetadata();
objMetaData.setContentType(this.contentType);
objMetaData.setCacheControl("max-age=315360000, public");
objMetaData.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
try {
objMetaData.setContentLength(this.objectData.available());
} catch (final IOException ex) {
Logger.warn("Can't retrieve stream available size", ex);
} finally {
try {
if (this.objectData.markSupported()) {
this.objectData.reset();
}
} catch (final IOException ex) {
Logger.error("Can't reset stream position", ex);
}
}
// Upload file to PlayS3
final PutObjectRequest putObjectRequest = new PutObjectRequest(this.bucket, this.getActualFileName(), this.objectData, objMetaData);
putObjectRequest.withCannedAcl(this.isPrivate ? CannedAccessControlList.Private : CannedAccessControlList.PublicRead);
PlayS3.getAmazonS3().putObject(putObjectRequest);
try {
if (this.objectData != null) {
this.objectData.close();
}
} catch (final IOException ignore) {
}
// Save object on database
super.save();
}
}
/**
* Delete the remote file.
*
* @since 16.03.13
*/
@PreRemove
public void deleteRemoteFile() {
if (!PlayS3.isReady()) {
Logger.error("Could not delete PlayS3 file because amazonS3 variable is null");
throw new RuntimeException("Could not delete");
} else {
try {
PlayS3.getAmazonS3().deleteObject(this.bucket, getActualFileName());
} catch (final AmazonS3Exception ex) {
Logger.warn("Something goes wrong with Amazon PlayS3", ex);
}
}
}
/**
* Get the file content. In case of error (network error, file not
* found, ...), this method will return null.
*
* @return The file content, otherwise, null
* @see InputStream
* @since 16.03.13
*/
public InputStream getFileContent() {
if (!PlayS3.isReady()) {
Logger.error("Could not get PlayS3 file content because amazonS3 variable is null");
throw new RuntimeException("Could not get file content");
}
final S3Object obj = PlayS3.getAmazonS3().getObject(this.bucket, getActualFileName());
if (obj != null) {
return obj.getObjectContent();
}
return null;
}
}
|
|
package org.usfirst.frc.team5567.robot;
import edu.wpi.first.wpilibj.GenericHID.Hand;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj.SpeedController;
import edu.wpi.first.wpilibj.XboxController;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.Victor;
import edu.wpi.first.wpilibj.RobotDrive.MotorType;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.Talon;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Robot extends IterativeRobot {
final String defaultAuto = "Default";
final String customAuto = "My Auto";
String autoSelected;
SendableChooser<String> chooser = new SendableChooser<>();
//Driver options chooser
//TODO: Verify driver order
final String defaultDriver = "Parker";
final String driverTwo = "Taylor";
final String driverThree = "Michael";
String driverSelected;
SendableChooser<String> driverSelect = new SendableChooser<>();
//Setting drivetrain motor controllers to PWM Ports
int FrontLeft=0;
int FrontRight=1;
int RearLeft=2;
int RearRight=3;
//Setting the winch motor controller to a PWM port
int winchController = 4;
//Setting the shooter motor controller and the ball release motor controller to a PWM port
int shooterController = 5;
int ballReleaseController = 6;
//Setting Xbox controllers to USB ports
int pilotUSBPort = 0;
int copilotUSBPort = 1;
//Creating speed controllers for the drivetrain & assigning new instance of the corresponding motor controllers
//initializing with which PWM channel to use
SpeedController FLController = new Talon(FrontLeft);
SpeedController FRController = new Victor(FrontRight);
SpeedController RLController = new Talon(RearLeft);
SpeedController RRController = new Talon(RearRight);
//Creating a winch & initializing with the PWM port set by winchController
Winch robotWinch = new Winch(winchController);
//Creating a shooter & initializing with the PWM ports set by shooterController & ballReleaseController
Shooter robotShoooter = new Shooter(shooterController,ballReleaseController);
//Creating a new RobotDrive & initializing with the speed controllers
RobotDrive myRobotDrive = new RobotDrive(FLController,FRController,RLController,RRController);
//Creating a timer for elapsed time
Timer myTimer = new Timer();
//Initializing controllers to the correct USB ports
XboxController pilotController = new XboxController(pilotUSBPort);
XboxController copilotController = new XboxController(copilotUSBPort);
//Deadzone threshold for controller
//Prevents unintentional drifting
double threshold = 0.1;
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
@Override
public void robotInit() {
chooser.addDefault("Default Auto", defaultAuto);
chooser.addObject("My Auto", customAuto);
SmartDashboard.putData("Auto choices", chooser);
//Creates driver selection on dashboard
driverSelect.addDefault("Parker", defaultDriver);
driverSelect.addObject("Taylor", driverTwo);
driverSelect.addObject("Michael", driverThree);
SmartDashboard.putData("Driver Selection", driverSelect);
myRobotDrive.setInvertedMotor(MotorType.kFrontLeft,true);
myRobotDrive.setInvertedMotor(MotorType.kRearLeft,true);
}
/**
* This autonomous (along with the chooser code above) shows how to select
* between different autonomous modes using the dashboard. The sendable
* chooser code works with the Java SmartDashboard. If you prefer the
* LabVIEW Dashboard, remove all of the chooser code and uncomment the
* getString line to get the auto name from the text box below the Gyro
*
* You can add additional auto modes by adding additional comparisons to the
* switch structure below with additional strings. If using the
* SendableChooser make sure to add them to the chooser code above as well.
*/
@Override
public void autonomousInit() {
autoSelected = chooser.getSelected();
// autoSelected = SmartDashboard.getString("Auto Selector",
// defaultAuto);
System.out.println("Auto selected: " + autoSelected);
this.myTimer.reset();
this.myTimer.start();
}
/**
* This function is called periodically during autonomous
*/
@Override
public void autonomousPeriodic() {
switch (autoSelected) {
case customAuto:
// Put custom auto code here
break;
case defaultAuto:
default:
// Put default auto code here
this.myTimer.get();
if (myTimer.get() <= 5.00){
myRobotDrive.mecanumDrive_Cartesian(0, 0.5, 0, 0);
}
else {
myRobotDrive.mecanumDrive_Cartesian(0, 0, 0, 0);
}
break;
}
}
@Override
public void teleopInit(){
driverSelected = driverSelect.getSelected();
System.out.println("Driver selected: " + driverSelected);
}
/**
* This function is called periodically during operator control
*/
@Override
public void teleopPeriodic() {
double XLIn = this.pilotController.getX(Hand.kLeft);
double YLIn = this.pilotController.getY(Hand.kLeft);
double LtIn = this.pilotController.getTriggerAxis(Hand.kLeft);
double RtIn = this.pilotController.getTriggerAxis(Hand.kRight);
double XLIn2 = this.pilotController.getX(Hand.kRight);
double TrigDiff = (RtIn-LtIn);
double Rotation = 0.00;
if (XLIn < threshold && XLIn > -threshold){
XLIn = 0;
}
if (YLIn < threshold && YLIn > -threshold){
YLIn = 0;
}
if (XLIn2 < threshold && XLIn2 > -threshold){
XLIn2 = 0;
}
// TODO: Rotation selection--Port options to dashboard?
// **Could use button to change rotation input
if (Math.abs(XLIn2) > Math.abs(TrigDiff)){
Rotation = XLIn2;
}
else {
Rotation = TrigDiff;
}
myRobotDrive.mecanumDrive_Cartesian(XLIn, YLIn, Rotation, 0);
double BLtIn = this.copilotController.getTriggerAxis(Hand.kLeft);
// double BRtIn = this.copilotController.getTriggerAxis(Hand.kRight);
boolean BRbIn = this.copilotController.getBumper(Hand.kRight);
this.robotWinch.setWinchSpeed(BLtIn);
this.robotShoooter.turnShooterOn(BRbIn);
}
/**
* This function is called periodically during test mode
*/
@Override
public void testPeriodic() {
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl;
import java.util.Map;
import org.camunda.bpm.engine.AuthorizationService;
import org.camunda.bpm.engine.CaseService;
import org.camunda.bpm.engine.DecisionService;
import org.camunda.bpm.engine.ExternalTaskService;
import org.camunda.bpm.engine.FilterService;
import org.camunda.bpm.engine.FormService;
import org.camunda.bpm.engine.HistoryService;
import org.camunda.bpm.engine.IdentityService;
import org.camunda.bpm.engine.ManagementService;
import org.camunda.bpm.engine.ProcessEngine;
import org.camunda.bpm.engine.ProcessEngines;
import org.camunda.bpm.engine.RepositoryService;
import org.camunda.bpm.engine.RuntimeService;
import org.camunda.bpm.engine.TaskService;
import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.camunda.bpm.engine.impl.cfg.TransactionContextFactory;
import org.camunda.bpm.engine.impl.el.ExpressionManager;
import org.camunda.bpm.engine.impl.history.HistoryLevel;
import org.camunda.bpm.engine.impl.interceptor.CommandExecutor;
import org.camunda.bpm.engine.impl.interceptor.SessionFactory;
import org.camunda.bpm.engine.impl.jobexecutor.JobExecutor;
import org.camunda.bpm.engine.impl.metrics.reporter.DbMetricsReporter;
/**
* @author Tom Baeyens
*/
public class ProcessEngineImpl implements ProcessEngine {
private final static ProcessEngineLogger LOG = ProcessEngineLogger.INSTANCE;
protected String name;
protected RepositoryService repositoryService;
protected RuntimeService runtimeService;
protected HistoryService historicDataService;
protected IdentityService identityService;
protected TaskService taskService;
protected FormService formService;
protected ManagementService managementService;
protected AuthorizationService authorizationService;
protected CaseService caseService;
protected FilterService filterService;
protected ExternalTaskService externalTaskService;
protected DecisionService decisionService;
protected String databaseSchemaUpdate;
protected JobExecutor jobExecutor;
protected CommandExecutor commandExecutor;
protected CommandExecutor commandExecutorSchemaOperations;
protected Map<Class<?>, SessionFactory> sessionFactories;
protected ExpressionManager expressionManager;
protected HistoryLevel historyLevel;
protected TransactionContextFactory transactionContextFactory;
protected ProcessEngineConfigurationImpl processEngineConfiguration;
public ProcessEngineImpl(ProcessEngineConfigurationImpl processEngineConfiguration) {
this.processEngineConfiguration = processEngineConfiguration;
this.name = processEngineConfiguration.getProcessEngineName();
this.repositoryService = processEngineConfiguration.getRepositoryService();
this.runtimeService = processEngineConfiguration.getRuntimeService();
this.historicDataService = processEngineConfiguration.getHistoryService();
this.identityService = processEngineConfiguration.getIdentityService();
this.taskService = processEngineConfiguration.getTaskService();
this.formService = processEngineConfiguration.getFormService();
this.managementService = processEngineConfiguration.getManagementService();
this.authorizationService = processEngineConfiguration.getAuthorizationService();
this.caseService = processEngineConfiguration.getCaseService();
this.filterService = processEngineConfiguration.getFilterService();
this.externalTaskService = processEngineConfiguration.getExternalTaskService();
this.decisionService = processEngineConfiguration.getDecisionService();
this.databaseSchemaUpdate = processEngineConfiguration.getDatabaseSchemaUpdate();
this.jobExecutor = processEngineConfiguration.getJobExecutor();
this.commandExecutor = processEngineConfiguration.getCommandExecutorTxRequired();
commandExecutorSchemaOperations = processEngineConfiguration.getCommandExecutorSchemaOperations();
this.sessionFactories = processEngineConfiguration.getSessionFactories();
this.historyLevel = processEngineConfiguration.getHistoryLevel();
this.transactionContextFactory = processEngineConfiguration.getTransactionContextFactory();
executeSchemaOperations();
if (name == null) {
LOG.processEngineCreated(ProcessEngines.NAME_DEFAULT);
} else {
LOG.processEngineCreated(name);
}
ProcessEngines.registerProcessEngine(this);
if ((jobExecutor != null)) {
// register process engine with Job Executor
jobExecutor.registerProcessEngine(this);
}
if (processEngineConfiguration.isMetricsEnabled()) {
String reporterId = processEngineConfiguration.getMetricsReporterIdProvider().provideId(this);
DbMetricsReporter dbMetricsReporter = processEngineConfiguration.getDbMetricsReporter();
dbMetricsReporter.setReporterId(reporterId);
if(processEngineConfiguration.isDbMetricsReporterActivate()) {
dbMetricsReporter.start();
}
}
}
protected void executeSchemaOperations() {
commandExecutorSchemaOperations.execute(new SchemaOperationsProcessEngineBuild());
}
public void close() {
ProcessEngines.unregister(this);
if(processEngineConfiguration.isMetricsEnabled()) {
processEngineConfiguration.getDbMetricsReporter().stop();
}
if ((jobExecutor != null)) {
// unregister process engine with Job Executor
jobExecutor.unregisterProcessEngine(this);
}
commandExecutorSchemaOperations.execute(new SchemaOperationProcessEngineClose());
processEngineConfiguration.close();
LOG.processEngineClosed(name);
}
// getters and setters //////////////////////////////////////////////////////
public String getName() {
return name;
}
public IdentityService getIdentityService() {
return identityService;
}
public ManagementService getManagementService() {
return managementService;
}
public TaskService getTaskService() {
return taskService;
}
public HistoryService getHistoryService() {
return historicDataService;
}
public RuntimeService getRuntimeService() {
return runtimeService;
}
public RepositoryService getRepositoryService() {
return repositoryService;
}
public FormService getFormService() {
return formService;
}
public AuthorizationService getAuthorizationService() {
return authorizationService;
}
public CaseService getCaseService() {
return caseService;
}
public FilterService getFilterService() {
return filterService;
}
public ExternalTaskService getExternalTaskService() {
return externalTaskService;
}
public DecisionService getDecisionService() {
return decisionService;
}
public ProcessEngineConfigurationImpl getProcessEngineConfiguration() {
return processEngineConfiguration;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.portmap;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.oncrpc.RpcAcceptedReply;
import org.apache.hadoop.oncrpc.RpcCall;
import org.apache.hadoop.oncrpc.RpcInfo;
import org.apache.hadoop.oncrpc.RpcProgram;
import org.apache.hadoop.oncrpc.RpcResponse;
import org.apache.hadoop.oncrpc.RpcUtil;
import org.apache.hadoop.oncrpc.XDR;
import org.apache.hadoop.oncrpc.security.VerifierNone;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.group.ChannelGroup;
import org.jboss.netty.handler.timeout.IdleState;
import org.jboss.netty.handler.timeout.IdleStateAwareChannelUpstreamHandler;
import org.jboss.netty.handler.timeout.IdleStateEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler {
static final int PROGRAM = 100000;
static final int VERSION = 2;
static final int PMAPPROC_NULL = 0;
static final int PMAPPROC_SET = 1;
static final int PMAPPROC_UNSET = 2;
static final int PMAPPROC_GETPORT = 3;
static final int PMAPPROC_DUMP = 4;
static final int PMAPPROC_GETVERSADDR = 9;
private static final Logger LOG =
LoggerFactory.getLogger(RpcProgramPortmap.class);
private final ConcurrentHashMap<String, PortmapMapping> map = new ConcurrentHashMap<String, PortmapMapping>();
/** ChannelGroup that remembers all active channels for gracefully shutdown. */
private final ChannelGroup allChannels;
RpcProgramPortmap(ChannelGroup allChannels) {
this.allChannels = allChannels;
PortmapMapping m = new PortmapMapping(PROGRAM, VERSION,
PortmapMapping.TRANSPORT_TCP, RpcProgram.RPCB_PORT);
PortmapMapping m1 = new PortmapMapping(PROGRAM, VERSION,
PortmapMapping.TRANSPORT_UDP, RpcProgram.RPCB_PORT);
map.put(PortmapMapping.key(m), m);
map.put(PortmapMapping.key(m1), m1);
}
/**
* This procedure does no work. By convention, procedure zero of any protocol
* takes no parameters and returns no results.
*/
private XDR nullOp(int xid, XDR in, XDR out) {
return PortmapResponse.voidReply(out, xid);
}
/**
* When a program first becomes available on a machine, it registers itself
* with the port mapper program on the same machine. The program passes its
* program number "prog", version number "vers", transport protocol number
* "prot", and the port "port" on which it awaits service request. The
* procedure returns a boolean reply whose value is "TRUE" if the procedure
* successfully established the mapping and "FALSE" otherwise. The procedure
* refuses to establish a mapping if one already exists for the tuple
* "(prog, vers, prot)".
*/
private XDR set(int xid, XDR in, XDR out) {
PortmapMapping mapping = PortmapRequest.mapping(in);
String key = PortmapMapping.key(mapping);
if (LOG.isDebugEnabled()) {
LOG.debug("Portmap set key=" + key);
}
map.put(key, mapping);
return PortmapResponse.intReply(out, xid, mapping.getPort());
}
/**
* When a program becomes unavailable, it should unregister itself with the
* port mapper program on the same machine. The parameters and results have
* meanings identical to those of "PMAPPROC_SET". The protocol and port number
* fields of the argument are ignored.
*/
private XDR unset(int xid, XDR in, XDR out) {
PortmapMapping mapping = PortmapRequest.mapping(in);
String key = PortmapMapping.key(mapping);
if (LOG.isDebugEnabled())
LOG.debug("Portmap remove key=" + key);
map.remove(key);
return PortmapResponse.booleanReply(out, xid, true);
}
/**
* Given a program number "prog", version number "vers", and transport
* protocol number "prot", this procedure returns the port number on which the
* program is awaiting call requests. A port value of zeros means the program
* has not been registered. The "port" field of the argument is ignored.
*/
private XDR getport(int xid, XDR in, XDR out) {
PortmapMapping mapping = PortmapRequest.mapping(in);
String key = PortmapMapping.key(mapping);
if (LOG.isDebugEnabled()) {
LOG.debug("Portmap GETPORT key=" + key + " " + mapping);
}
PortmapMapping value = map.get(key);
int res = 0;
if (value != null) {
res = value.getPort();
if (LOG.isDebugEnabled()) {
LOG.debug("Found mapping for key: " + key + " port:" + res);
}
} else {
LOG.warn("Warning, no mapping for key: " + key);
}
return PortmapResponse.intReply(out, xid, res);
}
/**
* This procedure enumerates all entries in the port mapper's database. The
* procedure takes no parameters and returns a list of program, version,
* protocol, and port values.
*/
private XDR dump(int xid, XDR in, XDR out) {
PortmapMapping[] pmapList = map.values().toArray(new PortmapMapping[0]);
return PortmapResponse.pmapList(out, xid, pmapList);
}
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
throws Exception {
RpcInfo info = (RpcInfo) e.getMessage();
RpcCall rpcCall = (RpcCall) info.header();
final int portmapProc = rpcCall.getProcedure();
int xid = rpcCall.getXid();
XDR in = new XDR(info.data().toByteBuffer().asReadOnlyBuffer(),
XDR.State.READING);
XDR out = new XDR();
if (portmapProc == PMAPPROC_NULL) {
out = nullOp(xid, in, out);
} else if (portmapProc == PMAPPROC_SET) {
out = set(xid, in, out);
} else if (portmapProc == PMAPPROC_UNSET) {
out = unset(xid, in, out);
} else if (portmapProc == PMAPPROC_DUMP) {
out = dump(xid, in, out);
} else if (portmapProc == PMAPPROC_GETPORT) {
out = getport(xid, in, out);
} else if (portmapProc == PMAPPROC_GETVERSADDR) {
out = getport(xid, in, out);
} else {
LOG.info("PortmapHandler unknown rpc procedure=" + portmapProc);
RpcAcceptedReply reply = RpcAcceptedReply.getInstance(xid,
RpcAcceptedReply.AcceptState.PROC_UNAVAIL, new VerifierNone());
reply.write(out);
}
ChannelBuffer buf = ChannelBuffers.wrappedBuffer(out.asReadOnlyWrap()
.buffer());
RpcResponse rsp = new RpcResponse(buf, info.remoteAddress());
RpcUtil.sendRpcResponse(ctx, rsp);
}
@Override
public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e)
throws Exception {
allChannels.add(e.getChannel());
}
@Override
public void channelIdle(ChannelHandlerContext ctx, IdleStateEvent e)
throws Exception {
if (e.getState() == IdleState.ALL_IDLE) {
e.getChannel().close();
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
LOG.warn("Encountered ", e.getCause());
e.getChannel().close();
}
}
|
|
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.apigateway.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* A resource that can be distributed to callers for executing <a>Method</a> resources that require an API key. API keys
* can be mapped to any <a>Stage</a> on any <a>RestApi</a>, which indicates that the callers with the API key can make
* requests to that stage.
* </p>
* <div class="seeAlso"> <a href="http://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-api-keys.html">Use
* API Keys</a> </div>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateApiKeyResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The identifier of the API Key.
* </p>
*/
private String id;
/**
* <p>
* The value of the API Key.
* </p>
*/
private String value;
/**
* <p>
* The name of the API Key.
* </p>
*/
private String name;
/**
* <p>
* An AWS Marketplace customer identifier , when integrating with the AWS SaaS Marketplace.
* </p>
*/
private String customerId;
/**
* <p>
* The description of the API Key.
* </p>
*/
private String description;
/**
* <p>
* Specifies whether the API Key can be used by callers.
* </p>
*/
private Boolean enabled;
/**
* <p>
* The timestamp when the API Key was created.
* </p>
*/
private java.util.Date createdDate;
/**
* <p>
* The timestamp when the API Key was last updated.
* </p>
*/
private java.util.Date lastUpdatedDate;
/**
* <p>
* A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
* </p>
*/
private java.util.List<String> stageKeys;
/**
* <p>
* The identifier of the API Key.
* </p>
*
* @param id
* The identifier of the API Key.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The identifier of the API Key.
* </p>
*
* @return The identifier of the API Key.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The identifier of the API Key.
* </p>
*
* @param id
* The identifier of the API Key.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The value of the API Key.
* </p>
*
* @param value
* The value of the API Key.
*/
public void setValue(String value) {
this.value = value;
}
/**
* <p>
* The value of the API Key.
* </p>
*
* @return The value of the API Key.
*/
public String getValue() {
return this.value;
}
/**
* <p>
* The value of the API Key.
* </p>
*
* @param value
* The value of the API Key.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withValue(String value) {
setValue(value);
return this;
}
/**
* <p>
* The name of the API Key.
* </p>
*
* @param name
* The name of the API Key.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the API Key.
* </p>
*
* @return The name of the API Key.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the API Key.
* </p>
*
* @param name
* The name of the API Key.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withName(String name) {
setName(name);
return this;
}
/**
* <p>
* An AWS Marketplace customer identifier , when integrating with the AWS SaaS Marketplace.
* </p>
*
* @param customerId
* An AWS Marketplace customer identifier , when integrating with the AWS SaaS Marketplace.
*/
public void setCustomerId(String customerId) {
this.customerId = customerId;
}
/**
* <p>
* An AWS Marketplace customer identifier , when integrating with the AWS SaaS Marketplace.
* </p>
*
* @return An AWS Marketplace customer identifier , when integrating with the AWS SaaS Marketplace.
*/
public String getCustomerId() {
return this.customerId;
}
/**
* <p>
* An AWS Marketplace customer identifier , when integrating with the AWS SaaS Marketplace.
* </p>
*
* @param customerId
* An AWS Marketplace customer identifier , when integrating with the AWS SaaS Marketplace.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withCustomerId(String customerId) {
setCustomerId(customerId);
return this;
}
/**
* <p>
* The description of the API Key.
* </p>
*
* @param description
* The description of the API Key.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The description of the API Key.
* </p>
*
* @return The description of the API Key.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The description of the API Key.
* </p>
*
* @param description
* The description of the API Key.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* Specifies whether the API Key can be used by callers.
* </p>
*
* @param enabled
* Specifies whether the API Key can be used by callers.
*/
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
/**
* <p>
* Specifies whether the API Key can be used by callers.
* </p>
*
* @return Specifies whether the API Key can be used by callers.
*/
public Boolean getEnabled() {
return this.enabled;
}
/**
* <p>
* Specifies whether the API Key can be used by callers.
* </p>
*
* @param enabled
* Specifies whether the API Key can be used by callers.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withEnabled(Boolean enabled) {
setEnabled(enabled);
return this;
}
/**
* <p>
* Specifies whether the API Key can be used by callers.
* </p>
*
* @return Specifies whether the API Key can be used by callers.
*/
public Boolean isEnabled() {
return this.enabled;
}
/**
* <p>
* The timestamp when the API Key was created.
* </p>
*
* @param createdDate
* The timestamp when the API Key was created.
*/
public void setCreatedDate(java.util.Date createdDate) {
this.createdDate = createdDate;
}
/**
* <p>
* The timestamp when the API Key was created.
* </p>
*
* @return The timestamp when the API Key was created.
*/
public java.util.Date getCreatedDate() {
return this.createdDate;
}
/**
* <p>
* The timestamp when the API Key was created.
* </p>
*
* @param createdDate
* The timestamp when the API Key was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withCreatedDate(java.util.Date createdDate) {
setCreatedDate(createdDate);
return this;
}
/**
* <p>
* The timestamp when the API Key was last updated.
* </p>
*
* @param lastUpdatedDate
* The timestamp when the API Key was last updated.
*/
public void setLastUpdatedDate(java.util.Date lastUpdatedDate) {
this.lastUpdatedDate = lastUpdatedDate;
}
/**
* <p>
* The timestamp when the API Key was last updated.
* </p>
*
* @return The timestamp when the API Key was last updated.
*/
public java.util.Date getLastUpdatedDate() {
return this.lastUpdatedDate;
}
/**
* <p>
* The timestamp when the API Key was last updated.
* </p>
*
* @param lastUpdatedDate
* The timestamp when the API Key was last updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withLastUpdatedDate(java.util.Date lastUpdatedDate) {
setLastUpdatedDate(lastUpdatedDate);
return this;
}
/**
* <p>
* A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
* </p>
*
* @return A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
*/
public java.util.List<String> getStageKeys() {
return stageKeys;
}
/**
* <p>
* A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
* </p>
*
* @param stageKeys
* A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
*/
public void setStageKeys(java.util.Collection<String> stageKeys) {
if (stageKeys == null) {
this.stageKeys = null;
return;
}
this.stageKeys = new java.util.ArrayList<String>(stageKeys);
}
/**
* <p>
* A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setStageKeys(java.util.Collection)} or {@link #withStageKeys(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param stageKeys
* A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withStageKeys(String... stageKeys) {
if (this.stageKeys == null) {
setStageKeys(new java.util.ArrayList<String>(stageKeys.length));
}
for (String ele : stageKeys) {
this.stageKeys.add(ele);
}
return this;
}
/**
* <p>
* A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
* </p>
*
* @param stageKeys
* A list of <a>Stage</a> resources that are associated with the <a>ApiKey</a> resource.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateApiKeyResult withStageKeys(java.util.Collection<String> stageKeys) {
setStageKeys(stageKeys);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getValue() != null)
sb.append("Value: ").append(getValue()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getCustomerId() != null)
sb.append("CustomerId: ").append(getCustomerId()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getEnabled() != null)
sb.append("Enabled: ").append(getEnabled()).append(",");
if (getCreatedDate() != null)
sb.append("CreatedDate: ").append(getCreatedDate()).append(",");
if (getLastUpdatedDate() != null)
sb.append("LastUpdatedDate: ").append(getLastUpdatedDate()).append(",");
if (getStageKeys() != null)
sb.append("StageKeys: ").append(getStageKeys());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateApiKeyResult == false)
return false;
CreateApiKeyResult other = (CreateApiKeyResult) obj;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getValue() == null ^ this.getValue() == null)
return false;
if (other.getValue() != null && other.getValue().equals(this.getValue()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getCustomerId() == null ^ this.getCustomerId() == null)
return false;
if (other.getCustomerId() != null && other.getCustomerId().equals(this.getCustomerId()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getEnabled() == null ^ this.getEnabled() == null)
return false;
if (other.getEnabled() != null && other.getEnabled().equals(this.getEnabled()) == false)
return false;
if (other.getCreatedDate() == null ^ this.getCreatedDate() == null)
return false;
if (other.getCreatedDate() != null && other.getCreatedDate().equals(this.getCreatedDate()) == false)
return false;
if (other.getLastUpdatedDate() == null ^ this.getLastUpdatedDate() == null)
return false;
if (other.getLastUpdatedDate() != null && other.getLastUpdatedDate().equals(this.getLastUpdatedDate()) == false)
return false;
if (other.getStageKeys() == null ^ this.getStageKeys() == null)
return false;
if (other.getStageKeys() != null && other.getStageKeys().equals(this.getStageKeys()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getValue() == null) ? 0 : getValue().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getCustomerId() == null) ? 0 : getCustomerId().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getEnabled() == null) ? 0 : getEnabled().hashCode());
hashCode = prime * hashCode + ((getCreatedDate() == null) ? 0 : getCreatedDate().hashCode());
hashCode = prime * hashCode + ((getLastUpdatedDate() == null) ? 0 : getLastUpdatedDate().hashCode());
hashCode = prime * hashCode + ((getStageKeys() == null) ? 0 : getStageKeys().hashCode());
return hashCode;
}
@Override
public CreateApiKeyResult clone() {
try {
return (CreateApiKeyResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.scenariosimulation.client.resources.i18n;
import com.google.gwt.core.client.GWT;
import com.google.gwt.i18n.client.Messages;
/**
* ScenarioSimulationEditor I18N constants
*/
public interface ScenarioSimulationEditorConstants
extends
Messages {
ScenarioSimulationEditorConstants INSTANCE = GWT.create(ScenarioSimulationEditorConstants.class);
String newScenarioSimulationDescription();
String factColumnHeader();
String fieldColumnHeader();
String contextColumnHeader();
String addScenarioSimulation();
String scenarioSimulationEditorTitle();
String scenarioSimulationResourceTypeDescription();
String remove();
String currentlyNotAvailable();
String invalidDefinitionDisabled();
String testTools();
String testReport();
String testEditor();
String testToolsAddButton();
String testToolsAddButtonLabel();
String scenarioCheatSheet();
String ruleCheatSheet1();
String ruleCheatSheet2();
String ruleCheatSheet3();
String ruleCheatSheet4();
String ruleCheatSheet5();
String ruleCheatSheet6();
String ruleCheatSheet7();
String ruleCheatSheet8();
String ruleCheatSheet9();
String ruleCheatSheet10();
String ruleCheatSheet11();
String ruleCheatSheet12();
String ruleCheatSheet13();
String ruleCheatSheet14();
String ruleCheatSheet15();
String ruleCheatSheet16();
String ruleCheatSheet17();
String ruleCheatSheet18();
String ruleCheatSheet19();
String or();
String ruleCheatSheet20();
String ruleCheatSheet21();
String ruleCheatSheet22();
String ruleCheatSheet23();
String dmnCheatSheet1();
String dmnCheatSheet2();
String dmnCheatSheet3();
String dmnCheatSheet4();
String dmnCheatSheet5();
String dmnCheatSheet6();
String dmnCheatSheet7();
String and();
String dmnCheatSheet8();
String dmnCheatSheet9();
String dmnCheatSheet10();
String dmnCheatSheet11();
String dmnCheatSheet12();
String dmnCheatSheet13();
String dmnCheatSheet14();
String dmnCheatSheet15();
String dmnCheatSheet16();
String dmnCheatSheet17();
String dmnCheatSheet18();
String dmnCheatSheet19();
String dmnCheatSheet20();
String dmnCheatSheet21();
String forExample();
String runScenarioSimulation();
String expect();
String insertColumnLeft();
String insertColumnRight();
String deleteColumn();
String deleteInstance();
String duplicateInstance();
String insertRowBelow();
String scenario();
String background();
String given();
String insertRowAbove();
String deleteRow();
String duplicateRow();
String runSingleScenario();
String prependRow();
String appendRow();
String insertLeftmostColumn();
String insertRightmostColumn();
String description();
String insertValue();
String deleteValues();
String dateFormatPlaceholder();
String dateTimeFormatPlaceholder();
String timeFormatPlaceholder();
String dmnDateFormatPlaceholder();
String deleteScenarioMainTitle();
String deleteScenarioMainQuestion();
String deleteScenarioText1();
String deleteScenarioTextQuestion();
String deleteScenarioTextDanger();
String preserveDeleteScenarioMainTitle();
String preserveDeleteScenarioMainQuestion();
String preserveDeleteScenarioText1();
String preserveDeleteScenarioTextQuestion();
String preserveDeleteScenarioTextOption1();
String preserveDeleteScenarioTextOption2();
String preserveValues();
String defineValidType();
String insertExpression();
String changeType();
String changeTypeMainTitle();
String changeTypeMainQuestion();
String changeTypeText1();
String changeTypeTextQuestion();
String changeTypeTextDanger();
String undo();
String redo();
String sourceType();
String chooseDMN();
String chooseValidDMNAsset();
String removeCollectionMainTitle();
String removeCollectionMainQuestion();
String removeCollectionText1();
String removeCollectionQuestion();
String removeCollectionWarningText();
String collectionError();
String selectImportFile();
String importLabel();
String settings();
String name();
String type();
String dmnPathErrorLabel(String dmnPath);
String dmnPathErrorDetailedLabel(String dmnPath, String message);
String missingSelectedType();
String missingDmnPath();
String coverageReport();
String uploadWarning();
String chooseFile();
String importFailedMessage();
String importErrorTitle();
String executed();
String notCovered();
String decisionsEvaluated();
String rulesFired();
String coverageNotSupportedForRule();
String runATestToSeeCoverageReport();
String running();
String simpleTypes();
String complexTypes();
String dataObjectInstances();
String complexCustomInstances();
String simpleCustomInstances();
String reportAvailableLabel();
String reportExecutedLabel();
String reportCoverageLabel();
String numberOfTimesDecisionEvaluated();
String reportAvailableRuleLabel();
String reportExecutedRuleLabel();
String reportCoverageRuleLabel();
String numberOfTimesRulesFired();
String errorReason();
String keep();
String close();
String apply();
String errorPopoverMessageFailedWithError(String expectedValue, String errorValue);
String errorPopoverMessageFailedWithException(String errorMsg);
String errorPopoverGenericCollectionErrorMessage();
String errorPopoverCollectionHTMLFailureMessage(String failureMessage);
String errorPopoverCollectionHTMLField(String field);
String errorPopoverCollectionHTMLValue(String value);
String skipSimulation();
String downloadReportLabel();
String noDecisionsAvailable();
String noRulesAvailable();
String headerTitleEmptyError();
String instanceTitleAssignedError(String title);
String instanceTitleWithPeriodsError();
String propertyTitleAssignedError(String title);
String propertyTitleWithPeriodsError();
String validationErrorTitle();
String validationErrorMessage();
String validationFailedNotification();
String validationSucceed();
String loadContentFailedNotification();
String backgroundTabTitle();
String export();
String backgroundErrorNotification();
String ruleScenarioNotSupportedNotification();
String testToolsDescription();
String testToolObjectSelectionTitle();
String testToolObjectSelectionTooltip();
String testToolClearSelection();
String commonCheatSheetBackground(String background, String model);
String createButton();
String saveButton();
String removeButton();
String cancelButton();
String item();
String addNewListItem();
String addNewMapItem();
String createLabelList();
String createLabelMap();
String collectionListCreation();
String collectionMapCreation();
String createLabelListDescription();
String defineLabelList();
String defineLabelMap();
String defineLabelListDescription();
String createLabelMapDescription();
String defineLabelMapDescription();
String scenarioValidationNodeChangedError(String oldType, String newType);
String scenarioValidationFieldChangedError(String oldType, String newType);
String scenarioValidationFieldAddedConstraintError();
String scenarioValidationFieldRemovedConstraintError();
}
|
|
package sgraph;
import com.jogamp.opengl.GL3;
import org.joml.Matrix4f;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.joml.Vector4f;
import raytrace.HitRecord;
import raytrace.Ray3D;
import util.Material;
/**
* This node represents the leaf of a scene graph. It is the only type of node that has
* actual geometry to render.
* @author Amit Shesh
*/
public class LeafNode extends AbstractNode
{
/**
* The name of the object instance that this leaf contains. All object instances are stored
* in the scene graph itself, so that an instance can be reused in several leaves
*/
protected String objInstanceName;
/**
* The material associated with the object instance at this leaf
*/
protected util.Material material;
protected String textureName;
public LeafNode(String instanceOf,IScenegraph graph,String name)
{
super(graph,name);
this.objInstanceName = instanceOf;
}
/*
*Set the material of each vertex in this object
*/
@Override
public void setMaterial(util.Material mat)
{
material = new util.Material(mat);
}
/**
* Set texture ID of the texture to be used for this leaf
* @param name
*/
@Override
public void setTextureName(String name)
{
textureName = name;
}
/*
* gets the material
*/
public util.Material getMaterial()
{
return material;
}
@Override
public INode clone()
{
LeafNode newclone = new LeafNode(this.objInstanceName,scenegraph,name);
newclone.setMaterial(this.getMaterial());
return newclone;
}
/**
* Delegates to the scene graph for rendering. This has two advantages:
* <ul>
* <li>It keeps the leaf light.</li>
* <li>It abstracts the actual drawing to the specific implementation of the scene graph renderer</li>
* </ul>
* @param context the generic renderer context {@link sgraph.IScenegraphRenderer}
* @param modelView the stack of modelview matrices
* @throws IllegalArgumentException
*/
@Override
public void draw(IScenegraphRenderer context,Stack<Matrix4f> modelView) throws IllegalArgumentException
{
if (objInstanceName.length()>0)
{
context.drawMesh(objInstanceName,material,textureName,modelView.peek());
}
}
@Override
public List<HitRecord> raycast(Ray3D ray, Stack<Matrix4f> transforms) throws IllegalArgumentException {
List<HitRecord> hits = new ArrayList<HitRecord>();
List<HitRecord> objHits;
switch(objInstanceName) {
case "box":
objHits = BoxRaycast(ray, transforms);
if (objHits.size() > 0) {
hits.addAll(objHits);
}
break;
case "sphere":
objHits = SphereRaycast(ray, transforms);
if (objHits.size() > 0) {
hits.addAll(objHits);
}
break;
default:
throw new IllegalArgumentException("RayTracing not yet supported for objects of type: " + objInstanceName);
}
return hits;
}
private List<HitRecord> BoxRaycast(Ray3D ray, Stack<Matrix4f> transforms) {
List<HitRecord> hits = new ArrayList<HitRecord>();
Ray3D transformRay = new Ray3D(ray);
Matrix4f transform = new Matrix4f(transforms.peek());
transformRay.viewToWorld(transform);
HitRecord hit;
float maxT = (float)Double.POSITIVE_INFINITY;
float minT = (float)Double.NEGATIVE_INFINITY;
/*
Ray(start S, vector V)
create ray in view coordinates
S: 0,0,0 always! (when in view coordinate system)
going through near plane pixel (i,j)
So 3D location of that pixel in view coordinates is
V = Vector(x, y, z)
x = i-width/2
y = j-height/2
z = -0.5*height/tan(0.5*FOVY)
*/
/*
Given ray R and object O, does R hit O?
If so: where?
*/
/*
Equation of a plane P: ax + by + cz + d = 0
Px = Sx + tVx
Py = Sy + tVy
Pz = Sz + tVz
t = -(aSx + bSy + cSz + d) / (aVx + bVy + cVz)
If ray || to plane P, divide by zero error
Need t such that S + tV
*/
/*
For box centered at 0 with dimensions (1,1,1)
front plane: -0.5 <= x <= 0.5 ; -0.5 <= y <= 0.5 ; z = 0.5
rear plane: -0.5 <= x <= 0.5 ; -0.5 <= y <= 0.5 ; z = -0.5
left plane: x = -0.5 ; -0.5 <= y <= 0.5 ; -0.5 <= z <= 0.5
right plane: x = 0.5 ; -0.5 <= y <= 0.5 ; -0.5 <= z <= 0.5
top plane: -0.5 <= x <= 0.5 ; y = 0.5 ; -0.5 <= z <= 0.5
bottom plane: -0.5 <= x <= 0.5 ; y = -0.5 ; -0.5 <= z <= 0.5
tx1 = (-0.5 - Sx) / Vx
tx2 = (-0.5 - Sx) / Vx
tminx = min(tx1, tx2)
tmaxx = max(tx1, tx2)
ty1 = (-0.5 - Sy) / Vy
ty2 = (-0.5 - Sy) / Vy
tminy = min(ty1, ty2)
tmaxy = max(ty1, ty2)
tz1 = (-0.5 - Sz) / Vz
tz2 = (-0.5 - Sz) / Vz
tminz = min(tz1, tz2)
tmaxz = max(tz1, tz2)
tmin = max(tminx, tminy, tminz)
tmax = min(tmaxx, tmaxy, tmaxz)
*/
float tx1 = (-0.5f - transformRay.getStart().x) / transformRay.getDirection().x;
float tx2 = (0.5f - transformRay.getStart().x) / transformRay.getDirection().x;
float tminx = Math.min(tx1, tx2);
float tmaxx = Math.max(tx1, tx2);
float ty1 = (-0.5f - transformRay.getStart().y) / transformRay.getDirection().y;
float ty2 = (0.5f - transformRay.getStart().y) / transformRay.getDirection().y;
float tminy = Math.min(ty1, ty2);
float tmaxy = Math.max(ty1, ty2);
float tz1 = (-0.5f - transformRay.getStart().z) / transformRay.getDirection().z;
float tz2 = (0.5f - transformRay.getStart().z) / transformRay.getDirection().z;
float tminz = Math.min(tz1, tz2);
float tmaxz = Math.max(tz1, tz2);
float tmin = Math.max(tminx, Math.max(tminy, Math.max(tminz, minT)));
float tmax = Math.min(tmaxx, Math.min(tmaxy, Math.min(tmaxz, maxT)));
float tEnter;
float tExit;
if (tmin != maxT && tmin > 0 && tmin <= tmax) {
tEnter = tmin;
if (tmax != maxT) {
tExit = tmax;
Vector4f intersectIn = transformRay.getStart().add(transformRay.getDirection().mul(tmin));
Vector4f intersectOut = transformRay.getStart().add(transformRay.getDirection().mul(tmax));
Vector4f normalIn = getBoxNormal(intersectIn);
Vector4f normalOut = getBoxNormal(intersectOut);
Material mat = this.getMaterial();
hit = new HitRecord(tEnter, tExit, intersectIn, intersectOut, normalIn, normalOut, mat, new Matrix4f(transforms.peek()));
hits.add(hit);
}
}
else {
tEnter = tmax;
}
/*
Need Point of Intersection P, Vector N (normal at P), and List<Light> L, all in view coordinate system
To get N, apply the inverse transpose of M on N in object coordinate system
*/
/*
Equation of sphere with center C (Cx,Cy,Cz) and radius r: (X-Cx)^2 + (Y-Cy)^2 + (Z-Cz)^2 = r^2
// to get t:
At^2 + Bt + C = 0
// For any sphere
A = Vx^2 + Vy^2 + Vz^2
B = 2Vx(Sx-Cx) + 2Vy(Sy-Cy) + 2Vz(Sz-Cz)
C = (Sx-Cx)^2 + (Sy-Cy)^2 + (Sz-Cz)^2 - r^2
P = S + tV
N = P - C
// For sphere centered at origin with radius = 1
A = Vx^2 + Vy^2 + Vz^2
B = 2(VxSx + VySy + VzSz)
C = Sx^2 + Sy^2 + Sz^2 - 1
P = S + tV
N = P
// Catch cases:
A = 0 : cannot happen without other mistakes in code
B^2 - 4AC < 0 : means the ray does not hit the sphere
*/
/*
Textures
-PI/2 <= phi <= PI/2
0 <= theta <= 2PI
X = r*cos(theta)*cos(phi)
Y = r*sin(phi
Z = r*sin(theta)*cos(phi)
Xo = cos(theta)*cos(phi)
Yo = sin(phi
Zo = sin(theta)*cos(phi)
phi = sin^-1(Yo) ==> t = (phi + PI/2)/PI
theta = tan^-1(-Zo/Xo) ==> s = theta/2PI
*/
/*
Shadows
Color at point P due to light i:
C(P) = (i=0 SIGMA n) { Si }
Si = 1 --> not in shadow
Si = 0 --> in shadow
To determine if a point P is in shadow from light i:
Cast a Ray from P to i.position
*/
return hits;
}
private List<HitRecord> SphereRaycast(Ray3D ray, Stack<Matrix4f> transforms) {
List<HitRecord> hits = new ArrayList<HitRecord>();
Ray3D transformRay = new Ray3D(ray);
Matrix4f transform = new Matrix4f(transforms.peek());
transformRay.viewToWorld(transform);
Vector4f start = transformRay.getStart();
Vector4f direction = transformRay.getDirection();
HitRecord hit;
float maxT = (float)Double.POSITIVE_INFINITY;
float minT = (float)Double.NEGATIVE_INFINITY;
/*
Ray(start S, vector V)
create ray in view coordinates
S: 0,0,0 always! (when in view coordinate system)
going through near plane pixel (i,j)
So 3D location of that pixel in view coordinates is
V = Vector(x, y, z)
x = i-width/2
y = j-height/2
z = -0.5*height/tan(0.5*FOVY)
*/
/*
Given ray R and object O, does R hit O?
If so: where?
*/
/*
Equation of a plane P: ax + by + cz + d = 0
Px = Sx + tVx
Py = Sy + tVy
Pz = Sz + tVz
t = -(aSx + bSy + cSz + d) / (aVx + bVy + cVz)
If ray || to plane P, divide by zero error
*/
/*
For box centered at 0 with dimensions (1,1,1)
front plane: -0.5 <= x <= 0.5 ; -0.5 <= y <= 0.5 ; z = 0.5
rear plane: -0.5 <= x <= 0.5 ; -0.5 <= y <= 0.5 ; z = -0.5
left plane: x = -0.5 ; -0.5 <= y <= 0.5 ; -0.5 <= z <= 0.5
right plane: x = 0.5 ; -0.5 <= y <= 0.5 ; -0.5 <= z <= 0.5
top plane: -0.5 <= x <= 0.5 ; y = 0.5 ; -0.5 <= z <= 0.5
bottom plane: -0.5 <= x <= 0.5 ; y = -0.5 ; -0.5 <= z <= 0.5
tx1 = (-0.5 - Sx) / Vx
tx2 = (-0.5 - Sx) / Vx
tminx = min(tx1, tx2)
tmaxx = max(tx1, tx2)
ty1 = (-0.5 - Sy) / Vy
ty2 = (-0.5 - Sy) / Vy
tminy = min(ty1, ty2)
tmaxy = max(ty1, ty2)
tz1 = (-0.5 - Sz) / Vz
tz2 = (-0.5 - Sz) / Vz
tminz = min(tz1, tz2)
tmaxz = max(tz1, tz2)
tmin = max(tminx, tminy, tminz)
tmax = min(tmaxx, tmaxy, tmaxz)
*/
/*
float tx1 = (-0.5f - start.x) / v.x;
float tx2 = (-0.5f - start.x) / v.x;
float tminx = Math.min(tx1, tx2);
float tmaxx = Math.max(tx1, tx2);
float ty1 = (-0.5f - start.y) / v.y;
float ty2 = (-0.5f - start.y) / v.y;
float tminy = Math.min(ty1, ty2);
float tmaxy = Math.max(ty1, ty2);
float tz1 = (-0.5f - start.z) / v.z;
float tz2 = (-0.5f - start.z) / v.z;
float tminz = Math.min(tz1, tz2);
float tmaxz = Math.max(tz1, tz2);
float tmin = Math.max(tminx, Math.max(tminy, Math.max(tminz, -maxT)));
float tmax = Math.min(tmaxx, Math.min(tmaxy, Math.min(tmaxz, maxT)));
float t;
if (tmin != maxT && tmin > 0) {
t = tmin;
}
else {
t = tmax;
}
*/
/*
Need Point of Intersection P, Vector N (normal at P), and List<Light> L, all in view coordinate system
To get N, apply the inverse transpose of M on N in object coordinate system
*/
// Vector3f p = ray.getStart().add(ray.getDirection().mul(transforms.peek().invert()));
/*
Equation of sphere with center C (Cx,Cy,Cz) and radius r: (X-Cx)^2 + (Y-Cy)^2 + (Z-Cz)^2 = r^2
// to get t:
At^2 + Bt + C = 0
// For any sphere
A = Vx^2 + Vy^2 + Vz^2
B = 2Vx(Sx-Cx) + 2Vy(Sy-Cy) + 2Vz(Sz-Cz)
C = (Sx-Cx)^2 + (Sy-Cy)^2 + (Sz-Cz)^2 - r^2
P = S + tV
N = P - C
// For sphere centered at origin with radius = 1
A = Vx^2 + Vy^2 + Vz^2
B = 2(VxSx + VySy + VzSz)
C = Sx^2 + Sy^2 + Sz^2 - 1
P = S + tV
N = P
// Catch cases:
A = 0 : cannot happen without other mistakes in code
B^2 - 4AC < 0 : means the ray does not hit the sphere
*/
float a = 1;
float b = 2 * (start.dot(direction));
float c = (float)(Math.pow(start.x, 2) + Math.pow(start.y, 2) + Math.pow(start.z, 2) - 1);
/*
float a = transformRay.getDirection().lengthSquared();
float b = 2 * (transformRay.getStart().dot(transformRay.getDirection()));
float c = transformRay.getStart().lengthSquared() - 1;
*/
List<Float> t = quadratic(a,b,c);
if (t.size() == 2) {
if (t.get(0) != (float)Double.POSITIVE_INFINITY) {
float tEnter = t.get(0);
float tExit = t.get(1);
if (tEnter > 0) {
Vector4f intersectIn = new Vector4f(start.add(direction.mul(tEnter)));
Vector4f intersectOut = new Vector4f(start.add(direction.mul(tExit)));
hit = new HitRecord(tEnter, tExit, intersectIn, intersectOut, intersectIn, intersectOut, this.getMaterial(), new Matrix4f(transforms.peek()));
hits.add(hit);
}
}
}
else if (t.size() == 1) {
float tEnter = t.get(0);
if (tEnter > 0) {
Vector4f intersectIn = new Vector4f(transformRay.getStart().add(transformRay.getDirection().mul(tEnter)));
hit = new HitRecord(tEnter, tEnter, intersectIn, intersectIn, intersectIn, intersectIn, this.getMaterial(), new Matrix4f(transforms.peek()));
hits.add(hit);
}
}
/*
Textures
-PI/2 <= phi <= PI/2
0 <= theta <= 2PI
X = r*cos(theta)*cos(phi)
Y = r*sin(phi
Z = r*sin(theta)*cos(phi)
Xo = cos(theta)*cos(phi)
Yo = sin(phi
Zo = sin(theta)*cos(phi)
phi = sin^-1(Yo) ==> t = (phi + PI/2)/PI
theta = tan^-1(-Zo/Xo) ==> s = theta/2PI
*/
/*
Shadows
Color at point P due to light i:
C(P) = (i=0 SIGMA n) { Si }
Si = 1 --> not in shadow
Si = 0 --> in shadow
To determine if a point P is in shadow from light i:
Cast a Ray from P to i.position
*/
return hits;
}
public Vector4f getBoxNormal(Vector4f intersect) {
float normalx, normaly, normalz;
if (intersect.x == -0.5f || intersect.x == 0.5f) {
normalx = intersect.x;
}
else {
normalx = 0;
}
if (intersect.y == -0.5f || intersect.y == 0.5f) {
normaly = intersect.y;
}
else {
normaly = 0;
}
if (intersect.z == -0.5f || intersect.z == 0.5f) {
normalz = intersect.z;
}
else {
normalz = 0;
}
return new Vector4f(normalx, normaly, normalz, 0).normalize();
}
public List<Float> quadratic(float a, float b, float c) {
List<Float> t = new ArrayList<Float>();
float t1,t2;
float numerator = (float)(Math.pow(b,2) - (4 * a * c));
if (numerator < 0) {
t1 = (float)Double.POSITIVE_INFINITY;
t2 = (float)Double.POSITIVE_INFINITY;
t.add(t1);
t.add(t2);
}
else {
t1 = (float)(-b + Math.sqrt(numerator)) / (2 * a);
t2 = (float)(-b - Math.sqrt(numerator)) / (2 * a);
if (t1 > t2) {
t.add(t2);
t.add(t1);
}
else if (t1 < t2) {
t.add(t1);
t.add(t2);
}
else {
t.add(t1);
}
}
return t;
}
}
|
|
/***************************************************************************
* Copyright 2015 Kieker Project (http://kieker-monitoring.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package kieker.monitoring.core.controller;
import java.lang.Thread.State;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Queue;
import java.util.concurrent.BlockingQueue;
import kieker.common.configuration.Configuration;
import kieker.common.logging.Log;
import kieker.common.logging.LogFactory;
import kieker.common.record.IMonitoringRecord;
import kieker.monitoring.core.configuration.ConfigurationFactory;
import kieker.monitoring.queue.BlockingQueueDecorator;
import kieker.monitoring.queue.behavior.BlockOnFailedInsertBehavior;
import kieker.monitoring.queue.behavior.CountOnFailedInsertBehavior;
import kieker.monitoring.queue.behavior.DoNotInsertBehavior;
import kieker.monitoring.queue.behavior.InsertBehavior;
import kieker.monitoring.queue.behavior.TerminateOnFailedInsertBehavior;
import kieker.monitoring.queue.putstrategy.PutStrategy;
import kieker.monitoring.queue.putstrategy.SPBlockingPutStrategy;
import kieker.monitoring.queue.takestrategy.SCBlockingTakeStrategy;
import kieker.monitoring.queue.takestrategy.TakeStrategy;
import kieker.monitoring.writer.AbstractMonitoringWriter;
import kieker.monitoring.writer.MonitoringWriterThread;
/**
* @author Andre van Hoorn, Matthias Rohr, Jan Waller, Robert von Massow
*
* @since 1.3
*/
public final class WriterController extends AbstractController implements IWriterController {
public static final String PREFIX = WriterController.class.getName() + ".";
/** The name of the configuration determining the size of the queue of this writer. */
public static final String RECORD_QUEUE_SIZE = "RecordQueueSize";
/** The name of the configuration determining the insert behavior to the queue of the writer. */
public static final String RECORD_QUEUE_INSERT_BEHAVIOR = "RecordQueueInsertBehavior";
/** The fully qualified name of the queue to be used for the records. */
public static final String RECORD_QUEUE_FQN = "RecordQueueFQN";
private static final Log LOG = LogFactory.getLog(WriterController.class);
/** Monitoring Writer. */
private AbstractMonitoringWriter monitoringWriter; // NOPMD (so far, cannot be made final due to the MonitoringController)
/** Whether or not to automatically log the metadata record. */
private final boolean logMetadataRecord;
/** the capacity of the queue. */
private final int queueCapacity;
/** the synchronized, blocking queue used for the communication between the monitored application's threads and the writer thread. */
private final BlockingQueue<IMonitoringRecord> writerQueue;
private MonitoringWriterThread monitoringWriterThread; // NOPMD (so far, cannot be made final due to the MonitoringController)
private InsertBehavior<IMonitoringRecord> insertBehavior; // NOPMD (so far, cannot be made final due to the MonitoringController)
// private Disruptor<IMonitoringRecordEvent> disruptor;
// private RingBuffer<IMonitoringRecordEvent> ringBuffer;
// private int numInsertedRecords;
/**
* Creates a new instance of this class using the given parameters.
*
* @param configuration
* The configuration for the controller.
*/
public WriterController(final Configuration configuration) {
super(configuration);
this.logMetadataRecord = configuration.getBooleanProperty(ConfigurationFactory.METADATA);
this.queueCapacity = configuration.getIntProperty(PREFIX + RECORD_QUEUE_SIZE);
final String queueFqn = configuration.getStringProperty(PREFIX + RECORD_QUEUE_FQN);
LOG.info("Initializing writer queue '" + queueFqn + "' with a capacity of (at least) " + this.queueCapacity);
final Queue<IMonitoringRecord> queue = this.newQueue(queueFqn, this.queueCapacity);
if (queue instanceof BlockingQueue) {
this.writerQueue = (BlockingQueue<IMonitoringRecord>) queue;
} else {
final PutStrategy putStrategy = new SPBlockingPutStrategy();
final TakeStrategy takeStrategy = new SCBlockingTakeStrategy();
this.writerQueue = new BlockingQueueDecorator<IMonitoringRecord>(queue, putStrategy, takeStrategy);
}
final String writerClassName = configuration.getStringProperty(ConfigurationFactory.WRITER_CLASSNAME);
this.monitoringWriter = AbstractController.createAndInitialize(AbstractMonitoringWriter.class, writerClassName,
configuration);
if (this.monitoringWriter == null) {
this.terminate();
return; // TODO should throw an exception! and then monitoringWriter can be declared final
// throw new IllegalStateException("monitoringWriter may not be null");
}
this.monitoringWriterThread = new MonitoringWriterThread(this.monitoringWriter, this.writerQueue);
int recordQueueInsertBehavior = configuration.getIntProperty(PREFIX + RECORD_QUEUE_INSERT_BEHAVIOR);
if ((recordQueueInsertBehavior < 0) || (recordQueueInsertBehavior > 4)) {
if (LOG.isWarnEnabled()) {
LOG.warn("Unknown value '" + recordQueueInsertBehavior + "' for " + PREFIX + RECORD_QUEUE_INSERT_BEHAVIOR
+ "; using default value 0");
}
recordQueueInsertBehavior = 0;
}
switch (recordQueueInsertBehavior) {
case 1:
this.insertBehavior = new BlockOnFailedInsertBehavior<IMonitoringRecord>(this.writerQueue);
break;
case 2:
this.insertBehavior = new CountOnFailedInsertBehavior<IMonitoringRecord>(this.writerQueue);
break;
case 3:
this.insertBehavior = new DoNotInsertBehavior<IMonitoringRecord>();
break;
case 4:
// try {
// this.initDisruptor(configuration);
// } catch (final IOException e) {
// throw new IllegalStateException(e);
// }
// this.insertBehavior = new DisruptorInsertBehavior<IMonitoringRecord>(this.ringBuffer);
break;
default:
this.insertBehavior = new TerminateOnFailedInsertBehavior<IMonitoringRecord>(this.writerQueue);
break;
}
}
// private void initDisruptor(final Configuration configuration) throws IOException {
// final EventFactory<IMonitoringRecordEvent> eventFactory = new NewRecordEventFactory();
// final int bufferSize = 1024;
// final ThreadFactory threadFactory = new ThreadFactory() {
// @Override
// public Thread newThread(final Runnable r) {
// final Thread thread = new Thread(r);
// thread.setDaemon(true);
// return thread;
// }
// };
//
// this.disruptor = new Disruptor<IMonitoringRecordEvent>(eventFactory, bufferSize, threadFactory);
//
// final SharedConnection connection = new SharedConnection(configuration);
// final EventHandler<? super IMonitoringRecordEvent> handler1 = new DisruptorTcpWriter(configuration, connection);
// // allowing more than one consumes breaks the assumptions that the records are sent in order of occurrence
// // final EventHandler<? super IMonitoringRecordEvent> handler2 = new DisruptorTcpWriter(configuration,
// connection);
//
// this.disruptor.handleEventsWith(handler1);
// this.disruptor.start();
//
// this.ringBuffer = this.disruptor.getRingBuffer();
// }
/**
* @param queueFqn
* the fully qualified queue name
* @param capacity
* the (initial) capacity of the queue
* @return a new instance of the queue indicated by its <code>queueFQN</code>. Such instance is created by invoking
* the constructor with a single parameter of type <code>int</code>.
*/
@SuppressWarnings("unchecked")
private Queue<IMonitoringRecord> newQueue(final String queueFqn, final int capacity) {
try {
final Class<?> clazz = Class.forName(queueFqn);
@SuppressWarnings("rawtypes")
final Class<? extends Queue> queueClass = clazz.asSubclass(Queue.class);
@SuppressWarnings("rawtypes")
final Constructor<? extends Queue> constructor = queueClass.getConstructor(int.class);
return constructor.newInstance(capacity);
} catch (final ClassNotFoundException | InstantiationException e) {
LOG.warn("An exception occurred", e);
throw new IllegalStateException(e);
} catch (final NoSuchMethodException | SecurityException e) {
LOG.warn("An exception occurred", e);
throw new IllegalStateException(e);
} catch (final IllegalAccessException | IllegalArgumentException e) {
LOG.warn("An exception occurred", e);
throw new IllegalStateException(e);
} catch (final InvocationTargetException e) {
LOG.warn("An exception occurred", e);
throw new IllegalStateException(e);
}
}
@SuppressWarnings("PMD.DefaultPackage")
// default
boolean isLogMetadataRecord() {
return this.logMetadataRecord;
}
@Override
protected final void init() {
if (LOG.isDebugEnabled()) {
LOG.debug("Initializing Writer Controller");
}
if (this.monitoringWriterThread != null) {
this.monitoringWriterThread.start();
}
}
@Override
protected final void cleanup() {
if (LOG.isDebugEnabled()) {
LOG.debug("Shutting down Writer Controller");
}
if (this.monitoringWriterThread != null) {
this.monitoringWriterThread.terminate();
}
// if (this.disruptor != null) {
// this.disruptor.shutdown();
// }
// LOG.info("block durations: {}" + this.insertBehavior.toString());
// System.out.println("block durations: " + this.insertBehavior.toString());
}
@Override
public final String toString() {
final StringBuilder sb = new StringBuilder(256) // NOPMD (consecutive calls of append with string literals)
.append("WriterController:")
.append("\n\tQueue capacity: ")
.append(this.queueCapacity)
.append("\n\tInsert behavior (a.k.a. QueueFullBehavior): ")
.append(this.insertBehavior.toString())
.append("\n");
if (this.monitoringWriter != null) {
sb.append(this.monitoringWriter.toString());
} else {
sb.append("\tNo Monitoring Writer available");
}
sb.append('\n');
return sb.toString();
}
@Override
public final boolean newMonitoringRecord(final IMonitoringRecord record) {
final boolean recordSent = this.insertBehavior.insert(record);
if (!recordSent) {
LOG.error("Error writing the monitoring data. Will terminate monitoring!");
this.terminate();
}
return recordSent;
}
@Override
public void waitForTermination(final long timeoutInMs) throws InterruptedException {
if (this.monitoringWriterThread != null) {
this.monitoringWriterThread.join(timeoutInMs);
}
}
/**
* Used in tests only.
*/
@SuppressWarnings({ "PMD.DefaultPackage", "PMD.CommentDefaultAccessModifier" })
State getStateOfMonitoringWriterThread() {
return this.monitoringWriterThread.getState();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.lang.utils;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.util.GridUnsafe;
import org.apache.ignite.internal.util.OffheapReadWriteLock;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
/**
*
*/
@SuppressWarnings("BusyWait")
public class IgniteOffheapReadWriteLockSelfTest extends GridCommonAbstractTest {
/** */
private static final int TAG_0 = 1;
/** Number of 1-second iterations in every test. */
public static final int ROUNDS_PER_TEST = 5;
/**
* @throws Exception if failed.
*/
@Test
public void testConcurrentUpdatesSingleLock() throws Exception {
final int numPairs = 100;
final Pair[] data = new Pair[numPairs];
for (int i = 0; i < numPairs; i++)
data[i] = new Pair();
final OffheapReadWriteLock lock = new OffheapReadWriteLock(16);
final long ptr = GridUnsafe.allocateMemory(OffheapReadWriteLock.LOCK_SIZE);
lock.init(ptr, TAG_0);
final AtomicInteger reads = new AtomicInteger();
final AtomicInteger writes = new AtomicInteger();
final AtomicBoolean done = new AtomicBoolean(false);
IgniteInternalFuture<Long> fut = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
/** {@inheritDoc} */
@Override public Object call() {
try {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
while (!done.get()) {
boolean write = rnd.nextInt(10) < 2;
if (write) {
boolean locked = lock.writeLock(ptr, TAG_0);
try {
// No tag change in this test.
assert locked;
assertTrue(lock.isWriteLocked(ptr));
assertFalse(lock.isReadLocked(ptr));
int idx = rnd.nextInt(numPairs);
int delta = rnd.nextInt(100_000);
data[idx].a += delta;
data[idx].b -= delta;
}
finally {
lock.writeUnlock(ptr, TAG_0);
}
writes.incrementAndGet();
}
else {
boolean locked = lock.readLock(ptr, TAG_0);
try {
assert locked;
assertFalse(lock.isWriteLocked(ptr));
assertTrue(lock.isReadLocked(ptr));
for (int i1 = 0; i1 < data.length; i1++) {
Pair pair = data[i1];
assertEquals("Failed check for index: " + i1, pair.a, -pair.b);
}
}
finally {
lock.readUnlock(ptr);
}
reads.incrementAndGet();
}
}
}
catch (Throwable e) {
e.printStackTrace();
}
return null;
}
}, 32, "tester");
for (int i = 0; i < ROUNDS_PER_TEST; i++) {
Thread.sleep(1_000);
info("Reads: " + reads.getAndSet(0) + ", writes=" + writes.getAndSet(0));
}
done.set(true);
fut.get();
validate(data);
}
/**
* @throws Exception if failed.
*/
@Test
public void testConcurrentUpdatesMultipleLocks() throws Exception {
final int numPairs = 100;
final Pair[] data = new Pair[numPairs];
final OffheapReadWriteLock lock = new OffheapReadWriteLock(16);
final long ptr = GridUnsafe.allocateMemory(OffheapReadWriteLock.LOCK_SIZE * numPairs);
for (int i = 0; i < numPairs; i++) {
data[i] = new Pair();
lock.init(ptr + i * OffheapReadWriteLock.LOCK_SIZE, TAG_0);
}
final AtomicInteger reads = new AtomicInteger();
final AtomicInteger writes = new AtomicInteger();
final AtomicBoolean done = new AtomicBoolean(false);
IgniteInternalFuture<Long> fut = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
/** {@inheritDoc} */
@Override public Object call() {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
while (!done.get()) {
boolean write = rnd.nextInt(10) < 2;
int idx = rnd.nextInt(numPairs);
long lPtr = ptr + idx * OffheapReadWriteLock.LOCK_SIZE;
if (write) {
lock.writeLock(lPtr, TAG_0);
try {
assertTrue(lock.isWriteLocked(lPtr));
assertFalse(lock.isReadLocked(lPtr));
int delta = rnd.nextInt(100_000);
data[idx].a += delta;
data[idx].b -= delta;
}
finally {
lock.writeUnlock(lPtr, TAG_0);
}
writes.incrementAndGet();
}
else {
lock.readLock(lPtr, TAG_0);
try {
assertFalse(lock.isWriteLocked(lPtr));
assertTrue(lock.isReadLocked(lPtr));
Pair pair = data[idx];
assertEquals("Failed check for index: " + idx, pair.a, -pair.b);
}
finally {
lock.readUnlock(lPtr);
}
reads.incrementAndGet();
}
}
return null;
}
}, 32, "tester");
for (int i = 0; i < ROUNDS_PER_TEST; i++) {
Thread.sleep(1_000);
info("Reads: " + reads.getAndSet(0) + ", writes=" + writes.getAndSet(0));
}
done.set(true);
fut.get();
validate(data);
}
/**
* @throws Exception if failed.
*/
@Test
public void testLockUpgradeMultipleLocks() throws Exception {
final int numPairs = 100;
final Pair[] data = new Pair[numPairs];
final OffheapReadWriteLock lock = new OffheapReadWriteLock(16);
final long ptr = GridUnsafe.allocateMemory(OffheapReadWriteLock.LOCK_SIZE * numPairs);
for (int i = 0; i < numPairs; i++) {
data[i] = new Pair();
lock.init(ptr + i * OffheapReadWriteLock.LOCK_SIZE, TAG_0);
}
final AtomicInteger reads = new AtomicInteger();
final AtomicInteger writes = new AtomicInteger();
final AtomicInteger successfulUpgrades = new AtomicInteger();
final AtomicBoolean done = new AtomicBoolean(false);
IgniteInternalFuture<Long> fut = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
/** {@inheritDoc} */
@Override public Object call() {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
while (!done.get()) {
int idx = rnd.nextInt(numPairs);
long lPtr = ptr + idx * OffheapReadWriteLock.LOCK_SIZE;
boolean locked = lock.readLock(lPtr, TAG_0);
boolean write = false;
try {
assert locked;
Pair pair = data[idx];
assertEquals("Failed check for index: " + idx, pair.a, -pair.b);
write = rnd.nextInt(10) < 2;
if (write) {
// TAG fail will cause NPE.
boolean upg = lock.upgradeToWriteLock(lPtr, TAG_0);
writes.incrementAndGet();
if (upg)
successfulUpgrades.incrementAndGet();
int delta = rnd.nextInt(100_000);
pair.a += delta;
pair.b -= delta;
}
}
finally {
if (write)
lock.writeUnlock(lPtr, TAG_0);
else
lock.readUnlock(lPtr);
}
reads.incrementAndGet();
}
return null;
}
}, 32, "tester");
for (int i = 0; i < ROUNDS_PER_TEST; i++) {
Thread.sleep(1_000);
info("Reads=" + reads.getAndSet(0) + ", writes=" + writes.getAndSet(0) + ", upgrades=" + successfulUpgrades.getAndSet(0));
}
done.set(true);
fut.get();
validate(data);
}
/**
* @throws Exception if failed.
*/
@Test
public void testTagIdUpdateWait() throws Exception {
checkTagIdUpdate(true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testTagIdUpdateContinuous() throws Exception {
checkTagIdUpdate(false);
}
/**
* @throws Exception if failed.
*/
private void checkTagIdUpdate(final boolean waitBeforeSwitch) throws Exception {
final int numPairs = 100;
final Pair[] data = new Pair[numPairs];
for (int i = 0; i < numPairs; i++)
data[i] = new Pair();
final OffheapReadWriteLock lock = new OffheapReadWriteLock(16);
final long ptr = GridUnsafe.allocateMemory(OffheapReadWriteLock.LOCK_SIZE);
lock.init(ptr, TAG_0);
final AtomicInteger reads = new AtomicInteger();
final AtomicInteger writes = new AtomicInteger();
final AtomicBoolean done = new AtomicBoolean(false);
final AtomicBoolean run = new AtomicBoolean(true);
final int threadCnt = 32;
final CyclicBarrier barr = new CyclicBarrier(threadCnt, () -> {if (done.get()) run.set(false);});
IgniteInternalFuture<Long> fut = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
/** {@inheritDoc} */
@Override public Object call() {
try {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
int tag = TAG_0;
long lastSwitch = System.currentTimeMillis();
while (run.get()) {
boolean write = rnd.nextInt(10) < 2;
boolean locked;
boolean switched = false;
if (write) {
locked = lock.writeLock(ptr, tag);
if (locked) {
try {
assertTrue(lock.isWriteLocked(ptr));
assertFalse(lock.isReadLocked(ptr));
int idx = rnd.nextInt(numPairs);
int delta = rnd.nextInt(100_000);
data[idx].a += delta;
data[idx].b -= delta;
}
finally {
switched = System.currentTimeMillis() - lastSwitch > 1_000 || !waitBeforeSwitch;
if (switched && waitBeforeSwitch)
info("Switching...");
int tag1 = (tag + (switched ? 1 : 0)) & 0xFFFF;
if (tag1 == 0)
tag1 = 1;
lock.writeUnlock(ptr, tag1);
}
writes.incrementAndGet();
}
}
else {
locked = lock.readLock(ptr, tag);
if (locked) {
try {
assert locked;
assertFalse(lock.isWriteLocked(ptr));
assertTrue(lock.isReadLocked(ptr));
for (int i1 = 0; i1 < data.length; i1++) {
Pair pair = data[i1];
assertEquals("Failed check for index: " + i1, pair.a, -pair.b);
}
}
finally {
lock.readUnlock(ptr);
}
reads.incrementAndGet();
}
}
if (!locked || switched) {
try {
barr.await();
}
catch (BrokenBarrierException e) {
// Done.
e.printStackTrace();
return null;
}
tag = (tag + 1) & 0xFFFF;
if (tag == 0)
tag = 1;
if (waitBeforeSwitch || (!waitBeforeSwitch && tag == 1))
info("Switch to a new tag: " + tag);
lastSwitch = System.currentTimeMillis();
}
}
}
catch (Throwable e) {
e.printStackTrace();
}
return null;
}
}, threadCnt, "tester");
for (int i = 0; i < ROUNDS_PER_TEST; i++) {
Thread.sleep(1_000);
info("Reads: " + reads.getAndSet(0) + ", writes=" + writes.getAndSet(0));
}
done.set(true);
fut.get();
validate(data);
}
/**
* Validates data integrity.
*
* @param data Data to validate.
*/
private void validate(Pair[] data) {
for (int i = 0; i < data.length; i++) {
Pair pair = data[i];
assertEquals("Failed for index: " + i, pair.a, -pair.b);
}
}
private static class Pair {
/** */
private int a;
/** */
private int b;
}
}
|
|
package cz.jan.maly.service.implementation;
import bwapi.*;
import bwta.BWTA;
import cz.jan.maly.model.agent.AgentPlayer;
import cz.jan.maly.model.agent.AgentUnit;
import cz.jan.maly.model.game.util.Annotator;
import cz.jan.maly.model.game.wrappers.APlayer;
import cz.jan.maly.model.game.wrappers.AbstractPositionWrapper;
import cz.jan.maly.model.game.wrappers.UnitWrapperFactory;
import cz.jan.maly.model.game.wrappers.WrapperTypeFactory;
import cz.jan.maly.service.*;
import cz.jan.maly.utils.MyLogger;
import lombok.Getter;
import lombok.Setter;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.logging.Level;
import java.util.stream.Collectors;
/**
* Facade for bot.
* Created by Jan on 28-Dec-16.
*/
@Getter
public class BotFacade extends DefaultBWListener {
//TODO !!!THIS IS HACK DO NOT USE INSIDE OTHER COMMAND INTERACTING WITH GAME!!!
//class to handle additional commands with observations requests
public static AdditionalCommandToObserveGameProcessor ADDITIONAL_OBSERVATIONS_PROCESSOR;
@Setter
@Getter
private static int gameDefaultSpeed = 0;
@Setter
@Getter
private static long maxFrameExecutionTime = 20;
@Setter
@Getter
private static boolean annotateMap = false;
//keep track of agent units
private final Map<Integer, AgentUnit> agentsWithGameRepresentation = new HashMap<>();
//fields provided by user
private final AgentUnitFactoryCreationStrategy agentUnitFactoryCreationStrategy;
private final PlayerInitializerCreationStrategy playerInitializerCreationStrategy;
private final LocationInitializerCreationStrategy locationInitializerCreationStrategy;
//facade for MAS
private MASFacade masFacade;
//executor of game commands
private GameCommandExecutor gameCommandExecutor;
//to init abstract agents at the beginning of each game
private final AbstractAgentsInitializer abstractAgentsInitializer = new AbstractAgentsInitializerImpl();
//this is created with new game
private AgentUnitHandler agentUnitFactory;
private PlayerInitializer playerInitializer;
private LocationInitializer locationInitializer;
//game related fields
private Mirror mirror = new Mirror();
@Getter
private Game game;
@Getter
private Player self;
private Annotator annotator;
public BotFacade(AgentUnitFactoryCreationStrategy agentUnitFactoryCreationStrategy,
PlayerInitializerCreationStrategy playerInitializerCreationStrategy,
LocationInitializerCreationStrategy locationInitializerCreationStrategy) {
this.agentUnitFactoryCreationStrategy = agentUnitFactoryCreationStrategy;
this.playerInitializerCreationStrategy = playerInitializerCreationStrategy;
this.locationInitializerCreationStrategy = locationInitializerCreationStrategy;
MyLogger.setLoggingLevel(Level.WARNING);
}
@Override
public void onStart() {
//load decision points
DecisionLoadingServiceImpl.getInstance();
UnitWrapperFactory.clearCache();
WrapperTypeFactory.clearCache();
AbstractPositionWrapper.clearCache();
//initialize game related data
game = mirror.getGame();
self = game.self();
//initialize command executor
gameCommandExecutor = new GameCommandExecutor(game);
masFacade = new MASFacade(() -> gameCommandExecutor.getCountOfPassedFrames());
ADDITIONAL_OBSERVATIONS_PROCESSOR = new AdditionalCommandToObserveGameProcessor(gameCommandExecutor);
playerInitializer = playerInitializerCreationStrategy.createFactory();
agentUnitFactory = agentUnitFactoryCreationStrategy.createFactory();
locationInitializer = locationInitializerCreationStrategy.createFactory();
//Use BWTA to analyze map
//This may take a few minutes if the map is processed first time!
MyLogger.getLogger().info("Analyzing map");
BWTA.readMap();
BWTA.analyze();
MyLogger.getLogger().info("Map data ready");
//init annotation
annotator = new Annotator(game.getPlayers().stream()
.filter(player -> player.isEnemy(self) || player.getID() == self.getID())
.collect(Collectors.toList()), self, game);
//init player as another agent
Optional<APlayer> player = APlayer.wrapPlayer(self);
if (!player.isPresent()) {
MyLogger.getLogger().warning("Could not initiate player.");
throw new RuntimeException("Could not initiate player.");
}
AgentPlayer agentPlayer = playerInitializer.createAgentForPlayer(player.get(), this, game.enemy().getRace());
masFacade.addAgentToSystem(agentPlayer);
//init base location as agents
BWTA.getBaseLocations().stream()
.map(location -> locationInitializer.createAgent(location, this))
.filter(Optional::isPresent)
.map(Optional::get)
.forEach(agentBaseLocation -> masFacade.addAgentToSystem(agentBaseLocation));
//init abstract agents
abstractAgentsInitializer.initializeAbstractAgents(this)
.forEach(agentBaseLocation -> masFacade.addAgentToSystem(agentBaseLocation));
//speed up game to setup value
game.setLocalSpeed(getGameDefaultSpeed());
MyLogger.getLogger().info("Local game speed set to " + getGameDefaultSpeed());
}
@Override
public void onUnitCreate(Unit unit) {
if (self.getID() == unit.getPlayer().getID()) {
Optional<AgentUnit> agent = agentUnitFactory.createAgentForUnit(unit, this, game.getFrameCount());
agent.ifPresent(agentObservingGame -> {
agentsWithGameRepresentation.put(unit.getID(), agentObservingGame);
masFacade.addAgentToSystem(agentObservingGame);
});
}
}
@Override
public void onUnitDestroy(Unit unit) {
if (self.getID() == unit.getPlayer().getID()) {
Optional<AgentUnit> agent = Optional.ofNullable(agentsWithGameRepresentation.remove(unit.getID()));
agent.ifPresent(agentObservingGame -> masFacade.removeAgentFromSystem(agentObservingGame, unit.getType().isBuilding()));
}
UnitWrapperFactory.unitDied(unit);
}
@Override
public void onUnitMorph(Unit unit) {
if (self.getID() == unit.getPlayer().getID()) {
Optional<AgentUnit> agent = Optional.ofNullable(agentsWithGameRepresentation.remove(unit.getID()));
agent.ifPresent(agentObservingGame -> masFacade.removeAgentFromSystem(agentObservingGame, true));
onUnitCreate(unit);
}
}
public void run() throws IOException, InterruptedException {
mirror.getModule().setEventListener(this);
mirror.startGame();
}
@Override
public void onEnd(boolean b) {
agentsWithGameRepresentation.clear();
masFacade.terminate();
}
@Override
public void onFrame() {
try {
gameCommandExecutor.actOnFrame();
}
// === Catch any exception that occur not to "kill" the bot with one trivial error ===================
catch (Exception e) {
e.printStackTrace();
}
//annotate map
if (annotateMap) {
annotator.annotate();
}
}
//TODO handle more events - unit renegade, visibility
/**
* Contract for strategy to create new AgentUnitHandlerImpl for new game
*/
public interface AgentUnitFactoryCreationStrategy {
/**
* Creates new factory
*
* @return
*/
AgentUnitHandler createFactory();
}
/**
* Contract for strategy to create new LocationInitializer for new game
*/
public interface LocationInitializerCreationStrategy {
/**
* Creates new factory
*
* @return
*/
LocationInitializer createFactory();
}
/**
* Contract for strategy to create new PlayerInitializer for new game
*/
public interface PlayerInitializerCreationStrategy {
/**
* Creates new factory
*
* @return
*/
PlayerInitializer createFactory();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.sql;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.camel.Exchange;
import org.apache.camel.impl.DefaultProducer;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCallback;
import org.springframework.jdbc.core.PreparedStatementCreator;
import static org.springframework.jdbc.support.JdbcUtils.closeResultSet;
public class SqlProducer extends DefaultProducer {
private final String query;
private String resolvedQuery;
private final JdbcTemplate jdbcTemplate;
private final boolean batch;
private final boolean alwaysPopulateStatement;
private final SqlPrepareStatementStrategy sqlPrepareStatementStrategy;
private final boolean useMessageBodyForSql;
private int parametersCount;
public SqlProducer(SqlEndpoint endpoint, String query, JdbcTemplate jdbcTemplate, SqlPrepareStatementStrategy sqlPrepareStatementStrategy,
boolean batch, boolean alwaysPopulateStatement, boolean useMessageBodyForSql) {
super(endpoint);
this.jdbcTemplate = jdbcTemplate;
this.sqlPrepareStatementStrategy = sqlPrepareStatementStrategy;
this.query = query;
this.batch = batch;
this.alwaysPopulateStatement = alwaysPopulateStatement;
this.useMessageBodyForSql = useMessageBodyForSql;
}
@Override
public SqlEndpoint getEndpoint() {
return (SqlEndpoint) super.getEndpoint();
}
@Override
protected void doStart() throws Exception {
super.doStart();
String placeholder = getEndpoint().isUsePlaceholder() ? getEndpoint().getPlaceholder() : null;
resolvedQuery = SqlHelper.resolveQuery(getEndpoint().getCamelContext(), query, placeholder);
}
public void process(final Exchange exchange) throws Exception {
final String sql;
if (useMessageBodyForSql) {
sql = exchange.getIn().getBody(String.class);
} else {
String queryHeader = exchange.getIn().getHeader(SqlConstants.SQL_QUERY, String.class);
sql = queryHeader != null ? queryHeader : resolvedQuery;
}
final String preparedQuery = sqlPrepareStatementStrategy.prepareQuery(sql, getEndpoint().isAllowNamedParameters(), exchange);
// CAMEL-7313 - check whether to return generated keys
final Boolean shouldRetrieveGeneratedKeys =
exchange.getIn().getHeader(SqlConstants.SQL_RETRIEVE_GENERATED_KEYS, false, Boolean.class);
PreparedStatementCreator statementCreator = new PreparedStatementCreator() {
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
if (!shouldRetrieveGeneratedKeys) {
return con.prepareStatement(preparedQuery);
} else {
Object expectedGeneratedColumns = exchange.getIn().getHeader(SqlConstants.SQL_GENERATED_COLUMNS);
if (expectedGeneratedColumns == null) {
return con.prepareStatement(preparedQuery, Statement.RETURN_GENERATED_KEYS);
} else if (expectedGeneratedColumns instanceof String[]) {
return con.prepareStatement(preparedQuery, (String[]) expectedGeneratedColumns);
} else if (expectedGeneratedColumns instanceof int[]) {
return con.prepareStatement(preparedQuery, (int[]) expectedGeneratedColumns);
} else {
throw new IllegalArgumentException(
"Header specifying expected returning columns isn't an instance of String[] or int[] but "
+ expectedGeneratedColumns.getClass());
}
}
}
};
jdbcTemplate.execute(statementCreator, new PreparedStatementCallback<Map<?, ?>>() {
public Map<?, ?> doInPreparedStatement(PreparedStatement ps) throws SQLException {
ResultSet rs = null;
try {
int expected = parametersCount > 0 ? parametersCount : ps.getParameterMetaData().getParameterCount();
// only populate if really needed
if (alwaysPopulateStatement || expected > 0) {
// transfer incoming message body data to prepared statement parameters, if necessary
if (batch) {
Iterator<?> iterator;
if (useMessageBodyForSql) {
iterator = exchange.getIn().getHeader(SqlConstants.SQL_PARAMETERS, Iterator.class);
} else {
iterator = exchange.getIn().getBody(Iterator.class);
}
while (iterator != null && iterator.hasNext()) {
Object value = iterator.next();
Iterator<?> i = sqlPrepareStatementStrategy.createPopulateIterator(sql, preparedQuery, expected, exchange, value);
sqlPrepareStatementStrategy.populateStatement(ps, i, expected);
ps.addBatch();
}
} else {
Object value;
if (useMessageBodyForSql) {
value = exchange.getIn().getHeader(SqlConstants.SQL_PARAMETERS);
} else {
value = exchange.getIn().getBody();
}
Iterator<?> i = sqlPrepareStatementStrategy.createPopulateIterator(sql, preparedQuery, expected, exchange, value);
sqlPrepareStatementStrategy.populateStatement(ps, i, expected);
}
}
boolean isResultSet = false;
// execute the prepared statement and populate the outgoing message
if (batch) {
int[] updateCounts = ps.executeBatch();
int total = 0;
for (int count : updateCounts) {
total += count;
}
exchange.getIn().setHeader(SqlConstants.SQL_UPDATE_COUNT, total);
} else {
isResultSet = ps.execute();
if (isResultSet) {
// preserve headers first, so we can override the SQL_ROW_COUNT header
exchange.getOut().getHeaders().putAll(exchange.getIn().getHeaders());
rs = ps.getResultSet();
SqlOutputType outputType = getEndpoint().getOutputType();
log.trace("Got result list from query: {}, outputType={}", rs, outputType);
if (outputType == SqlOutputType.SelectList) {
List<?> data = getEndpoint().queryForList(rs, true);
// for noop=true we still want to enrich with the row count header
if (getEndpoint().isNoop()) {
exchange.getOut().setBody(exchange.getIn().getBody());
} else if (getEndpoint().getOutputHeader() != null) {
exchange.getOut().setBody(exchange.getIn().getBody());
exchange.getOut().setHeader(getEndpoint().getOutputHeader(), data);
} else {
exchange.getOut().setBody(data);
}
exchange.getOut().setHeader(SqlConstants.SQL_ROW_COUNT, data.size());
} else if (outputType == SqlOutputType.SelectOne) {
Object data = getEndpoint().queryForObject(rs);
if (data != null) {
// for noop=true we still want to enrich with the row count header
if (getEndpoint().isNoop()) {
exchange.getOut().setBody(exchange.getIn().getBody());
} else if (getEndpoint().getOutputHeader() != null) {
exchange.getOut().setBody(exchange.getIn().getBody());
exchange.getOut().setHeader(getEndpoint().getOutputHeader(), data);
} else {
exchange.getOut().setBody(data);
}
exchange.getOut().setHeader(SqlConstants.SQL_ROW_COUNT, 1);
}
} else {
throw new IllegalArgumentException("Invalid outputType=" + outputType);
}
} else {
exchange.getIn().setHeader(SqlConstants.SQL_UPDATE_COUNT, ps.getUpdateCount());
}
}
if (shouldRetrieveGeneratedKeys) {
// if no OUT message yet then create one and propagate headers
if (!exchange.hasOut()) {
exchange.getOut().getHeaders().putAll(exchange.getIn().getHeaders());
}
if (isResultSet) {
// we won't return generated keys for SELECT statements
exchange.getOut().setHeader(SqlConstants.SQL_GENERATED_KEYS_DATA, Collections.EMPTY_LIST);
exchange.getOut().setHeader(SqlConstants.SQL_GENERATED_KEYS_ROW_COUNT, 0);
} else {
List<?> generatedKeys = getEndpoint().queryForList(ps.getGeneratedKeys(), false);
exchange.getOut().setHeader(SqlConstants.SQL_GENERATED_KEYS_DATA, generatedKeys);
exchange.getOut().setHeader(SqlConstants.SQL_GENERATED_KEYS_ROW_COUNT, generatedKeys.size());
}
}
// data is set on exchange so return null
return null;
} finally {
closeResultSet(rs);
}
}
});
}
public void setParametersCount(int parametersCount) {
this.parametersCount = parametersCount;
}
}
|
|
package com.koenv.universalminecraftapi.commands;
import com.koenv.universalminecraftapi.ChatColor;
import com.koenv.universalminecraftapi.UniversalMinecraftAPIInterface;
import com.koenv.universalminecraftapi.http.rest.*;
import com.koenv.universalminecraftapi.methods.*;
import com.koenv.universalminecraftapi.permissions.PermissionUtils;
import com.koenv.universalminecraftapi.util.json.JSONArray;
import com.koenv.universalminecraftapi.util.json.JSONObject;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.lang.reflect.ParameterizedType;
import java.util.Arrays;
import java.util.Collection;
import java.util.stream.Stream;
public class CreateApiDocCommand extends Command {
@Override
public void onCommand(UniversalMinecraftAPIInterface uma, CommandSource commandSource, String[] args) {
if (args.length < 1) {
args = new String[]{uma.getProvider().getPlatform() + ".json"}; // default file name: <platform>.json
}
File file = new File(args[0]);
if (file.exists()) {
commandSource.sendMessage(ChatColor.RED, "File " + file.getPath() + " already exists, aborting");
return;
}
// v1 methods
MethodInvoker methodInvoker = uma.getMethodInvoker();
JSONObject root = new JSONObject();
JSONObject v1 = new JSONObject();
JSONArray namespaces = new JSONArray();
methodInvoker.getNamespaces().values().stream().flatMap(map -> map.values().stream()).forEach(method -> namespaces.put(getV1JsonMethod(method)));
v1.put("namespaces", namespaces);
JSONArray classes = new JSONArray();
methodInvoker.getClasses().values().stream().flatMap(map -> map.values().stream()).forEach(method -> classes.put(getV1JsonMethod(method)));
v1.put("classes", classes);
JSONArray streams = new JSONArray();
uma.getStreamManager().getStreams().stream().forEach(streams::put);
v1.put("streams", streams);
root.put("v1", v1);
// v2 methods
RestHandler restHandler = uma.getRestHandler();
JSONObject v2 = new JSONObject();
JSONArray resources = new JSONArray();
restHandler.getResources().stream().forEach(method -> resources.put(getV2RestResourceMethod(method)));
v2.put("resources", resources);
JSONArray operations = new JSONArray();
restHandler.getOperations().values().stream().flatMap(map -> map.values().stream()).forEach(method -> operations.put(getV2RestOperationMethod(method)));
v2.put("operations", operations);
root.put("v2", v2);
JSONObject platform = new JSONObject();
platform.put("name", uma.getProvider().getPlatform());
platform.put("version", uma.getProvider().getPlatformVersion());
platform.put("umaVersion", uma.getProvider().getUMAVersion());
root.put("platform", platform);
try (PrintWriter printWriter = new PrintWriter(file)) {
printWriter.write(root.toString(4));
} catch (IOException e) {
e.printStackTrace();
commandSource.sendMessage(ChatColor.RED, "Failed to write to file: " + e.toString());
return;
}
commandSource.sendMessage(ChatColor.GREEN, "API documentation saved to file " + file.getPath());
}
private JSONObject getV1JsonMethod(AbstractMethod methodEntry) {
JSONObject jsonMethod = new JSONObject();
Method method = methodEntry.getJavaMethod();
jsonMethod.put("name", methodEntry.getName());
int parameterCount = method.getParameters().length;
Stream<Parameter> stream = Arrays.stream(method.getParameters());
if (methodEntry instanceof ClassMethod) {
stream = stream.skip(1);
parameterCount--;
}
if (parameterCount > 0) {
JSONArray arguments = new JSONArray();
stream
.filter(parameter -> !MethodUtils.shouldExcludeFromDoc(parameter))
.forEach(parameter -> {
JSONObject json = new JSONObject();
json.put("name", parameter.getName());
json.put("type", parameter.getType().getSimpleName());
json.put("optional", parameter.getAnnotation(OptionalParam.class) != null);
arguments.put(json);
});
jsonMethod.put("arguments", arguments);
} else {
jsonMethod.put("arguments", new JSONArray());
}
jsonMethod.put("returns", getReturnType(method));
if (methodEntry instanceof ClassMethod) {
jsonMethod.put("operatesOn", ((ClassMethod) methodEntry).getOperatesOn().getSimpleName());
} else if (methodEntry instanceof NamespacedMethod) {
jsonMethod.put("namespace", ((NamespacedMethod) methodEntry).getNamespace());
}
jsonMethod.put("permission", PermissionUtils.getPermissionPath(methodEntry.getJavaMethod()));
return jsonMethod;
}
private JSONObject getV2RestResourceMethod(RestResourceMethod methodEntry) {
JSONObject jsonMethod = new JSONObject();
Method method = methodEntry.getJavaMethod();
jsonMethod.put("path", methodEntry.getPath());
JSONArray pathParams = new JSONArray();
JSONArray queryParams = new JSONArray();
Arrays.stream(method.getParameters())
.filter(parameter -> !MethodUtils.shouldExcludeFromDoc(parameter))
.forEach(parameter -> {
if (parameter.getAnnotation(RestPath.class) != null) {
JSONObject json = new JSONObject();
json.put("name", parameter.getAnnotation(RestPath.class).value());
json.put("type", parameter.getType().getSimpleName());
pathParams.put(json);
} else if (parameter.getAnnotation(RestQuery.class) != null) {
JSONObject json = new JSONObject();
json.put("name", parameter.getAnnotation(RestQuery.class).value());
json.put("type", parameter.getType().getSimpleName());
queryParams.put(json);
}
});
jsonMethod.put("pathParams", pathParams);
jsonMethod.put("queryParams", queryParams);
jsonMethod.put("returns", getReturnType(method));
jsonMethod.put("permission", PermissionUtils.getPermissionPath(method));
return jsonMethod;
}
private JSONObject getV2RestOperationMethod(RestOperationMethod methodEntry) {
JSONObject jsonMethod = new JSONObject();
Method method = methodEntry.getJavaMethod();
jsonMethod.put("path", methodEntry.getPath());
JSONArray bodyParams = new JSONArray();
Arrays.stream(method.getParameters())
.filter(parameter -> !MethodUtils.shouldExcludeFromDoc(parameter))
.forEach(parameter -> {
if (parameter.getAnnotation(RestBody.class) != null) {
JSONObject json = new JSONObject();
json.put("name", parameter.getAnnotation(RestBody.class).value());
json.put("type", parameter.getType().getSimpleName());
bodyParams.put(json);
}
});
jsonMethod.put("bodyParams", bodyParams);
jsonMethod.put("returns", getReturnType(method));
jsonMethod.put("operatesOn", methodEntry.getOperatesOn().getSimpleName());
jsonMethod.put("method", methodEntry.getRestMethod().name());
jsonMethod.put("permission", PermissionUtils.getPermissionPath(method));
return jsonMethod;
}
private String getReturnType(Method method) {
if (Collection.class.isAssignableFrom(method.getReturnType())) {
ParameterizedType type = (ParameterizedType) method.getGenericReturnType();
Class<?> realType = (Class<?>) type.getActualTypeArguments()[0];
return realType.getSimpleName() + "[]";
}
return method.getReturnType().getSimpleName();
}
@Override
public boolean hasPermission(CommandSource commandSource) {
return commandSource.hasPermission("universalminecraftapi.command.createapidoc");
}
@Override
public String getDescription() {
return "Create an API documentation file (JSON format)";
}
@Override
public String getUsage() {
return "<filename>";
}
}
|
|
////////////////////////////////////////////////////////////////////////////////
//
// RMG - Reaction Mechanism Generator
//
// Copyright (c) 2002-2011 Prof. William H. Green ([email protected]) and the
// RMG Team ([email protected])
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
////////////////////////////////////////////////////////////////////////////////
package jing.rxn;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.ListIterator;
import jing.chem.Species;
import jing.rxnSys.CoreEdgeReactionModel;
import jing.rxnSys.ReactionModel;
import jing.rxnSys.ReactionSystem;
import jing.rxnSys.SystemSnapshot;
/**
* A PDepNetwork object represents a single pressure-dependent reaction network.
* Such a network is made up of three types of reactions: isomerizations
* (A1 --> A2), associations (B + C --> A), and dissociations (A --> B + C).
* Thus each network is defined by
* <ul>
* <li>A list of unimolecular isomers { A1, A2, ... }
* <li>A list of multimolecular isomers { B1 + C1, B2 + C2, ... }
* <li>A list of path reactions (isomerizations, associations, and dissociations)
* </ul>
* PDepNetwork can also be used in a static manner to interact with all of the
* existing PDepNetwork objects at once.
*
* This is a rewrite of the old PDepNetwork class, which was tailored too
* closely to the CHEMDIS way of treating pressure-dependent networks. The new
* class is more general and is tailored to the FAME way of treating
* pressure-dependent networks
* @author jwallen
*/
public class PDepNetwork {
//==========================================================================
//
// Data members
//
/**
* These are used by the network to check the core/edge states of
* individual isomers and species and to generate pathways when isomers
* gain included status.
*/
public static ReactionModel reactionModel;
public static ReactionSystem reactionSystem;
/**
* A count of the number of pressure-dependent networks that have been
* created since the inception of the current instance of RMG.
*/
private static int networkCount = 0;
/**
* A unique identifier integer for the network.
*/
private int id;
/**
* A hash map containing all of the PDepNetworks created. Each network is
* identified by the chemical formula of the unimolecular isomers.
*/
protected static LinkedList<PDepNetwork> networks = new LinkedList<PDepNetwork>();
/**
* Set to true if RMG is ready to allow pressure-dependent networks to be
* created and false if not. A holdover from the original PDepNetwork class.
*/
public static boolean generateNetworks = false;
/**
* The list of unimolecular isomers.
*/
private LinkedList<PDepIsomer> isomerList;
/**
* The list of path reactions (isomerizations, associations, and
* dissociations that directly connect two isomers).
*/
private LinkedList<PDepReaction> pathReactionList;
/**
* The list of net reactions (allowing reactions between two isomers not
* directly connected by a path reaction) that belong in the core or the
* edge of the current reaction model.
*/
private LinkedList<PDepReaction> netReactionList;
/**
* The list of net reactions (allowing reactions between two isomers not
* directly connected by a path reaction) that are neither in the core nor
* on the edge of the current reaction model.
*/
private LinkedList<PDepReaction> nonincludedReactionList;
/**
* True if the network has been modified in such a way as to require a
* new pressure-dependent calculation, and false otherwise. Examples include
* changing the number of isomers or the number of path reactions.
*/
private boolean altered;
//==========================================================================
//
// Constructor
//
/**
* Creates an empty pressure-dependent network. Does not automatically add
* the network to the PDepNetwork.networks collection.
*/
public PDepNetwork() {
isomerList = new LinkedList<PDepIsomer>();
isomerList.clear();
pathReactionList = new LinkedList<PDepReaction>();
pathReactionList.clear();
netReactionList = new LinkedList<PDepReaction>();
netReactionList.clear();
nonincludedReactionList = new LinkedList<PDepReaction>();
nonincludedReactionList.clear();
altered = false;
id = networkCount + 1;
networkCount++;
}
//==========================================================================
//
// Get accessor methods
//
/**
* Returns the unique identifier for this network.
* @return The unique identifier for this network.
*/
public int getID() {
return id;
}
/**
* Returns a list of all of the species in the network.
* @return The list of the species in the network
*/
public LinkedList<Species> getSpeciesList() {
LinkedList<Species> speciesList = new LinkedList<Species>();
for (Iterator<PDepIsomer> iter = isomerList.iterator(); iter.hasNext(); ) {
PDepIsomer isomer = iter.next();
for (int i = 0; i < isomer.getNumSpecies(); i++) {
if (!speciesList.contains(isomer.getSpecies(i)))
speciesList.add(isomer.getSpecies(i));
}
}
return speciesList;
}
/**
* Returns the list of isomers.
* @return The list of isomers
*/
public LinkedList<PDepIsomer> getIsomers() {
return isomerList;
}
public int getNumUniIsomers() {
int count = 0;
for (Iterator<PDepIsomer> iter = isomerList.iterator(); iter.hasNext(); ) {
PDepIsomer isomer = iter.next();
if (isomer.isUnimolecular())
count++;
}
return count;
}
public int getNumMultiIsomers() {
return isomerList.size() - getNumUniIsomers();
}
/**
* Returns the list of path reactions.
* @return The list of path reactions
*/
public LinkedList<PDepReaction> getPathReactions() {
return pathReactionList;
}
public void removeFromPathReactionList(PDepReaction pdr) {
pathReactionList.remove(pdr);
return;
}
/**
* Returns the list of net reactions (in the core or on the edge).
* @return The list of net reactions
*/
public LinkedList<PDepReaction> getNetReactions() {
return netReactionList;
}
public void removeFromNetReactionList(PDepReaction pdr) {
netReactionList.remove(pdr);
return;
}
/**
* Returns the list of nonincluded reactions (neither in the core nor on
* the edge).
* @return The list of nonincluded reactions
*/
public LinkedList<PDepReaction> getNonincludedReactions() {
return nonincludedReactionList;
}
public void removeFromNonincludedReactionList(PDepReaction pdr) {
nonincludedReactionList.remove(pdr);
return;
}
/**
* Returns the status of the altered flag: true if the network requires a
* new pressure-dependent calculation, false if not.
* @return The status of the altered flag
*/
public boolean getAltered() {
return altered;
}
/**
* Returns the isomer that contains the same species as those in
* speciesList.
* @param speciesList The species to check the isomers for
* @return The isomer that contains the same species as those in
* speciesList
*/
public PDepIsomer getIsomer(LinkedList speciesList) {
if (speciesList.size() == 1) {
for (ListIterator<PDepIsomer> iter = isomerList.listIterator(); iter.hasNext(); ) {
PDepIsomer isomer = iter.next();
if (isomer.getSpeciesList().equals(speciesList))
return isomer;
}
}
return null;
}
/**
* Returns the unimolecular isomer that contains the indicated species.
* @param species The species to check the isomers for
* @return The unimolecular isomer corresponding to species
*/
public PDepIsomer getIsomer(Species species) {
for (ListIterator<PDepIsomer> iter = isomerList.listIterator(); iter.hasNext(); ) {
PDepIsomer isomer = iter.next();
if (isomer.getSpecies(0).equals(species) && isomer.isUnimolecular())
return isomer;
}
return null;
}
public void removeFromIsomerList(PDepIsomer pdi) {
isomerList.remove(pdi);
return;
}
/**
* Checks to see if the network contains species as a unimolecular isomer.
* @param species The species to check for
* @return True if species is a unimolecular isomer in the network, false if
* not
*/
public boolean contains(Species species) {
for (ListIterator<PDepIsomer> iter = isomerList.listIterator(); iter.hasNext(); ) {
PDepIsomer isomer = iter.next();
if (isomer.getSpecies(0).equals(species) && isomer.isUnimolecular())
return true;
}
return false;
}
/**
* Checks to see if the network contains species as a unimolecular isomer.
* @param species The species to check for
* @return True if species is a unimolecular isomer in the network, false if
* not
*/
public boolean contains(Reaction reaction) {
if (reaction == null)
return false;
Reaction reverse = reaction.getReverseReaction();
for (ListIterator<PDepReaction> iter = pathReactionList.listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
if (reaction.equals(rxn))
return true;
if (reverse != null) {
if (reverse.equals(rxn))
return true;
}
}
return false;
}
//==========================================================================
//
// Set accessor methods
//
/**
* Adds an isomer (unimolecular or multimolecular) to the appropriate
* list in the network if it is not already present.
* @param isomer The isomer to add
*/
public void addIsomer(PDepIsomer isomer) {
// Don't add if isomer is already in network
if (isomerList.contains(isomer))
return;
// Add isomer: keep unimolecular isomers first
if (isomer.isUnimolecular()) {
int index = -1;
for (int i = 0; i < isomerList.size(); i++) {
if (isomerList.get(i).isMultimolecular() && index < 0)
index = i;
}
if (index >= 0)
isomerList.add(index, isomer);
else
isomerList.add(isomer);
}
else
isomerList.add(isomer);
// Mark network as changed so that updated rates can be determined
altered = true;
}
/**
* Adds a path reaction to the network if it is not already present.
* @param newRxn The path reaction to add
* @param addKinetics: In the event the reaction is already present,
* determine whether to add the kinetics to the already present rxn
*/
public void addReaction(PDepReaction newRxn, boolean addKinetics) {
// Check to ensure that reaction is not already present
for (ListIterator<PDepReaction> iter = pathReactionList.listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
if (rxn.equals(newRxn)) {
if (addKinetics) rxn.addAdditionalKinetics(newRxn.getKinetics()[0], 1, false);
return;
}
}
// Add reaction
pathReactionList.add(newRxn);
// Mark network as changed so that updated rates can be determined
altered = true;
}
/**
* Updates the status of the altered flag: true if the network requires a
* new pressure-dependent calculation, false if not.
* @param alt The new status of the altered flag
*/
public void setAltered(boolean alt) {
altered = alt;
}
/**
* Elevates the status of the designated isomer from nonincluded to
* included, and generates pathways for this isomer. Generally a large
* number of pathways are generated.
* @param isomer The isomer to make included.
*/
public void makeIsomerIncluded(PDepIsomer isomer) {
if (!isomer.isUnimolecular() || isomer.getIncluded())
return;
isomer.setIncluded(true);
LinkedHashSet reactionSet = isomer.generatePaths(reactionSystem);
for (Iterator iter = reactionSet.iterator(); iter.hasNext(); ) {
Reaction rxn = (Reaction) iter.next();
if (!contains(rxn)) {
addReactionToNetworks(rxn);
/*
* The reactions (of form A --> B or A --> C + D) could form
* a species that is not otherwise in the edge of the
* CoreEdgeReactionModel.
* We want to leave all of the reactions alone (i.e. not add
* them to the core OR edge) but need to check whether any
* of the new species should be added to the edge.
*/
LinkedList rxnProducts = rxn.getProductList();
for (int numProds=0; numProds<rxnProducts.size(); numProds++) {
if (!((CoreEdgeReactionModel)reactionModel).containsAsReactedSpecies((Species)rxnProducts.get(numProds)))
if (!((CoreEdgeReactionModel)reactionModel).containsAsUnreactedSpecies((Species)rxnProducts.get(numProds)))
((CoreEdgeReactionModel)reactionModel).addUnreactedSpecies((Species)rxnProducts.get(numProds));
}
}
}
}
//==========================================================================
//
// Other methods
//
/**
* Redistributes the net reactions based on the current core and edge
* reaction models. Especially useful when one or more species has been
* moved from the edge to the core since the last pressure-dependent
* calculation.
* @param cerm The current core/edge reaction model
*/
public void updateReactionLists(CoreEdgeReactionModel cerm) throws PDepException {
// Merge the net reaction and nonincluded reactinon lists together
// We will recalculate how to distribute them
LinkedList<PDepReaction> reactionList = new LinkedList<PDepReaction>();
for (int i = 0; i < netReactionList.size(); i++) {
PDepReaction rxn = netReactionList.get(i);
reactionList.add(rxn);
}
for (int i = 0; i < nonincludedReactionList.size(); i++) {
PDepReaction rxn = nonincludedReactionList.get(i);
reactionList.add(rxn);
}
netReactionList.clear();
nonincludedReactionList.clear();
for (int i = 0; i < reactionList.size(); i++) {
PDepReaction forward = reactionList.get(i);
PDepReaction reverse = (PDepReaction) forward.getReverseReaction();
if (reverse != null) {
if (forward.isCoreReaction(cerm) || reverse.isCoreReaction(cerm))
netReactionList.add(forward);
else if (forward.getReactant().getIncluded() && forward.getProduct().getIncluded())
netReactionList.add(forward);
else
nonincludedReactionList.add(forward);
}
else {
if (forward.isCoreReaction(cerm))
netReactionList.add(forward);
else if (forward.getReactant().getIncluded() && forward.getProduct().getIncluded())
netReactionList.add(forward);
else
nonincludedReactionList.add(forward);
}
}
}
/**
* Calculates the leak flux for this network. The leak flux is defined as
* the maximum possible flux to all nonincluded species. The maximum
* modifier implies that only the forward reaction to the nonincluded
* species is used to generate the flux, rather than the combined forward
* and backward reaction.
* @param ss A system snapshot (T, P, concentrations, etc.) to use to calculate the flux.
* @return The leak flux for this network
*/
public double getLeakFlux(SystemSnapshot ss) {
double rLeak = 0.0;
// If there is only one path reaction (and thus only one nonincluded
// reaction), use the high-pressure limit rate as the flux rather than
// the k(T,P) value to ensure that we are considering the maximum
// possible flux entering the network
if (pathReactionList.size() == 1 && netReactionList.size() == 0) {
PDepReaction rxn = pathReactionList.get(0);
if (!rxn.getProduct().getIncluded())
rLeak += rxn.calculateForwardFlux(ss);
else if (!rxn.getReactant().getIncluded())
rLeak += rxn.calculateReverseFlux(ss);
else
// If both the reactant and the product are included, then the
// leak flux is zero
rLeak = 0.0;
}
// Otherwise use the set of k(T,P) values
else {
for (ListIterator<PDepReaction> iter = nonincludedReactionList.listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
if (rxn.getReactant().getIncluded() && !rxn.getProduct().getIncluded())
rLeak += rxn.calculateForwardFlux(ss);
else if (!rxn.getReactant().getIncluded() && rxn.getProduct().getIncluded())
rLeak += rxn.calculateReverseFlux(ss);
}
}
return rLeak;
}
public static double[] getSpeciesLeakFluxes(SystemSnapshot ss, CoreEdgeReactionModel cerm) {
int len = cerm.getMaxSpeciesID() + 1;
double[] leakFlux = new double[len];
for (int n = 0; n < len; n++)
leakFlux[n] = 0.0;
for (ListIterator<PDepNetwork> iter0 = networks.listIterator(); iter0.hasNext(); ) {
PDepNetwork pdn = iter0.next();
// If there is only one path reaction (and thus only one nonincluded
// reaction), use the high-pressure limit rate as the flux rather than
// the k(T,P) value to ensure that we are considering the maximum
// possible flux entering the network
if (pdn.getPathReactions().size() == 1 && pdn.getNetReactions().size() == 0) {
PDepReaction rxn = pdn.getPathReactions().get(0);
if (!rxn.getProduct().getIncluded())
leakFlux[rxn.getProduct().getSpecies(0).getID()] += rxn.calculateForwardFlux(ss);
else if (!rxn.getReactant().getIncluded())
leakFlux[rxn.getReactant().getSpecies(0).getID()] += rxn.calculateReverseFlux(ss);
}
// Otherwise use the set of k(T,P) values
else {
for (ListIterator<PDepReaction> iter = pdn.getNonincludedReactions().listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
if (rxn.getReactant().getIncluded() && !rxn.getProduct().getIncluded())
leakFlux[rxn.getProduct().getSpecies(0).getID()] += rxn.calculateForwardFlux(ss);
else if (!rxn.getReactant().getIncluded() && rxn.getProduct().getIncluded())
leakFlux[rxn.getReactant().getSpecies(0).getID()] += rxn.calculateReverseFlux(ss);
}
}
}
return leakFlux;
}
/**
* Calculates the isomer with the largest leak flux for this network. The
* reaction with the maximum flux is used to select the isomer. This isomer
* is the candidate for elevating to included status.
* @param ss A system snapshot (T, P, concentrations, etc.) to use to calculate the flux.
* @return The isomer with the largest leak flux
*/
public PDepIsomer getMaxLeakIsomer(SystemSnapshot ss) throws PDepException {
if (nonincludedReactionList.size() == 0) {
if (pathReactionList.size() == 1 && isomerList.size() == 2) {
PDepIsomer isomer1 = isomerList.get(0);
PDepIsomer isomer2 = isomerList.get(1);
if (isomer1.isUnimolecular() && !isomer1.getIncluded())
return isomer1;
else if (isomer2.isUnimolecular() && !isomer2.getIncluded())
return isomer2;
}
throw new PDepException("Tried to determine nonincluded isomer with maximum leak flux, but there are no nonincluded reactions, so no isomer can be identified.");
}
PDepReaction maxReaction = null;
double maxLeak = 0.0;
for (ListIterator<PDepReaction> iter = nonincludedReactionList.listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
if (!rxn.getReactant().getIncluded() || !rxn.getProduct().getIncluded()) {
if (Math.abs(rxn.calculateFlux(ss)) > maxLeak) {
maxReaction = rxn;
maxLeak = rxn.calculateFlux(ss);
}
}
}
if (maxReaction == null)
throw new PDepException("Tried to determine nonincluded isomer with maximum leak flux, but no suitable nonincluded reaction has been found.");
else if (!maxReaction.getReactant().getIncluded())
return maxReaction.getReactant();
else if (!maxReaction.getProduct().getIncluded())
return maxReaction.getProduct();
else
throw new PDepException("Tried to determine nonincluded isomer with maximum leak flux, but nonincluded reaction with maximum leak flux has no nonincluded isomers.");
}
public String getSpeciesType() {
for (Iterator<PDepIsomer> iter = isomerList.iterator(); iter.hasNext(); ) {
PDepIsomer isomer = iter.next();
if (isomer.isUnimolecular())
return isomer.getSpecies(0).getName();
}
return "";
}
public String toString() {
String str = "PDepNetwork #" + Integer.toString(id) + ":\n";
str += "\tIsomers:\n";
for (ListIterator<PDepIsomer> iter = isomerList.listIterator(); iter.hasNext(); ) {
PDepIsomer isomer = iter.next();
str += "\t\t" + isomer.toString() + "\n";
}
str += "\tPath reactions:\n";
for (ListIterator<PDepReaction> iter = pathReactionList.listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
str += "\t\t" + rxn.toString() + "\n";
}
str += "\tNet reactions:\n";
for (ListIterator<PDepReaction> iter = netReactionList.listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
str += "\t\t" + rxn.toString() + "\n";
}
str += "\tNonincluded reactions:\n";
for (ListIterator<PDepReaction> iter = nonincludedReactionList.listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
str += "\t\t" + rxn.toString() + "\n";
}
return str;
}
//==========================================================================
//
// Static methods (for access to PDepNetwork.networks)
//
/**
* Returns the linked list containing the currently-existing pressure-
* dependent networks
* @return The currently-existing pressure-dependent networks
*/
public static LinkedList<PDepNetwork> getNetworks() {
return networks;
}
/**
* Used to add a reaction to the appropriate pressure-dependent network. If
* no such network exists, a new network is created. For isomerization
* reactions connecting two existing networks, the networks are merged. This
* function is to be called whenever a new reaction is added to the edge.
* @param reaction The reaction to add
* @return The network the reaction was added to
*/
public static PDepNetwork addReactionToNetworks(Reaction reaction) {
// Expect that most reactions passed to this function will be already
// present in a network
// Fail if neither reactant nor product are unimolecular
Species species = null;
if (reaction.getReactantNumber() == 1)
species = (Species) reaction.getReactantList().get(0);
if (reaction.getProductNumber() == 1)
species = (Species) reaction.getProductList().get(0);
if (species == null)
return null;
if (reaction.getReactantNumber() > 1)
reaction = reaction.getReverseReaction();
PDepNetwork pdn = null;
if (reaction.getProductNumber() == 1) {
// Isomerization reactions should cause networks to be merged together
// This means that each unimolecular isomer should only appear in one network
// Get the appropriate pressure-dependent network(s)
PDepNetwork reac_pdn = null;
PDepNetwork prod_pdn = null;
Species reactant = (Species) reaction.getReactantList().get(0);
Species product = (Species) reaction.getProductList().get(0);
for (ListIterator<PDepNetwork> iter = networks.listIterator(); iter.hasNext(); ) {
PDepNetwork n = iter.next();
if (n.contains(reactant)) {
if (n.getIsomer(reactant).getIncluded())
{
reac_pdn = n;
if (prod_pdn != null) break; // have now found both prod_pdn and reac_pdn.
}
}
if (n.contains(product)) {
if (n.getIsomer(product).getIncluded())
{
prod_pdn = n;
if (reac_pdn != null) break; // have now found both reac_pdn and prod_pdn.
}
}
}
if (reac_pdn != null && prod_pdn != null && reac_pdn != prod_pdn) {
// Two distinct networks found; must join them together
pdn = reac_pdn;
for (int i = 0; i < prod_pdn.getIsomers().size(); i++)
pdn.addIsomer(prod_pdn.getIsomers().get(i));
for (int i = 0; i < prod_pdn.getPathReactions().size(); i++)
pdn.addReaction(prod_pdn.getPathReactions().get(i),false);
// Also remove the second network from the list of networks
networks.remove(prod_pdn);
}
else if (reac_pdn != null && prod_pdn != null && reac_pdn == prod_pdn) {
// Both species already present as unimolecular isomers in the same network, so use that network
pdn = reac_pdn;
}
else if (reac_pdn != null) {
// Only reactant species found in a network, so use that network
pdn = reac_pdn;
}
else if (prod_pdn != null) {
// Only product species found in a network, so use that network
pdn = reac_pdn;
}
else {
// No networks found for either species; will create a new network
pdn = null;
}
}
else if (reaction.getProductNumber() > 1) {
// Dissociation reactions are added to the network containing that unimolecular isomer
// Since each unimolecular isomer should only appear in one network, there should only be one such addition
// If no existing network is found, a new one may be created
// Get the appropriate pressure-dependent network
Species reactant = (Species) reaction.getReactantList().get(0);
for (ListIterator<PDepNetwork> iter = networks.listIterator(); iter.hasNext(); ) {
PDepNetwork n = iter.next();
if (n.contains(reactant)) {
if (n.getIsomer(reactant).getIncluded())
pdn = n;
}
}
}
// If network not found, create a new network
if (pdn == null) {
pdn = new PDepNetwork();
PDepIsomer isomer = new PDepIsomer(species);
pdn.addIsomer(isomer);
networks.add(pdn);
}
// Add the reaction to the network
PDepIsomer reactantIsomer = pdn.getIsomer(reaction.getReactantList());
if (reactantIsomer == null) {
reactantIsomer = new PDepIsomer(reaction.getReactantList());
pdn.addIsomer(reactantIsomer);
}
PDepIsomer productIsomer = pdn.getIsomer(reaction.getProductList());
if (productIsomer == null) {
productIsomer = new PDepIsomer(reaction.getProductList());
pdn.addIsomer(productIsomer);
}
// Always add the reaction in the direction for which we have the kinetics
PDepReaction rxn = null;
if (reaction.isForward()) {
rxn = new PDepReaction(reactantIsomer, productIsomer, reaction);
}
else {
rxn = new PDepReaction(productIsomer, reactantIsomer, reaction.getReverseReaction());
}
pdn.addReaction(rxn,false);
// Fill in partial network if necessary
if (reactantIsomer.isCore((CoreEdgeReactionModel) reactionModel) && reactantIsomer.isUnimolecular())
pdn.makeIsomerIncluded(reactantIsomer);
if (productIsomer.isCore((CoreEdgeReactionModel) reactionModel) && productIsomer.isUnimolecular())
pdn.makeIsomerIncluded(productIsomer);
// Return the created network
return pdn;
}
/**
* Useful for debugging, this function prints the isomers of each network
* to the console window.
*/
public static void printNetworks() {
int index = 0;
for (ListIterator<PDepNetwork> iter0 = networks.listIterator(); iter0.hasNext(); ) {
PDepNetwork pdn = iter0.next();
index++;
System.out.print("Network #" + Integer.toString(index) + ": ");
for (ListIterator<PDepIsomer> iter = pdn.getIsomers().listIterator(); iter.hasNext(); ) {
PDepIsomer isomer = iter.next();
System.out.print(isomer.toString());
if (iter.hasNext())
System.out.print(", ");
}
System.out.print("\n");
}
}
/**
* Checks to see if there are any core reactions hidden amongst those
* net reactions which are found in the pressure-dependent networks.
* This is particularly useful in the initialization of the reaction model,
* in which the core must have at least one reaction in it before the
* dynamic simulator can be executed.
* @param cerm The current core/edge reaction model
* @return True if core reactions are found, false if not
*/
public static boolean hasCoreReactions(CoreEdgeReactionModel cerm) {
return (getCoreReactions(cerm).size() > 0);
}
/**
* Counts the number of core reactions that are hidden amongst those
* net reactions which are found in the pressure-dependent networks.
* This is particularly useful in the initialization of the reaction model,
* in which the core must have at least one reaction in it before the
* dynamic simulator can be executed.
* @param cerm The current core/edge reaction model
* @return The number of core reactions found
*/
public static int getNumCoreReactions(CoreEdgeReactionModel cerm) {
return getCoreReactions(cerm).size();
}
/**
* Returns the core reactions that are hidden amongst those
* net reactions which are found in the pressure-dependent networks.
* This is particularly useful in the initialization of the reaction model,
* in which the core must have at least one reaction in it before the
* dynamic simulator can be executed.
* @param cerm The current core/edge reaction model
* @return The number of core reactions found
*/
public static LinkedList<PDepReaction> getCoreReactions(CoreEdgeReactionModel cerm) {
LinkedList<PDepReaction> coreReactions = new LinkedList<PDepReaction>();
for (ListIterator<PDepNetwork> iter0 = networks.listIterator(); iter0.hasNext(); ) {
PDepNetwork pdn = iter0.next();
for (ListIterator<PDepReaction> iter = pdn.getNetReactions().listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
if (rxn.isCoreReaction(cerm) && !coreReactions.contains(rxn))
coreReactions.add(rxn);
}
}
return coreReactions;
}
/**
* Counts the number of edge reactions that are hidden amongst those
* net reactions which are found in the pressure-dependent networks.
* @param cerm The current core/edge reaction model
* @return The number of edge reactions found
*/
public static int getNumEdgeReactions(CoreEdgeReactionModel cerm) {
return getEdgeReactions(cerm).size();
}
/**
* Returns the edge reactions that are hidden amongst those
* net reactions which are found in the pressure-dependent networks.
* @param cerm The current core/edge reaction model
* @return The list of edge reactions found
*/
public static LinkedList<PDepReaction> getEdgeReactions(CoreEdgeReactionModel cerm) {
LinkedList<PDepReaction> edgeReactions = new LinkedList<PDepReaction>();
for (ListIterator<PDepNetwork> iter0 = networks.listIterator(); iter0.hasNext(); ) {
PDepNetwork pdn = iter0.next();
for (ListIterator<PDepReaction> iter = pdn.getNetReactions().listIterator(); iter.hasNext(); ) {
PDepReaction rxn = iter.next();
if (rxn.getReactant().getIncluded() && rxn.getProduct().getIncluded()) {
if (rxn.isEdgeReaction(cerm) && !edgeReactions.contains(rxn))
edgeReactions.add(rxn);
}
}
}
return edgeReactions;
}
/**
* Counts the number of total path reactions in the pressure-dependent
* networks.
* @param cerm The current core/edge reaction model
* @return The number of path reactions found
*/
public static int getNumPathReactions(CoreEdgeReactionModel cerm) {
int count = 0;
for (ListIterator<PDepNetwork> iter0 = networks.listIterator(); iter0.hasNext(); ) {
PDepNetwork pdn = iter0.next();
count += pdn.getPathReactions().size();
}
return count;
}
/**
* Counts the number of total net reactions in the pressure-dependent
* networks, including all core-to-core ("core"), core-to-edge ("edge"), and
* edge-to-edge reactions.
* @param cerm The current core/edge reaction model
* @return The number of net reactions found
*/
public static int getNumNetReactions(CoreEdgeReactionModel cerm) {
int count = 0;
for (ListIterator<PDepNetwork> iter0 = networks.listIterator(); iter0.hasNext(); ) {
PDepNetwork pdn = iter0.next();
count += pdn.getNetReactions().size();
}
return count;
}
/**
* Check whether or not a given species is an included (fully explored)
* unimolecular isomer in any currently-existing network.
* @param species The species to check for included status
* @return true if the species is included in any existing network, false if not
*/
public static boolean isSpeciesIncludedInAnyNetwork(Species species) {
for (Iterator iter = networks.iterator(); iter.hasNext(); ) {
PDepNetwork network = (PDepNetwork) iter.next();
if (network.contains(species)) {
PDepIsomer isomer = network.getIsomer(species);
if (isomer.isUnimolecular() && isomer.getIncluded())
// We've identified a network wherein the species exists as
// a unimolecular isomer, and that its path reactions have
// been fully explored
// This satisfies all of the conditions, so we return true
return true;
}
}
// No suitable match for all conditions was found, so we return false
return false;
}
}
|
|
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo.test;
import org.drools.core.WorkingMemory;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.DefaultFactHandle;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.PropagationContextFactory;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.reteoo.BetaMemory;
import org.drools.core.reteoo.JoinNode;
import org.drools.core.reteoo.LeftInputAdapterNode;
import org.drools.core.reteoo.LeftTupleImpl;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.reteoo.builder.BuildContext;
import org.drools.core.reteoo.test.dsl.DslStep;
import org.drools.core.reteoo.test.dsl.NodeTestCase;
import org.drools.core.reteoo.test.dsl.NodeTestCaseResult;
import org.drools.core.reteoo.test.dsl.NodeTestCaseResult.NodeTestResult;
import org.drools.core.reteoo.test.dsl.NodeTestCaseResult.Result;
import org.drools.core.reteoo.test.dsl.NodeTestDef;
import org.drools.core.rule.Declaration;
import org.drools.core.spi.PropagationContext;
import org.drools.core.spi.Tuple;
import org.drools.core.test.model.Person;
import org.drools.core.util.index.TupleList;
import org.junit.Ignore;
import org.junit.Test;
import org.kie.api.runtime.rule.FactHandle;
import org.kie.internal.KnowledgeBaseFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
@Ignore("phreak")
public class ReteDslTestEngineTest {
@Test
public void testDslCommandBuilder() {
InputStream stream = getClass().getResourceAsStream( "DslTestBuilder.testCase" );
assertNotNull( stream );
NodeTestCase test = null;
try {
test = ReteDslTestEngine.compile( stream );
} catch ( Exception e ) {
e.printStackTrace();
fail( "Unexpected Exception: " + e.getMessage() );
}
assertEquals( "Test Case Name",
test.getName() );
checkSetup( test );
assertEquals( 0,
test.getTearDown().size() );
List<NodeTestDef> tests = test.getTests();
assertEquals( 2,
tests.size() );
NodeTestDef test1 = tests.get( 0 );
assertEquals( "test 1",
test1.getName() );
assertEquals( 22,
test1.getLine() );
DslStep[] steps = test1.getSteps().toArray( new DslStep[0] );
assertEquals( 6,
steps.length );
// step 0
int step = 0;
assertEquals( "assert",
steps[step].getName() );
assertEquals( 1, steps[step].getCommands().size() );
checkCommand( new String[] { "left", "[h0]" },
steps[step].getCommands().get( 0 ) );
// step 1
step = 1;
assertEquals( "col",
steps[step].getName() );
assertEquals( 1, steps[step].getCommands().size() );
checkCommand( new String[] { "leftMemory", "[[h0]]" },
steps[step].getCommands().get( 0 ) );
// step 2
step = 2;
assertEquals( "sink",
steps[step].getName() );
assertEquals( 2, steps[step].getCommands().size() );
checkCommand( new String[] { "verify", "assert", "count", "1" },
steps[step].getCommands().get( 0 ) );
checkCommand( new String[] { "verify", "assert", "tuple0[1]", "is(empty())" },
steps[step].getCommands().get( 1 ) );
// Another test
test1 = tests.get( 1 );
assertEquals( "another test",
test1.getName() );
assertEquals( 38,
test1.getLine() );
steps = test1.getSteps().toArray( new DslStep[0] );
assertEquals( 3,
steps.length );
// step 0
step = 0;
assertEquals( "assert",
steps[step].getName() );
assertEquals( 1, steps[step].getCommands().size() );
checkCommand( new String[] { "left", "[h2]" },
steps[step].getCommands().get( 0 ) );
// step 1
step = 1;
assertEquals( "col",
steps[step].getName() );
assertEquals( 1, steps[step].getCommands().size() );
checkCommand( new String[] { "leftMemory", "[[h2]]" },
steps[step].getCommands().get( 0 ) );
}
private void checkCommand(String[] expected,
String[] actual) {
assertEquals( expected.length, actual.length );
for( int i = 0; i < expected.length; i++ ) {
assertEquals( expected[i], actual[i] );
}
}
private void checkSetup(NodeTestCase test) {
/**
* SETUP
*/
DslStep[] steps = test.getSetup().toArray( new DslStep[0] );
assertEquals( 6,
steps.length );
// step 0
int step = 0;
assertEquals( "LeftTupleSource",
steps[step].getName() );
assertEquals( 7,
steps[step].getLine() );
assertEquals( 1,
steps[step].getCommands().size() );
assertEquals( 1,
steps[step].getCommands().get( 0 ).length );
assertEquals( "left",
steps[step].getCommands().get( 0 )[0] );
// step 1
step = 1;
assertEquals( "ObjectSource",
steps[step].getName() );
assertEquals( 8,
steps[step].getLine() );
assertEquals( 1,
steps[step].getCommands().size() );
assertEquals( 1,
steps[step].getCommands().get( 0 ).length );
assertEquals( "right",
steps[step].getCommands().get( 0 )[0] );
// step 3
step = 2;
assertEquals( "LeftTupleSink",
steps[step].getName() );
assertEquals( 9,
steps[step].getLine() );
assertEquals( 1,
steps[step].getCommands().size() );
assertEquals( 1,
steps[step].getCommands().get( 0 ).length );
assertEquals( "sink",
steps[step].getCommands().get( 0 )[0] );
// step 3
step = 3;
assertEquals( "CollectNode",
steps[step].getName() );
assertEquals( 10,
steps[step].getLine() );
assertEquals( 2,
steps[step].getCommands().size() );
assertEquals( 4,
steps[step].getCommands().get( 0 ).length );
assertEquals( "col",
steps[step].getCommands().get( 0 )[0] );
assertEquals( "left",
steps[step].getCommands().get( 0 )[1] );
assertEquals( "right",
steps[step].getCommands().get( 0 )[2] );
assertEquals( "java.util.ArrayList",
steps[step].getCommands().get( 0 )[3] );
assertEquals( 4,
steps[step].getCommands().get( 1 ).length );
assertEquals( "source",
steps[step].getCommands().get( 1 )[0] );
assertEquals( "type",
steps[step].getCommands().get( 1 )[1] );
assertEquals( "==",
steps[step].getCommands().get( 1 )[2] );
assertEquals( "l1",
steps[step].getCommands().get( 1 )[3] );
// step 4
step = 4;
assertEquals( "Binding",
steps[step].getName() );
assertEquals( 13,
steps[step].getLine() );
assertEquals( 1,
steps[step].getCommands().size() );
assertEquals( 4,
steps[step].getCommands().get( 0 ).length );
assertEquals( "l1",
steps[step].getCommands().get( 0 )[0] );
assertEquals( "0",
steps[step].getCommands().get( 0 )[1] );
assertEquals( "Person",
steps[step].getCommands().get( 0 )[2] );
assertEquals( "likes",
steps[step].getCommands().get( 0 )[3] );
// step 5
step = 5;
assertEquals( "Facts",
steps[step].getName() );
assertEquals( 15,
steps[step].getLine() );
assertEquals( 1,
steps[step].getCommands().size() );
assertEquals( 6,
steps[step].getCommands().get( 0 ).length );
assertEquals( "Person('darth', 35, \"brie\")",
steps[step].getCommands().get( 0 )[0] );
assertEquals( "Cheese('brie', 12)",
steps[step].getCommands().get( 0 )[3] );
}
@Test
public void testObjectTypeNodeStep() throws Exception {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode :\n";
str += "otn1, java.lang.Integer;\n";
NodeTestResult result = executeTest( str );
ObjectTypeNode otn1 = (ObjectTypeNode) result.context.get( "otn1" );
assertNotNull( otn1 );
assertEquals( new ClassObjectType( Integer.class ),
otn1.getObjectType() );
}
@Test
public void testLeftInputAdapterNodeStep() throws Exception {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn1, java.lang.Integer;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn1;";
NodeTestResult result = executeTest( str );
ObjectTypeNode otn1 = (ObjectTypeNode) result.context.get( "otn1" );
LeftInputAdapterNode lian0 = (LeftInputAdapterNode) result.context.get( "lian0" );
assertNotNull( lian0 );
assertSame ( lian0,
otn1.getObjectSinkPropagator().getSinks()[0] );
}
@Test
public void testBindingStep() throws Exception {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "Binding:\n";
str += " p1, 0, java.lang.Integer, intValue;\n";
NodeTestResult result = executeTest( str );
Declaration p1 = (Declaration) result.context.get( "p1" );
assertNotNull( p1 );
}
@Test
public void testJoinNodeStep() throws Exception {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn1, java.lang.Integer;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn1;\n";
str += "ObjectTypeNode:\n";
str += " otn2, java.lang.Integer;\n";
str += "Binding:\n";
str += " p1, 0, java.lang.Integer, intValue;\n";
str += "JoinNode:\n";
str += " join1, lian0, otn2;\n";
str += " intValue, !=, p1;\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
JoinNode join1 = (JoinNode) map.get( "join1" );
assertNotNull( join1 );
InternalKnowledgeBase kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase();
PropagationContextFactory pctxFactory = kBase.getConfiguration().getComponentFactory().getPropagationContextFactory();
PropagationContext context = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, null, null, null);
StatefulKnowledgeSessionImpl workingMemory = new StatefulKnowledgeSessionImpl( 1L, kBase );
BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( join1 );
final DefaultFactHandle f0 = new DefaultFactHandle( 0,
0 );
final LeftTupleImpl tuple0 = new LeftTupleImpl( f0,
join1,
true );
// assert tuple, should add one to left memory
join1.assertLeftTuple( tuple0,
context,
workingMemory );
// check memories, left memory is populated, right memory is emptys
assertEquals( 1,
memory.getLeftTupleMemory().size() );
assertEquals( 0,
memory.getRightTupleMemory().size() );
// assert tuple, should add left memory should be 2
final DefaultFactHandle f1 = new DefaultFactHandle( 1,
0 );
final LeftTupleImpl tuple1 = new LeftTupleImpl( f1,
join1,
true );
join1.assertLeftTuple( tuple1,
context,
workingMemory );
assertEquals( 2,
memory.getLeftTupleMemory().size() );
Tuple leftTuple = ((TupleList)memory.getLeftTupleMemory()).getFirst( );
assertEquals( tuple0,
leftTuple );
assertEquals( tuple1,
leftTuple.getNext() );
}
@SuppressWarnings("unchecked")
@Test
public void testFactsStep() throws Exception {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "Facts:\n";
str += " 1, 2, 'hello',\n";
str += " 'good bye', new java.util.ArrayList();\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
WorkingMemory wm = (WorkingMemory) map.get( "WorkingMemory" );
List<InternalFactHandle> handles = (List<InternalFactHandle>) map.get( "Handles" );
assertNotNull( wm );
assertNotNull( handles );
assertEquals( 5,
handles.size() );
assertEquals( 1,
handles.get( 0 ).getObject() );
assertEquals( 2,
handles.get( 1 ).getObject() );
assertEquals( "hello",
handles.get( 2 ).getObject() );
assertEquals( "good bye",
handles.get( 3 ).getObject() );
assertEquals( new ArrayList<FactHandle>(),
handles.get( 4 ).getObject() );
}
@SuppressWarnings("unchecked")
@Test
public void testWithStep() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "Facts:\n";
str += " 1, 2, new org.drools.core.test.model.Person('darth', 35),\n";
str += " 'good bye', new java.util.ArrayList();\n";
str += "With:\n";
str += " h2, age = 36, city = 'la',\n";
str += " state = 'ca';\n";
str += " h4, add( 2 );\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
WorkingMemory wm = (WorkingMemory) map.get( "WorkingMemory" );
List<InternalFactHandle> handles = (List<InternalFactHandle>) map.get( "Handles" );
assertNotNull( wm );
assertNotNull( handles );
assertEquals( 5,
handles.size() );
Person p = new Person( "darth", 36);
p.setAge( 36 );
p.setCity( "la" );
p.setState( "ca" );
List<Integer> list = new ArrayList<Integer>();
list.add( 2 );
assertEquals( 1,
handles.get( 0 ).getObject() );
assertEquals( 2,
handles.get( 1 ).getObject() );
assertEquals( p,
handles.get( 2 ).getObject() );
assertEquals( "good bye",
handles.get( 3 ).getObject() );
assertEquals( list,
handles.get( 4 ).getObject() );
}
@Test
public void testBetaNodeAssertOperations() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn1, java.lang.Integer;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn1;\n";
str += "ObjectTypeNode:\n";
str += " otn2, java.lang.Integer;\n";
str += "Binding:\n";
str += " p1, 0, java.lang.Integer, intValue;\n";
str += "JoinNode:\n";
str += " join1, lian0, otn2;\n";
str += " intValue, !=, p1;\n";
str += "Facts:\n";
str += " 0, 1, 2, 3;\n";
str += "assert:\n";
str += " otn2,[h0, h2];\n";
str += " otn1,[h1, h3];\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
InternalWorkingMemory wm = (InternalWorkingMemory) map.get( "WorkingMemory" );
JoinNode join1 = (JoinNode) map.get( "join1" );
BetaMemory memory = (BetaMemory) wm.getNodeMemory( join1 );
assertEquals( 2,
memory.getRightTupleMemory().size() );
assertEquals( 2,
memory.getLeftTupleMemory().size() );
}
@Test
public void testBetaNodeRetractOperations() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn1, java.lang.Integer;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn1;\n";
str += "ObjectTypeNode:\n";
str += " otn2, java.lang.Integer;\n";
str += "Binding:\n";
str += " p1, 0, java.lang.Integer, intValue;\n";
str += "JoinNode:\n";
str += " join1, lian0, otn2;\n";
str += " intValue, !=, p1;\n";
str += "Facts:\n";
str += " 0, 1, 2, 3;\n";
str += "assert:\n";
str += " otn1,[h1, h3];\n";
str += " otn2,[h0, h2];\n";
str += "retract:\n";
str += " otn1,[h1];\n";
str += " otn2,[h2];\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
InternalWorkingMemory wm = (InternalWorkingMemory) map.get( "WorkingMemory" );
List<InternalFactHandle> handles = (List<InternalFactHandle>) map.get( "Handles" );
JoinNode join1 = (JoinNode) map.get( "join1" );
BetaMemory memory = (BetaMemory) wm.getNodeMemory( join1 );
assertEquals( 1,
memory.getRightTupleMemory().size() );
assertEquals( 1,
memory.getLeftTupleMemory().size() );
}
@Test
public void testBetaNodeSimpleMemoryChecks() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn1, java.lang.Integer;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn1;\n";
str += "ObjectTypeNode:\n";
str += " otn2, java.lang.Integer;\n";
str += "Binding:\n";
str += " p1, 0, java.lang.Integer, intValue;\n";
str += "JoinNode:\n";
str += " join1, lian0, otn2;\n";
str += " intValue, !=, p1;\n";
str += "Facts:\n";
str += " 0, 1, 2, 3;\n";
str += "assert:\n";
str += " otn1,[h1, h3];\n";
str += " otn2,[h0, h2];\n";
str += "join1:\n";
str += " leftMemory,[[h1], [h3]];\n";
str += " rightMemory,[h0, h2];\n";
str += "retract:\n";
str += " otn1,[h1];\n";
str += " otn2,[h2];\n";;
str += "join1:\n";
str += " leftMemory,[ [h3] ];\n";
str += " rightMemory,[h0];\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
InternalWorkingMemory wm = (InternalWorkingMemory) map.get( "WorkingMemory" );
List<InternalFactHandle> handles = (List<InternalFactHandle>) map.get( "Handles" );
JoinNode join1 = (JoinNode) map.get( "join1" );
BetaMemory memory = (BetaMemory) wm.getNodeMemory( join1 );
assertEquals( 1,
memory.getRightTupleMemory().size() );
assertEquals( 1,
memory.getLeftTupleMemory().size() );
}
@Test
public void testBetaNodeChainedMemoryChecks() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn1, java.lang.Integer;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn1;\n";
str += "ObjectTypeNode:\n";
str += " otn2, java.lang.Integer;\n";
str += "ObjectTypeNode:\n";
str += " otn3, java.lang.Integer;\n";
str += "Binding:\n";
str += " p1, 0, java.lang.Integer, intValue;\n";
str += "JoinNode:\n";
str += " join1, lian0, otn2;\n";
str += " intValue, !=, p1;\n";
str += "JoinNode:\n";
str += " join2, join1, otn3;\n";
str += " intValue, !=, p1;\n";
str += "Facts:\n";
str += " 0, 1, 2, 3, 4;\n";
str += "assert:\n";
str += " otn1, [h1, h3];\n";
str += " otn2, [h0, h2];\n";
str += " otn3, [h4];\n";
str += "join1:\n";
str += " leftMemory, [[h1], [h3]];\n";
str += " rightMemory, [h0, h2];\n";
str += "join2:\n";
str += " leftMemory, [[h1, h0], [h3, h0],\n";
str += " [h1, h2], [h3, h2]];\n";
str += " rightMemory, [h4];\n";
str += "retract:\n";
str += " otn1, [h1];\n";
str += " otn2, [h2];\n";;
str += "join1:\n";
str += " leftMemory, [ [h3] ];\n";
str += " rightMemory, [h0];\n";
str += "join2:\n";
str += " leftMemory, [[h3, h0]];\n";
str += " rightMemory, [h4];\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
InternalWorkingMemory wm = (InternalWorkingMemory) map.get( "WorkingMemory" );
List<InternalFactHandle> handles = (List<InternalFactHandle>) map.get( "Handles" );
JoinNode join1 = (JoinNode) map.get( "join1" );
BetaMemory memory = (BetaMemory) wm.getNodeMemory( join1 );
assertEquals( 1,
memory.getRightTupleMemory().size() );
assertEquals( 1,
memory.getLeftTupleMemory().size() );
}
@Test
public void testBetaNodeChainedMemoryWithIndexChecks() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn1, java.lang.Integer;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn1;\n";
str += "ObjectTypeNode:\n";
str += " otn2, java.lang.Integer;\n";
str += "ObjectTypeNode:\n";
str += " otn3, java.lang.Integer;\n";
str += "Binding:\n";
str += " p1, 0, java.lang.Integer, intValue;\n";
str += "JoinNode:\n";
str += " join1, lian0, otn2;\n";
str += " intValue, ==, p1;\n";
str += "JoinNode:\n";
str += " join2, join1, otn3;\n";
str += " intValue, ==, p1;\n";
str += "Facts:\n";
str += " new Integer(0), new Integer(0), new Integer(0), 2, 4;\n";
str += "assert:\n";
str += " otn1, [h1, h3];\n";
str += " otn2, [h0, h2];\n";
str += " otn3, [h4];\n";
str += "join1:\n";
str += " leftMemory, [[h1]];\n";
str += " leftMemory, [[h3]];\n";
str += " rightMemory, [h0, h2];\n";
str += "join2:\n";
str += " leftMemory, [[h1, h0],\n";
str += " [h1, h2]];\n";
str += " rightMemory, [h4];\n";
str += "retract:\n";
str += " otn1, [h2];\n";
str += " otn2, [h3];\n";
str += "join1:\n";
str += " leftMemory, [ [h1] ];\n";
str += " rightMemory, [h0];\n";
str += "join2:\n";
str += " leftMemory, [[h1, h0]];\n";
str += " rightMemory, [h4];\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
InternalWorkingMemory wm = (InternalWorkingMemory) map.get( "WorkingMemory" );
List<InternalFactHandle> handles = (List<InternalFactHandle>) map.get( "Handles" );
JoinNode join1 = (JoinNode) map.get( "join1" );
BetaMemory memory = (BetaMemory) wm.getNodeMemory( join1 );
assertEquals( 1,
memory.getRightTupleMemory().size() );
assertEquals( 1,
memory.getLeftTupleMemory().size() );
}
@Test
public void testBetaNodeModifyOperations() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn1, org.drools.core.test.model.Person;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn1;\n";
str += "ObjectTypeNode:\n";
str += " otn2, org.drools.core.test.model.Person;\n";
str += "ObjectTypeNode:\n";
str += " otn3, org.drools.core.test.model.Person;\n";
str += "Binding:\n";
str += " p1, 0, org.drools.core.test.model.Person, age;\n";
str += "JoinNode:\n";
str += " join1, lian0, otn2;\n";
str += " age, ==, p1;\n";
str += "JoinNode:\n";
str += " join2, join1, otn3;\n";
str += " age, ==, p1;\n";
str += "Facts:\n";
str += " new org.drools.core.test.model.Person('darth', 35), new org.drools.core.test.model.Person('bobba', 35),\n";
str += " new org.drools.core.test.model.Person('yoda', 35), new org.drools.core.test.model.Person('luke', 35),\n";
str += " new org.drools.core.test.model.Person('dave', 36);\n";
str += "assert:\n";
str += " otn1, [h1, h3, h4];\n";
str += " otn2, [h0, h2];\n";
str += "join1:\n";
str += " leftMemory, [[h1], [h3]];\n"; // check leftMemory twice, as we have two index buckets
str += " leftMemory, [[h4]];\n";
str += " rightMemory, [h0, h2];\n";
str += "join2:\n";
str += " leftMemory, [[h1, h0], [h3, h0],\n";
str += " [h1, h2], [h3, h2]];\n";
str += " rightMemory, [];\n";
str += "With:\n";
str += " h1, age = 36;\n";
str += "modify:\n";
str += " otn1, [h1];\n";
str += "join1:\n";
str += " leftMemory, [[h3]];\n";
str += " leftMemory, [[h4], [h1]];\n"; // notice it's moved to the new bucket
str += " rightMemory, [h0, h2];\n";
str += "join2:\n";
str += " leftMemory, [[h3, h0],\n";
str += " [h3, h2]];\n";
str += " rightMemory, [];\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
InternalWorkingMemory wm = (InternalWorkingMemory) map.get( "WorkingMemory" );
List<InternalFactHandle> handles = (List<InternalFactHandle>) map.get( "Handles" );
JoinNode join1 = (JoinNode) map.get( "join1" );
BetaMemory memory = (BetaMemory) wm.getNodeMemory( join1 );
assertEquals( 2,
memory.getRightTupleMemory().size() );
assertEquals( 3,
memory.getLeftTupleMemory().size() );
JoinNode join2 = (JoinNode) map.get( "join2" );
memory = (BetaMemory) wm.getNodeMemory( join2 );
assertEquals( 0,
memory.getRightTupleMemory().size() );
assertEquals( 2,
memory.getLeftTupleMemory().size() );
}
@Test
public void testNotNodeStep() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "ObjectTypeNode:\n";
str += " otn0, org.drools.core.test.model.Person;\n";
str += "LeftInputAdapterNode:\n";
str += " lian0, otn0;\n";
str += "ObjectTypeNode:\n";
str += " otn1, org.drools.core.test.model.Person;\n";
str += "Binding:\n";
str += " p1, 0, org.drools.core.test.model.Person, age;\n";
str += "NotNode:\n";
str += " not0, lian0, otn1;\n";
str += " age, !=, p1;\n";
str += "LeftTupleSink:\n";
str += " sink, not0;\n";
str += "Facts:\n";
str += " new org.drools.core.test.model.Person('darth', 35), new org.drools.core.test.model.Person('bobba', 35);\n";
str += "assert:\n";
str += " otn0, [h0];\n";
str += " otn1, [h1];\n";
str += "sink:\n";
str += " verify, assertLeft, count, 1;\n";
str += "With:\n";
str += " h1, age = 36;\n";
str += "modify:\n";
str += " otn1, [h1];\n";
str += "sink:\n";
str += " verify, retractLeft, count, 1;\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
}
public void testConfigOptions() throws IOException {
String str = "TestCase 'testOTN'\nTest 'dummy'\n";
str += "Config:\n";
str += " drools.phreakEnabled, true;\n";
str += "ObjectTypeNode:\n";
str += " otn1, java.lang.Integer;\n";
NodeTestResult result = executeTest( str );
Map<String, Object> map = result.context;
BuildContext buildCtx = (BuildContext) map.get( ReteDslTestEngine.BUILD_CONTEXT );
assertTrue(buildCtx.getKnowledgeBase().getConfiguration().isPhreakEnabled());
}
private void print(DslStep[] steps) {
for ( DslStep command : steps ) {
System.out.println( command );
}
}
private NodeTestResult executeTest(String str) throws IOException {
NodeTestCase testCase = ReteDslTestEngine.compile( str );
if( testCase.hasErrors() ) {
fail( testCase.getErrors().toString() );
}
ReteDslTestEngine tester = new ReteDslTestEngine();
NodeTestCaseResult testCaseResult = tester.run( testCase, null );
NodeTestResult result = testCaseResult.getResults().get( 0 );
if( result.result != Result.SUCCESS ) {
fail( result.getMessages() );
}
return result;
}
}
|
|
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.openapi.util;
import com.intellij.openapi.util.io.PathExecLazyValue;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.util.text.Strings;
import com.intellij.util.lang.JavaVersion;
import com.intellij.util.system.CpuArch;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.List;
/**
* Provides information about operating system, system-wide settings, and Java Runtime.
*/
public final class SystemInfo {
public static final String OS_NAME = SystemInfoRt.OS_NAME;
public static final String OS_VERSION = SystemInfoRt.OS_VERSION;
public static final String OS_ARCH = System.getProperty("os.arch");
public static final String JAVA_VERSION = System.getProperty("java.version");
public static final String JAVA_RUNTIME_VERSION = getRtVersion(JAVA_VERSION);
public static final String JAVA_VENDOR = System.getProperty("java.vm.vendor", "Unknown");
private static String getRtVersion(@SuppressWarnings("SameParameterValue") String fallback) {
String rtVersion = System.getProperty("java.runtime.version");
return Character.isDigit(rtVersion.charAt(0)) ? rtVersion : fallback;
}
public static final boolean isWindows = SystemInfoRt.isWindows;
public static final boolean isMac = SystemInfoRt.isMac;
public static final boolean isLinux = SystemInfoRt.isLinux;
public static final boolean isFreeBSD = SystemInfoRt.isFreeBSD;
public static final boolean isSolaris = SystemInfoRt.isSolaris;
public static final boolean isUnix = SystemInfoRt.isUnix;
public static final boolean isChromeOS = isLinux && isCrostini();
public static final boolean isOracleJvm = Strings.indexOfIgnoreCase(JAVA_VENDOR, "Oracle", 0) >= 0;
public static final boolean isIbmJvm = Strings.indexOfIgnoreCase(JAVA_VENDOR, "IBM", 0) >= 0;
public static final boolean isAzulJvm = Strings.indexOfIgnoreCase(JAVA_VENDOR, "Azul", 0) >= 0;
public static final boolean isJetBrainsJvm = Strings.indexOfIgnoreCase(JAVA_VENDOR, "JetBrains", 0) >= 0;
public static final boolean isMetalRendering = isMac && Boolean.getBoolean("sun.java2d.metal");
@SuppressWarnings("SpellCheckingInspection")
private static boolean isCrostini() {
return new File("/dev/.cros_milestone").exists();
}
public static boolean isOsVersionAtLeast(@NotNull String version) {
return StringUtil.compareVersionNumbers(OS_VERSION, version) >= 0;
}
public static final boolean isWin7OrNewer = isWindows && isOsVersionAtLeast("6.1");
public static final boolean isWin8OrNewer = isWindows && isOsVersionAtLeast("6.2");
public static final boolean isWin10OrNewer = isWindows && isOsVersionAtLeast("10.0");
public static final boolean isXWindow = SystemInfoRt.isXWindow;
public static final boolean isWayland, isGNOME, isKDE, isXfce, isI3;
static {
// http://askubuntu.com/questions/72549/how-to-determine-which-window-manager-is-running/227669#227669
// https://userbase.kde.org/KDE_System_Administration/Environment_Variables#KDE_FULL_SESSION
if (isXWindow) {
isWayland = System.getenv("WAYLAND_DISPLAY") != null;
String desktop = System.getenv("XDG_CURRENT_DESKTOP"), gdmSession = System.getenv("GDMSESSION");
isGNOME = desktop != null && desktop.contains("GNOME") || gdmSession != null && gdmSession.contains("gnome");
isKDE = !isGNOME && (desktop != null && desktop.contains("KDE") || System.getenv("KDE_FULL_SESSION") != null);
isXfce = !isGNOME && !isKDE && (desktop != null && desktop.contains("XFCE"));
isI3 = !isGNOME && !isKDE && !isXfce && (desktop != null && desktop.contains("i3"));
}
else {
isWayland = isGNOME = isKDE = isXfce = isI3 = false;
}
}
public static final boolean isAppleSystemMenu = isMac && Boolean.getBoolean("apple.laf.useScreenMenuBar");
public static final boolean isJBSystemMenu = isMac && Boolean.getBoolean("jbScreenMenuBar.enabled");
public static final boolean isMacSystemMenu = isAppleSystemMenu || isJBSystemMenu;
public static final boolean isFileSystemCaseSensitive = SystemInfoRt.isFileSystemCaseSensitive;
private static final NotNullLazyValue<Boolean> ourHasXdgOpen = PathExecLazyValue.create("xdg-open");
public static boolean hasXdgOpen() {
return isXWindow && ourHasXdgOpen.getValue();
}
private static final NotNullLazyValue<Boolean> ourHasXdgMime = PathExecLazyValue.create("xdg-mime");
public static boolean hasXdgMime() {
return isXWindow && ourHasXdgMime.getValue();
}
/**
* @deprecated macOS 10.14 is the minimum version.
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.3")
public static final boolean isMacOSYosemite = isMac && isOsVersionAtLeast("10.10");
public static final boolean isMacOSMojave = isMac && isOsVersionAtLeast("10.14");
public static final boolean isMacOSCatalina = isMac && isOsVersionAtLeast("10.15");
public static final boolean isMacOSBigSur = isMac && isOsVersionAtLeast("10.16");
public static final boolean isMacOSMonterey = isMac && isOsVersionAtLeast("12.0");
/**
* Build number is the only more or less stable approach to get comparable win version.
* See <a href="https://www.gaijin.at/en/infos/windows-version-numbers">list of builds</a>.
* There is also <a href="https://en.wikipedia.org/wiki/Windows_10_version_history">Wikipedia article</a>.
* And <a href="https://en.wikipedia.org/wiki/Windows_11_version_history">another one for Windows 11</a>.
*
* ReleaseID (1903, 2004 e.t.c.) is marketing term which is not a number since 20H2 while build numbers
* grow since NT 3.1 (see the first link) and this trend is unlikely to change
*
*/
public static @Nullable Long getWinBuildNumber() {
if (!isWin10OrNewer) {
return null;
}
return WinBuildVersionKt.getWinBuildNumber();
}
public static @NotNull String getMacOSMajorVersion() {
return getMacOSMajorVersion(OS_VERSION);
}
public static String getMacOSMajorVersion(String version) {
int[] parts = getMacOSVersionParts(version);
return String.format("%d.%d", parts[0], parts[1]);
}
public static @NotNull String getMacOSVersionCode() {
return getMacOSVersionCode(OS_VERSION);
}
public static @NotNull String getMacOSMajorVersionCode() {
return getMacOSMajorVersionCode(OS_VERSION);
}
public static @NotNull String getMacOSMinorVersionCode() {
return getMacOSMinorVersionCode(OS_VERSION);
}
public static @NotNull String getMacOSVersionCode(@NotNull String version) {
int[] parts = getMacOSVersionParts(version);
return String.format("%02d%d%d", parts[0], normalize(parts[1]), normalize(parts[2]));
}
public static @NotNull String getMacOSMajorVersionCode(@NotNull String version) {
int[] parts = getMacOSVersionParts(version);
return String.format("%02d%d%d", parts[0], normalize(parts[1]), 0);
}
public static @NotNull String getMacOSMinorVersionCode(@NotNull String version) {
int[] parts = getMacOSVersionParts(version);
return String.format("%02d%02d", parts[1], parts[2]);
}
private static int[] getMacOSVersionParts(@NotNull String version) {
List<String> parts = StringUtil.split(version, ".");
while (parts.size() < 3) {
parts.add("0");
}
return new int[]{toInt(parts.get(0)), toInt(parts.get(1)), toInt(parts.get(2))};
}
public static String getOsNameAndVersion() {
return (isMac ? "macOS" : OS_NAME) + ' ' + OS_VERSION;
}
private static int normalize(int number) {
return Math.min(number, 9);
}
private static int toInt(String string) {
try {
return Integer.parseInt(string);
}
catch (NumberFormatException e) {
return 0;
}
}
//<editor-fold desc="Deprecated stuff.">
/** @deprecated please use {@link Runtime#version()} (in the platform) or {@link JavaVersion} (in utils) */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static final boolean IS_AT_LEAST_JAVA9 = JavaVersion.current().feature >= 9;
/** @deprecated please use {@link Runtime#version()} (in the platform) or {@link JavaVersion} (in utils) */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static boolean isJavaVersionAtLeast(int major) {
return JavaVersion.current().feature >= major;
}
/** @deprecated please use {@link Runtime#version()} (in the platform) or {@link JavaVersion} (in utils) */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static boolean isJavaVersionAtLeast(int major, int minor, int update) {
return JavaVersion.current().compareTo(JavaVersion.compose(major, minor, update, 0, false)) >= 0;
}
/** @deprecated please use {@link Runtime#version()} (in the platform) or {@link JavaVersion} (in utils) */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static boolean isJavaVersionAtLeast(String v) {
return StringUtil.compareVersionNumbers(JAVA_RUNTIME_VERSION, v) >= 0;
}
/** @deprecated may be inaccurate; please use {@link CpuArch} instead */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static final boolean is32Bit = CpuArch.CURRENT.width == 32;
/** @deprecated may be inaccurate; please use {@link CpuArch} instead */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static final boolean is64Bit = CpuArch.CURRENT.width == 64;
/** @deprecated trivial and mostly outdated */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.2")
public static final boolean isMacIntel64 = isMac && CpuArch.isIntel64();
/** @deprecated always false */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static final boolean isAppleJvm = false;
/** @deprecated always true (Java 8 requires macOS 10.9+) */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static final boolean isMacOSLeopard = isMac;
/** @deprecated always true (Java 8 requires macOS 10.9+) */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static final boolean isMacOSMountainLion = isMac;
/** @deprecated always true (Java 8 requires Windows Vista / Server 2008) */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static final boolean isWinVistaOrNewer = isWindows;
/** @deprecated always true */
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
public static final boolean areSymLinksSupported = isUnix || isWindows;
//</editor-fold>
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.el;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.HashSet;
import java.util.Set;
/**
* @since EL 3.0
*/
public class ELProcessor {
private static final Set<String> PRIMITIVES = new HashSet<>();
static {
PRIMITIVES.add("boolean");
PRIMITIVES.add("byte");
PRIMITIVES.add("char");
PRIMITIVES.add("double");
PRIMITIVES.add("float");
PRIMITIVES.add("int");
PRIMITIVES.add("long");
PRIMITIVES.add("short");
}
private static final String[] EMPTY_STRING_ARRAY = new String[0];
private final ELManager manager = new ELManager();
private final ELContext context = manager.getELContext();
private final ExpressionFactory factory = ELManager.getExpressionFactory();
public ELManager getELManager() {
return manager;
}
public Object eval(String expression) {
return getValue(expression, Object.class);
}
public Object getValue(String expression, Class<?> expectedType) {
ValueExpression ve = factory.createValueExpression(
context, bracket(expression), expectedType);
return ve.getValue(context);
}
public void setValue(String expression, Object value) {
ValueExpression ve = factory.createValueExpression(
context, bracket(expression), Object.class);
ve.setValue(context, value);
}
public void setVariable(String variable, String expression) {
if (expression == null) {
manager.setVariable(variable, null);
} else {
ValueExpression ve = factory.createValueExpression(
context, bracket(expression), Object.class);
manager.setVariable(variable, ve);
}
}
public void defineFunction(String prefix, String function, String className,
String methodName) throws ClassNotFoundException,
NoSuchMethodException {
if (prefix == null || function == null || className == null ||
methodName == null) {
throw new NullPointerException(Util.message(
context, "elProcessor.defineFunctionNullParams"));
}
// Check the imports
Class<?> clazz = context.getImportHandler().resolveClass(className);
if (clazz == null) {
clazz = Class.forName(className, true,
Thread.currentThread().getContextClassLoader());
}
if (!Modifier.isPublic(clazz.getModifiers())) {
throw new ClassNotFoundException(Util.message(context,
"elProcessor.defineFunctionInvalidClass", className));
}
MethodSignature sig =
new MethodSignature(context, methodName, className);
if (function.length() == 0) {
function = sig.getName();
}
Method methods[] = clazz.getMethods();
for (Method method : methods) {
if (!Modifier.isStatic(method.getModifiers())) {
continue;
}
if (method.getName().equals(sig.getName())) {
if (sig.getParamTypeNames() == null) {
// Only a name provided, no signature so map the first
// method declared
manager.mapFunction(prefix, function, method);
return;
}
if (sig.getParamTypeNames().length != method.getParameterTypes().length) {
continue;
}
if (sig.getParamTypeNames().length == 0) {
manager.mapFunction(prefix, function, method);
return;
} else {
Class<?>[] types = method.getParameterTypes();
String[] typeNames = sig.getParamTypeNames();
if (types.length == typeNames.length) {
boolean match = true;
for (int i = 0; i < types.length; i++) {
if (i == types.length -1 && method.isVarArgs()) {
String typeName = typeNames[i];
if (typeName.endsWith("...")) {
typeName = typeName.substring(0, typeName.length() - 3);
if (!typeName.equals(types[i].getName())) {
match = false;
}
} else {
match = false;
}
} else if (!types[i].getName().equals(typeNames[i])) {
match = false;
break;
}
}
if (match) {
manager.mapFunction(prefix, function, method);
return;
}
}
}
}
}
throw new NoSuchMethodException(Util.message(context,
"elProcessor.defineFunctionNoMethod", methodName, className));
}
/**
* @throws NullPointerException
* If any of the arguments are null
* @throws NoSuchMethodException
* If the method is not static
*/
public void defineFunction(String prefix, String function, Method method)
throws java.lang.NoSuchMethodException {
if (prefix == null || function == null || method == null) {
throw new NullPointerException(Util.message(
context, "elProcessor.defineFunctionNullParams"));
}
int modifiers = method.getModifiers();
// Check for public method as well as being static
if (!Modifier.isStatic(modifiers) || !Modifier.isPublic(modifiers)) {
throw new NoSuchMethodException(Util.message(context,
"elProcessor.defineFunctionInvalidMethod", method.getName(),
method.getDeclaringClass().getName()));
}
manager.mapFunction(prefix, function, method);
}
public void defineBean(String name, Object bean) {
manager.defineBean(name, bean);
}
private static String bracket(String expression) {
return "${" + expression + "}";
}
private static class MethodSignature {
private final String name;
private final String[] parameterTypeNames;
public MethodSignature(ELContext context, String methodName,
String className) throws NoSuchMethodException {
int paramIndex = methodName.indexOf('(');
if (paramIndex == -1) {
name = methodName.trim();
parameterTypeNames = null;
} else {
String returnTypeAndName = methodName.substring(0, paramIndex).trim();
// Assume that the return type and the name are separated by
// whitespace. Given the use of trim() above, there should only
// be one sequence of whitespace characters.
int wsPos = -1;
for (int i = 0; i < returnTypeAndName.length(); i++) {
if (Character.isWhitespace(returnTypeAndName.charAt(i))) {
wsPos = i;
break;
}
}
if (wsPos == -1) {
throw new NoSuchMethodException();
}
name = returnTypeAndName.substring(wsPos).trim();
String paramString = methodName.substring(paramIndex).trim();
// We know the params start with '(', check they end with ')'
if (!paramString.endsWith(")")) {
throw new NoSuchMethodException(Util.message(context,
"elProcessor.defineFunctionInvalidParameterList",
paramString, methodName, className));
}
// Trim '(' and ')'
paramString = paramString.substring(1, paramString.length() - 1).trim();
if (paramString.length() == 0) {
parameterTypeNames = EMPTY_STRING_ARRAY;
} else {
parameterTypeNames = paramString.split(",");
ImportHandler importHandler = context.getImportHandler();
for (int i = 0; i < parameterTypeNames.length; i++) {
String parameterTypeName = parameterTypeNames[i].trim();
int dimension = 0;
int bracketPos = parameterTypeName.indexOf('[');
if (bracketPos > -1) {
String parameterTypeNameOnly =
parameterTypeName.substring(0, bracketPos).trim();
while (bracketPos > -1) {
dimension++;
bracketPos = parameterTypeName.indexOf('[', bracketPos+ 1);
}
parameterTypeName = parameterTypeNameOnly;
}
boolean varArgs = false;
if (parameterTypeName.endsWith("...")) {
varArgs = true;
dimension = 1;
parameterTypeName = parameterTypeName.substring(
0, parameterTypeName.length() -3).trim();
}
boolean isPrimitive = PRIMITIVES.contains(parameterTypeName);
if (isPrimitive && dimension > 0) {
// When in an array, class name changes for primitive
switch(parameterTypeName)
{
case "boolean":
parameterTypeName = "Z";
break;
case "byte":
parameterTypeName = "B";
break;
case "char":
parameterTypeName = "C";
break;
case "double":
parameterTypeName = "D";
break;
case "float":
parameterTypeName = "F";
break;
case "int":
parameterTypeName = "I";
break;
case "long":
parameterTypeName = "J";
break;
case "short":
parameterTypeName = "S";
break;
default:
// Should never happen
break;
}
} else if (!isPrimitive &&
!parameterTypeName.contains(".")) {
Class<?> clazz = importHandler.resolveClass(
parameterTypeName);
if (clazz == null) {
throw new NoSuchMethodException(Util.message(
context,
"elProcessor.defineFunctionInvalidParameterTypeName",
parameterTypeNames[i], methodName,
className));
}
parameterTypeName = clazz.getName();
}
if (dimension > 0) {
// Convert to array form of class name
StringBuilder sb = new StringBuilder();
for (int j = 0; j < dimension; j++) {
sb.append('[');
}
if (!isPrimitive) {
sb.append('L');
}
sb.append(parameterTypeName);
if (!isPrimitive) {
sb.append(';');
}
parameterTypeName = sb.toString();
}
if (varArgs) {
parameterTypeName += "...";
}
parameterTypeNames[i] = parameterTypeName;
}
}
}
}
public String getName() {
return name;
}
/**
* @return <code>null</code> if just the method name was specified, an
* empty List if an empty parameter list was specified - i.e. ()
* - otherwise an ordered list of parameter type names
*/
public String[] getParamTypeNames() {
return parameterTypeNames;
}
}
}
|
|
package org.drobos;
import java.awt.Color;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Scanner;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
/**
*
* @author Daikaiser
*/
public class InappExpSimulator extends javax.swing.JFrame {
/**
* Creates new form NGramGUI
*/
public InappExpSimulator() {
initComponents();
this.getContentPane().setBackground(Color.BLACK);
this.btnNGramLearn.setBackground(Color.BLACK);
this.btnOpenFile.setBackground(Color.BLACK);
this.btnRecognize.setBackground(Color.BLACK);
this.btnWordLearn.setBackground(Color.BLACK);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jScrollPane1 = new javax.swing.JScrollPane();
txaOutput = new javax.swing.JTextArea();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
btnRecognize = new javax.swing.JButton();
btnWordLearn = new javax.swing.JButton();
btnNGramLearn = new javax.swing.JButton();
jScrollPane2 = new javax.swing.JScrollPane();
txaInput = new javax.swing.JTextArea();
btnOpenFile = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setTitle("Inappropriate Expressions Recognition");
setBackground(new java.awt.Color(0, 0, 0));
setForeground(new java.awt.Color(0, 0, 0));
txaOutput.setEditable(false);
txaOutput.setBackground(new java.awt.Color(102, 102, 102));
txaOutput.setColumns(20);
txaOutput.setFont(new java.awt.Font("Monospaced", 0, 18)); // NOI18N
txaOutput.setForeground(new java.awt.Color(255, 255, 255));
txaOutput.setRows(5);
jScrollPane1.setViewportView(txaOutput);
jLabel1.setBackground(java.awt.SystemColor.controlDkShadow);
jLabel1.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel1.setForeground(new java.awt.Color(255, 255, 255));
jLabel1.setText("INPUT:");
jLabel2.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel2.setForeground(new java.awt.Color(255, 255, 255));
jLabel2.setText("OUTPUT:");
btnRecognize.setBackground(java.awt.SystemColor.activeCaptionText);
btnRecognize.setFont(new java.awt.Font("Tahoma", 0, 14)); // NOI18N
btnRecognize.setForeground(new java.awt.Color(255, 255, 255));
btnRecognize.setText("Recognize");
btnRecognize.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnRecognizeActionPerformed(evt);
}
});
btnWordLearn.setBackground(java.awt.SystemColor.activeCaptionText);
btnWordLearn.setFont(new java.awt.Font("Tahoma", 0, 14)); // NOI18N
btnWordLearn.setForeground(new java.awt.Color(255, 255, 255));
btnWordLearn.setText("Word Learn");
btnWordLearn.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnWordLearnActionPerformed(evt);
}
});
btnNGramLearn.setBackground(java.awt.SystemColor.activeCaptionText);
btnNGramLearn.setFont(new java.awt.Font("Tahoma", 0, 14)); // NOI18N
btnNGramLearn.setForeground(new java.awt.Color(255, 255, 255));
btnNGramLearn.setText("NGram Learn");
btnNGramLearn.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnNGramLearnActionPerformed(evt);
}
});
txaInput.setBackground(new java.awt.Color(102, 102, 102));
txaInput.setColumns(20);
txaInput.setFont(new java.awt.Font("Monospaced", 0, 18)); // NOI18N
txaInput.setForeground(new java.awt.Color(255, 255, 255));
txaInput.setRows(5);
jScrollPane2.setViewportView(txaInput);
btnOpenFile.setBackground(java.awt.SystemColor.activeCaptionText);
btnOpenFile.setFont(new java.awt.Font("Tahoma", 0, 14)); // NOI18N
btnOpenFile.setForeground(new java.awt.Color(255, 255, 255));
btnOpenFile.setText("Open File");
btnOpenFile.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnOpenFileActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGap(101, 101, 101)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(btnRecognize, javax.swing.GroupLayout.PREFERRED_SIZE, 109, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnOpenFile, javax.swing.GroupLayout.PREFERRED_SIZE, 109, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(btnWordLearn, javax.swing.GroupLayout.PREFERRED_SIZE, 109, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnNGramLearn, javax.swing.GroupLayout.PREFERRED_SIZE, 109, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED))
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 315, Short.MAX_VALUE)
.addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGap(6, 6, 6)))
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel2)
.addGap(0, 0, Short.MAX_VALUE))
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 342, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 307, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnRecognize, javax.swing.GroupLayout.PREFERRED_SIZE, 38, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnNGramLearn, javax.swing.GroupLayout.PREFERRED_SIZE, 38, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnWordLearn, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnOpenFile, javax.swing.GroupLayout.PREFERRED_SIZE, 39, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addComponent(jScrollPane1))
.addGap(19, 19, 19))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void btnRecognizeActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnRecognizeActionPerformed
txaOutput.setText("");
for(String comment:txaInput.getText().split("\n")){
this.txaOutput.setText(txaOutput.getText()+InappExp.recognizeDebugMode(comment)+"\n");
}
JOptionPane.showMessageDialog(null,"Finished Evaluation");
}//GEN-LAST:event_btnRecognizeActionPerformed
private void btnWordLearnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnWordLearnActionPerformed
new Learner().setVisible(true);
}//GEN-LAST:event_btnWordLearnActionPerformed
private void btnNGramLearnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnNGramLearnActionPerformed
txaOutput.setText("");
for(String comment:txaInput.getText().split("\n")){
this.txaOutput.setText(txaOutput.getText()+NGramGenerator.generateAndLearn(comment)+"\n");
}
}//GEN-LAST:event_btnNGramLearnActionPerformed
private void btnOpenFileActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnOpenFileActionPerformed
txaInput.setText("");
JFileChooser chooser = new JFileChooser();
int returnVal = chooser.showOpenDialog(null);
if (returnVal == JFileChooser.APPROVE_OPTION) {
try {
Scanner scanFile = new Scanner(new File(chooser.getSelectedFile().getAbsolutePath()));
while (scanFile.hasNextLine()) {
String input = scanFile.nextLine();
txaInput.setText(txaInput.getText()+input+"\n");
}
txaInput.setText(txaInput.getText().trim());
} catch (FileNotFoundException ex) {
JOptionPane.showMessageDialog(null, "You selected an unopenable file");
}
}
}//GEN-LAST:event_btnOpenFileActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(InappExpSimulator.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(InappExpSimulator.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(InappExpSimulator.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(InappExpSimulator.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new InappExpSimulator().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnNGramLearn;
private javax.swing.JButton btnOpenFile;
private javax.swing.JButton btnRecognize;
private javax.swing.JButton btnWordLearn;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTextArea txaInput;
private javax.swing.JTextArea txaOutput;
// End of variables declaration//GEN-END:variables
}
|
|
/*
* Copyright 2011 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
/**
* Unit tests for {#link {@link PeepholeReplaceKnownMethods}
*
*/
public class PeepholeReplaceKnownMethodsTest extends CompilerTestCase {
public PeepholeReplaceKnownMethodsTest() {
super("");
}
@Override
public void setUp() {
enableLineNumberCheck(true);
}
@Override
public CompilerPass getProcessor(final Compiler compiler) {
CompilerPass peepholePass = new PeepholeOptimizationsPass(compiler,
new PeepholeReplaceKnownMethods());
return peepholePass;
}
public void testStringIndexOf() {
fold("x = 'abcdef'.indexOf('b')", "x = 1");
fold("x = 'abcdefbe'.indexOf('b', 2)", "x = 6");
fold("x = 'abcdef'.indexOf('bcd')", "x = 1");
fold("x = 'abcdefsdfasdfbcdassd'.indexOf('bcd', 4)", "x = 13");
fold("x = 'abcdef'.lastIndexOf('b')", "x = 1");
fold("x = 'abcdefbe'.lastIndexOf('b')", "x = 6");
fold("x = 'abcdefbe'.lastIndexOf('b', 5)", "x = 1");
// Both elements must be string. Dont do anything if either one is not
// string.
fold("x = 'abc1def'.indexOf(1)", "x = 3");
fold("x = 'abcNaNdef'.indexOf(NaN)", "x = 3");
fold("x = 'abcundefineddef'.indexOf(undefined)", "x = 3");
fold("x = 'abcnulldef'.indexOf(null)", "x = 3");
fold("x = 'abctruedef'.indexOf(true)", "x = 3");
// The following testcase fails with JSC_PARSE_ERROR. Hence omitted.
// foldSame("x = 1.indexOf('bcd');");
foldSame("x = NaN.indexOf('bcd')");
foldSame("x = undefined.indexOf('bcd')");
foldSame("x = null.indexOf('bcd')");
foldSame("x = true.indexOf('bcd')");
foldSame("x = false.indexOf('bcd')");
// Avoid dealing with regex or other types.
foldSame("x = 'abcdef'.indexOf(/b./)");
foldSame("x = 'abcdef'.indexOf({a:2})");
foldSame("x = 'abcdef'.indexOf([1,2])");
}
public void testStringJoinAddSparse() {
fold("x = [,,'a'].join(',')", "x = ',,a'");
}
public void testStringJoinAdd() {
fold("x = ['a', 'b', 'c'].join('')", "x = \"abc\"");
fold("x = [].join(',')", "x = \"\"");
fold("x = ['a'].join(',')", "x = \"a\"");
fold("x = ['a', 'b', 'c'].join(',')", "x = \"a,b,c\"");
fold("x = ['a', foo, 'b', 'c'].join(',')",
"x = [\"a\",foo,\"b,c\"].join(\",\")");
fold("x = [foo, 'a', 'b', 'c'].join(',')",
"x = [foo,\"a,b,c\"].join(\",\")");
fold("x = ['a', 'b', 'c', foo].join(',')",
"x = [\"a,b,c\",foo].join(\",\")");
// Works with numbers
fold("x = ['a=', 5].join('')", "x = \"a=5\"");
fold("x = ['a', '5'].join(7)", "x = \"a75\"");
// Works on boolean
fold("x = ['a=', false].join('')", "x = \"a=false\"");
fold("x = ['a', '5'].join(true)", "x = \"atrue5\"");
fold("x = ['a', '5'].join(false)", "x = \"afalse5\"");
// Only optimize if it's a size win.
fold("x = ['a', '5', 'c'].join('a very very very long chain')",
"x = [\"a\",\"5\",\"c\"].join(\"a very very very long chain\")");
// TODO(user): Its possible to fold this better.
foldSame("x = ['', foo].join(',')");
foldSame("x = ['', foo, ''].join(',')");
fold("x = ['', '', foo, ''].join(',')", "x = [',', foo, ''].join(',')");
fold("x = ['', '', foo, '', ''].join(',')",
"x = [',', foo, ','].join(',')");
fold("x = ['', '', foo, '', '', bar].join(',')",
"x = [',', foo, ',', bar].join(',')");
fold("x = [1,2,3].join('abcdef')",
"x = '1abcdef2abcdef3'");
fold("x = [1,2].join()", "x = '1,2'");
fold("x = [null,undefined,''].join(',')", "x = ',,'");
fold("x = [null,undefined,0].join(',')", "x = ',,0'");
// This can be folded but we don't currently.
foldSame("x = [[1,2],[3,4]].join()"); // would like: "x = '1,2,3,4'"
}
public void testStringJoinAdd_b1992789() {
fold("x = ['a'].join('')", "x = \"a\"");
fold("x = [foo()].join('')", "x = '' + foo()");
fold("[foo()].join('')", "'' + foo()");
}
public void testFoldStringSubstr() {
fold("x = 'abcde'.substr(0,2)", "x = 'ab'");
fold("x = 'abcde'.substr(1,2)", "x = 'bc'");
fold("x = 'abcde'['substr'](1,3)", "x = 'bcd'");
fold("x = 'abcde'.substr(2)", "x = 'cde'");
// we should be leaving negative indexes alone for now
foldSame("x = 'abcde'.substr(-1)");
foldSame("x = 'abcde'.substr(1, -2)");
foldSame("x = 'abcde'.substr(1, 2, 3)");
foldSame("x = 'a'.substr(0, 2)");
}
public void testFoldStringSubstring() {
fold("x = 'abcde'.substring(0,2)", "x = 'ab'");
fold("x = 'abcde'.substring(1,2)", "x = 'b'");
fold("x = 'abcde'['substring'](1,3)", "x = 'bc'");
fold("x = 'abcde'.substring(2)", "x = 'cde'");
// we should be leaving negative indexes alone for now
foldSame("x = 'abcde'.substring(-1)");
foldSame("x = 'abcde'.substring(1, -2)");
foldSame("x = 'abcde'.substring(1, 2, 3)");
foldSame("x = 'a'.substring(0, 2)");
}
public void testJoinBug() {
fold("var x = [].join();", "var x = '';");
fold("var x = [x].join();", "var x = '' + x;");
foldSame("var x = [x,y].join();");
foldSame("var x = [x,y,z].join();");
foldSame("shape['matrix'] = [\n" +
" Number(headingCos2).toFixed(4),\n" +
" Number(-headingSin2).toFixed(4),\n" +
" Number(headingSin2 * yScale).toFixed(4),\n" +
" Number(headingCos2 * yScale).toFixed(4),\n" +
" 0,\n" +
" 0\n" +
" ].join()");
}
public void testToUpper() {
fold("'a'.toUpperCase()", "'A'");
fold("'A'.toUpperCase()", "'A'");
fold("'aBcDe'.toUpperCase()", "'ABCDE'");
}
public void testToLower() {
fold("'A'.toLowerCase()", "'a'");
fold("'a'.toLowerCase()", "'a'");
fold("'aBcDe'.toLowerCase()", "'abcde'");
}
public void testFoldParseNumbers() {
enableNormalize();
enableEcmaScript5(true);
fold("x = parseInt('123')", "x = 123");
fold("x = parseInt(' 123')", "x = 123");
fold("x = parseInt('123', 10)", "x = 123");
fold("x = parseInt('0xA')", "x = 10");
fold("x = parseInt('0xA', 16)", "x = 10");
fold("x = parseInt('07', 8)", "x = 7");
fold("x = parseInt('08')", "x = 8");
fold("x = parseFloat('1.23')", "x = 1.23");
fold("x = parseFloat('1.2300')", "x = 1.23");
fold("x = parseFloat(' 0.3333')", "x = 0.3333");
//Mozilla Dev Center test cases
fold("x = parseInt(' 0xF', 16)", "x = 15");
fold("x = parseInt(' F', 16)", "x = 15");
fold("x = parseInt('17', 8)", "x = 15");
fold("x = parseInt('015', 10)", "x = 15");
fold("x = parseInt('1111', 2)", "x = 15");
fold("x = parseInt('12', 13)", "x = 15");
fold("x = parseInt(021, 8)", "x = 15");
fold("x = parseInt(15.99, 10)", "x = 15");
fold("x = parseFloat('3.14')", "x = 3.14");
fold("x = parseFloat(3.14)", "x = 3.14");
//Valid calls - unable to fold
foldSame("x = parseInt('FXX123', 16)");
foldSame("x = parseInt('15*3', 10)");
foldSame("x = parseInt('15e2', 10)");
foldSame("x = parseInt('15px', 10)");
foldSame("x = parseInt('-0x08')");
foldSame("x = parseInt('1', -1)");
foldSame("x = parseFloat('3.14more non-digit characters')");
foldSame("x = parseFloat('314e-2')");
foldSame("x = parseFloat('0.0314E+2')");
foldSame("x = parseFloat('3.333333333333333333333333')");
//Invalid calls
foldSame("x = parseInt('0xa', 10)");
enableEcmaScript5(false);
foldSame("x = parseInt('08')");
}
@Override
protected int getNumRepetitions() {
// Reduce this to 2 if we get better expression evaluators.
return 2;
}
private void foldSame(String js) {
testSame(js);
}
private void fold(String js, String expected) {
test(js, expected);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.coders;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.beam.sdk.coders.Coder.NonDeterministicException;
import org.apache.beam.sdk.schemas.Schema;
import org.apache.beam.sdk.schemas.Schema.FieldType;
import org.apache.beam.sdk.testing.CoderProperties;
import org.apache.beam.sdk.values.Row;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Assume;
import org.junit.Ignore;
import org.junit.Test;
/** Unit tests for {@link RowCoder}. */
public class RowCoderTest {
@Test
public void testPrimitiveTypes() throws Exception {
Schema schema =
Schema.builder()
.addByteField("f_byte")
.addInt16Field("f_int16")
.addInt32Field("f_int32")
.addInt64Field("f_int64")
.addDecimalField("f_decimal")
.addFloatField("f_float")
.addDoubleField("f_double")
.addStringField("f_string")
.addDateTimeField("f_datetime")
.addBooleanField("f_boolean")
.build();
DateTime dateTime =
new DateTime().withDate(1979, 03, 14).withTime(1, 2, 3, 4).withZone(DateTimeZone.UTC);
Row row =
Row.withSchema(schema)
.addValues(
(byte) 0, (short) 1, 2, 3L, new BigDecimal(2.3), 1.2f, 3.0d, "str", dateTime, false)
.build();
CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row);
}
@Test
public void testNestedTypes() throws Exception {
Schema nestedSchema = Schema.builder().addInt32Field("f1_int").addStringField("f1_str").build();
Schema schema =
Schema.builder().addInt32Field("f_int").addRowField("nested", nestedSchema).build();
Row nestedRow = Row.withSchema(nestedSchema).addValues(18, "foobar").build();
Row row = Row.withSchema(schema).addValues(42, nestedRow).build();
CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row);
}
@Test
public void testArrays() throws Exception {
Schema schema = Schema.builder().addArrayField("f_array", FieldType.STRING).build();
Row row = Row.withSchema(schema).addArray("one", "two", "three", "four").build();
CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row);
}
@Test
public void testArrayOfRow() throws Exception {
Schema nestedSchema = Schema.builder().addInt32Field("f1_int").addStringField("f1_str").build();
FieldType collectionElementType = FieldType.row(nestedSchema);
Schema schema = Schema.builder().addArrayField("f_array", collectionElementType).build();
Row row =
Row.withSchema(schema)
.addArray(
Row.withSchema(nestedSchema).addValues(1, "one").build(),
Row.withSchema(nestedSchema).addValues(2, "two").build(),
Row.withSchema(nestedSchema).addValues(3, "three").build())
.build();
CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row);
}
@Test
public void testArrayOfArray() throws Exception {
FieldType arrayType = FieldType.array(FieldType.array(FieldType.INT32));
Schema schema = Schema.builder().addField("f_array", arrayType).build();
Row row =
Row.withSchema(schema)
.addArray(
Lists.newArrayList(1, 2, 3, 4),
Lists.newArrayList(5, 6, 7, 8),
Lists.newArrayList(9, 10, 11, 12))
.build();
CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row);
}
@Test(expected = NonDeterministicException.class)
public void testVerifyDeterministic() throws NonDeterministicException {
Schema schema =
Schema.builder()
.addField("f1", FieldType.DOUBLE)
.addField("f2", FieldType.FLOAT)
.addField("f3", FieldType.INT32)
.build();
RowCoder coder = RowCoder.of(schema);
coder.verifyDeterministic();
}
@Test(expected = NonDeterministicException.class)
public void testVerifyDeterministicNestedRow() throws NonDeterministicException {
Schema schema =
Schema.builder()
.addField(
"f1",
FieldType.row(
Schema.builder()
.addField("a1", FieldType.DOUBLE)
.addField("a2", FieldType.INT64)
.build()))
.build();
RowCoder coder = RowCoder.of(schema);
coder.verifyDeterministic();
}
@Test
public void testConsistentWithEqualsBytesField() throws Exception {
Schema schema = Schema.of(Schema.Field.of("f1", FieldType.BYTES));
Row row1 = Row.withSchema(schema).addValue(new byte[] {1, 2, 3, 4}).build();
Row row2 = Row.withSchema(schema).addValue(new byte[] {1, 2, 3, 4}).build();
RowCoder coder = RowCoder.of(schema);
Assume.assumeTrue(coder.consistentWithEquals());
CoderProperties.coderConsistentWithEquals(coder, row1, row2);
}
@Test
@Ignore
public void testConsistentWithEqualsMapWithBytesKeyField() throws Exception {
FieldType fieldType = FieldType.map(FieldType.BYTES, FieldType.INT32);
Schema schema = Schema.of(Schema.Field.of("f1", fieldType));
RowCoder coder = RowCoder.of(schema);
Map<byte[], Integer> map1 = Collections.singletonMap(new byte[] {1, 2, 3, 4}, 1);
Row row1 = Row.withSchema(schema).addValue(map1).build();
Map<byte[], Integer> map2 = Collections.singletonMap(new byte[] {1, 2, 3, 4}, 1);
Row row2 = Row.withSchema(schema).addValue(map2).build();
Assume.assumeTrue(coder.consistentWithEquals());
CoderProperties.coderConsistentWithEquals(coder, row1, row2);
}
@Test
public void testConsistentWithEqualsArrayOfBytes() throws Exception {
FieldType fieldType = FieldType.array(FieldType.BYTES);
Schema schema = Schema.of(Schema.Field.of("f1", fieldType));
RowCoder coder = RowCoder.of(schema);
List<byte[]> list1 = Collections.singletonList(new byte[] {1, 2, 3, 4});
Row row1 = Row.withSchema(schema).addValue(list1).build();
List<byte[]> list2 = Collections.singletonList(new byte[] {1, 2, 3, 4});
Row row2 = Row.withSchema(schema).addValue(list2).build();
Assume.assumeTrue(coder.consistentWithEquals());
CoderProperties.coderConsistentWithEquals(coder, row1, row2);
}
@Test
public void testConsistentWithEqualsArrayOfArrayOfBytes() throws Exception {
FieldType fieldType = FieldType.array(FieldType.array(FieldType.BYTES));
Schema schema = Schema.of(Schema.Field.of("f1", fieldType));
RowCoder coder = RowCoder.of(schema);
List<byte[]> innerList1 = Collections.singletonList(new byte[] {1, 2, 3, 4});
List<List<byte[]>> list1 = Collections.singletonList(innerList1);
Row row1 = Row.withSchema(schema).addValue(list1).build();
List<byte[]> innerList2 = Collections.singletonList(new byte[] {1, 2, 3, 4});
List<List<byte[]>> list2 = Collections.singletonList(innerList2);
Row row2 = Row.withSchema(schema).addValue(list2).build();
Assume.assumeTrue(coder.consistentWithEquals());
CoderProperties.coderConsistentWithEquals(coder, row1, row2);
}
@Test
public void testConsistentWithEqualsArrayWithNull() throws Exception {
Schema schema =
Schema.builder()
.addField("a", Schema.FieldType.array(Schema.FieldType.INT32, true))
.build();
Row row = Row.withSchema(schema).addValue(Arrays.asList(1, null)).build();
CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row);
}
@Test
public void testConsistentWithEqualsMapWithNull() throws Exception {
Schema schema =
Schema.builder()
.addField(
"a",
Schema.FieldType.map(
Schema.FieldType.INT32, Schema.FieldType.INT32.withNullable(true)))
.build();
Row row = Row.withSchema(schema).addValue(Collections.singletonMap(1, null)).build();
CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row);
}
}
|
|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.replication;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.bookkeeper.client.AsyncCallback.AddCallback;
import org.apache.bookkeeper.client.BKException;
import org.apache.bookkeeper.client.BookKeeper.DigestType;
import org.apache.bookkeeper.client.LedgerHandle;
import org.apache.bookkeeper.conf.ServerConfiguration;
import org.apache.bookkeeper.meta.ZkLedgerUnderreplicationManager;
import org.apache.bookkeeper.proto.BookieServer;
import org.apache.bookkeeper.proto.DataFormats.UnderreplicatedLedgerFormat;
import org.apache.bookkeeper.replication.ReplicationException.CompatibilityException;
import org.apache.bookkeeper.replication.ReplicationException.UnavailableException;
import org.apache.bookkeeper.test.MultiLedgerManagerTestCase;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
/**
* Tests publishing of under replicated ledgers by the Auditor bookie node when
* corresponding bookies identifes as not running
*/
public class AuditorLedgerCheckerTest extends MultiLedgerManagerTestCase {
// Depending on the taste, select the amount of logging
// by decommenting one of the two lines below
// private final static Logger LOG = Logger.getRootLogger();
private final static Logger LOG = LoggerFactory
.getLogger(AuditorLedgerCheckerTest.class);
private static final byte[] ledgerPassword = "aaa".getBytes();
private Random rng; // Random Number Generator
private DigestType digestType;
private final String UNDERREPLICATED_PATH = baseClientConf
.getZkLedgersRootPath()
+ "/underreplication/ledgers";
private HashMap<String, AuditorElector> auditorElectors = new HashMap<String, AuditorElector>();
private ZkLedgerUnderreplicationManager urLedgerMgr;
private Set<Long> urLedgerList;
private List<Long> ledgerList;
public AuditorLedgerCheckerTest(String ledgerManagerFactoryClass)
throws IOException, KeeperException, InterruptedException,
CompatibilityException {
super(3);
LOG.info("Running test case using ledger manager : "
+ ledgerManagerFactoryClass);
this.digestType = DigestType.CRC32;
// set ledger manager name
baseConf.setLedgerManagerFactoryClassName(ledgerManagerFactoryClass);
baseClientConf
.setLedgerManagerFactoryClassName(ledgerManagerFactoryClass);
}
@Before
public void setUp() throws Exception {
super.setUp();
urLedgerMgr = new ZkLedgerUnderreplicationManager(baseClientConf, zkc);
startAuditorElectors();
rng = new Random(System.currentTimeMillis()); // Initialize the Random
urLedgerList = new HashSet<Long>();
ledgerList = new ArrayList<Long>(2);
}
@Override
public void tearDown() throws Exception {
stopAuditorElectors();
super.tearDown();
}
private void startAuditorElectors() throws Exception {
for (BookieServer bserver : bs) {
String addr = bserver.getLocalAddress().toString();
AuditorElector auditorElector = new AuditorElector(addr,
baseConf, zkc);
auditorElectors.put(addr, auditorElector);
auditorElector.start();
LOG.debug("Starting Auditor Elector");
}
}
private void stopAuditorElectors() throws Exception {
for (AuditorElector auditorElector : auditorElectors.values()) {
auditorElector.shutdown();
LOG.debug("Stopping Auditor Elector!");
}
}
/**
* Test publishing of under replicated ledgers by the auditor bookie
*/
@Test(timeout=60000)
public void testSimpleLedger() throws Exception {
LedgerHandle lh1 = createAndAddEntriesToLedger();
Long ledgerId = lh1.getId();
LOG.debug("Created ledger : " + ledgerId);
ledgerList.add(ledgerId);
lh1.close();
final CountDownLatch underReplicaLatch = registerUrLedgerWatcher(ledgerList
.size());
int bkShutdownIndex = bs.size() - 1;
String shutdownBookie = shutdownBookie(bkShutdownIndex);
// grace period for publishing the bk-ledger
LOG.debug("Waiting for ledgers to be marked as under replicated");
underReplicaLatch.await(5, TimeUnit.SECONDS);
Map<Long, String> urLedgerData = getUrLedgerData(urLedgerList);
assertEquals("Missed identifying under replicated ledgers", 1,
urLedgerList.size());
/*
* Sample data format present in the under replicated ledger path
*
* {4=replica: "10.18.89.153:5002"}
*/
assertTrue("Ledger is not marked as underreplicated:" + ledgerId,
urLedgerList.contains(ledgerId));
String data = urLedgerData.get(ledgerId);
assertTrue("Bookie " + shutdownBookie
+ "is not listed in the ledger as missing replica :" + data,
data.contains(shutdownBookie));
}
/**
* Test once published under replicated ledger should exists even after
* restarting respective bookie
*/
@Test(timeout=60000)
public void testRestartBookie() throws Exception {
LedgerHandle lh1 = createAndAddEntriesToLedger();
LedgerHandle lh2 = createAndAddEntriesToLedger();
LOG.debug("Created following ledgers : {}, {}", lh1, lh2);
int bkShutdownIndex = bs.size() - 1;
ServerConfiguration bookieConf1 = bsConfs.get(bkShutdownIndex);
String shutdownBookie = shutdownBookie(bkShutdownIndex);
// restart the failed bookie
bs.add(startBookie(bookieConf1));
waitForLedgerMissingReplicas(lh1.getId(), 10, shutdownBookie);
waitForLedgerMissingReplicas(lh2.getId(), 10, shutdownBookie);
}
/**
* Test publishing of under replicated ledgers when multiple bookie failures
* one after another.
*/
@Test(timeout=60000)
public void testMultipleBookieFailures() throws Exception {
LedgerHandle lh1 = createAndAddEntriesToLedger();
// failing first bookie
shutdownBookie(bs.size() - 1);
// simulate re-replication
doLedgerRereplication(lh1.getId());
// failing another bookie
String shutdownBookie = shutdownBookie(bs.size() - 1);
// grace period for publishing the bk-ledger
LOG.debug("Waiting for ledgers to be marked as under replicated");
assertTrue("Ledger should be missing second replica",
waitForLedgerMissingReplicas(lh1.getId(), 10, shutdownBookie));
}
@Test(timeout = 30000)
public void testToggleLedgerReplication() throws Exception {
LedgerHandle lh1 = createAndAddEntriesToLedger();
ledgerList.add(lh1.getId());
LOG.debug("Created following ledgers : " + ledgerList);
// failing another bookie
CountDownLatch urReplicaLatch = registerUrLedgerWatcher(ledgerList
.size());
// disabling ledger replication
urLedgerMgr.disableLedgerReplication();
ArrayList<String> shutdownBookieList = new ArrayList<String>();
shutdownBookieList.add(shutdownBookie(bs.size() - 1));
shutdownBookieList.add(shutdownBookie(bs.size() - 1));
assertFalse("Ledger replication is not disabled!", urReplicaLatch
.await(1, TimeUnit.SECONDS));
// enabling ledger replication
urLedgerMgr.enableLedgerReplication();
assertTrue("Ledger replication is not enabled!", urReplicaLatch.await(
5, TimeUnit.SECONDS));
}
@Test(timeout = 20000)
public void testDuplicateEnDisableAutoRecovery() throws Exception {
urLedgerMgr.disableLedgerReplication();
try {
urLedgerMgr.disableLedgerReplication();
fail("Must throw exception, since AutoRecovery is already disabled");
} catch (UnavailableException e) {
assertTrue("AutoRecovery is not disabled previously!",
e.getCause() instanceof KeeperException.NodeExistsException);
}
urLedgerMgr.enableLedgerReplication();
try {
urLedgerMgr.enableLedgerReplication();
fail("Must throw exception, since AutoRecovery is already enabled");
} catch (UnavailableException e) {
assertTrue("AutoRecovery is not enabled previously!",
e.getCause() instanceof KeeperException.NoNodeException);
}
}
/**
* Test Auditor should consider Readonly bookie as available bookie. Should not publish ur ledgers for
* readonly bookies.
*/
@Test(timeout = 20000)
public void testReadOnlyBookieExclusionFromURLedgersCheck() throws Exception {
LedgerHandle lh = createAndAddEntriesToLedger();
ledgerList.add(lh.getId());
LOG.debug("Created following ledgers : " + ledgerList);
int count = ledgerList.size();
final CountDownLatch underReplicaLatch = registerUrLedgerWatcher(count);
ServerConfiguration bookieConf = bsConfs.get(2);
BookieServer bk = bs.get(2);
bookieConf.setReadOnlyModeEnabled(true);
bk.getBookie().doTransitionToReadOnlyMode();
// grace period for publishing the bk-ledger
LOG.debug("Waiting for Auditor to finish ledger check.");
assertFalse("latch should not have completed", underReplicaLatch.await(5, TimeUnit.SECONDS));
}
/**
* Test Auditor should consider Readonly bookie fail and publish ur ledgers for readonly bookies.
*/
@Test(timeout = 20000)
public void testReadOnlyBookieShutdown() throws Exception {
LedgerHandle lh = createAndAddEntriesToLedger();
long ledgerId = lh.getId();
ledgerList.add(ledgerId);
LOG.debug("Created following ledgers : " + ledgerList);
int count = ledgerList.size();
final CountDownLatch underReplicaLatch = registerUrLedgerWatcher(count);
int bkIndex = bs.size() - 1;
ServerConfiguration bookieConf = bsConfs.get(bkIndex);
BookieServer bk = bs.get(bkIndex);
bookieConf.setReadOnlyModeEnabled(true);
bk.getBookie().doTransitionToReadOnlyMode();
// grace period for publishing the bk-ledger
LOG.debug("Waiting for Auditor to finish ledger check.");
assertFalse("latch should not have completed", underReplicaLatch.await(5, TimeUnit.SECONDS));
String shutdownBookie = shutdownBookie(bkIndex);
// grace period for publishing the bk-ledger
LOG.debug("Waiting for ledgers to be marked as under replicated");
underReplicaLatch.await(5, TimeUnit.SECONDS);
Map<Long, String> urLedgerData = getUrLedgerData(urLedgerList);
assertEquals("Missed identifying under replicated ledgers", 1, urLedgerList.size());
/*
* Sample data format present in the under replicated ledger path
*
* {4=replica: "10.18.89.153:5002"}
*/
assertTrue("Ledger is not marked as underreplicated:" + ledgerId, urLedgerList.contains(ledgerId));
String data = urLedgerData.get(ledgerId);
assertTrue("Bookie " + shutdownBookie + "is not listed in the ledger as missing replica :" + data,
data.contains(shutdownBookie));
}
/**
* Wait for ledger to be underreplicated, and to be missing all replicas specified
*/
private boolean waitForLedgerMissingReplicas(Long ledgerId, long secondsToWait, String... replicas)
throws Exception {
for (int i = 0; i < secondsToWait; i++) {
try {
UnderreplicatedLedgerFormat data = urLedgerMgr.getLedgerUnreplicationInfo(ledgerId);
boolean all = true;
for (String r : replicas) {
all = all && data.getReplicaList().contains(r);
}
if (all) {
return true;
}
} catch (Exception e) {
// may not find node
}
Thread.sleep(1000);
}
return false;
}
private CountDownLatch registerUrLedgerWatcher(int count)
throws KeeperException, InterruptedException {
final CountDownLatch underReplicaLatch = new CountDownLatch(count);
for (Long ledgerId : ledgerList) {
Watcher urLedgerWatcher = new ChildWatcher(underReplicaLatch);
String znode = ZkLedgerUnderreplicationManager.getUrLedgerZnode(UNDERREPLICATED_PATH,
ledgerId);
zkc.exists(znode, urLedgerWatcher);
}
return underReplicaLatch;
}
private void doLedgerRereplication(Long... ledgerIds)
throws UnavailableException {
for (int i = 0; i < ledgerIds.length; i++) {
long lid = urLedgerMgr.getLedgerToRereplicate();
assertTrue("Received unexpected ledgerid", Arrays.asList(ledgerIds).contains(lid));
urLedgerMgr.markLedgerReplicated(lid);
urLedgerMgr.releaseUnderreplicatedLedger(lid);
}
}
private String shutdownBookie(int bkShutdownIndex) throws Exception {
BookieServer bkServer = bs.get(bkShutdownIndex);
String bookieAddr = bkServer.getLocalAddress().toString();
LOG.debug("Shutting down bookie:" + bookieAddr);
killBookie(bkShutdownIndex);
auditorElectors.get(bookieAddr).shutdown();
auditorElectors.remove(bookieAddr);
return bookieAddr;
}
private LedgerHandle createAndAddEntriesToLedger() throws BKException,
InterruptedException {
int numEntriesToWrite = 100;
// Create a ledger
LedgerHandle lh = bkc.createLedger(digestType, ledgerPassword);
LOG.info("Ledger ID: " + lh.getId());
addEntry(numEntriesToWrite, lh);
return lh;
}
private void addEntry(int numEntriesToWrite, LedgerHandle lh)
throws InterruptedException, BKException {
final CountDownLatch completeLatch = new CountDownLatch(numEntriesToWrite);
final AtomicInteger rc = new AtomicInteger(BKException.Code.OK);
for (int i = 0; i < numEntriesToWrite; i++) {
ByteBuffer entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(Integer.MAX_VALUE));
entry.position(0);
lh.asyncAddEntry(entry.array(), new AddCallback() {
public void addComplete(int rc2, LedgerHandle lh, long entryId, Object ctx) {
rc.compareAndSet(BKException.Code.OK, rc2);
completeLatch.countDown();
}
}, null);
}
completeLatch.await();
if (rc.get() != BKException.Code.OK) {
throw BKException.create(rc.get());
}
}
private Map<Long, String> getUrLedgerData(Set<Long> urLedgerList)
throws KeeperException, InterruptedException {
Map<Long, String> urLedgerData = new HashMap<Long, String>();
for (Long ledgerId : urLedgerList) {
String znode = ZkLedgerUnderreplicationManager.getUrLedgerZnode(UNDERREPLICATED_PATH,
ledgerId);
byte[] data = zkc.getData(znode, false, null);
urLedgerData.put(ledgerId, new String(data));
}
return urLedgerData;
}
private class ChildWatcher implements Watcher {
private final CountDownLatch underReplicaLatch;
public ChildWatcher(CountDownLatch underReplicaLatch) {
this.underReplicaLatch = underReplicaLatch;
}
@Override
public void process(WatchedEvent event) {
LOG.info("Received notification for the ledger path : "
+ event.getPath());
for (Long ledgerId : ledgerList) {
if (event.getPath().contains(ledgerId + "")) {
urLedgerList.add(Long.valueOf(ledgerId));
}
}
LOG.debug("Count down and waiting for next notification");
// count down and waiting for next notification
underReplicaLatch.countDown();
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import static org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Status.CHECKSUM_OK;
import static org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Status.ERROR;
import static org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Status.ERROR_ACCESS_TOKEN;
import static org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Status.SUCCESS;
import static org.apache.hadoop.hdfs.server.datanode.DataNode.DN_CLIENTTRACE_FORMAT;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import org.apache.commons.logging.Log;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DataTransferProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.hdfs.protocol.DataTransferProtocol.BlockConstructionStage;
import org.apache.hadoop.hdfs.security.BlockAccessToken;
import org.apache.hadoop.hdfs.security.AccessTokenHandler;
import org.apache.hadoop.hdfs.server.common.HdfsConstants;
import org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface.MetaDataInputStream;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingInt;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.StringUtils;
/**
* Thread for processing incoming/outgoing data stream.
*/
class DataXceiver extends DataTransferProtocol.Receiver
implements Runnable, FSConstants {
public static final Log LOG = DataNode.LOG;
static final Log ClientTraceLog = DataNode.ClientTraceLog;
private final Socket s;
private final boolean isLocal; //is a local connection?
private final String remoteAddress; // address of remote side
private final String localAddress; // local address of this daemon
private final DataNode datanode;
private final DataXceiverServer dataXceiverServer;
private long opStartTime; //the start time of receiving an Op
public DataXceiver(Socket s, DataNode datanode,
DataXceiverServer dataXceiverServer) {
this.s = s;
this.isLocal = s.getInetAddress().equals(s.getLocalAddress());
this.datanode = datanode;
this.dataXceiverServer = dataXceiverServer;
dataXceiverServer.childSockets.put(s, s);
remoteAddress = s.getRemoteSocketAddress().toString();
localAddress = s.getLocalSocketAddress().toString();
if (LOG.isDebugEnabled()) {
LOG.debug("Number of active connections is: "
+ datanode.getXceiverCount());
}
}
/** Return the datanode object. */
DataNode getDataNode() {return datanode;}
/**
* Read/write data from/to the DataXceiveServer.
*/
public void run() {
DataInputStream in=null;
try {
in = new DataInputStream(
new BufferedInputStream(NetUtils.getInputStream(s),
SMALL_BUFFER_SIZE));
final DataTransferProtocol.Op op = readOp(in);
// Make sure the xciver count is not exceeded
int curXceiverCount = datanode.getXceiverCount();
if (curXceiverCount > dataXceiverServer.maxXceiverCount) {
throw new IOException("xceiverCount " + curXceiverCount
+ " exceeds the limit of concurrent xcievers "
+ dataXceiverServer.maxXceiverCount);
}
opStartTime = DataNode.now();
processOp(op, in);
} catch (Throwable t) {
LOG.error(datanode.dnRegistration + ":DataXceiver",t);
} finally {
if (LOG.isDebugEnabled()) {
LOG.debug(datanode.dnRegistration + ":Number of active connections is: "
+ datanode.getXceiverCount());
}
IOUtils.closeStream(in);
IOUtils.closeSocket(s);
dataXceiverServer.childSockets.remove(s);
}
}
/**
* Read a block from the disk.
*/
@Override
protected void opReadBlock(DataInputStream in,
long blockId, long blockGs, long startOffset, long length,
String clientName, BlockAccessToken accessToken) throws IOException {
final Block block = new Block(blockId, 0 , blockGs);
OutputStream baseStream = NetUtils.getOutputStream(s,
datanode.socketWriteTimeout);
DataOutputStream out = new DataOutputStream(
new BufferedOutputStream(baseStream, SMALL_BUFFER_SIZE));
if (datanode.isAccessTokenEnabled
&& !datanode.accessTokenHandler.checkAccess(accessToken, null, blockId,
AccessTokenHandler.AccessMode.READ)) {
try {
ERROR_ACCESS_TOKEN.write(out);
out.flush();
throw new IOException("Access token verification failed, for client "
+ remoteAddress + " for OP_READ_BLOCK for block " + block);
} finally {
IOUtils.closeStream(out);
}
}
// send the block
BlockSender blockSender = null;
final String clientTraceFmt =
clientName.length() > 0 && ClientTraceLog.isInfoEnabled()
? String.format(DN_CLIENTTRACE_FORMAT, localAddress, remoteAddress,
"%d", "HDFS_READ", clientName, "%d",
datanode.dnRegistration.getStorageID(), block, "%d")
: datanode.dnRegistration + " Served block " + block + " to " +
s.getInetAddress();
try {
try {
blockSender = new BlockSender(block, startOffset, length,
true, true, false, datanode, clientTraceFmt);
} catch(IOException e) {
ERROR.write(out);
throw e;
}
SUCCESS.write(out); // send op status
long read = blockSender.sendBlock(out, baseStream, null); // send data
if (blockSender.isBlockReadFully()) {
// See if client verification succeeded.
// This is an optional response from client.
try {
if (DataTransferProtocol.Status.read(in) == CHECKSUM_OK
&& datanode.blockScanner != null) {
datanode.blockScanner.verifiedByClient(block);
}
} catch (IOException ignored) {}
}
datanode.myMetrics.bytesRead.inc((int) read);
datanode.myMetrics.blocksRead.inc();
} catch ( SocketException ignored ) {
// Its ok for remote side to close the connection anytime.
datanode.myMetrics.blocksRead.inc();
} catch ( IOException ioe ) {
/* What exactly should we do here?
* Earlier version shutdown() datanode if there is disk error.
*/
LOG.warn(datanode.dnRegistration + ":Got exception while serving " +
block + " to " +
s.getInetAddress() + ":\n" +
StringUtils.stringifyException(ioe) );
throw ioe;
} finally {
IOUtils.closeStream(out);
IOUtils.closeStream(blockSender);
}
//update metrics
updateDuration(datanode.myMetrics.readBlockOp);
updateCounter(datanode.myMetrics.readsFromLocalClient,
datanode.myMetrics.readsFromRemoteClient);
}
/**
* Write a block to disk.
*/
@Override
protected void opWriteBlock(DataInputStream in, long blockId, long blockGs,
int pipelineSize, BlockConstructionStage stage,
long newGs, long minBytesRcvd, long maxBytesRcvd,
String client, DatanodeInfo srcDataNode, DatanodeInfo[] targets,
BlockAccessToken accessToken) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("writeBlock receive buf size " + s.getReceiveBufferSize() +
" tcp no delay " + s.getTcpNoDelay());
}
final Block block = new Block(blockId, dataXceiverServer.estimateBlockSize,
blockGs);
LOG.info("Receiving block " + block +
" src: " + remoteAddress +
" dest: " + localAddress);
DataOutputStream replyOut = null; // stream to prev target
replyOut = new DataOutputStream(
NetUtils.getOutputStream(s, datanode.socketWriteTimeout));
if (datanode.isAccessTokenEnabled
&& !datanode.accessTokenHandler.checkAccess(accessToken, null, block
.getBlockId(), AccessTokenHandler.AccessMode.WRITE)) {
try {
if (client.length() != 0) {
ERROR_ACCESS_TOKEN.write(replyOut);
Text.writeString(replyOut, datanode.dnRegistration.getName());
replyOut.flush();
}
throw new IOException("Access token verification failed, for client "
+ remoteAddress + " for OP_WRITE_BLOCK for block " + block);
} finally {
IOUtils.closeStream(replyOut);
}
}
DataOutputStream mirrorOut = null; // stream to next target
DataInputStream mirrorIn = null; // reply from next target
Socket mirrorSock = null; // socket to next target
BlockReceiver blockReceiver = null; // responsible for data handling
String mirrorNode = null; // the name:port of next target
String firstBadLink = ""; // first datanode that failed in connection setup
DataTransferProtocol.Status mirrorInStatus = SUCCESS;
try {
if (client.length() == 0 ||
stage != BlockConstructionStage.PIPELINE_CLOSE_RECOVERY) {
// open a block receiver
blockReceiver = new BlockReceiver(block, in,
s.getRemoteSocketAddress().toString(),
s.getLocalSocketAddress().toString(),
stage, newGs, minBytesRcvd, maxBytesRcvd,
client, srcDataNode, datanode);
} else {
datanode.data.recoverClose(block, newGs, minBytesRcvd);
}
//
// Open network conn to backup machine, if
// appropriate
//
if (targets.length > 0) {
InetSocketAddress mirrorTarget = null;
// Connect to backup machine
mirrorNode = targets[0].getName();
mirrorTarget = NetUtils.createSocketAddr(mirrorNode);
mirrorSock = datanode.newSocket();
try {
int timeoutValue = datanode.socketTimeout
+ (HdfsConstants.READ_TIMEOUT_EXTENSION * targets.length);
int writeTimeout = datanode.socketWriteTimeout +
(HdfsConstants.WRITE_TIMEOUT_EXTENSION * targets.length);
NetUtils.connect(mirrorSock, mirrorTarget, timeoutValue);
mirrorSock.setSoTimeout(timeoutValue);
mirrorSock.setSendBufferSize(DEFAULT_DATA_SOCKET_SIZE);
mirrorOut = new DataOutputStream(
new BufferedOutputStream(
NetUtils.getOutputStream(mirrorSock, writeTimeout),
SMALL_BUFFER_SIZE));
mirrorIn = new DataInputStream(NetUtils.getInputStream(mirrorSock));
// Write header: Copied from DFSClient.java!
DataTransferProtocol.Sender.opWriteBlock(mirrorOut,
blockId, blockGs,
pipelineSize, stage, newGs, minBytesRcvd, maxBytesRcvd, client,
srcDataNode, targets, accessToken);
if (blockReceiver != null) { // send checksum header
blockReceiver.writeChecksumHeader(mirrorOut);
}
mirrorOut.flush();
// read connect ack (only for clients, not for replication req)
if (client.length() != 0) {
mirrorInStatus = DataTransferProtocol.Status.read(mirrorIn);
firstBadLink = Text.readString(mirrorIn);
if (LOG.isDebugEnabled() || mirrorInStatus != SUCCESS) {
LOG.info("Datanode " + targets.length +
" got response for connect ack " +
" from downstream datanode with firstbadlink as " +
firstBadLink);
}
}
} catch (IOException e) {
if (client.length() != 0) {
ERROR.write(replyOut);
Text.writeString(replyOut, mirrorNode);
replyOut.flush();
}
IOUtils.closeStream(mirrorOut);
mirrorOut = null;
IOUtils.closeStream(mirrorIn);
mirrorIn = null;
IOUtils.closeSocket(mirrorSock);
mirrorSock = null;
if (client.length() > 0) {
throw e;
} else {
LOG.info(datanode.dnRegistration + ":Exception transfering block " +
block + " to mirror " + mirrorNode +
". continuing without the mirror.\n" +
StringUtils.stringifyException(e));
}
}
}
// send connect ack back to source (only for clients)
if (client.length() != 0) {
if (LOG.isDebugEnabled() || mirrorInStatus != SUCCESS) {
LOG.info("Datanode " + targets.length +
" forwarding connect ack to upstream firstbadlink is " +
firstBadLink);
}
mirrorInStatus.write(replyOut);
Text.writeString(replyOut, firstBadLink);
replyOut.flush();
}
// receive the block and mirror to the next target
if (blockReceiver != null) {
String mirrorAddr = (mirrorSock == null) ? null : mirrorNode;
blockReceiver.receiveBlock(mirrorOut, mirrorIn, replyOut,
mirrorAddr, null, targets.length);
}
// update its generation stamp
if (client.length() != 0 &&
stage == BlockConstructionStage.PIPELINE_CLOSE_RECOVERY) {
block.setGenerationStamp(newGs);
block.setNumBytes(minBytesRcvd);
}
// if this write is for a replication request or recovering
// a failed close for client, then confirm block. For other client-writes,
// the block is finalized in the PacketResponder.
if (client.length() == 0 ||
stage == BlockConstructionStage.PIPELINE_CLOSE_RECOVERY) {
datanode.closeBlock(block, DataNode.EMPTY_DEL_HINT);
LOG.info("Received block " + block +
" src: " + remoteAddress +
" dest: " + localAddress +
" of size " + block.getNumBytes());
}
} catch (IOException ioe) {
LOG.info("writeBlock " + block + " received exception " + ioe);
throw ioe;
} finally {
// close all opened streams
IOUtils.closeStream(mirrorOut);
IOUtils.closeStream(mirrorIn);
IOUtils.closeStream(replyOut);
IOUtils.closeSocket(mirrorSock);
IOUtils.closeStream(blockReceiver);
}
//update metrics
updateDuration(datanode.myMetrics.writeBlockOp);
updateCounter(datanode.myMetrics.writesFromLocalClient,
datanode.myMetrics.writesFromRemoteClient);
}
/**
* Get block checksum (MD5 of CRC32).
*/
@Override
protected void opBlockChecksum(DataInputStream in,
long blockId, long blockGs, BlockAccessToken accessToken) throws IOException {
final Block block = new Block(blockId, 0 , blockGs);
DataOutputStream out = new DataOutputStream(NetUtils.getOutputStream(s,
datanode.socketWriteTimeout));
if (datanode.isAccessTokenEnabled
&& !datanode.accessTokenHandler.checkAccess(accessToken, null, block
.getBlockId(), AccessTokenHandler.AccessMode.READ)) {
try {
ERROR_ACCESS_TOKEN.write(out);
out.flush();
throw new IOException(
"Access token verification failed, for client " + remoteAddress
+ " for OP_BLOCK_CHECKSUM for block " + block);
} finally {
IOUtils.closeStream(out);
}
}
final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(block);
final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream(
metadataIn, BUFFER_SIZE));
try {
//read metadata file
final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
final DataChecksum checksum = header.getChecksum();
final int bytesPerCRC = checksum.getBytesPerChecksum();
final long crcPerBlock = (metadataIn.getLength()
- BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
//compute block checksum
final MD5Hash md5 = MD5Hash.digest(checksumIn);
if (LOG.isDebugEnabled()) {
LOG.debug("block=" + block + ", bytesPerCRC=" + bytesPerCRC
+ ", crcPerBlock=" + crcPerBlock + ", md5=" + md5);
}
//write reply
SUCCESS.write(out);
out.writeInt(bytesPerCRC);
out.writeLong(crcPerBlock);
md5.write(out);
out.flush();
} finally {
IOUtils.closeStream(out);
IOUtils.closeStream(checksumIn);
IOUtils.closeStream(metadataIn);
}
//update metrics
updateDuration(datanode.myMetrics.blockChecksumOp);
}
/**
* Read a block from the disk and then sends it to a destination.
*/
@Override
protected void opCopyBlock(DataInputStream in,
long blockId, long blockGs, BlockAccessToken accessToken) throws IOException {
// Read in the header
Block block = new Block(blockId, 0, blockGs);
if (datanode.isAccessTokenEnabled
&& !datanode.accessTokenHandler.checkAccess(accessToken, null, blockId,
AccessTokenHandler.AccessMode.COPY)) {
LOG.warn("Invalid access token in request from "
+ remoteAddress + " for OP_COPY_BLOCK for block " + block);
sendResponse(s, ERROR_ACCESS_TOKEN, datanode.socketWriteTimeout);
return;
}
if (!dataXceiverServer.balanceThrottler.acquire()) { // not able to start
LOG.info("Not able to copy block " + blockId + " to "
+ s.getRemoteSocketAddress() + " because threads quota is exceeded.");
sendResponse(s, ERROR, datanode.socketWriteTimeout);
return;
}
BlockSender blockSender = null;
DataOutputStream reply = null;
boolean isOpSuccess = true;
try {
// check if the block exists or not
blockSender = new BlockSender(block, 0, -1, false, false, false,
datanode);
// set up response stream
OutputStream baseStream = NetUtils.getOutputStream(
s, datanode.socketWriteTimeout);
reply = new DataOutputStream(new BufferedOutputStream(
baseStream, SMALL_BUFFER_SIZE));
// send status first
SUCCESS.write(reply);
// send block content to the target
long read = blockSender.sendBlock(reply, baseStream,
dataXceiverServer.balanceThrottler);
datanode.myMetrics.bytesRead.inc((int) read);
datanode.myMetrics.blocksRead.inc();
LOG.info("Copied block " + block + " to " + s.getRemoteSocketAddress());
} catch (IOException ioe) {
isOpSuccess = false;
throw ioe;
} finally {
dataXceiverServer.balanceThrottler.release();
if (isOpSuccess) {
try {
// send one last byte to indicate that the resource is cleaned.
reply.writeChar('d');
} catch (IOException ignored) {
}
}
IOUtils.closeStream(reply);
IOUtils.closeStream(blockSender);
}
//update metrics
updateDuration(datanode.myMetrics.copyBlockOp);
}
/**
* Receive a block and write it to disk, it then notifies the namenode to
* remove the copy from the source.
*/
@Override
protected void opReplaceBlock(DataInputStream in,
long blockId, long blockGs, String sourceID, DatanodeInfo proxySource,
BlockAccessToken accessToken) throws IOException {
/* read header */
final Block block = new Block(blockId, dataXceiverServer.estimateBlockSize,
blockGs);
if (datanode.isAccessTokenEnabled
&& !datanode.accessTokenHandler.checkAccess(accessToken, null, blockId,
AccessTokenHandler.AccessMode.REPLACE)) {
LOG.warn("Invalid access token in request from "
+ remoteAddress + " for OP_REPLACE_BLOCK for block " + block);
sendResponse(s, ERROR_ACCESS_TOKEN, datanode.socketWriteTimeout);
return;
}
if (!dataXceiverServer.balanceThrottler.acquire()) { // not able to start
LOG.warn("Not able to receive block " + blockId + " from "
+ s.getRemoteSocketAddress() + " because threads quota is exceeded.");
sendResponse(s, ERROR, datanode.socketWriteTimeout);
return;
}
Socket proxySock = null;
DataOutputStream proxyOut = null;
DataTransferProtocol.Status opStatus = SUCCESS;
BlockReceiver blockReceiver = null;
DataInputStream proxyReply = null;
try {
// get the output stream to the proxy
InetSocketAddress proxyAddr = NetUtils.createSocketAddr(
proxySource.getName());
proxySock = datanode.newSocket();
NetUtils.connect(proxySock, proxyAddr, datanode.socketTimeout);
proxySock.setSoTimeout(datanode.socketTimeout);
OutputStream baseStream = NetUtils.getOutputStream(proxySock,
datanode.socketWriteTimeout);
proxyOut = new DataOutputStream(
new BufferedOutputStream(baseStream, SMALL_BUFFER_SIZE));
/* send request to the proxy */
DataTransferProtocol.Sender.opCopyBlock(proxyOut, block.getBlockId(),
block.getGenerationStamp(), accessToken);
// receive the response from the proxy
proxyReply = new DataInputStream(new BufferedInputStream(
NetUtils.getInputStream(proxySock), BUFFER_SIZE));
final DataTransferProtocol.Status status
= DataTransferProtocol.Status.read(proxyReply);
if (status != SUCCESS) {
if (status == ERROR_ACCESS_TOKEN) {
throw new IOException("Copy block " + block + " from "
+ proxySock.getRemoteSocketAddress()
+ " failed due to access token error");
}
throw new IOException("Copy block " + block + " from "
+ proxySock.getRemoteSocketAddress() + " failed");
}
// open a block receiver and check if the block does not exist
blockReceiver = new BlockReceiver(
block, proxyReply, proxySock.getRemoteSocketAddress().toString(),
proxySock.getLocalSocketAddress().toString(),
null, 0, 0, 0, "", null, datanode);
// receive a block
blockReceiver.receiveBlock(null, null, null, null,
dataXceiverServer.balanceThrottler, -1);
// notify name node
datanode.notifyNamenodeReceivedBlock(block, sourceID);
LOG.info("Moved block " + block +
" from " + s.getRemoteSocketAddress());
} catch (IOException ioe) {
opStatus = ERROR;
throw ioe;
} finally {
// receive the last byte that indicates the proxy released its thread resource
if (opStatus == SUCCESS) {
try {
proxyReply.readChar();
} catch (IOException ignored) {
}
}
// now release the thread resource
dataXceiverServer.balanceThrottler.release();
// send response back
try {
sendResponse(s, opStatus, datanode.socketWriteTimeout);
} catch (IOException ioe) {
LOG.warn("Error writing reply back to " + s.getRemoteSocketAddress());
}
IOUtils.closeStream(proxyOut);
IOUtils.closeStream(blockReceiver);
IOUtils.closeStream(proxyReply);
}
//update metrics
updateDuration(datanode.myMetrics.replaceBlockOp);
}
private void updateDuration(MetricsTimeVaryingRate mtvr) {
mtvr.inc(DataNode.now() - opStartTime);
}
private void updateCounter(MetricsTimeVaryingInt localCounter,
MetricsTimeVaryingInt remoteCounter) {
(isLocal? localCounter: remoteCounter).inc();
}
/**
* Utility function for sending a response.
* @param s socket to write to
* @param opStatus status message to write
* @param timeout send timeout
**/
private void sendResponse(Socket s, DataTransferProtocol.Status opStatus,
long timeout) throws IOException {
DataOutputStream reply =
new DataOutputStream(NetUtils.getOutputStream(s, timeout));
try {
opStatus.write(reply);
reply.flush();
} finally {
IOUtils.closeStream(reply);
}
}
}
|
|
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.proto;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.actions.util.ActionsTestUtil.getFirstArtifactEndingWith;
import static com.google.devtools.build.lib.actions.util.ActionsTestUtil.prettyArtifactNames;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.actions.FileWriteAction;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class BazelProtoLibraryTest extends BuildViewTestCase {
@Before
public void setUp() throws Exception {
useConfiguration("--proto_compiler=//proto:compiler");
scratch.file("proto/BUILD", "licenses(['notice'])", "exports_files(['compiler'])");
}
@Test
public void createsDescriptorSets() throws Exception {
scratch.file(
"x/BUILD",
"proto_library(name='alias', deps = ['foo'])",
"proto_library(name='foo', srcs=['foo.proto'])",
"proto_library(name='alias_to_no_srcs', deps = ['no_srcs'])",
"proto_library(name='no_srcs')");
assertThat(getDescriptorOutput("//x:alias").getRootRelativePathString())
.isEqualTo("x/alias-descriptor-set.proto.bin");
assertThat(getDescriptorOutput("//x:foo").getRootRelativePathString())
.isEqualTo("x/foo-descriptor-set.proto.bin");
assertThat(getDescriptorOutput("//x:alias_to_no_srcs").getRootRelativePathString())
.isEqualTo("x/alias_to_no_srcs-descriptor-set.proto.bin");
assertThat(getDescriptorOutput("//x:no_srcs").getRootRelativePathString())
.isEqualTo("x/no_srcs-descriptor-set.proto.bin");
}
@Test
public void descriptorSets_ruleWithSrcsCallsProtoc() throws Exception {
scratch.file("x/BUILD", "proto_library(name='foo', srcs=['foo.proto'])");
Artifact file = getDescriptorOutput("//x:foo");
assertThat(getGeneratingSpawnAction(file).getRemainingArguments())
.containsAllOf(
"-Ix/foo.proto=x/foo.proto",
"--descriptor_set_out=" + file.getExecPathString(),
"x/foo.proto");
}
/** Asserts that we register a FileWriteAction with empty contents if there are no srcs. */
@Test
public void descriptorSets_ruleWithoutSrcsWritesEmptyFile() throws Exception {
scratch.file("x/BUILD", "proto_library(name='no_srcs')");
Action action = getDescriptorWriteAction("//x:no_srcs");
assertThat(action).isInstanceOf(FileWriteAction.class);
assertThat(((FileWriteAction) action).getFileContents()).isEmpty();
}
/**
* Asserts that the actions creating descriptor sets for rule R, take as input (=depend on) all of
* the descriptor sets of the transitive dependencies of R.
*
* <p>This is needed so that building R, that has a dependency R' which violates strict proto
* deps, would break.
*/
@Test
public void descriptorSetsDependOnChildren() throws Exception {
scratch.file(
"x/BUILD",
"proto_library(name='alias', deps = ['foo'])",
"proto_library(name='foo', srcs=['foo.proto'], deps = ['bar'])",
"proto_library(name='bar', srcs=['bar.proto'])",
"proto_library(name='alias_to_no_srcs', deps = ['no_srcs'])",
"proto_library(name='no_srcs')");
assertThat(getDepsDescriptorSets(getDescriptorOutput("//x:alias")))
.containsExactly("x/foo-descriptor-set.proto.bin", "x/bar-descriptor-set.proto.bin");
assertThat(getDepsDescriptorSets(getDescriptorOutput("//x:foo")))
.containsExactly("x/bar-descriptor-set.proto.bin");
assertThat(getDepsDescriptorSets(getDescriptorOutput("//x:bar"))).isEmpty();
assertThat(getDepsDescriptorSets(getDescriptorOutput("//x:alias_to_no_srcs")))
.containsExactly("x/no_srcs-descriptor-set.proto.bin");
assertThat(getDepsDescriptorSets(getDescriptorOutput("//x:no_srcs"))).isEmpty();
}
/**
* Returns all of the inputs of the action that generated 'descriptorSet', and which are
* themselves descriptor sets.
*/
private ImmutableList<String> getDepsDescriptorSets(Artifact descriptorSet) {
ImmutableList.Builder<String> result = ImmutableList.builder();
for (String input : prettyArtifactNames(getGeneratingAction(descriptorSet).getInputs())) {
if (input.endsWith("-descriptor-set.proto.bin")) {
result.add(input);
}
}
return result.build();
}
@Test
public void descriptorSetsAreExposedInProvider() throws Exception {
scratch.file(
"x/BUILD",
"proto_library(name='alias', deps = ['foo'])",
"proto_library(name='foo', srcs=['foo.proto'], deps = ['bar'])",
"proto_library(name='bar', srcs=['bar.proto'])",
"proto_library(name='alias_to_no_srcs', deps = ['no_srcs'])",
"proto_library(name='no_srcs')");
{
ProtoSourcesProvider provider =
getConfiguredTarget("//x:alias").getProvider(ProtoSourcesProvider.class);
assertThat(provider.directDescriptorSet().getRootRelativePathString())
.isEqualTo("x/alias-descriptor-set.proto.bin");
assertThat(prettyArtifactNames(provider.transitiveDescriptorSets()))
.containsExactly(
"x/alias-descriptor-set.proto.bin",
"x/foo-descriptor-set.proto.bin",
"x/bar-descriptor-set.proto.bin");
}
{
ProtoSourcesProvider provider =
getConfiguredTarget("//x:foo").getProvider(ProtoSourcesProvider.class);
assertThat(provider.directDescriptorSet().getRootRelativePathString())
.isEqualTo("x/foo-descriptor-set.proto.bin");
assertThat(prettyArtifactNames(provider.transitiveDescriptorSets()))
.containsExactly("x/foo-descriptor-set.proto.bin", "x/bar-descriptor-set.proto.bin");
}
{
ProtoSourcesProvider provider =
getConfiguredTarget("//x:bar").getProvider(ProtoSourcesProvider.class);
assertThat(provider.directDescriptorSet().getRootRelativePathString())
.isEqualTo("x/bar-descriptor-set.proto.bin");
assertThat(prettyArtifactNames(provider.transitiveDescriptorSets()))
.containsExactly("x/bar-descriptor-set.proto.bin");
}
{
ProtoSourcesProvider provider =
getConfiguredTarget("//x:alias_to_no_srcs").getProvider(ProtoSourcesProvider.class);
assertThat(provider.directDescriptorSet().getRootRelativePathString())
.isEqualTo("x/alias_to_no_srcs-descriptor-set.proto.bin");
assertThat(prettyArtifactNames(provider.transitiveDescriptorSets()))
.containsExactly(
"x/alias_to_no_srcs-descriptor-set.proto.bin", "x/no_srcs-descriptor-set.proto.bin");
}
{
ProtoSourcesProvider provider =
getConfiguredTarget("//x:no_srcs").getProvider(ProtoSourcesProvider.class);
assertThat(provider.directDescriptorSet().getRootRelativePathString())
.isEqualTo("x/no_srcs-descriptor-set.proto.bin");
assertThat(prettyArtifactNames(provider.transitiveDescriptorSets()))
.containsExactly("x/no_srcs-descriptor-set.proto.bin");
}
}
@Test
public void testDescriptorSetOutput_strictDeps() throws Exception {
useConfiguration("--proto_compiler=//proto:compiler", "--strict_proto_deps=error");
scratch.file(
"x/BUILD",
"proto_library(name='nodeps', srcs=['nodeps.proto'])",
"proto_library(name='withdeps', srcs=['withdeps.proto'], deps=[':dep1', ':dep2'])",
"proto_library(name='depends_on_alias', srcs=['depends_on_alias.proto'], deps=[':alias'])",
"proto_library(name='alias', deps=[':dep1', ':dep2'])",
"proto_library(name='dep1', srcs=['dep1.proto'])",
"proto_library(name='dep2', srcs=['dep2.proto'])");
assertThat(getGeneratingSpawnAction(getDescriptorOutput("//x:nodeps")).getRemainingArguments())
.containsAllOf("--direct_dependencies", "x/nodeps.proto")
.inOrder();
assertThat(
getGeneratingSpawnAction(getDescriptorOutput("//x:withdeps")).getRemainingArguments())
.containsAllOf("--direct_dependencies", "x/dep1.proto:x/dep2.proto:x/withdeps.proto")
.inOrder();
assertThat(
getGeneratingSpawnAction(getDescriptorOutput("//x:depends_on_alias"))
.getRemainingArguments())
.containsAllOf(
"--direct_dependencies", "x/dep1.proto:x/dep2.proto:x/depends_on_alias.proto")
.inOrder();
}
/**
* When building a proto_library with multiple srcs (say foo.proto and bar.proto), we should allow
* foo.proto to import bar.proto without tripping strict-deps checking. This means that
* --direct_dependencies should list the srcs.
*/
@Test
public void testDescriptorSetOutput_strict_deps_multipleSrcs() throws Exception {
useConfiguration("--proto_compiler=//proto:compiler", "--strict_proto_deps=error");
ConfiguredTarget target =
scratchConfiguredTarget(
"x", "foo", "proto_library(name='foo', srcs=['foo.proto', 'bar.proto'])");
Artifact file = getFirstArtifactEndingWith(getFilesToBuild(target), ".proto.bin");
assertThat(file.getRootRelativePathString()).isEqualTo("x/foo-descriptor-set.proto.bin");
assertThat(getGeneratingSpawnAction(file).getRemainingArguments())
.containsAllOf("--direct_dependencies", "x/foo.proto:x/bar.proto")
.inOrder();
}
@Test
public void testDescriptorSetOutput_strictDeps_disabled() throws Exception {
useConfiguration("--proto_compiler=//proto:compiler", "--strict_proto_deps=off");
scratch.file("x/BUILD", "proto_library(name='foo', srcs=['foo.proto'])");
for (String arg :
getGeneratingSpawnAction(getDescriptorOutput("//x:foo")).getRemainingArguments()) {
assertThat(arg).doesNotContain("--direct_dependencies=");
}
}
private Artifact getDescriptorOutput(String label) throws Exception {
return getFirstArtifactEndingWith(getFilesToBuild(getConfiguredTarget(label)), ".proto.bin");
}
private Action getDescriptorWriteAction(String label) throws Exception {
return getGeneratingAction(getDescriptorOutput(label));
}
}
|
|
/* $Id$
*
* Part of ZonMW project no. 50-53000-98-156
*
* @license
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* Copyright (c) 2016 RIVM National Institute for Health and Environment
*/
package nl.rivm.cib.episim.model;
import java.math.BigDecimal;
import java.text.ParseException;
import java.util.NavigableMap;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.measure.Quantity;
import javax.measure.quantity.Time;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.coala.bind.InjectConfig;
import io.coala.bind.LocalBinder;
import io.coala.exception.Thrower;
import io.coala.json.JsonUtil;
import io.coala.log.LogUtil;
import io.coala.math.QuantityUtil;
import io.coala.math.Range;
import io.coala.name.Identified;
import io.coala.random.ProbabilityDistribution;
import io.coala.random.QuantityDistribution;
import io.coala.time.Duration;
import io.coala.time.Instant;
import io.coala.time.Scheduler;
import io.coala.time.Timing;
import io.reactivex.Observable;
import io.reactivex.subjects.PublishSubject;
import io.reactivex.subjects.Subject;
import tec.uom.se.ComparableQuantity;
/**
* {@link SocialGatherer} used to convene and adjourn members e.g. in
* transmission spaces
*
* @version $Id$
* @author Rick van Krevelen
*/
public interface SocialGatherer
extends Identified<String>, JsonSchedulable<SocialGatherer>
{
String TIMING_KEY = "convene-timing";
String DURATION_KEY = "duration-dist";
String SIZE_KEY = "capacity-dist";
String AGES_KEY = "age-filter";
String ASSORTATIVE_KEY = "assortative";
boolean isAssortative();
Range<ComparableQuantity<?>> memberAges();
ProbabilityDistribution<Long> sizeLimitDist();
/**
* Publishes when and for how long people driven by this motor convene, with
* errors diverted to {@link Scheduler#time}
*
* @return an {@link Observable} stream of {@link Duration}s
*/
Observable<Quantity<Time>> summon();
class SimpleGatherer extends Identified.SimpleOrdinal<String>
implements SocialGatherer
{
/** */
private static final Logger LOG = LogUtil
.getLogger( SocialGatherer.SimpleGatherer.class );
@Inject
private ProbabilityDistribution.Parser distParser;
@Inject
private Scheduler scheduler;
@InjectConfig
private JsonNode config;
private ProbabilityDistribution<Long> sizeDist;
private Range<ComparableQuantity<?>> ageRange;
@Override
public Scheduler scheduler()
{
return this.scheduler;
}
@Override
public JsonNode config()
{
return this.config;
}
@Override
public String toString()
{
return stringify();
}
@Override
public String id()
{
return fromConfig( ID_JSON_PROPERTY, "[NOID]" );
}
@Override
public ProbabilityDistribution<Long> sizeLimitDist()
{
if( this.sizeDist == null )
{
final String sizeDist = fromConfig( SIZE_KEY,
"uniform-discrete(50;200)" );
try
{
this.sizeDist = this.distParser
.parse( sizeDist, BigDecimal.class )
.map( v -> ((Number) v).longValue() );
} catch( final ParseException e )
{
scheduler().fail( e );
this.sizeDist = ProbabilityDistribution
.createDeterministic( 100L );
}
}
return this.sizeDist;
}
@Override
@SuppressWarnings( "unchecked" )
public Range<ComparableQuantity<?>> memberAges()
{
if( this.ageRange == null )
{
final String ages = fromConfig( AGES_KEY, "[0 yr;100 yr]" );
try
{
this.ageRange = Range.parse( ages, QuantityUtil::valueOf )
.map( q -> q.asType( Time.class ) );
} catch( final ParseException e )
{
LOG.error( "Problem parsing {}: {}", ages, e.getMessage() );
this.ageRange = Range.infinite();
}
}
return this.ageRange;
}
@Override
public boolean isAssortative()
{
return fromConfig( ASSORTATIVE_KEY, false );
}
private Subject<Instant> summonings = null;
private QuantityDistribution<Time> dist = null;
@Override
public Observable<Quantity<Time>> summon()
{
if( this.summonings == null ) try
{
this.dist = this.distParser.parseQuantity(
fromConfigNonEmpty( DURATION_KEY ), Time.class );
this.summonings = PublishSubject.create();
final String cron = fromConfigNonEmpty( TIMING_KEY );
final Iterable<Instant> timing = Timing.valueOf( cron )
.iterate( scheduler() );
atEach( timing, this.summonings::onNext );
} catch( final Exception e )
{
return Observable.error( e );
}
return this.summonings.map( t -> this.dist.draw() );
}
}
interface Factory<T>
{
String TYPE_KEY = "type";
T create( String name, ObjectNode config ) throws Exception;
default T createOrFail( String name, JsonNode config )
{
try
{
return create( name, (ObjectNode) config );
} catch( final Throwable e )
{
return Thrower.rethrowUnchecked( e );
}
}
default NavigableMap<String, T> createAll( final JsonNode config )
throws Exception
{
// array: generate default numbered name
if( config.isArray() ) return JsonUtil.toMap( (ArrayNode) config,
i -> String.format( "gatherer%02d", i ),
this::createOrFail );
// object: use field names to identify
if( config.isObject() ) return JsonUtil.toMap( (ObjectNode) config,
this::createOrFail );
// unexpected
return Thrower.throwNew( IllegalArgumentException::new,
() -> "Invalid config: " + config );
}
@Singleton
class SimpleBinding implements Factory<SocialGatherer>
{
@Inject
private LocalBinder binder;
@Override
public SocialGatherer create( final String name,
final ObjectNode config )
throws ClassNotFoundException, ParseException
{
final Class<? extends SocialGatherer> type = config
.has( TYPE_KEY )
? Class
.forName( config.get( TYPE_KEY )
.textValue() )
.asSubclass( SocialGatherer.class )
: SimpleGatherer.class;
return this.binder.inject( type,
config.put( Identified.ID_JSON_PROPERTY, name ) );
}
}
}
}
|
|
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.opengl;
import org.lwjgl.BufferUtils;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL13;
import org.lwjgl.opengl.GL15;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.assets.AssetType;
import org.terasology.assets.ResourceUrn;
import org.terasology.engine.GameThread;
import org.terasology.engine.subsystem.lwjgl.GLBufferPool;
import org.terasology.math.geom.Quat4f;
import org.terasology.math.geom.Vector2f;
import org.terasology.math.geom.Vector3f;
import org.terasology.rendering.VertexBufferObjectUtil;
import org.terasology.rendering.assets.skeletalmesh.Bone;
import org.terasology.rendering.assets.skeletalmesh.SkeletalMesh;
import org.terasology.rendering.assets.skeletalmesh.SkeletalMeshData;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.Collection;
import java.util.List;
import static org.lwjgl.opengl.GL11.GL_FLOAT;
import static org.lwjgl.opengl.GL11.GL_NORMAL_ARRAY;
import static org.lwjgl.opengl.GL11.GL_TEXTURE_COORD_ARRAY;
import static org.lwjgl.opengl.GL11.GL_UNSIGNED_INT;
import static org.lwjgl.opengl.GL11.GL_VERTEX_ARRAY;
import static org.lwjgl.opengl.GL11.glDisableClientState;
import static org.lwjgl.opengl.GL11.glEnableClientState;
import static org.lwjgl.opengl.GL11.glNormalPointer;
import static org.lwjgl.opengl.GL11.glTexCoordPointer;
import static org.lwjgl.opengl.GL11.glVertexPointer;
/**
* @author Immortius
*/
public class OpenGLSkeletalMesh extends SkeletalMesh {
private static final int TEX_COORD_SIZE = 2;
private static final int VECTOR3_SIZE = 3;
private static final int STRIDE = 24;
private static final int NORMAL_OFFSET = VECTOR3_SIZE * 4;
private static final Logger logger = LoggerFactory.getLogger(OpenGLSkeletalMesh.class);
private SkeletalMeshData data;
private int vboPosNormBuffer;
private int vboUVBuffer;
private int vboIndexBuffer;
private GLBufferPool bufferPool;
private Vector3f scale;
private Vector3f translate;
public OpenGLSkeletalMesh(ResourceUrn urn, AssetType<?, SkeletalMeshData> assetType, SkeletalMeshData data, GLBufferPool bufferPool) {
super(urn, assetType);
this.bufferPool = bufferPool;
reload(data);
}
public void setScaleTranslate(Vector3f newScale, Vector3f newTranslate) {
this.scale = newScale;
this.translate = newTranslate;
}
@Override
protected void doReload(SkeletalMeshData newData) {
try {
GameThread.synch(() -> {
this.data = newData;
if (vboPosNormBuffer == 0) {
vboPosNormBuffer = bufferPool.get(getUrn().toString());
}
IntBuffer indexBuffer = BufferUtils.createIntBuffer(newData.getIndices().size());
indexBuffer.put(newData.getIndices().toArray());
indexBuffer.flip();
if (vboIndexBuffer == 0) {
vboIndexBuffer = bufferPool.get(getUrn().toString());
}
VertexBufferObjectUtil.bufferVboElementData(vboIndexBuffer, indexBuffer, GL15.GL_STATIC_DRAW);
FloatBuffer uvBuffer = BufferUtils.createFloatBuffer(newData.getUVs().size() * 2);
for (Vector2f uv : newData.getUVs()) {
uvBuffer.put(uv.x);
uvBuffer.put(uv.y);
}
uvBuffer.flip();
if (vboUVBuffer == 0) {
vboUVBuffer = bufferPool.get(getUrn().toString());
}
VertexBufferObjectUtil.bufferVboData(vboUVBuffer, uvBuffer, GL15.GL_STATIC_DRAW);
});
} catch (InterruptedException e) {
logger.error("Failed to reload {}", getUrn(), e);
}
}
@Override
protected void doDispose() {
try {
GameThread.synch(() -> {
if (vboIndexBuffer != 0) {
bufferPool.dispose(vboIndexBuffer);
vboIndexBuffer = 0;
}
if (vboPosNormBuffer != 0) {
bufferPool.dispose(vboPosNormBuffer);
vboPosNormBuffer = 0;
}
if (vboUVBuffer != 0) {
bufferPool.dispose(vboUVBuffer);
vboUVBuffer = 0;
}
});
} catch (InterruptedException e) {
logger.error("Failed to dispose {}", getUrn(), e);
}
}
public void preRender() {
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboUVBuffer);
GL13.glClientActiveTexture(GL13.GL_TEXTURE0);
glTexCoordPointer(2, GL11.GL_FLOAT, TEX_COORD_SIZE * 4, 0);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, vboIndexBuffer);
}
public void postRender() {
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
}
public void doRender(List<Vector3f> verts, List<Vector3f> normals) {
FloatBuffer vertBuffer = BufferUtils.createFloatBuffer(verts.size() * 6);
for (int i = 0; i < verts.size(); ++i) {
Vector3f vert = verts.get(i);
vertBuffer.put(vert.x * scale.x + translate.x);
vertBuffer.put(vert.y * scale.y + translate.y);
vertBuffer.put(vert.z * scale.z + translate.z);
Vector3f norm = normals.get(i);
vertBuffer.put(norm.x);
vertBuffer.put(norm.y);
vertBuffer.put(norm.z);
}
vertBuffer.flip();
VertexBufferObjectUtil.bufferVboData(vboPosNormBuffer, vertBuffer, GL15.GL_DYNAMIC_DRAW);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboPosNormBuffer);
glVertexPointer(VECTOR3_SIZE, GL_FLOAT, STRIDE, 0);
glNormalPointer(GL_FLOAT, STRIDE, NORMAL_OFFSET);
GL11.glDrawElements(GL11.GL_TRIANGLES, data.getIndices().size(), GL_UNSIGNED_INT, 0);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
}
public void render() {
preRender();
doRender(data.getBindPoseVertexPositions(), data.getBindPoseVertexNormals());
postRender();
}
public void render(List<Vector3f> bonePositions, List<Quat4f> boneRotations) {
preRender();
doRender(data.getVertexPositions(bonePositions, boneRotations), data.getVertexNormals(bonePositions, boneRotations));
postRender();
}
@Override
public int getVertexCount() {
return data.getVertexCount();
}
@Override
public Collection<Bone> getBones() {
return data.getBones();
}
@Override
public Bone getBone(String boneName) {
return data.getBone(boneName);
}
}
|
|
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.location;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
/**
* A class representing an Address, i.e, a set of Strings describing a location.
*
* The addres format is a simplified version of xAL (eXtensible Address Language)
* http://www.oasis-open.org/committees/ciq/ciq.html#6
*/
public class Address implements Parcelable {
private Locale mLocale;
private String mFeatureName;
private HashMap<Integer, String> mAddressLines;
private int mMaxAddressLineIndex = -1;
private String mAdminArea;
private String mSubAdminArea;
private String mLocality;
private String mSubLocality;
private String mThoroughfare;
private String mSubThoroughfare;
private String mPremises;
private String mPostalCode;
private String mCountryCode;
private String mCountryName;
private double mLatitude;
private double mLongitude;
private boolean mHasLatitude = false;
private boolean mHasLongitude = false;
private String mPhone;
private String mUrl;
private Bundle mExtras = null;
/**
* Constructs a new Address object set to the given Locale and with all
* other fields initialized to null or false.
*/
public Address(Locale locale) {
mLocale = locale;
}
/**
* Returns the Locale associated with this address.
*/
public Locale getLocale() {
return mLocale;
}
/**
* Returns the largest index currently in use to specify an address line.
* If no address lines are specified, -1 is returned.
*/
public int getMaxAddressLineIndex() {
return mMaxAddressLineIndex;
}
/**
* Returns a line of the address numbered by the given index
* (starting at 0), or null if no such line is present.
*
* @throws IllegalArgumentException if index < 0
*/
public String getAddressLine(int index) {
if (index < 0) {
throw new IllegalArgumentException("index = " + index + " < 0");
}
return mAddressLines == null? null : mAddressLines.get(index);
}
/**
* Sets the line of the address numbered by index (starting at 0) to the
* given String, which may be null.
*
* @throws IllegalArgumentException if index < 0
*/
public void setAddressLine(int index, String line) {
if (index < 0) {
throw new IllegalArgumentException("index = " + index + " < 0");
}
if (mAddressLines == null) {
mAddressLines = new HashMap<Integer, String>();
}
mAddressLines.put(index, line);
if (line == null) {
// We've eliminated a line, recompute the max index
mMaxAddressLineIndex = -1;
for (Integer i : mAddressLines.keySet()) {
mMaxAddressLineIndex = Math.max(mMaxAddressLineIndex, i);
}
} else {
mMaxAddressLineIndex = Math.max(mMaxAddressLineIndex, index);
}
}
/**
* Returns the feature name of the address, for example, "Golden Gate Bridge", or null
* if it is unknown
*/
public String getFeatureName() {
return mFeatureName;
}
/**
* Sets the feature name of the address to the given String, which may be null
*/
public void setFeatureName(String featureName) {
mFeatureName = featureName;
}
/**
* Returns the administrative area name of the address, for example, "CA", or null if
* it is unknown
*/
public String getAdminArea() {
return mAdminArea;
}
/**
* Sets the administrative area name of the address to the given String, which may be null
*/
public void setAdminArea(String adminArea) {
this.mAdminArea = adminArea;
}
/**
* Returns the sub-administrative area name of the address, for example, "Santa Clara County",
* or null if it is unknown
*/
public String getSubAdminArea() {
return mSubAdminArea;
}
/**
* Sets the sub-administrative area name of the address to the given String, which may be null
*/
public void setSubAdminArea(String subAdminArea) {
this.mSubAdminArea = subAdminArea;
}
/**
* Returns the locality of the address, for example "Mountain View", or null if it is unknown.
*/
public String getLocality() {
return mLocality;
}
/**
* Sets the locality of the address to the given String, which may be null.
*/
public void setLocality(String locality) {
mLocality = locality;
}
/**
* Returns the sub-locality of the address, or null if it is unknown.
* For example, this may correspond to the neighborhood of the locality.
*/
public String getSubLocality() {
return mSubLocality;
}
/**
* Sets the sub-locality of the address to the given String, which may be null.
*/
public void setSubLocality(String sublocality) {
mSubLocality = sublocality;
}
/**
* Returns the thoroughfare name of the address, for example, "1600 Ampitheater Parkway",
* which may be null
*/
public String getThoroughfare() {
return mThoroughfare;
}
/**
* Sets the thoroughfare name of the address, which may be null.
*/
public void setThoroughfare(String thoroughfare) {
this.mThoroughfare = thoroughfare;
}
/**
* Returns the sub-thoroughfare name of the address, which may be null.
* This may correspond to the street number of the address.
*/
public String getSubThoroughfare() {
return mSubThoroughfare;
}
/**
* Sets the sub-thoroughfare name of the address, which may be null.
*/
public void setSubThoroughfare(String subthoroughfare) {
this.mSubThoroughfare = subthoroughfare;
}
/**
* Returns the premises of the address, or null if it is unknown.
*/
public String getPremises() {
return mPremises;
}
/**
* Sets the premises of the address to the given String, which may be null.
*/
public void setPremises(String premises) {
mPremises = premises;
}
/**
* Returns the postal code of the address, for example "94110",
* or null if it is unknown.
*/
public String getPostalCode() {
return mPostalCode;
}
/**
* Sets the postal code of the address to the given String, which may
* be null.
*/
public void setPostalCode(String postalCode) {
mPostalCode = postalCode;
}
/**
* Returns the country code of the address, for example "US",
* or null if it is unknown.
*/
public String getCountryCode() {
return mCountryCode;
}
/**
* Sets the country code of the address to the given String, which may
* be null.
*/
public void setCountryCode(String countryCode) {
mCountryCode = countryCode;
}
/**
* Returns the localized country name of the address, for example "Iceland",
* or null if it is unknown.
*/
public String getCountryName() {
return mCountryName;
}
/**
* Sets the country name of the address to the given String, which may
* be null.
*/
public void setCountryName(String countryName) {
mCountryName = countryName;
}
/**
* Returns true if a latitude has been assigned to this Address,
* false otherwise.
*/
public boolean hasLatitude() {
return mHasLatitude;
}
/**
* Returns the latitude of the address if known.
*
* @throws IllegalStateException if this Address has not been assigned
* a latitude.
*/
public double getLatitude() {
if (mHasLatitude) {
return mLatitude;
} else {
throw new IllegalStateException();
}
}
/**
* Sets the latitude associated with this address.
*/
public void setLatitude(double latitude) {
mLatitude = latitude;
mHasLatitude = true;
}
/**
* Removes any latitude associated with this address.
*/
public void clearLatitude() {
mHasLatitude = false;
}
/**
* Returns true if a longitude has been assigned to this Address,
* false otherwise.
*/
public boolean hasLongitude() {
return mHasLongitude;
}
/**
* Returns the longitude of the address if known.
*
* @throws IllegalStateException if this Address has not been assigned
* a longitude.
*/
public double getLongitude() {
if (mHasLongitude) {
return mLongitude;
} else {
throw new IllegalStateException();
}
}
/**
* Sets the longitude associated with this address.
*/
public void setLongitude(double longitude) {
mLongitude = longitude;
mHasLongitude = true;
}
/**
* Removes any longitude associated with this address.
*/
public void clearLongitude() {
mHasLongitude = false;
}
/**
* Returns the phone number of the address if known,
* or null if it is unknown.
*
* @throws IllegalStateException if this Address has not been assigned
* a latitude.
*/
public String getPhone() {
return mPhone;
}
/**
* Sets the phone number associated with this address.
*/
public void setPhone(String phone) {
mPhone = phone;
}
/**
* Returns the public URL for the address if known,
* or null if it is unknown.
*/
public String getUrl() {
return mUrl;
}
/**
* Sets the public URL associated with this address.
*/
public void setUrl(String Url) {
mUrl = Url;
}
/**
* Returns additional provider-specific information about the
* address as a Bundle. The keys and values are determined
* by the provider. If no additional information is available,
* null is returned.
*
* <!--
* <p> A number of common key/value pairs are listed
* below. Providers that use any of the keys on this list must
* provide the corresponding value as described below.
*
* <ul>
* </ul>
* -->
*/
public Bundle getExtras() {
return mExtras;
}
/**
* Sets the extra information associated with this fix to the
* given Bundle.
*/
public void setExtras(Bundle extras) {
mExtras = (extras == null) ? null : new Bundle(extras);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("Address[addressLines=[");
for (int i = 0; i <= mMaxAddressLineIndex; i++) {
if (i > 0) {
sb.append(',');
}
sb.append(i);
sb.append(':');
String line = mAddressLines.get(i);
if (line == null) {
sb.append("null");
} else {
sb.append('\"');
sb.append(line);
sb.append('\"');
}
}
sb.append(']');
sb.append(",feature=");
sb.append(mFeatureName);
sb.append(",admin=");
sb.append(mAdminArea);
sb.append(",sub-admin=");
sb.append(mSubAdminArea);
sb.append(",locality=");
sb.append(mLocality);
sb.append(",thoroughfare=");
sb.append(mThoroughfare);
sb.append(",postalCode=");
sb.append(mPostalCode);
sb.append(",countryCode=");
sb.append(mCountryCode);
sb.append(",countryName=");
sb.append(mCountryName);
sb.append(",hasLatitude=");
sb.append(mHasLatitude);
sb.append(",latitude=");
sb.append(mLatitude);
sb.append(",hasLongitude=");
sb.append(mHasLongitude);
sb.append(",longitude=");
sb.append(mLongitude);
sb.append(",phone=");
sb.append(mPhone);
sb.append(",url=");
sb.append(mUrl);
sb.append(",extras=");
sb.append(mExtras);
sb.append(']');
return sb.toString();
}
public static final Parcelable.Creator<Address> CREATOR =
new Parcelable.Creator<Address>() {
public Address createFromParcel(Parcel in) {
String language = in.readString();
String country = in.readString();
Locale locale = country.length() > 0 ?
new Locale(language, country) :
new Locale(language);
Address a = new Address(locale);
int N = in.readInt();
if (N > 0) {
a.mAddressLines = new HashMap<Integer, String>(N);
for (int i = 0; i < N; i++) {
int index = in.readInt();
String line = in.readString();
a.mAddressLines.put(index, line);
a.mMaxAddressLineIndex =
Math.max(a.mMaxAddressLineIndex, index);
}
} else {
a.mAddressLines = null;
a.mMaxAddressLineIndex = -1;
}
a.mFeatureName = in.readString();
a.mAdminArea = in.readString();
a.mSubAdminArea = in.readString();
a.mLocality = in.readString();
a.mSubLocality = in.readString();
a.mThoroughfare = in.readString();
a.mSubThoroughfare = in.readString();
a.mPremises = in.readString();
a.mPostalCode = in.readString();
a.mCountryCode = in.readString();
a.mCountryName = in.readString();
a.mHasLatitude = in.readInt() == 0 ? false : true;
if (a.mHasLatitude) {
a.mLatitude = in.readDouble();
}
a.mHasLongitude = in.readInt() == 0 ? false : true;
if (a.mHasLongitude) {
a.mLongitude = in.readDouble();
}
a.mPhone = in.readString();
a.mUrl = in.readString();
a.mExtras = in.readBundle();
return a;
}
public Address[] newArray(int size) {
return new Address[size];
}
};
public int describeContents() {
return (mExtras != null) ? mExtras.describeContents() : 0;
}
public void writeToParcel(Parcel parcel, int flags) {
parcel.writeString(mLocale.getLanguage());
parcel.writeString(mLocale.getCountry());
if (mAddressLines == null) {
parcel.writeInt(0);
} else {
Set<Map.Entry<Integer, String>> entries = mAddressLines.entrySet();
parcel.writeInt(entries.size());
for (Map.Entry<Integer, String> e : entries) {
parcel.writeInt(e.getKey());
parcel.writeString(e.getValue());
}
}
parcel.writeString(mFeatureName);
parcel.writeString(mAdminArea);
parcel.writeString(mSubAdminArea);
parcel.writeString(mLocality);
parcel.writeString(mSubLocality);
parcel.writeString(mThoroughfare);
parcel.writeString(mSubThoroughfare);
parcel.writeString(mPremises);
parcel.writeString(mPostalCode);
parcel.writeString(mCountryCode);
parcel.writeString(mCountryName);
parcel.writeInt(mHasLatitude ? 1 : 0);
if (mHasLatitude) {
parcel.writeDouble(mLatitude);
}
parcel.writeInt(mHasLongitude ? 1 : 0);
if (mHasLongitude){
parcel.writeDouble(mLongitude);
}
parcel.writeString(mPhone);
parcel.writeString(mUrl);
parcel.writeBundle(mExtras);
}
}
|
|
/*
* Copyright 2013 The GDG Frisbee Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gdg.frisbee.android.fragment;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.*;
import android.widget.AdapterView;
import android.widget.ListView;
import com.google.android.gms.plus.PlusShare;
import com.google.api.client.googleapis.services.json.CommonGoogleJsonClientRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.services.plus.Plus;
import com.google.api.services.plus.model.Activity;
import com.google.api.services.plus.model.ActivityFeed;
import java.io.IOException;
import org.gdg.frisbee.android.R;
import org.gdg.frisbee.android.activity.GdgActivity;
import org.gdg.frisbee.android.adapter.NewsAdapter;
import org.gdg.frisbee.android.api.GapiTransportChooser;
import org.gdg.frisbee.android.app.App;
import org.gdg.frisbee.android.cache.ModelCache;
import org.gdg.frisbee.android.task.Builder;
import org.gdg.frisbee.android.task.CommonAsyncTask;
import org.gdg.frisbee.android.utils.PullToRefreshTransformer;
import org.gdg.frisbee.android.utils.Utils;
import org.joda.time.DateTime;
import butterknife.ButterKnife;
import de.keyboardsurfer.android.widget.crouton.Crouton;
import de.keyboardsurfer.android.widget.crouton.Style;
import timber.log.Timber;
import uk.co.senab.actionbarpulltorefresh.library.ActionBarPullToRefresh;
import uk.co.senab.actionbarpulltorefresh.library.Options;
import uk.co.senab.actionbarpulltorefresh.library.PullToRefreshLayout;
import uk.co.senab.actionbarpulltorefresh.library.listeners.OnRefreshListener;
/**
* GDG Aachen
* org.gdg.frisbee.android.fragment
* <p/>
* User: maui
* Date: 20.04.13
* Time: 12:22
*/
public class NewsFragment extends GdgListFragment implements OnRefreshListener {
private static final String LOG_TAG = "GDG-NewsFragment";
final HttpTransport mTransport = GapiTransportChooser.newCompatibleTransport();
final JsonFactory mJsonFactory = new GsonFactory();
private PullToRefreshLayout mPullToRefreshLayout;
private Plus mClient;
private NewsAdapter mAdapter;
private PullToRefreshTransformer mPulltoRefreshTransformer;
public static NewsFragment newInstance(String plusId) {
NewsFragment fragment = new NewsFragment();
Bundle arguments = new Bundle();
arguments.putString("plus_id", plusId);
fragment.setArguments(arguments);
return fragment;
}
@Override
public void onSaveInstanceState(Bundle outState) {
Timber.d("onSaveInstanceState()");
super.onSaveInstanceState(outState);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public void onStart() {
super.onStart();
Timber.d("onStart()");
}
@Override
public void onResume() {
super.onResume();
Timber.d("onResume()");
for(int i = 0; i <= getListView().getChildCount(); i++) {
mAdapter.updatePlusOne(getListView().getChildAt(i));
}
}
@Override
public void onPause() {
super.onPause();
Timber.d("onPause()");
}
@Override
public void onStop() {
super.onStop();
Timber.d("onStop()");
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
Timber.d("onActivityCreated()");
mClient = new Plus.Builder(mTransport, mJsonFactory, null).setGoogleClientRequestInitializer(new CommonGoogleJsonClientRequestInitializer(getString(R.string.ip_simple_api_access_key))).build();
mAdapter = new NewsAdapter(getActivity(), ((GdgActivity)getActivity()).getGoogleApiClient());
setListAdapter(mAdapter);
registerForContextMenu(getListView());
if(((ActionBarActivity)getActivity()).getSupportActionBar() != null) {
mPullToRefreshLayout = new PullToRefreshLayout(getActivity());
mPulltoRefreshTransformer = new PullToRefreshTransformer();
ActionBarPullToRefresh.from(getActivity())
.options(Options.create()
.headerTransformer(mPulltoRefreshTransformer)
.headerLayout(R.layout.pull_to_refresh)
.build())
.theseChildrenArePullable(android.R.id.list, android.R.id.empty)
.insertLayoutInto((ViewGroup)getView())
.listener(this)
.setup(mPullToRefreshLayout);
}
/*((GdgActivity)getActivity()).getPullToRefreshHelper().addRefreshableView(getListView(), new PullToRefreshAttacher.ViewDelegate() {
@Override
public boolean isScrolledToTop(View view) {
AbsListView absListView = (AbsListView) view;
if (absListView.getCount() == 0) {
return true;
} else if (absListView.getFirstVisiblePosition() == 0) {
final View firstVisibleChild = absListView.getChildAt(0);
return firstVisibleChild != null && firstVisibleChild.getTop() >= 0;
}
return false;
}
}, this);*/
if(getListView() instanceof ListView) {
ListView listView = (ListView) getListView();
listView.setDivider(null);
listView.setDividerHeight(0);
}
if(Utils.isOnline(getActivity())) {
new Builder<String, ActivityFeed>(String.class, ActivityFeed.class)
.addParameter(getArguments().getString("plus_id"))
.setOnPreExecuteListener(new CommonAsyncTask.OnPreExecuteListener() {
@Override
public void onPreExecute() {
setIsLoading(true);
}
})
.setOnBackgroundExecuteListener(new CommonAsyncTask.OnBackgroundExecuteListener<String, ActivityFeed>() {
@Override
public ActivityFeed doInBackground(String... params) {
try {
ActivityFeed feed = (ActivityFeed) App.getInstance().getModelCache().get("news_" + params[0]);
if (feed == null) {
Plus.Activities.List request = mClient.activities().list(params[0], "public");
request.setMaxResults(10L);
request.setFields("nextPageToken,items(id,published,url,object/content,verb,object/attachments,object/actor,annotation,object(plusoners,replies,resharers))");
feed = request.execute();
App.getInstance().getModelCache().put("news_" + params[0], feed, DateTime.now().plusHours(1));
}
return feed;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
})
.setOnPostExecuteListener(new CommonAsyncTask.OnPostExecuteListener<String, ActivityFeed>() {
@Override
public void onPostExecute(String[] params, ActivityFeed activityFeed) {
if(activityFeed != null) {
mAdapter.addAll(activityFeed.getItems());
setIsLoading(false);
}
}
})
.buildAndExecute();
} else {
App.getInstance().getModelCache().getAsync("news_" +getArguments().getString("plus_id"), false, new ModelCache.CacheListener() {
@Override
public void onGet(Object item) {
ActivityFeed feed = (ActivityFeed)item;
if(isAdded())
Crouton.makeText(getActivity(), getString(R.string.cached_content), Style.INFO).show();
mAdapter.addAll(feed.getItems());
setIsLoading(false);
}
@Override
public void onNotFound(String key) {
if(isAdded())
Crouton.makeText(getActivity(), getString(R.string.offline_alert), Style.ALERT).show();
}
});
}
}
@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
super.onCreateContextMenu(menu, v, menuInfo);
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) menuInfo;
getActivity().getMenuInflater().inflate(R.menu.news_context, menu);
}
@Override
public boolean onContextItemSelected(MenuItem item) {
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
Activity activity = (Activity) mAdapter.getItem(info.position);
switch(item.getItemId()) {
case R.id.share_with_googleplus:
shareWithGooglePlus(activity);
return true;
default:
return super.onContextItemSelected(item);
}
}
private void shareWithGooglePlus(Activity activity) {
Intent shareIntent = new PlusShare.Builder(getActivity())
.setType("text/plain")
.setContentUrl(Uri.parse(activity.getUrl()))
.getIntent();
startActivityForResult(shareIntent, 0);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
Timber.d("onCreateView()");
View v = inflater.inflate(R.layout.fragment_news, null);
ButterKnife.inject(this, v);
return v;
}
@Override
public void onDestroy() {
super.onDestroy();
Timber.d("onDestroy()");
}
@Override
public void onRefreshStarted(View view) {
if(Utils.isOnline(getActivity())) {
new Builder<String, ActivityFeed>(String.class, ActivityFeed.class)
.addParameter(getArguments().getString("plus_id"))
.setOnPreExecuteListener(new CommonAsyncTask.OnPreExecuteListener() {
@Override
public void onPreExecute() {
setIsLoading(true);
}
})
.setOnBackgroundExecuteListener(new CommonAsyncTask.OnBackgroundExecuteListener<String, ActivityFeed>() {
@Override
public ActivityFeed doInBackground(String... params) {
try {
Plus.Activities.List request = mClient.activities().list(params[0], "public");
request.setMaxResults(10L);
request.setFields("nextPageToken,items(id,published,url,object/content,verb,object/attachments,annotation,object(plusoners,replies,resharers))");
ActivityFeed feed = request.execute();
App.getInstance().getModelCache().put("news_" + params[0], feed, DateTime.now().plusHours(1));
return feed;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
})
.setOnPostExecuteListener(new CommonAsyncTask.OnPostExecuteListener<String, ActivityFeed>() {
@Override
public void onPostExecute(String[] params, ActivityFeed activityFeed) {
if (activityFeed != null) {
mAdapter.replaceAll(activityFeed.getItems(), 0);
setIsLoading(false);
if (getActivity() != null) {
mPullToRefreshLayout.setRefreshComplete();
mPulltoRefreshTransformer.onReset();
}
}
}
})
.buildAndExecute();
}
}
}
|
|
package com.smict.product.data;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import com.smict.product.model.*;
import ldc.util.*;
public class LabModeDB {
DBConnect agent = new DBConnect();
Connection conn = null;
Statement Stmt = null;
PreparedStatement pStmt = null;
ResultSet rs = null;
DateUtil dateUtil = new DateUtil();
public String GetHighest_ProductgroupID() throws IOException, Exception{
String sqlQuery = "select MAX(productgroup_id) as productgroup_id from pro_productgroup";
String ResultString = "";
conn = agent.getConnectMYSql();
Stmt = conn.createStatement();
rs = Stmt.executeQuery(sqlQuery);
if(rs.next()){
ResultString = rs.getString("productgroup_id");
}
return ResultString;
}
public String PlusOneID_FormatID(String productgroup_id){
if(productgroup_id == null){
productgroup_id = "0001";
}else{
String ResultString_plusone = String.valueOf((Integer.parseInt(productgroup_id)+1));
switch (ResultString_plusone.length()) {
case 1:productgroup_id="000"+ResultString_plusone; break;
case 2:productgroup_id="00"+ResultString_plusone; break;
case 3:productgroup_id="0"+ResultString_plusone; break;
case 4:productgroup_id=ResultString_plusone; break;
}
}
return productgroup_id;
}
public int Addlabmode(String labmode_id, String labmode_name) {
String sqlQuery = "insert into lab_mode (labmode_id,labmode_name) "
+ "values (?,?)";
int rowsupdate = 0;
try {
conn = agent.getConnectMYSql();
conn.setAutoCommit(false);
pStmt = conn.prepareStatement(sqlQuery);
pStmt.setString(1, labmode_id);
pStmt.setString(2, labmode_name);
rowsupdate = pStmt.executeUpdate();
conn.commit();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
try {
if (!pStmt.isClosed())
pStmt.close();
if (!conn.isClosed())
conn.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return rowsupdate;
}
public Boolean Deletelabmode(String labmode_id) {
String sqlQuery = "delete from lab_mode where labmode_id = ?";
Boolean delete_success = false;
try {
conn = agent.getConnectMYSql();
pStmt = conn.prepareStatement(sqlQuery);
pStmt.setString(1, labmode_id);
int rowsupdate = pStmt.executeUpdate();
if (rowsupdate > 0)
delete_success = true;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
try {
if (!pStmt.isClosed())
pStmt.close();
if (!conn.isClosed())
conn.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return delete_success;
}
public int Updatelabmode(String labmode_id, String labmode_name, String hdlabmode_id) {
String sqlQuery = "update lab_mode set labmode_id = ? , labmode_name = ? "
+ "where labmode_id = ?";
int rowsupdate = 0;
try {
conn = agent.getConnectMYSql();
conn.setAutoCommit(false);
pStmt = conn.prepareStatement(sqlQuery);
pStmt.setString(1, labmode_id);
pStmt.setString(2, labmode_name);
pStmt.setString(3, hdlabmode_id);
rowsupdate = pStmt.executeUpdate();
conn.commit();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
try {
if (!pStmt.isClosed())
pStmt.close();
if (!conn.isClosed())
conn.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return rowsupdate;
}
public List<LabModeModel> Get_LabModeList(String labmode_id, String labmode_name) throws IOException, Exception {
String sqlQuery = "select * from lab_mode where ";
if (new Validate().Check_String_notnull_notempty(labmode_id))
sqlQuery += "labmode_id = '" + labmode_id + "' and ";
if (new Validate().Check_String_notnull_notempty(labmode_name))
sqlQuery += "labmode_name like '%" + labmode_name + "%' and ";
sqlQuery += "labmode_id <> '' ";
conn = agent.getConnectMYSql();
Stmt = conn.createStatement();
rs = Stmt.executeQuery(sqlQuery);
List<LabModeModel> ResultList = new ArrayList<LabModeModel>();
while (rs.next()) {
// vender_id,vender_name,create_by,create_datetime,update_by,update_datetime
ResultList.add(new LabModeModel(rs.getString("labmode_id"), rs.getString("labmode_name")));
}
if (!rs.isClosed())
rs.close();
if (!Stmt.isClosed())
Stmt.close();
if (!conn.isClosed())
conn.close();
return ResultList;
}
public List<LabModeModel> Get_treatmentGroup() throws IOException, Exception {
String sqlQuery = "select id,code,name from treatment_group ";
conn = agent.getConnectMYSql();
Stmt = conn.createStatement();
rs = Stmt.executeQuery(sqlQuery);
List<LabModeModel> ResultList = new ArrayList<LabModeModel>();
while (rs.next()) {
LabModeModel treatGmodel = new LabModeModel();
treatGmodel.setTreatG_id(rs.getString("id"));
treatGmodel.setTreatG_name(rs.getString("name"));
treatGmodel.setTreatG_code(rs.getString("code"));
ResultList.add(treatGmodel);
}
if (!rs.isClosed())
rs.close();
if (!Stmt.isClosed())
Stmt.close();
if (!conn.isClosed())
conn.close();
return ResultList;
}
public List<LabModeModel> Get_ScopeGroup(String scopeID) throws IOException, Exception {
String sqlQuery = "select treatment_group.`code`,treatment_group.id,treatment_group.`name` "
+ "FROM "
+ "treatment_master "
+ "INNER JOIN doctor_position_treatment ON (treatment_master.id = doctor_position_treatment.treatment_id "
+ "AND doctor_position_treatment.doc_position_id = "+scopeID+") "
+ "INNER JOIN treatment_category ON treatment_master.category_id = treatment_category.id "
+ "INNER JOIN treatment_group ON treatment_category.group_id = treatment_group.id "
+ "GROUP BY treatment_group.id ";
conn = agent.getConnectMYSql();
Stmt = conn.createStatement();
rs = Stmt.executeQuery(sqlQuery);
List<LabModeModel> ResultList = new ArrayList<LabModeModel>();
while (rs.next()) {
LabModeModel treatGmodel = new LabModeModel();
treatGmodel.setTreatG_id(rs.getString("id"));
treatGmodel.setTreatG_name(rs.getString("name"));
treatGmodel.setTreatG_code(rs.getString("code"));
ResultList.add(treatGmodel);
}
if (!rs.isClosed())
rs.close();
if (!Stmt.isClosed())
Stmt.close();
if (!conn.isClosed())
conn.close();
return ResultList;
}
public List<LabModeModel> Get_ScopeCategory(String scopeID) throws IOException, Exception {
String sqlQuery = "select treatment_category.id,treatment_category.`name`,treatment_category.`code` "
+ "FROM "
+ "treatment_master "
+ "INNER JOIN doctor_position_treatment ON (treatment_master.id = doctor_position_treatment.treatment_id "
+ "AND doctor_position_treatment.doc_position_id = "+scopeID+") "
+ "INNER JOIN treatment_category ON treatment_master.category_id = treatment_category.id "
+ "INNER JOIN treatment_group ON treatment_category.group_id = treatment_group.id "
+ "GROUP BY treatment_category.id ";
conn = agent.getConnectMYSql();
Stmt = conn.createStatement();
rs = Stmt.executeQuery(sqlQuery);
List<LabModeModel> ResultList = new ArrayList<LabModeModel>();
while (rs.next()) {
LabModeModel treatGmodel = new LabModeModel();
treatGmodel.setCategoryID(rs.getString("id"));
treatGmodel.setCategoryName(rs.getString("name"));
treatGmodel.setCategoryCode(rs.getString("code"));
ResultList.add(treatGmodel);
}
if (!rs.isClosed())
rs.close();
if (!Stmt.isClosed())
Stmt.close();
if (!conn.isClosed())
conn.close();
return ResultList;
}
public int Addtreatmentgroup(String code, String name) {
String sqlQuery = "insert into treatment_group (code,name) "
+ "values (?,?)";
int rowsupdate = 0;
try {
conn = agent.getConnectMYSql();
conn.setAutoCommit(false);
pStmt = conn.prepareStatement(sqlQuery);
pStmt.setString(1, code);
pStmt.setString(2, name);
rowsupdate = pStmt.executeUpdate();
conn.commit();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
try {
if (!pStmt.isClosed())
pStmt.close();
if (!conn.isClosed())
conn.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return rowsupdate;
}
public Boolean Deletetreatmentgroup(String id) {
String sqlQuery = "delete from treatment_group where id = ?";
Boolean delete_success = false;
try {
conn = agent.getConnectMYSql();
pStmt = conn.prepareStatement(sqlQuery);
pStmt.setString(1, id);
int rowsupdate = pStmt.executeUpdate();
if (rowsupdate > 0)
delete_success = true;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
try {
if (!pStmt.isClosed())
pStmt.close();
if (!conn.isClosed())
conn.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return delete_success;
}
public int Updatetreatmentgroup(String id, String code, String name) {
String sqlQuery = "update treatment_group set code = ? , name = ? "
+ "where id = ?";
int rowsupdate = 0;
try {
conn = agent.getConnectMYSql();
conn.setAutoCommit(false);
pStmt = conn.prepareStatement(sqlQuery);
pStmt.setString(1, code);
pStmt.setString(2, name);
pStmt.setString(3, id);
rowsupdate = pStmt.executeUpdate();
conn.commit();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
try {
if (!pStmt.isClosed())
pStmt.close();
if (!conn.isClosed())
conn.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return rowsupdate;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.locator;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.google.common.collect.*;
import org.apache.cassandra.utils.BiMultiValMap;
import org.apache.cassandra.utils.Pair;
import org.apache.cassandra.utils.SortedBiMultiValMap;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.gms.FailureDetector;
import org.apache.cassandra.service.StorageService;
public class TokenMetadata
{
private static final Logger logger = LoggerFactory.getLogger(TokenMetadata.class);
/**
* Maintains token to endpoint map of every node in the cluster.
* Each Token is associated with exactly one Address, but each Address may have
* multiple tokens. Hence, the BiMultiValMap collection.
*/
private final BiMultiValMap<Token, InetAddress> tokenToEndpointMap;
/** Maintains endpoint to host ID map of every node in the cluster */
private final BiMap<InetAddress, UUID> endpointToHostIdMap;
// Prior to CASSANDRA-603, we just had <tt>Map<Range, InetAddress> pendingRanges<tt>,
// which was added to when a node began bootstrap and removed from when it finished.
//
// This is inadequate when multiple changes are allowed simultaneously. For example,
// suppose that there is a ring of nodes A, C and E, with replication factor 3.
// Node D bootstraps between C and E, so its pending ranges will be E-A, A-C and C-D.
// Now suppose node B bootstraps between A and C at the same time. Its pending ranges
// would be C-E, E-A and A-B. Now both nodes need to be assigned pending range E-A,
// which we would be unable to represent with the old Map. The same thing happens
// even more obviously for any nodes that boot simultaneously between same two nodes.
//
// So, we made two changes:
//
// First, we changed pendingRanges to a <tt>Multimap<Range, InetAddress></tt> (now
// <tt>Map<String, Multimap<Range, InetAddress>></tt>, because replication strategy
// and options are per-KeySpace).
//
// Second, we added the bootstrapTokens and leavingEndpoints collections, so we can
// rebuild pendingRanges from the complete information of what is going on, when
// additional changes are made mid-operation.
//
// Finally, note that recording the tokens of joining nodes in bootstrapTokens also
// means we can detect and reject the addition of multiple nodes at the same token
// before one becomes part of the ring.
private final BiMultiValMap<Token, InetAddress> bootstrapTokens = new BiMultiValMap<Token, InetAddress>();
// (don't need to record Token here since it's still part of tokenToEndpointMap until it's done leaving)
private final Set<InetAddress> leavingEndpoints = new HashSet<InetAddress>();
// this is a cache of the calculation from {tokenToEndpointMap, bootstrapTokens, leavingEndpoints}
private final ConcurrentMap<String, Multimap<Range<Token>, InetAddress>> pendingRanges = new ConcurrentHashMap<String, Multimap<Range<Token>, InetAddress>>();
// nodes which are migrating to the new tokens in the ring
private final Set<Pair<Token, InetAddress>> movingEndpoints = new HashSet<Pair<Token, InetAddress>>();
// tokens which are migrating to new endpoints
private final ConcurrentMap<Token, InetAddress> relocatingTokens = new ConcurrentHashMap<Token, InetAddress>();
/* Use this lock for manipulating the token map */
private final ReadWriteLock lock = new ReentrantReadWriteLock(true);
private volatile ArrayList<Token> sortedTokens;
private final Topology topology;
/* list of subscribers that are notified when the tokenToEndpointMap changed */
private final CopyOnWriteArrayList<AbstractReplicationStrategy> subscribers = new CopyOnWriteArrayList<AbstractReplicationStrategy>();
private static final Comparator<InetAddress> inetaddressCmp = new Comparator<InetAddress>()
{
public int compare(InetAddress o1, InetAddress o2)
{
return ByteBuffer.wrap(o1.getAddress()).compareTo(ByteBuffer.wrap(o2.getAddress()));
}
};
public TokenMetadata()
{
this(SortedBiMultiValMap.<Token, InetAddress>create(null, inetaddressCmp),
HashBiMap.<InetAddress, UUID>create(),
new Topology());
}
private TokenMetadata(BiMultiValMap<Token, InetAddress> tokenToEndpointMap, BiMap<InetAddress, UUID> endpointsMap, Topology topology)
{
this.tokenToEndpointMap = tokenToEndpointMap;
this.topology = topology;
endpointToHostIdMap = endpointsMap;
sortedTokens = sortTokens();
}
private ArrayList<Token> sortTokens()
{
return new ArrayList<Token>(tokenToEndpointMap.keySet());
}
/** @return the number of nodes bootstrapping into source's primary range */
public int pendingRangeChanges(InetAddress source)
{
int n = 0;
Collection<Range<Token>> sourceRanges = getPrimaryRangesFor(getTokens(source));
lock.readLock().lock();
try
{
for (Token token : bootstrapTokens.keySet())
for (Range<Token> range : sourceRanges)
if (range.contains(token))
n++;
}
finally
{
lock.readLock().unlock();
}
return n;
}
/**
* Update token map with a single token/endpoint pair in normal state.
*/
public void updateNormalToken(Token token, InetAddress endpoint)
{
updateNormalTokens(Collections.singleton(token), endpoint);
}
public void updateNormalTokens(Collection<Token> tokens, InetAddress endpoint)
{
Multimap<InetAddress, Token> endpointTokens = HashMultimap.create();
for (Token token : tokens)
endpointTokens.put(endpoint, token);
updateNormalTokens(endpointTokens);
}
/**
* Update token map with a set of token/endpoint pairs in normal state.
*
* Prefer this whenever there are multiple pairs to update, as each update (whether a single or multiple)
* is expensive (CASSANDRA-3831).
*
* @param endpointTokens
*/
public void updateNormalTokens(Multimap<InetAddress, Token> endpointTokens)
{
if (endpointTokens.isEmpty())
return;
lock.writeLock().lock();
try
{
boolean shouldSortTokens = false;
for (InetAddress endpoint : endpointTokens.keySet())
{
Collection<Token> tokens = endpointTokens.get(endpoint);
assert tokens != null && !tokens.isEmpty();
bootstrapTokens.removeValue(endpoint);
tokenToEndpointMap.removeValue(endpoint);
topology.addEndpoint(endpoint);
leavingEndpoints.remove(endpoint);
removeFromMoving(endpoint); // also removing this endpoint from moving
for (Token token : tokens)
{
InetAddress prev = tokenToEndpointMap.put(token, endpoint);
if (!endpoint.equals(prev))
{
if (prev != null)
logger.warn("Token " + token + " changing ownership from " + prev + " to " + endpoint);
shouldSortTokens = true;
}
}
}
if (shouldSortTokens)
sortedTokens = sortTokens();
}
finally
{
lock.writeLock().unlock();
}
}
/**
* Store an end-point to host ID mapping. Each ID must be unique, and
* cannot be changed after the fact.
*
* @param hostId
* @param endpoint
*/
public void updateHostId(UUID hostId, InetAddress endpoint)
{
assert hostId != null;
assert endpoint != null;
lock.writeLock().lock();
try
{
InetAddress storedEp = endpointToHostIdMap.inverse().get(hostId);
if (storedEp != null)
{
if (!storedEp.equals(endpoint) && (FailureDetector.instance.isAlive(storedEp)))
{
throw new RuntimeException(String.format("Host ID collision between active endpoint %s and %s (id=%s)",
storedEp,
endpoint,
hostId));
}
}
UUID storedId = endpointToHostIdMap.get(endpoint);
if ((storedId != null) && (!storedId.equals(hostId)))
logger.warn("Changing {}'s host ID from {} to {}", endpoint, storedId, hostId);
endpointToHostIdMap.forcePut(endpoint, hostId);
}
finally
{
lock.writeLock().unlock();
}
}
/** Return the unique host ID for an end-point. */
public UUID getHostId(InetAddress endpoint)
{
lock.readLock().lock();
try
{
return endpointToHostIdMap.get(endpoint);
}
finally
{
lock.readLock().unlock();
}
}
/** Return the end-point for a unique host ID */
public InetAddress getEndpointForHostId(UUID hostId)
{
lock.readLock().lock();
try
{
return endpointToHostIdMap.inverse().get(hostId);
}
finally
{
lock.readLock().unlock();
}
}
/** @return a copy of the endpoint-to-id map for read-only operations */
public Map<InetAddress, UUID> getEndpointToHostIdMapForReading()
{
lock.readLock().lock();
try
{
Map<InetAddress, UUID> readMap = new HashMap<InetAddress, UUID>();
readMap.putAll(endpointToHostIdMap);
return readMap;
}
finally
{
lock.readLock().unlock();
}
}
@Deprecated
public void addBootstrapToken(Token token, InetAddress endpoint)
{
addBootstrapTokens(Collections.singleton(token), endpoint);
}
public void addBootstrapTokens(Collection<Token> tokens, InetAddress endpoint)
{
assert tokens != null && !tokens.isEmpty();
assert endpoint != null;
lock.writeLock().lock();
try
{
InetAddress oldEndpoint;
for (Token token : tokens)
{
oldEndpoint = bootstrapTokens.get(token);
if (oldEndpoint != null && !oldEndpoint.equals(endpoint))
throw new RuntimeException("Bootstrap Token collision between " + oldEndpoint + " and " + endpoint + " (token " + token);
oldEndpoint = tokenToEndpointMap.get(token);
if (oldEndpoint != null && !oldEndpoint.equals(endpoint))
throw new RuntimeException("Bootstrap Token collision between " + oldEndpoint + " and " + endpoint + " (token " + token);
}
bootstrapTokens.removeValue(endpoint);
for (Token token : tokens)
bootstrapTokens.put(token, endpoint);
}
finally
{
lock.writeLock().unlock();
}
}
public void removeBootstrapTokens(Collection<Token> tokens)
{
assert tokens != null && !tokens.isEmpty();
lock.writeLock().lock();
try
{
for (Token token : tokens)
bootstrapTokens.remove(token);
}
finally
{
lock.writeLock().unlock();
}
}
public void addLeavingEndpoint(InetAddress endpoint)
{
assert endpoint != null;
lock.writeLock().lock();
try
{
leavingEndpoints.add(endpoint);
}
finally
{
lock.writeLock().unlock();
}
}
/**
* Add a new moving endpoint
* @param token token which is node moving to
* @param endpoint address of the moving node
*/
public void addMovingEndpoint(Token token, InetAddress endpoint)
{
assert endpoint != null;
lock.writeLock().lock();
try
{
movingEndpoints.add(Pair.create(token, endpoint));
}
finally
{
lock.writeLock().unlock();
}
}
/**
* Add new relocating ranges (tokens moving from their respective endpoints, to another).
* @param tokens tokens being moved
* @param endpoint destination of moves
*/
public void addRelocatingTokens(Collection<Token> tokens, InetAddress endpoint)
{
assert endpoint != null;
assert tokens != null && tokens.size() > 0;
lock.writeLock().lock();
try
{
for (Token token : tokens)
{
InetAddress prev = relocatingTokens.put(token, endpoint);
if (prev != null && !prev.equals(endpoint))
logger.warn("Relocation of {} to {} overwrites previous to {}", new Object[]{token, endpoint, prev});
}
}
finally
{
lock.writeLock().unlock();
}
}
public void removeEndpoint(InetAddress endpoint)
{
assert endpoint != null;
lock.writeLock().lock();
try
{
bootstrapTokens.removeValue(endpoint);
tokenToEndpointMap.removeValue(endpoint);
topology.removeEndpoint(endpoint);
leavingEndpoints.remove(endpoint);
endpointToHostIdMap.remove(endpoint);
sortedTokens = sortTokens();
invalidateCaches();
}
finally
{
lock.writeLock().unlock();
}
}
/**
* Remove pair of token/address from moving endpoints
* @param endpoint address of the moving node
*/
public void removeFromMoving(InetAddress endpoint)
{
assert endpoint != null;
lock.writeLock().lock();
try
{
for (Pair<Token, InetAddress> pair : movingEndpoints)
{
if (pair.right.equals(endpoint))
{
movingEndpoints.remove(pair);
break;
}
}
invalidateCaches();
}
finally
{
lock.writeLock().unlock();
}
}
/**
* Remove pair of token/address from relocating ranges.
* @param endpoint
*/
public void removeFromRelocating(Token token, InetAddress endpoint)
{
assert endpoint != null;
assert token != null;
lock.writeLock().lock();
try
{
InetAddress previous = relocatingTokens.remove(token);
if (previous == null)
{
logger.debug("Cannot remove {}, not found among the relocating (previously removed?)", token);
}
else if (!previous.equals(endpoint))
{
logger.warn(
"Removal of relocating token {} with mismatched endpoint ({} != {})",
new Object[]{token, endpoint, previous});
}
}
finally
{
lock.writeLock().unlock();
}
}
public Collection<Token> getTokens(InetAddress endpoint)
{
assert endpoint != null;
assert isMember(endpoint); // don't want to return nulls
lock.readLock().lock();
try
{
return new ArrayList<Token>(tokenToEndpointMap.inverse().get(endpoint));
}
finally
{
lock.readLock().unlock();
}
}
@Deprecated
public Token getToken(InetAddress endpoint)
{
return getTokens(endpoint).iterator().next();
}
public boolean isMember(InetAddress endpoint)
{
assert endpoint != null;
lock.readLock().lock();
try
{
return tokenToEndpointMap.inverse().containsKey(endpoint);
}
finally
{
lock.readLock().unlock();
}
}
public boolean isLeaving(InetAddress endpoint)
{
assert endpoint != null;
lock.readLock().lock();
try
{
return leavingEndpoints.contains(endpoint);
}
finally
{
lock.readLock().unlock();
}
}
public boolean isMoving(InetAddress endpoint)
{
assert endpoint != null;
lock.readLock().lock();
try
{
for (Pair<Token, InetAddress> pair : movingEndpoints)
{
if (pair.right.equals(endpoint))
return true;
}
return false;
}
finally
{
lock.readLock().unlock();
}
}
public boolean isRelocating(Token token)
{
assert token != null;
lock.readLock().lock();
try
{
return relocatingTokens.containsKey(token);
}
finally
{
lock.readLock().unlock();
}
}
/**
* Create a copy of TokenMetadata with only tokenToEndpointMap. That is, pending ranges,
* bootstrap tokens and leaving endpoints are not included in the copy.
*/
public TokenMetadata cloneOnlyTokenMap()
{
lock.readLock().lock();
try
{
return new TokenMetadata(SortedBiMultiValMap.<Token, InetAddress>create(tokenToEndpointMap, null, inetaddressCmp),
HashBiMap.create(endpointToHostIdMap),
new Topology(topology));
}
finally
{
lock.readLock().unlock();
}
}
/**
* Create a copy of TokenMetadata with tokenToEndpointMap reflecting situation after all
* current leave operations have finished.
*
* @return new token metadata
*/
public TokenMetadata cloneAfterAllLeft()
{
lock.readLock().lock();
try
{
TokenMetadata allLeftMetadata = cloneOnlyTokenMap();
for (InetAddress endpoint : leavingEndpoints)
allLeftMetadata.removeEndpoint(endpoint);
return allLeftMetadata;
}
finally
{
lock.readLock().unlock();
}
}
/**
* Create a copy of TokenMetadata with tokenToEndpointMap reflecting situation after all
* current leave, move, and relocate operations have finished.
*
* @return new token metadata
*/
public TokenMetadata cloneAfterAllSettled()
{
lock.readLock().lock();
try
{
TokenMetadata metadata = cloneOnlyTokenMap();
for (InetAddress endpoint : leavingEndpoints)
metadata.removeEndpoint(endpoint);
for (Pair<Token, InetAddress> pair : movingEndpoints)
metadata.updateNormalToken(pair.left, pair.right);
for (Map.Entry<Token, InetAddress> relocating: relocatingTokens.entrySet())
metadata.updateNormalToken(relocating.getKey(), relocating.getValue());
return metadata;
}
finally
{
lock.readLock().unlock();
}
}
public InetAddress getEndpoint(Token token)
{
lock.readLock().lock();
try
{
return tokenToEndpointMap.get(token);
}
finally
{
lock.readLock().unlock();
}
}
public Collection<Range<Token>> getPrimaryRangesFor(Collection<Token> tokens)
{
Collection<Range<Token>> ranges = new ArrayList<Range<Token>>(tokens.size());
for (Token right : tokens)
ranges.add(new Range<Token>(getPredecessor(right), right));
return ranges;
}
@Deprecated
public Range<Token> getPrimaryRangeFor(Token right)
{
return getPrimaryRangesFor(Arrays.asList(right)).iterator().next();
}
public ArrayList<Token> sortedTokens()
{
return sortedTokens;
}
private Multimap<Range<Token>, InetAddress> getPendingRangesMM(String table)
{
Multimap<Range<Token>, InetAddress> map = pendingRanges.get(table);
if (map == null)
{
map = HashMultimap.create();
Multimap<Range<Token>, InetAddress> priorMap = pendingRanges.putIfAbsent(table, map);
if (priorMap != null)
map = priorMap;
}
return map;
}
/** a mutable map may be returned but caller should not modify it */
public Map<Range<Token>, Collection<InetAddress>> getPendingRanges(String table)
{
return getPendingRangesMM(table).asMap();
}
public List<Range<Token>> getPendingRanges(String table, InetAddress endpoint)
{
List<Range<Token>> ranges = new ArrayList<Range<Token>>();
for (Map.Entry<Range<Token>, InetAddress> entry : getPendingRangesMM(table).entries())
{
if (entry.getValue().equals(endpoint))
{
ranges.add(entry.getKey());
}
}
return ranges;
}
public void setPendingRanges(String table, Multimap<Range<Token>, InetAddress> rangeMap)
{
pendingRanges.put(table, rangeMap);
}
public Token getPredecessor(Token token)
{
List tokens = sortedTokens();
int index = Collections.binarySearch(tokens, token);
assert index >= 0 : token + " not found in " + StringUtils.join(tokenToEndpointMap.keySet(), ", ");
return (Token) (index == 0 ? tokens.get(tokens.size() - 1) : tokens.get(index - 1));
}
public Token getSuccessor(Token token)
{
List tokens = sortedTokens();
int index = Collections.binarySearch(tokens, token);
assert index >= 0 : token + " not found in " + StringUtils.join(tokenToEndpointMap.keySet(), ", ");
return (Token) ((index == (tokens.size() - 1)) ? tokens.get(0) : tokens.get(index + 1));
}
/** @return a copy of the bootstrapping tokens map */
public BiMultiValMap<Token, InetAddress> getBootstrapTokens()
{
lock.readLock().lock();
try
{
return new BiMultiValMap<Token, InetAddress>(bootstrapTokens);
}
finally
{
lock.readLock().unlock();
}
}
public Set<InetAddress> getAllEndpoints()
{
lock.readLock().lock();
try
{
return ImmutableSet.copyOf(endpointToHostIdMap.keySet());
}
finally
{
lock.readLock().unlock();
}
}
/** caller should not modify leavingEndpoints */
public Set<InetAddress> getLeavingEndpoints()
{
lock.readLock().lock();
try
{
return ImmutableSet.copyOf(leavingEndpoints);
}
finally
{
lock.readLock().unlock();
}
}
/**
* Endpoints which are migrating to the new tokens
* @return set of addresses of moving endpoints
*/
public Set<Pair<Token, InetAddress>> getMovingEndpoints()
{
lock.readLock().lock();
try
{
return ImmutableSet.copyOf(movingEndpoints);
}
finally
{
lock.readLock().unlock();
}
}
/**
* Ranges which are migrating to new endpoints.
* @return set of token-address pairs of relocating ranges
*/
public Map<Token, InetAddress> getRelocatingRanges()
{
return relocatingTokens;
}
public static int firstTokenIndex(final ArrayList ring, Token start, boolean insertMin)
{
assert ring.size() > 0;
// insert the minimum token (at index == -1) if we were asked to include it and it isn't a member of the ring
int i = Collections.binarySearch(ring, start);
if (i < 0)
{
i = (i + 1) * (-1);
if (i >= ring.size())
i = insertMin ? -1 : 0;
}
return i;
}
public static Token firstToken(final ArrayList<Token> ring, Token start)
{
return ring.get(firstTokenIndex(ring, start, false));
}
/**
* iterator over the Tokens in the given ring, starting with the token for the node owning start
* (which does not have to be a Token in the ring)
* @param includeMin True if the minimum token should be returned in the ring even if it has no owner.
*/
public static Iterator<Token> ringIterator(final ArrayList<Token> ring, Token start, boolean includeMin)
{
if (ring.isEmpty())
return includeMin ? Iterators.singletonIterator(StorageService.getPartitioner().getMinimumToken())
: Iterators.<Token>emptyIterator();
final boolean insertMin = includeMin && !ring.get(0).isMinimum();
final int startIndex = firstTokenIndex(ring, start, insertMin);
return new AbstractIterator<Token>()
{
int j = startIndex;
protected Token computeNext()
{
if (j < -1)
return endOfData();
try
{
// return minimum for index == -1
if (j == -1)
return StorageService.getPartitioner().getMinimumToken();
// return ring token for other indexes
return ring.get(j);
}
finally
{
j++;
if (j == ring.size())
j = insertMin ? -1 : 0;
if (j == startIndex)
// end iteration
j = -2;
}
}
};
}
/** used by tests */
public void clearUnsafe()
{
bootstrapTokens.clear();
tokenToEndpointMap.clear();
topology.clear();
leavingEndpoints.clear();
pendingRanges.clear();
endpointToHostIdMap.clear();
invalidateCaches();
}
public String toString()
{
StringBuilder sb = new StringBuilder();
lock.readLock().lock();
try
{
Set<InetAddress> eps = tokenToEndpointMap.inverse().keySet();
if (!eps.isEmpty())
{
sb.append("Normal Tokens:");
sb.append(System.getProperty("line.separator"));
for (InetAddress ep : eps)
{
sb.append(ep);
sb.append(":");
sb.append(tokenToEndpointMap.inverse().get(ep));
sb.append(System.getProperty("line.separator"));
}
}
if (!bootstrapTokens.isEmpty())
{
sb.append("Bootstrapping Tokens:" );
sb.append(System.getProperty("line.separator"));
for (Map.Entry<Token, InetAddress> entry : bootstrapTokens.entrySet())
{
sb.append(entry.getValue()).append(":").append(entry.getKey());
sb.append(System.getProperty("line.separator"));
}
}
if (!leavingEndpoints.isEmpty())
{
sb.append("Leaving Endpoints:");
sb.append(System.getProperty("line.separator"));
for (InetAddress ep : leavingEndpoints)
{
sb.append(ep);
sb.append(System.getProperty("line.separator"));
}
}
if (!pendingRanges.isEmpty())
{
sb.append("Pending Ranges:");
sb.append(System.getProperty("line.separator"));
sb.append(printPendingRanges());
}
}
finally
{
lock.readLock().unlock();
}
return sb.toString();
}
public String printPendingRanges()
{
StringBuilder sb = new StringBuilder();
for (Map.Entry<String, Multimap<Range<Token>, InetAddress>> entry : pendingRanges.entrySet())
{
for (Map.Entry<Range<Token>, InetAddress> rmap : entry.getValue().entries())
{
sb.append(rmap.getValue()).append(":").append(rmap.getKey());
sb.append(System.getProperty("line.separator"));
}
}
return sb.toString();
}
public String printRelocatingRanges()
{
StringBuilder sb = new StringBuilder();
for (Map.Entry<Token, InetAddress> entry : relocatingTokens.entrySet())
sb.append(String.format("%s:%s%n", entry.getKey(), entry.getValue()));
return sb.toString();
}
public void invalidateCaches()
{
for (AbstractReplicationStrategy subscriber : subscribers)
{
subscriber.invalidateCachedTokenEndpointValues();
}
}
public void register(AbstractReplicationStrategy subscriber)
{
subscribers.add(subscriber);
}
public void unregister(AbstractReplicationStrategy subscriber)
{
subscribers.remove(subscriber);
}
public Collection<InetAddress> pendingEndpointsFor(Token token, String table)
{
Map<Range<Token>, Collection<InetAddress>> ranges = getPendingRanges(table);
if (ranges.isEmpty())
return Collections.emptyList();
Set<InetAddress> endpoints = new HashSet<InetAddress>();
for (Map.Entry<Range<Token>, Collection<InetAddress>> entry : ranges.entrySet())
{
if (entry.getKey().contains(token))
endpoints.addAll(entry.getValue());
}
return endpoints;
}
/**
* @deprecated retained for benefit of old tests
*/
public Collection<InetAddress> getWriteEndpoints(Token token, String table, Collection<InetAddress> naturalEndpoints)
{
return ImmutableList.copyOf(Iterables.concat(naturalEndpoints, pendingEndpointsFor(token, table)));
}
/** @return an endpoint to token multimap representation of tokenToEndpointMap (a copy) */
public Multimap<InetAddress, Token> getEndpointToTokenMapForReading()
{
lock.readLock().lock();
try
{
Multimap<InetAddress, Token> cloned = HashMultimap.create();
for (Map.Entry<Token, InetAddress> entry : tokenToEndpointMap.entrySet())
cloned.put(entry.getValue(), entry.getKey());
return cloned;
}
finally
{
lock.readLock().unlock();
}
}
/**
* @return a (stable copy, won't be modified) Token to Endpoint map for all the normal and bootstrapping nodes
* in the cluster.
*/
public Map<Token, InetAddress> getNormalAndBootstrappingTokenToEndpointMap()
{
lock.readLock().lock();
try
{
Map<Token, InetAddress> map = new HashMap<Token, InetAddress>(tokenToEndpointMap.size() + bootstrapTokens.size());
map.putAll(tokenToEndpointMap);
map.putAll(bootstrapTokens);
return map;
}
finally
{
lock.readLock().unlock();
}
}
/**
* @return the Topology map of nodes to DCs + Racks
*
* This is only allowed when a copy has been made of TokenMetadata, to avoid concurrent modifications
* when Topology methods are subsequently used by the caller.
*/
public Topology getTopology()
{
assert this != StorageService.instance.getTokenMetadata();
return topology;
}
/**
* Tracks the assignment of racks and endpoints in each datacenter for all the "normal" endpoints
* in this TokenMetadata. This allows faster calculation of endpoints in NetworkTopologyStrategy.
*/
public static class Topology
{
/** multi-map of DC to endpoints in that DC */
private final Multimap<String, InetAddress> dcEndpoints;
/** map of DC to multi-map of rack to endpoints in that rack */
private final Map<String, Multimap<String, InetAddress>> dcRacks;
/** reverse-lookup map for endpoint to current known dc/rack assignment */
private final Map<InetAddress, Pair<String, String>> currentLocations;
protected Topology()
{
dcEndpoints = HashMultimap.create();
dcRacks = new HashMap<String, Multimap<String, InetAddress>>();
currentLocations = new HashMap<InetAddress, Pair<String, String>>();
}
protected void clear()
{
dcEndpoints.clear();
dcRacks.clear();
currentLocations.clear();
}
/**
* construct deep-copy of other
*/
protected Topology(Topology other)
{
dcEndpoints = HashMultimap.create(other.dcEndpoints);
dcRacks = new HashMap<String, Multimap<String, InetAddress>>();
for (String dc : other.dcRacks.keySet())
dcRacks.put(dc, HashMultimap.create(other.dcRacks.get(dc)));
currentLocations = new HashMap<InetAddress, Pair<String, String>>(other.currentLocations);
}
/**
* Stores current DC/rack assignment for ep
*/
protected void addEndpoint(InetAddress ep)
{
IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch();
String dc = snitch.getDatacenter(ep);
String rack = snitch.getRack(ep);
Pair<String, String> current = currentLocations.get(ep);
if (current != null)
{
if (current.left.equals(dc) && current.right.equals(rack))
return;
dcRacks.get(current.left).remove(current.right, ep);
dcEndpoints.remove(current.left, ep);
}
dcEndpoints.put(dc, ep);
if (!dcRacks.containsKey(dc))
dcRacks.put(dc, HashMultimap.<String, InetAddress>create());
dcRacks.get(dc).put(rack, ep);
currentLocations.put(ep, Pair.create(dc, rack));
}
/**
* Removes current DC/rack assignment for ep
*/
protected void removeEndpoint(InetAddress ep)
{
if (!currentLocations.containsKey(ep))
return;
Pair<String, String> current = currentLocations.remove(ep);
dcEndpoints.remove(current.left, ep);
dcRacks.get(current.left).remove(current.right, ep);
}
/**
* @return multi-map of DC to endpoints in that DC
*/
public Multimap<String, InetAddress> getDatacenterEndpoints()
{
return dcEndpoints;
}
/**
* @return map of DC to multi-map of rack to endpoints in that rack
*/
public Map<String, Multimap<String, InetAddress>> getDatacenterRacks()
{
return dcRacks;
}
}
}
|
|
/*
* Copyright (c) 2016-2017, Salesforce.com, Inc.
* All rights reserved.
* Licensed under the BSD 3-Clause license.
* For full license text, see the LICENSE.txt file in repo root
* or https://opensource.org/licenses/BSD-3-Clause
*/
package com.salesforce.pyplyn.duct.etl.extract.refocus;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Collections;
import java.util.List;
import org.mockito.ArgumentCaptor;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.salesforce.pyplyn.client.UnauthorizedException;
import com.salesforce.pyplyn.duct.com.salesforce.pyplyn.test.AppBootstrapFixtures;
import com.salesforce.pyplyn.duct.etl.configuration.ConfigurationUpdateManager;
import com.salesforce.pyplyn.model.Transmutation;
import com.salesforce.pyplyn.status.MeterType;
import com.salesforce.refocus.model.ImmutableSample;
import com.salesforce.refocus.model.Sample;
/**
* Test class
*
* @author Mihai Bojin <[email protected]>
* @since 5.0
*/
public class RefocusExtractProcessorTest {
AppBootstrapFixtures fixtures;
@BeforeMethod
public void setUp() throws Exception {
// ARRANGE
fixtures = new AppBootstrapFixtures();
}
@Test
public void testProcessShouldFailWhenClientIsMissing() throws Exception {
// ARRANGE
// bootstrap
fixtures.appConfigMocks()
.runOnce();
fixtures.oneRefocusToRefocusConfiguration()
.callRealRefocusExtractProcessor()
.refocusClientCanNotAuth()
.initializeFixtures();
// init app
ConfigurationUpdateManager manager = fixtures.configurationManager();
// ACT
manager.run();
fixtures.awaitUntilAllTasksHaveBeenProcessed(true);
// ASSERT
// since we had no real client, expecting RefocusExtractProcessor to have logged a failure
verify(fixtures.systemStatus(), times(1)).meter("Refocus", MeterType.ExtractFailure);
}
@Test
public void testDefaultValueProvidedOnTimeout() throws Exception {
// ARRANGE
Sample timedOutValue = ImmutableSample.builder()
.name("subject|aspect")
.updatedAt(ZonedDateTime.now(ZoneOffset.UTC).toString())
.value(RefocusExtractProcessor.RESPONSE_TIMEOUT)
.build();
// bootstrap
fixtures.appConfigMocks()
.runOnce();
fixtures.oneRefocusToRefocusConfigurationWithDefaultValue(1.2d)
.callRealRefocusExtractProcessor()
.refocusClientReturns(Collections.singletonList(timedOutValue))
.initializeFixtures();
// init app
ConfigurationUpdateManager manager = fixtures.configurationManager();
// ACT
manager.run();
fixtures.awaitUntilAllTasksHaveBeenProcessed(true);
// ASSERT
// since we had no real client, expecting RefocusExtractProcessor to have logged a failure
verify(fixtures.systemStatus(), times(0)).meter("Refocus", MeterType.ExtractFailure);
verify(fixtures.systemStatus(), times(0)).meter("Refocus", MeterType.ExtractNoDataReturned);
@SuppressWarnings("unchecked")
ArgumentCaptor<List<Transmutation>> dataCaptor = ArgumentCaptor.forClass(List.class);
verify(fixtures.refocusLoadProcessor()).executeAsync(dataCaptor.capture(), any());
List<Transmutation> data = dataCaptor.getValue();
assertThat(data, hasSize(1));
Transmutation result = data.get(0);
assertThat(result.name(), equalTo("subject|aspect"));
assertThat(result.value(), equalTo(1.2d));
assertThat(result.originalValue(), equalTo(1.2d));
assertThat(result.metadata().messages(), hasItem(containsString("Default value")));
}
@Test
public void testSamplesAreCached() throws Exception {
// ARRANGE
// create a sample
Sample sample = ImmutableSample.builder()
.name("subject|aspect")
.updatedAt(ZonedDateTime.now(ZoneOffset.UTC).toString())
.value("1.2")
.build();
// bootstrap
fixtures.appConfigMocks()
.runOnce();
fixtures.oneRefocusToRefocusConfigurationWithCache()
.realSampleCache()
.callRealRefocusExtractProcessor()
.refocusClientReturns(Collections.singletonList(sample))
.initializeFixtures();
// init app
ConfigurationUpdateManager manager = fixtures.configurationManager();
manager.run();
fixtures.awaitUntilAllTasksHaveBeenProcessed(false);
// ACT
manager = fixtures.initConfigurationManager().configurationManager();
manager.run();
fixtures.awaitUntilAllTasksHaveBeenProcessed(true);
// ASSERT
// since we had no real client, expecting RefocusExtractProcessor to have logged a failure
verify(fixtures.systemStatus(), times(2)).meter("Refocus", MeterType.ExtractSuccess);
verify(fixtures.sampleCache(), times(2)).isCached("subject|aspect");
verify(fixtures.sampleCache(), times(1)).cache(any(), anyLong());
}
@Test
public void testTimedOutSamplesAreNotCached() throws Exception {
// ARRANGE
// create a sample
Sample sample = ImmutableSample.builder()
.name("subject|aspect")
.updatedAt(ZonedDateTime.now(ZoneOffset.UTC).toString())
.value(RefocusExtractProcessor.RESPONSE_TIMEOUT)
.build();
// bootstrap
fixtures.appConfigMocks()
.runOnce();
fixtures.oneRefocusToRefocusConfigurationWithCache()
.realSampleCache()
.callRealRefocusExtractProcessor()
.refocusClientReturns(Collections.singletonList(sample))
.initializeFixtures();
// init app
ConfigurationUpdateManager manager = fixtures.configurationManager();
// ACT
manager.run();
fixtures.awaitUntilAllTasksHaveBeenProcessed(true);
// ASSERT
// since we had no real client, expecting RefocusExtractProcessor to have logged a failure
verify(fixtures.sampleCache(), times(1)).isCached("subject|aspect");
verify(fixtures.sampleCache(), times(0)).cache(any(), anyLong());
}
@Test
public void testProcessShouldFailWhenDataInvalid() throws Exception {
// create a sample
Sample badSample = ImmutableSample.builder()
.name("subject|aspect")
.updatedAt("INVALID_DATE")
.build();
assertFailureWithSample(badSample);
}
@Test
public void testProcessShouldFailWithTimedOutSample() throws Exception {
// create a sample
Sample badSample = ImmutableSample.builder()
.name("subject|aspect")
.updatedAt(ZonedDateTime.now(ZoneOffset.UTC).toString())
.value(RefocusExtractProcessor.RESPONSE_TIMEOUT)
.build();
assertFailureWithSample(badSample);
}
@Test
public void testProcessShouldFailWithInvalidSampleValue() throws Exception {
// create a sample
Sample badSample = ImmutableSample.builder()
.name("subject|aspect")
.updatedAt(ZonedDateTime.now(ZoneOffset.UTC).toString())
.value("INVALID")
.build();
assertFailureWithSample(badSample);
}
@Test
public void testProcessShouldFailWhenGetSampleThrowsException() throws Exception {
// ARRANGE
// bootstrap
fixtures.appConfigMocks()
.runOnce();
fixtures.oneRefocusToRefocusConfiguration()
.callRealRefocusExtractProcessor()
.refocusClientThrowsExceptionOnGetSample()
.initializeFixtures();
// init app
ConfigurationUpdateManager manager = fixtures.configurationManager();
// ACT
manager.run();
fixtures.awaitUntilAllTasksHaveBeenProcessed(true);
// ASSERT
// since we had no real client, expecting RefocusExtractProcessor to have logged a failure
verify(fixtures.systemStatus(), times(1)).meter("Refocus", MeterType.ExtractFailure);
}
/**
* Executes a test that assumes a failure when a bad sample is returned from the Endpoint
*/
private void assertFailureWithSample(Sample badSample) throws UnauthorizedException, InterruptedException {
// ARRANGE
// bootstrap
fixtures.appConfigMocks()
.runOnce();
fixtures.oneRefocusToRefocusConfiguration()
.callRealRefocusExtractProcessor()
.refocusClientReturns(Collections.singletonList(badSample))
.initializeFixtures();
// init app
ConfigurationUpdateManager manager = fixtures.configurationManager();
// ACT
manager.run();
fixtures.awaitUntilAllTasksHaveBeenProcessed(true);
// ASSERT
// since we had no real client, expecting RefocusExtractProcessor to have logged a failure
verify(fixtures.systemStatus(), times(1)).meter("Refocus", MeterType.ExtractFailure);
verify(fixtures.systemStatus(), times(1)).meter("Refocus", MeterType.ExtractNoDataReturned);
}
}
|
|
/*
* Copyright (C) 2015 Basil Miller
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package college.wyk.app.commons;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ArgbEvaluator;
import android.animation.ValueAnimator;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPager;
import android.text.TextPaint;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import android.widget.Scroller;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.Random;
import college.wyk.app.R;
/**
* Created by GIGAMOLE on 24.03.2016.
*/
@SuppressWarnings("unused")
public class NavigationTabStrip extends View implements ViewPager.OnPageChangeListener {
// NTS constants
private final static int HIGH_QUALITY_FLAGS = Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG;
private final static String PREVIEW_TITLE = "Title";
private final static int INVALID_INDEX = -1;
// Default variables
private final static int DEFAULT_ANIMATION_DURATION = 350;
private final static float DEFAULT_STRIP_FACTOR = 2.5F;
private final static float DEFAULT_STRIP_WEIGHT = 10.0F;
private final static float DEFAULT_CORNER_RADIUS = 5.0F;
private final static int DEFAULT_INACTIVE_COLOR = Color.GRAY;
private final static int DEFAULT_ACTIVE_COLOR = Color.WHITE;
private final static int DEFAULT_STRIP_COLOR = Color.RED;
private final static int DEFAULT_TITLE_SIZE = 0;
// Title size offer to view height
private final static float TITLE_SIZE_FRACTION = 0.35F;
// Max and min fraction
private final static float MIN_FRACTION = 0.0F;
private final static float MAX_FRACTION = 1.0F;
// NTS and strip bounds
private final RectF mBounds = new RectF();
private final RectF mStripBounds = new RectF();
private final Rect mTitleBounds = new Rect();
// Main paint
private final Paint mStripPaint = new Paint(HIGH_QUALITY_FLAGS) {
{
setStyle(Style.FILL);
}
};
// Paint for tav title
private final Paint mTitlePaint = new TextPaint(HIGH_QUALITY_FLAGS) {
{
setTextAlign(Align.CENTER);
setTypeface(Typeface.create(Typeface.DEFAULT, Typeface.BOLD));
}
};
// Variables for animator
private final ValueAnimator mAnimator = new ValueAnimator();
private final ArgbEvaluator mColorEvaluator = new ArgbEvaluator();
private final ResizeInterpolator mResizeInterpolator = new ResizeInterpolator();
private int mAnimationDuration;
// NTS titles
private String[] mTitles;
// Variables for ViewPager
private ViewPager mViewPager;
private ViewPager.OnPageChangeListener mOnPageChangeListener;
private int mScrollState;
// Tab listener
private OnTabStripSelectedIndexListener mOnTabStripSelectedIndexListener;
private ValueAnimator.AnimatorListener mAnimatorListener;
// Variables for sizes
private float mTabSize;
// Tab title size and margin
private float mTitleSize;
// Strip type and gravity
private StripType mStripType;
private StripGravity mStripGravity;
// Corners radius for rect mode
private float mStripWeight;
private float mCornersRadius;
// Indexes
private int mLastIndex = INVALID_INDEX;
private int mIndex = INVALID_INDEX;
// General fraction value
private float mFraction;
// Coordinates of strip
private float mStartStripX;
private float mEndStripX;
private float mStripLeft;
private float mStripRight;
// Detect if is bar mode or indicator pager mode
private boolean mIsViewPagerMode;
// Detect if we move from left to right
private boolean mIsResizeIn;
// Detect if we get action down event
private boolean mIsActionDown;
// Detect if we get action down event on strip
private boolean mIsTabActionDown;
// Detect when we set index from tab bar nor from ViewPager
private boolean mIsSetIndexFromTabBar;
// Color variables
private int mInactiveColor;
private int mActiveColor;
// Custom typeface
private Typeface mTypeface;
public NavigationTabStrip(final Context context) {
this(context, null);
}
public NavigationTabStrip(final Context context, final AttributeSet attrs) {
this(context, attrs, 0);
}
public NavigationTabStrip(final Context context, final AttributeSet attrs, final int defStyleAttr) {
super(context, attrs, defStyleAttr);
//Init NTS
// Always draw
setWillNotDraw(false);
// Speed and fix for pre 17 API
ViewCompat.setLayerType(this, ViewCompat.LAYER_TYPE_SOFTWARE, null);
setLayerType(LAYER_TYPE_SOFTWARE, null);
final TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.NavigationTabStrip);
try {
setStripColor(
typedArray.getColor(R.styleable.NavigationTabStrip_nts_color, DEFAULT_STRIP_COLOR)
);
setTitleSize(
typedArray.getDimension(R.styleable.NavigationTabStrip_nts_size, DEFAULT_TITLE_SIZE)
);
setStripWeight(
typedArray.getDimension(R.styleable.NavigationTabStrip_nts_weight, DEFAULT_STRIP_WEIGHT)
);
setStripFactor(
typedArray.getFloat(R.styleable.NavigationTabStrip_nts_factor, DEFAULT_STRIP_FACTOR)
);
setStripType(
typedArray.getInt(R.styleable.NavigationTabStrip_nts_type, StripType.LINE_INDEX)
);
setStripGravity(
typedArray.getInt(R.styleable.NavigationTabStrip_nts_gravity, StripGravity.BOTTOM_INDEX)
);
// setTypeface(typedArray.getString(R.styleable.NavigationTabStrip_nts_typeface));
setTypeface(Typeface.create(Typeface.DEFAULT, Typeface.BOLD));
setInactiveColor(
typedArray.getColor(
R.styleable.NavigationTabStrip_nts_inactive_color, DEFAULT_INACTIVE_COLOR
)
);
setActiveColor(
typedArray.getColor(
R.styleable.NavigationTabStrip_nts_active_color, DEFAULT_ACTIVE_COLOR
)
);
setAnimationDuration(
typedArray.getInteger(
R.styleable.NavigationTabStrip_nts_animation_duration, DEFAULT_ANIMATION_DURATION
)
);
setCornersRadius(
typedArray.getDimension(
R.styleable.NavigationTabStrip_nts_corners_radius, DEFAULT_CORNER_RADIUS
)
);
// Get titles
String[] titles = null;
try {
final int titlesResId = typedArray.getResourceId(
R.styleable.NavigationTabStrip_nts_titles, 0
);
titles = titlesResId == 0 ? null :
typedArray.getResources().getStringArray(titlesResId);
} catch (Exception exception) {
titles = null;
exception.printStackTrace();
} finally {
if (titles == null) {
if (isInEditMode()) {
titles = new String[new Random().nextInt(5) + 1];
Arrays.fill(titles, PREVIEW_TITLE);
} else titles = new String[0];
}
setTitles(titles);
}
// Init animator
mAnimator.setFloatValues(MIN_FRACTION, MAX_FRACTION);
mAnimator.setInterpolator(new LinearInterpolator());
mAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(final ValueAnimator animation) {
updateIndicatorPosition((Float) animation.getAnimatedValue());
}
});
} finally {
typedArray.recycle();
}
}
public int getAnimationDuration() {
return mAnimationDuration;
}
public void setAnimationDuration(final int animationDuration) {
mAnimationDuration = animationDuration;
mAnimator.setDuration(mAnimationDuration);
resetScroller();
}
public String[] getTitles() {
return mTitles;
}
public void setTitles(final String... titles) {
for (int i = 0; i < titles.length; i++) titles[i] = titles[i].toUpperCase();
mTitles = titles;
requestLayout();
}
public void setTitles(final int... titleResIds) {
final String[] titles = new String[titleResIds.length];
for (int i = 0; i < titleResIds.length; i++)
titles[i] = getResources().getString(titleResIds[i]);
setTitles(titles);
}
public int getStripColor() {
return mStripPaint.getColor();
}
public void setStripColor(final int color) {
mStripPaint.setColor(color);
postInvalidate();
}
public void setStripWeight(final float stripWeight) {
mStripWeight = stripWeight;
requestLayout();
}
public StripGravity getStripGravity() {
return mStripGravity;
}
public void setStripGravity(final StripGravity stripGravity) {
mStripGravity = stripGravity;
requestLayout();
}
private void setStripGravity(final int index) {
switch (index) {
case StripGravity.TOP_INDEX:
setStripGravity(StripGravity.TOP);
break;
case StripGravity.BOTTOM_INDEX:
default:
setStripGravity(StripGravity.BOTTOM);
break;
}
}
public StripType getStripType() {
return mStripType;
}
public void setStripType(final StripType stripType) {
mStripType = stripType;
requestLayout();
}
private void setStripType(final int index) {
switch (index) {
case StripType.POINT_INDEX:
setStripType(StripType.POINT);
break;
case StripType.LINE_INDEX:
default:
setStripType(StripType.LINE);
break;
}
}
public float getStripFactor() {
return mResizeInterpolator.getFactor();
}
public void setStripFactor(final float factor) {
mResizeInterpolator.setFactor(factor);
}
public Typeface getTypeface() {
return mTypeface;
}
public void setTypeface(final Typeface typeface) {
mTypeface = typeface;
mTitlePaint.setTypeface(typeface);
postInvalidate();
}
public void setTypeface(final String typeface) {
if (TextUtils.isEmpty(typeface)) return;
Typeface tempTypeface;
try {
tempTypeface = Typeface.createFromAsset(getContext().getAssets(), typeface);
} catch (Exception e) {
tempTypeface = Typeface.create(Typeface.DEFAULT, Typeface.BOLD);
e.printStackTrace();
}
setTypeface(tempTypeface);
}
public int getActiveColor() {
return mActiveColor;
}
public void setActiveColor(final int activeColor) {
mActiveColor = activeColor;
postInvalidate();
}
public int getInactiveColor() {
return mInactiveColor;
}
public void setInactiveColor(final int inactiveColor) {
mInactiveColor = inactiveColor;
postInvalidate();
}
public float getCornersRadius() {
return mCornersRadius;
}
public void setCornersRadius(final float cornersRadius) {
mCornersRadius = cornersRadius;
postInvalidate();
}
public float getTitleSize() {
return mTitleSize;
}
public void setTitleSize(final float titleSize) {
mTitleSize = titleSize;
mTitlePaint.setTextSize(titleSize);
postInvalidate();
}
public OnTabStripSelectedIndexListener getOnTabStripSelectedIndexListener() {
return mOnTabStripSelectedIndexListener;
}
// Set on tab bar selected index listener where you can trigger action onStart or onEnd
public void setOnTabStripSelectedIndexListener(final OnTabStripSelectedIndexListener onTabStripSelectedIndexListener) {
mOnTabStripSelectedIndexListener = onTabStripSelectedIndexListener;
if (mAnimatorListener == null)
mAnimatorListener = new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(final Animator animation) {
if (mOnTabStripSelectedIndexListener != null)
mOnTabStripSelectedIndexListener.onStartTabSelected(mTitles[mIndex], mIndex);
animation.removeListener(this);
animation.addListener(this);
}
@Override
public void onAnimationEnd(final Animator animation) {
if (mIsViewPagerMode) return;
animation.removeListener(this);
animation.addListener(this);
if (mOnTabStripSelectedIndexListener != null)
mOnTabStripSelectedIndexListener.onEndTabSelected(mTitles[mIndex], mIndex);
}
};
mAnimator.removeListener(mAnimatorListener);
mAnimator.addListener(mAnimatorListener);
}
public void setViewPager(final ViewPager viewPager) {
// Detect whether ViewPager mode
if (viewPager == null) {
mIsViewPagerMode = false;
return;
}
if (viewPager.equals(mViewPager)) return;
if (mViewPager != null) //noinspection deprecation
mViewPager.setOnPageChangeListener(null);
if (viewPager.getAdapter() == null)
throw new IllegalStateException("ViewPager does not provide adapter instance.");
mIsViewPagerMode = true;
mViewPager = viewPager;
mViewPager.addOnPageChangeListener(this);
resetScroller();
postInvalidate();
}
public void setViewPager(final ViewPager viewPager, int index) {
setViewPager(viewPager);
mIndex = index;
if (mIsViewPagerMode) mViewPager.setCurrentItem(index, true);
postInvalidate();
}
// Reset scroller and reset scroll duration equals to animation duration
private void resetScroller() {
if (mViewPager == null) return;
try {
final Field scrollerField = ViewPager.class.getDeclaredField("mScroller");
scrollerField.setAccessible(true);
final ResizeViewPagerScroller scroller = new ResizeViewPagerScroller(getContext());
scrollerField.set(mViewPager, scroller);
} catch (Exception e) {
e.printStackTrace();
}
}
public void setOnPageChangeListener(final ViewPager.OnPageChangeListener listener) {
mOnPageChangeListener = listener;
}
public int getTabIndex() {
return mIndex;
}
public void setTabIndex(int index) {
setTabIndex(index, false);
}
// Set tab index from touch or programmatically
public void setTabIndex(int tabIndex, boolean isForce) {
if (mAnimator.isRunning()) return;
if (mTitles.length == 0) return;
int index = tabIndex;
boolean force = isForce;
// This check gives us opportunity to have an non selected tab
if (mIndex == INVALID_INDEX) force = true;
// Detect if last is the same
if (index == mIndex) return;
// Snap index to tabs size
index = Math.max(0, Math.min(index, mTitles.length - 1));
mIsResizeIn = index < mIndex;
mLastIndex = mIndex;
mIndex = index;
mIsSetIndexFromTabBar = true;
if (mIsViewPagerMode) {
if (mViewPager == null) throw new IllegalStateException("ViewPager is null.");
mViewPager.setCurrentItem(index, !force);
}
// Set startX and endX for animation, where we animate two sides of rect with different interpolation
mStartStripX = mStripLeft;
mEndStripX = (mIndex * mTabSize) + (mStripType == StripType.POINT ? mTabSize * 0.5F : 0.0F);
/// If it force, so update immediately, else animate
// This happens if we set index onCreate or something like this
// You can use force param or call this method in some post()
if (force) {
updateIndicatorPosition(MAX_FRACTION);
// Force onPageScrolled listener and refresh VP
if (mIsViewPagerMode) {
if (!mViewPager.isFakeDragging()) mViewPager.beginFakeDrag();
if (mViewPager.isFakeDragging()) {
mViewPager.fakeDragBy(0.0F);
mViewPager.endFakeDrag();
}
}
} else mAnimator.start();
}
// Deselect active index and reset pointer
public void deselect() {
mLastIndex = INVALID_INDEX;
mIndex = INVALID_INDEX;
mStartStripX = INVALID_INDEX * mTabSize;
mEndStripX = mStartStripX;
updateIndicatorPosition(MIN_FRACTION);
}
private void updateIndicatorPosition(final float fraction) {
// Update general fraction
mFraction = fraction;
// Set the strip left side coordinate
mStripLeft =
mStartStripX + (mResizeInterpolator.getResizeInterpolation(fraction, mIsResizeIn) *
(mEndStripX - mStartStripX));
// Set the strip right side coordinate
mStripRight =
(mStartStripX + (mStripType == StripType.LINE ? mTabSize : mStripWeight)) +
(mResizeInterpolator.getResizeInterpolation(fraction, !mIsResizeIn) *
(mEndStripX - mStartStripX));
// Update NTS
postInvalidate();
}
// Update NTS
private void notifyDataSetChanged() {
requestLayout();
postInvalidate();
}
@Override
public boolean onTouchEvent(final MotionEvent event) {
// Return if animation is running
if (mAnimator.isRunning()) return true;
// If is not idle state, return
if (mScrollState != ViewPager.SCROLL_STATE_IDLE) return true;
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
// Action down touch
mIsActionDown = true;
if (!mIsViewPagerMode) break;
// Detect if we touch down on tab, later to move
mIsTabActionDown = (int) (event.getX() / mTabSize) == mIndex;
break;
case MotionEvent.ACTION_MOVE:
// If tab touched, so move
if (mIsTabActionDown) {
mViewPager.setCurrentItem((int) (event.getX() / mTabSize), true);
break;
}
if (mIsActionDown) break;
case MotionEvent.ACTION_UP:
// Press up and set tab index relative to current coordinate
if (mIsActionDown) setTabIndex((int) (event.getX() / mTabSize));
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_OUTSIDE:
default:
// Reset action touch variables
mIsTabActionDown = false;
mIsActionDown = false;
break;
}
return true;
}
@SuppressLint("DrawAllocation")
@Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
// Get measure size
final float width = MeasureSpec.getSize(widthMeasureSpec);
final float height = MeasureSpec.getSize(heightMeasureSpec);
// Set bounds for NTS
mBounds.set(0.0F, 0.0F, width, height);
if (mTitles.length == 0 || width == 0 || height == 0) return;
// Get smaller side
mTabSize = width / (float) mTitles.length;
if ((int) mTitleSize == DEFAULT_TITLE_SIZE)
setTitleSize((height - mStripWeight) * TITLE_SIZE_FRACTION);
// Set start position of strip for preview or on start
if (isInEditMode() || !mIsViewPagerMode) {
mIsSetIndexFromTabBar = true;
// Set random in preview mode
if (isInEditMode()) mIndex = new Random().nextInt(mTitles.length);
mStartStripX =
(mIndex * mTabSize) + (mStripType == StripType.POINT ? mTabSize * 0.5F : 0.0F);
mEndStripX = mStartStripX;
updateIndicatorPosition(MAX_FRACTION);
}
}
@Override
protected void onDraw(final Canvas canvas) {
// Set bound of strip
mStripBounds.set(
mStripLeft - (mStripType == StripType.POINT ? mStripWeight * 0.5F : 0.0F),
mStripGravity == StripGravity.BOTTOM ? mBounds.height() - mStripWeight : 0.0F,
mStripRight - (mStripType == StripType.POINT ? mStripWeight * 0.5F : 0.0F),
mStripGravity == StripGravity.BOTTOM ? mBounds.height() : mStripWeight
);
// Draw strip
if (mCornersRadius == 0) canvas.drawRect(mStripBounds, mStripPaint);
else canvas.drawRoundRect(mStripBounds, mCornersRadius, mCornersRadius, mStripPaint);
// Draw tab titles
for (int i = 0; i < mTitles.length; i++) {
final String title = mTitles[i];
final float leftTitleOffset = (mTabSize * i) + (mTabSize * 0.5F);
mTitlePaint.getTextBounds(title, 0, title.length(), mTitleBounds);
final float topTitleOffset = (mBounds.height() - mStripWeight) * 0.5F +
mTitleBounds.height() * 0.5F - mTitleBounds.bottom;
// Get interpolated fraction for left last and current tab
final float interpolation = mResizeInterpolator.getResizeInterpolation(mFraction, true);
final float lastInterpolation = mResizeInterpolator.getResizeInterpolation(mFraction, false);
// Check if we handle tab from touch on NTS or from ViewPager
// There is a strange logic of ViewPager onPageScrolled method, so it is
if (mIsSetIndexFromTabBar) {
if (mIndex == i) updateCurrentTitle(interpolation);
else if (mLastIndex == i) updateLastTitle(lastInterpolation);
else updateInactiveTitle();
} else {
if (i != mIndex && i != mIndex + 1) updateInactiveTitle();
else if (i == mIndex + 1) updateCurrentTitle(interpolation);
else if (i == mIndex) updateLastTitle(lastInterpolation);
}
canvas.drawText(
title, leftTitleOffset,
topTitleOffset + (mStripGravity == StripGravity.TOP ? mStripWeight : 0.0F),
mTitlePaint
);
}
}
// Method to transform current fraction of NTS and position
private void updateCurrentTitle(final float interpolation) {
mTitlePaint.setColor(
(int) mColorEvaluator.evaluate(interpolation, mInactiveColor, mActiveColor)
);
}
// Method to transform last fraction of NTS and position
private void updateLastTitle(final float lastInterpolation) {
mTitlePaint.setColor(
(int) mColorEvaluator.evaluate(lastInterpolation, mActiveColor, mInactiveColor)
);
}
// Method to transform others fraction of NTS and position
private void updateInactiveTitle() {
mTitlePaint.setColor(mInactiveColor);
}
@Override
public void onPageScrolled(int position, float positionOffset, final int positionOffsetPixels) {
if (mOnPageChangeListener != null)
mOnPageChangeListener.onPageScrolled(position, positionOffset, positionOffsetPixels);
// If we animate, don`t call this
if (!mIsSetIndexFromTabBar) {
mIsResizeIn = position < mIndex;
mLastIndex = mIndex;
mIndex = position;
mStartStripX =
(position * mTabSize) + (mStripType == StripType.POINT ? mTabSize * 0.5F : 0.0F);
mEndStripX = mStartStripX + mTabSize;
updateIndicatorPosition(positionOffset);
}
// Stop scrolling on animation end and reset values
if (!mAnimator.isRunning() && mIsSetIndexFromTabBar) {
mFraction = MIN_FRACTION;
mIsSetIndexFromTabBar = false;
}
}
@Override
public void onPageSelected(final int position) {
// This method is empty, because we call onPageSelected() when scroll state is idle
}
@Override
public void onPageScrollStateChanged(final int state) {
// If VP idle, reset to MIN_FRACTION
mScrollState = state;
if (state == ViewPager.SCROLL_STATE_IDLE) {
if (mOnPageChangeListener != null) mOnPageChangeListener.onPageSelected(mIndex);
if (mIsViewPagerMode && mOnTabStripSelectedIndexListener != null)
mOnTabStripSelectedIndexListener.onEndTabSelected(mTitles[mIndex], mIndex);
}
if (mOnPageChangeListener != null) mOnPageChangeListener.onPageScrollStateChanged(state);
}
@Override
public void onRestoreInstanceState(Parcelable state) {
final SavedState savedState = (SavedState) state;
super.onRestoreInstanceState(savedState.getSuperState());
mIndex = savedState.index;
requestLayout();
}
@Override
public Parcelable onSaveInstanceState() {
final Parcelable superState = super.onSaveInstanceState();
final SavedState savedState = new SavedState(superState);
savedState.index = mIndex;
return savedState;
}
@Override
protected void onConfigurationChanged(final Configuration newConfig) {
// Config view on rotate etc.
super.onConfigurationChanged(newConfig);
requestLayout();
// Refresh strip and state after config changed to current
final int tempIndex = mIndex;
deselect();
post(new Runnable() {
@Override
public void run() {
setTabIndex(tempIndex, true);
}
});
}
// NTS strip type
public enum StripType {
LINE, POINT;
private final static int LINE_INDEX = 0;
private final static int POINT_INDEX = 1;
}
// NTS strip gravity
public enum StripGravity {
BOTTOM, TOP;
private final static int BOTTOM_INDEX = 0;
private final static int TOP_INDEX = 1;
}
// Out listener for selected index
public interface OnTabStripSelectedIndexListener {
void onStartTabSelected(final String title, final int index);
void onEndTabSelected(final String title, final int index);
}
// Save current index instance
private static class SavedState extends BaseSavedState {
@SuppressWarnings("UnusedDeclaration")
public static final Creator<SavedState> CREATOR = new Creator<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
private int index;
public SavedState(Parcelable superState) {
super(superState);
}
private SavedState(Parcel in) {
super(in);
index = in.readInt();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
super.writeToParcel(dest, flags);
dest.writeInt(index);
}
}
// Resize interpolator to create smooth effect on strip according to inspiration design
// This is like improved accelerated and decelerated interpolator
private static class ResizeInterpolator implements Interpolator {
// Spring factor
private float mFactor;
// Check whether side we move
private boolean mResizeIn;
public float getFactor() {
return mFactor;
}
public void setFactor(final float factor) {
mFactor = factor;
}
@Override
public float getInterpolation(final float input) {
if (mResizeIn) return (float) (1.0F - Math.pow((1.0F - input), 2.0F * mFactor));
else return (float) (Math.pow(input, 2.0F * mFactor));
}
public float getResizeInterpolation(final float input, final boolean resizeIn) {
mResizeIn = resizeIn;
return getInterpolation(input);
}
}
// Custom scroller with custom scroll duration
private class ResizeViewPagerScroller extends Scroller {
public ResizeViewPagerScroller(Context context) {
super(context, new AccelerateDecelerateInterpolator());
}
@Override
public void startScroll(int startX, int startY, int dx, int dy, int duration) {
super.startScroll(startX, startY, dx, dy, mAnimationDuration);
}
@Override
public void startScroll(int startX, int startY, int dx, int dy) {
super.startScroll(startX, startY, dx, dy, mAnimationDuration);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import org.apache.geode.DataSerializer;
import org.apache.geode.InternalGemFireError;
import org.apache.geode.cache.CacheEvent;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.Operation;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.asyncqueue.AsyncEvent;
import org.apache.geode.cache.util.ObjectSizer;
import org.apache.geode.cache.wan.EventSequenceID;
import org.apache.geode.internal.cache.CachedDeserializable;
import org.apache.geode.internal.cache.CachedDeserializableFactory;
import org.apache.geode.internal.cache.Conflatable;
import org.apache.geode.internal.cache.EntryEventImpl;
import org.apache.geode.internal.cache.EnumListenerEvent;
import org.apache.geode.internal.cache.EventID;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.Token;
import org.apache.geode.internal.cache.WrappedCallbackArgument;
import org.apache.geode.internal.cache.tier.sockets.Message;
import org.apache.geode.internal.lang.ObjectUtils;
import org.apache.geode.internal.offheap.OffHeapHelper;
import org.apache.geode.internal.offheap.ReferenceCountHelper;
import org.apache.geode.internal.offheap.Releasable;
import org.apache.geode.internal.offheap.StoredObject;
import org.apache.geode.internal.offheap.annotations.OffHeapIdentifier;
import org.apache.geode.internal.offheap.annotations.Released;
import org.apache.geode.internal.offheap.annotations.Retained;
import org.apache.geode.internal.offheap.annotations.Unretained;
import org.apache.geode.internal.serialization.DataSerializableFixedID;
import org.apache.geode.internal.serialization.DeserializationContext;
import org.apache.geode.internal.serialization.SerializationContext;
import org.apache.geode.internal.serialization.StaticSerialization;
import org.apache.geode.internal.serialization.Version;
import org.apache.geode.internal.serialization.VersionedDataInputStream;
import org.apache.geode.internal.size.Sizeable;
/**
* Class <code>GatewaySenderEventImpl</code> represents an event sent between
* <code>GatewaySender</code>
*
*
* @since GemFire 7.0
*
*/
public class GatewaySenderEventImpl
implements AsyncEvent, DataSerializableFixedID, Conflatable, Sizeable, Releasable {
private static final long serialVersionUID = -5690172020872255422L;
protected static final Object TOKEN_NULL = new Object();
// It should use current version. But it was hard-coded to be 0x11, i.e. GEODE_120_ORDINAL,
// by mistake since 120 to pre-190
protected static final short VERSION = Version.GEODE_1_9_0.ordinal();
protected EnumListenerEvent operation;
protected Object substituteValue;
/**
* The action to be taken (e.g. AFTER_CREATE)
*/
protected int action;
/**
* The operation detail of EntryEvent (e.g. LOAD, PUTALL etc.)
*/
protected int operationDetail;
/**
* The number of parts for the <code>Message</code>
*
* @see Message
*/
protected int numberOfParts;
/**
* The identifier of this event
*/
protected EventID id;
/**
* The <code>Region</code> that was updated
*/
private transient LocalRegion region;
/**
* The name of the region being affected by this event
*/
protected String regionPath;
/**
* The key being affected by this event
*/
protected Object key;
/**
* The serialized new value for this event's key. May not be computed at construction time.
*/
protected volatile byte[] value;
/**
* The "object" form of the value. Will be null after this object is deserialized.
*/
@Retained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
protected transient Object valueObj;
protected transient boolean valueObjReleased;
private transient boolean serializedValueNotAvailable;
/**
* Whether the value is a serialized object or just a byte[]
*/
protected byte valueIsObject;
/**
* The callback argument for this event
*/
protected GatewaySenderEventCallbackArgument callbackArgument;
/**
* The version timestamp
*/
protected long versionTimeStamp;
/**
* Whether this event is a possible duplicate
*/
protected boolean possibleDuplicate;
/**
* Whether this event is acknowledged after the ack received by AckReaderThread. As of now this is
* getting used for PDX related GatewaySenderEvent. But can be extended for for other
* GatewaySenderEvent.
*/
protected volatile boolean isAcked;
/**
* Whether this event is dispatched by dispatcher. As of now this is getting used for PDX related
* GatewaySenderEvent. But can be extended for for other GatewaySenderEvent.
*/
protected volatile boolean isDispatched;
/**
* The creation timestamp in ms
*/
protected long creationTime;
/**
* For ParalledGatewaySender we need bucketId of the PartitionRegion on which the update operation
* was applied.
*/
protected int bucketId;
protected Long shadowKey = Long.valueOf(-1L);
protected boolean isInitialized;
private transient boolean isConcurrencyConflict = false;
private short version;
/**
* Is this thread in the process of serializing this event?
*/
public static final ThreadLocal isSerializingValue = new ThreadLocal() {
@Override
protected Object initialValue() {
return Boolean.FALSE;
}
};
private static final int CREATE_ACTION = 0;
private static final int UPDATE_ACTION = 1;
private static final int DESTROY_ACTION = 2;
private static final int VERSION_ACTION = 3;
private static final int INVALIDATE_ACTION = 5;
/**
* Static constants for Operation detail of EntryEvent.
*/
private static final int OP_DETAIL_NONE = 10;
private static final int OP_DETAIL_LOCAL_LOAD = 11;
private static final int OP_DETAIL_NET_LOAD = 12;
private static final int OP_DETAIL_PUTALL = 13;
private static final int OP_DETAIL_REMOVEALL = 14;
private static final int DEFAULT_SERIALIZED_VALUE_SIZE = -1;
private volatile int serializedValueSize = DEFAULT_SERIALIZED_VALUE_SIZE;
// /**
// * Is this thread in the process of deserializing this event?
// */
// public static final ThreadLocal isDeserializingValue = new ThreadLocal() {
// @Override
// protected Object initialValue() {
// return Boolean.FALSE;
// }
// };
/**
* Constructor. No-arg constructor for data serialization.
*
* @see DataSerializer
*/
public GatewaySenderEventImpl() {}
/**
* Constructor. Creates an initialized <code>GatewayEventImpl</code>
*
* @param operation The operation for this event (e.g. AFTER_CREATE)
* @param event The <code>CacheEvent</code> on which this <code>GatewayEventImpl</code> is based
* @param substituteValue The value to be enqueued instead of the value in the event.
*
*/
@Retained
public GatewaySenderEventImpl(EnumListenerEvent operation, CacheEvent event,
Object substituteValue) throws IOException {
this(operation, event, substituteValue, true);
}
@Retained
public GatewaySenderEventImpl(EnumListenerEvent operation, CacheEvent event,
Object substituteValue, boolean initialize, int bucketId) throws IOException {
this(operation, event, substituteValue, initialize);
this.bucketId = bucketId;
}
/**
* Constructor.
*
* @param operation The operation for this event (e.g. AFTER_CREATE)
* @param ce The <code>CacheEvent</code> on which this <code>GatewayEventImpl</code> is based
* @param substituteValue The value to be enqueued instead of the value in the event.
* @param initialize Whether to initialize this instance
*
*/
@Retained
public GatewaySenderEventImpl(EnumListenerEvent operation, CacheEvent ce, Object substituteValue,
boolean initialize) throws IOException {
// Set the operation and event
final EntryEventImpl event = (EntryEventImpl) ce;
this.operation = operation;
this.substituteValue = substituteValue;
// Initialize the region name. This is being done here because the event
// can get serialized/deserialized (for some reason) between the time
// it is set above and used (in initialize). If this happens, the
// region is null because it is a transient field of the event.
this.region = (LocalRegion) event.getRegion();
this.regionPath = this.region.getFullPath();
// Initialize the unique id
initializeId(event);
// Initialize possible duplicate
this.possibleDuplicate = event.isPossibleDuplicate();
// Initialize ack and dispatch status of events
this.isAcked = false;
this.isDispatched = false;
// Initialize the creation timestamp
this.creationTime = System.currentTimeMillis();
if (event.getVersionTag() != null && event.getVersionTag().hasValidVersion()) {
this.versionTimeStamp = event.getVersionTag().getVersionTimeStamp();
}
// Set key
// System.out.println("this._entryEvent: " + event);
// System.out.println("this._entryEvent.getKey(): " +
// event.getKey());
this.key = event.getKey();
initializeValue(event);
// Set the callback arg
this.callbackArgument = (GatewaySenderEventCallbackArgument) event.getRawCallbackArgument();
// Initialize the action and number of parts (called after _callbackArgument
// is set above)
initializeAction(this.operation);
// initialize the operation detail
initializeOperationDetail(event.getOperation());
setShadowKey(event.getTailKey());
if (initialize) {
initialize();
}
this.isConcurrencyConflict = event.isConcurrencyConflict();
}
/**
* Used to create a heap copy of an offHeap event. Note that this constructor produces an instance
* that does not need to be released.
*/
protected GatewaySenderEventImpl(GatewaySenderEventImpl offHeapEvent) {
this.operation = offHeapEvent.operation;
this.action = offHeapEvent.action;
this.numberOfParts = offHeapEvent.numberOfParts;
this.id = offHeapEvent.id;
this.region = offHeapEvent.region;
this.regionPath = offHeapEvent.regionPath;
this.key = offHeapEvent.key;
this.callbackArgument = offHeapEvent.callbackArgument;
this.versionTimeStamp = offHeapEvent.versionTimeStamp;
this.possibleDuplicate = offHeapEvent.possibleDuplicate;
this.isAcked = offHeapEvent.isAcked;
this.isDispatched = offHeapEvent.isDispatched;
this.creationTime = offHeapEvent.creationTime;
this.bucketId = offHeapEvent.bucketId;
this.shadowKey = offHeapEvent.shadowKey;
this.isInitialized = offHeapEvent.isInitialized;
this.valueObj = null;
this.valueObjReleased = false;
this.valueIsObject = offHeapEvent.valueIsObject;
this.value = offHeapEvent.getSerializedValue();
}
/**
* Returns this event's action
*
* @return this event's action
*/
public int getAction() {
return this.action;
}
/**
* Returns this event's operation
*
* @return this event's operation
*/
@Override
public Operation getOperation() {
Operation op = null;
switch (this.action) {
case CREATE_ACTION:
switch (this.operationDetail) {
case OP_DETAIL_LOCAL_LOAD:
op = Operation.LOCAL_LOAD_CREATE;
break;
case OP_DETAIL_NET_LOAD:
op = Operation.NET_LOAD_CREATE;
break;
case OP_DETAIL_PUTALL:
op = Operation.PUTALL_CREATE;
break;
case OP_DETAIL_NONE:
op = Operation.CREATE;
break;
// if operationDetail is none of the above, then default should be NONE
default:
op = Operation.CREATE;
break;
}
break;
case UPDATE_ACTION:
switch (this.operationDetail) {
case OP_DETAIL_LOCAL_LOAD:
op = Operation.LOCAL_LOAD_UPDATE;
break;
case OP_DETAIL_NET_LOAD:
op = Operation.NET_LOAD_UPDATE;
break;
case OP_DETAIL_PUTALL:
op = Operation.PUTALL_UPDATE;
break;
case OP_DETAIL_NONE:
op = Operation.UPDATE;
break;
// if operationDetail is none of the above, then default should be NONE
default:
op = Operation.UPDATE;
break;
}
break;
case DESTROY_ACTION:
if (this.operationDetail == OP_DETAIL_REMOVEALL) {
op = Operation.REMOVEALL_DESTROY;
} else {
op = Operation.DESTROY;
}
break;
case VERSION_ACTION:
op = Operation.UPDATE_VERSION_STAMP;
break;
case INVALIDATE_ACTION:
op = Operation.INVALIDATE;
break;
}
return op;
}
public Object getSubstituteValue() {
return this.substituteValue;
}
public EnumListenerEvent getEnumListenerEvent() {
return this.operation;
}
/**
* Return this event's region name
*
* @return this event's region name
*/
public String getRegionPath() {
return this.regionPath;
}
public boolean isInitialized() {
return this.isInitialized;
}
/**
* Returns this event's key
*
* @return this event's key
*/
@Override
public Object getKey() {
// TODO:Asif : Ideally would like to have throw exception if the key
// is TOKEN_UN_INITIALIZED, but for the time being trying to retain the GFE
// behaviour
// of returning null if getKey is invoked on un-initialized gateway event
return isInitialized() ? this.key : null;
}
/**
* Returns whether this event's value is a serialized object
*
* @return whether this event's value is a serialized object
*/
public byte getValueIsObject() {
return this.valueIsObject;
}
/**
* Return this event's callback argument
*
* @return this event's callback argument
*/
@Override
public Object getCallbackArgument() {
Object result = getSenderCallbackArgument();
while (result instanceof WrappedCallbackArgument) {
WrappedCallbackArgument wca = (WrappedCallbackArgument) result;
result = wca.getOriginalCallbackArg();
}
return result;
}
public GatewaySenderEventCallbackArgument getSenderCallbackArgument() {
return this.callbackArgument;
}
/**
* Return this event's number of parts
*
* @return this event's number of parts
*/
public int getNumberOfParts() {
return this.numberOfParts;
}
/**
* Return the currently held form of the object. May return a retained OFF_HEAP_REFERENCE.
*/
@Retained
public Object getRawValue() {
@Retained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
Object result = this.value;
if (result == null) {
result = this.substituteValue;
if (result == null) {
result = this.valueObj;
if (result instanceof StoredObject && ((StoredObject) result).hasRefCount()) {
if (this.valueObjReleased) {
result = null;
} else {
StoredObject ohref = (StoredObject) result;
if (!ohref.retain()) {
result = null;
} else if (this.valueObjReleased) {
ohref.release();
result = null;
}
}
}
}
}
return result;
}
/**
* Return this event's deserialized value
*
* @return this event's deserialized value
*/
@Override
public Object getDeserializedValue() {
if (this.valueIsObject == 0x00) {
Object result = this.value;
if (result == null) {
@Unretained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
Object so = this.valueObj;
if (this.valueObjReleased) {
throw new IllegalStateException(
"Value is no longer available. getDeserializedValue must be called before processEvents returns.");
}
if (so instanceof StoredObject) {
return ((StoredObject) so).getValueAsDeserializedHeapObject();
} else {
throw new IllegalStateException(
"expected valueObj field to be an instance of StoredObject but it was " + so);
}
}
return result;
} else {
Object vo = this.valueObj;
if (vo != null) {
if (vo instanceof StoredObject) {
@Unretained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
StoredObject so = (StoredObject) vo;
return so.getValueAsDeserializedHeapObject();
} else {
return vo; // it is already deserialized
}
} else {
if (this.value != null) {
Object result = EntryEventImpl.deserialize(this.value);
this.valueObj = result;
return result;
} else if (this.substituteValue != null) {
// If the substitute value is set, return it.
return this.substituteValue;
} else {
if (this.valueObjReleased) {
throw new IllegalStateException(
"Value is no longer available. getDeserializedValue must be called before processEvents returns.");
}
// both value and valueObj are null but we did not free it.
return null;
}
}
}
}
/**
* Returns the value in the form of a String. This should be used by code that wants to log the
* value. This is a debugging exception.
*/
public String getValueAsString(boolean deserialize) {
Object v = this.value;
if (v == null) {
v = this.substituteValue;
}
if (deserialize) {
try {
v = getDeserializedValue();
} catch (Exception e) {
return "Could not convert value to string because " + e;
} catch (InternalGemFireError e) { // catch this error for bug 49147
return "Could not convert value to string because " + e;
}
}
if (v == null) {
@Unretained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
Object ov = this.valueObj;
if (ov instanceof CachedDeserializable) {
return ((CachedDeserializable) ov).getStringForm();
}
}
if (v != null) {
if (v instanceof byte[]) {
byte[] bav = (byte[]) v;
// Using Arrays.toString(bav) can cause us to run out of memory
return "byte[" + bav.length + "]";
} else {
return v.toString();
}
} else {
return "";
}
}
public boolean isSerializedValueNotAvailable() {
return this.serializedValueNotAvailable;
}
/**
* If the value owned of this event is just bytes return that byte array; otherwise serialize the
* value object and return the serialized bytes. Use {@link #getValueIsObject()} to determine if
* the result is raw or serialized bytes.
*/
@Override
public byte[] getSerializedValue() {
byte[] result = this.value;
if (result == null) {
if (this.substituteValue != null) {
// The substitute value is set. Serialize it
isSerializingValue.set(Boolean.TRUE);
result = EntryEventImpl.serialize(this.substituteValue);
isSerializingValue.set(Boolean.FALSE);
return result;
}
@Unretained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
Object vo = this.valueObj;
if (vo instanceof StoredObject) {
synchronized (this) {
result = this.value;
if (result == null) {
StoredObject so = (StoredObject) vo;
result = so.getValueAsHeapByteArray();
this.value = result;
}
}
} else {
synchronized (this) {
result = this.value;
if (result == null && vo != null && !(vo instanceof Token)) {
isSerializingValue.set(Boolean.TRUE);
result = EntryEventImpl.serialize(vo);
isSerializingValue.set(Boolean.FALSE);
this.value = result;
} else if (result == null) {
if (this.valueObjReleased) {
this.serializedValueNotAvailable = true;
throw new IllegalStateException(
"Value is no longer available. getSerializedValue must be called before processEvents returns.");
}
}
}
}
}
return result;
}
public void setPossibleDuplicate(boolean possibleDuplicate) {
this.possibleDuplicate = possibleDuplicate;
}
@Override
public boolean getPossibleDuplicate() {
return this.possibleDuplicate;
}
public long getCreationTime() {
return this.creationTime;
}
@Override
public int getDSFID() {
return GATEWAY_SENDER_EVENT_IMPL;
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
toDataPre_GEODE_1_9_0_0(out, context);
DataSerializer.writeBoolean(this.isConcurrencyConflict, out);
}
public void toDataPre_GEODE_1_9_0_0(DataOutput out, SerializationContext context)
throws IOException {
// Make sure we are initialized before we serialize.
initialize();
out.writeShort(VERSION);
out.writeInt(this.action);
out.writeInt(this.numberOfParts);
// out.writeUTF(this._id);
context.getSerializer().writeObject(this.id, out);
DataSerializer.writeString(this.regionPath, out);
out.writeByte(this.valueIsObject);
serializeKey(out, context);
DataSerializer.writeByteArray(getSerializedValue(), out);
context.getSerializer().writeObject(this.callbackArgument, out);
out.writeBoolean(this.possibleDuplicate);
out.writeLong(this.creationTime);
out.writeInt(this.bucketId);
out.writeLong(this.shadowKey);
out.writeLong(getVersionTimeStamp());
}
protected void serializeKey(DataOutput out,
SerializationContext context) throws IOException {
context.getSerializer().writeObject(this.key, out);
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
fromDataPre_GEODE_1_9_0_0(in, context);
if (version >= Version.GEODE_1_9_0.ordinal()) {
this.isConcurrencyConflict = DataSerializer.readBoolean(in);
}
}
public void fromDataPre_GEODE_1_9_0_0(DataInput in, DeserializationContext context)
throws IOException, ClassNotFoundException {
version = in.readShort();
this.isInitialized = true;
this.action = in.readInt();
this.numberOfParts = in.readInt();
// this._id = in.readUTF();
if (version < 0x11 && (in instanceof InputStream)
&& StaticSerialization.getVersionForDataStream(in) == Version.CURRENT) {
in = new VersionedDataInputStream((InputStream) in, Version.GFE_701);
}
this.id = (EventID) context.getDeserializer().readObject(in);
// TODO:Asif ; Check if this violates Barry's logic of not assiging VM
// specific Token.FROM_GATEWAY
// and retain the serialized Token.FROM_GATEWAY
// this._id.setFromGateway(false);
this.regionPath = DataSerializer.readString(in);
this.valueIsObject = in.readByte();
deserializeKey(in, context);
this.value = DataSerializer.readByteArray(in);
this.callbackArgument =
(GatewaySenderEventCallbackArgument) context.getDeserializer().readObject(in);
this.possibleDuplicate = in.readBoolean();
this.creationTime = in.readLong();
this.bucketId = in.readInt();
this.shadowKey = in.readLong();
this.versionTimeStamp = in.readLong();
// TODO should this call initializeKey()?
}
protected void deserializeKey(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
this.key = context.getDeserializer().readObject(in);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("GatewaySenderEventImpl[").append("id=").append(this.id).append(";action=")
.append(this.action).append(";operation=").append(getOperation()).append(";region=")
.append(this.regionPath).append(";key=").append(this.key).append(";value=")
.append(getValueAsString(true)).append(";valueIsObject=").append(this.valueIsObject)
.append(";numberOfParts=").append(this.numberOfParts).append(";callbackArgument=")
.append(this.callbackArgument).append(";possibleDuplicate=").append(this.possibleDuplicate)
.append(";creationTime=").append(this.creationTime).append(";shadowKey=")
.append(this.shadowKey).append(";timeStamp=").append(this.versionTimeStamp)
.append(";acked=").append(this.isAcked).append(";dispatched=").append(this.isDispatched)
.append(";bucketId=").append(this.bucketId).append(";isConcurrencyConflict=")
.append(this.isConcurrencyConflict).append("]");
return builder.toString();
}
public String toSmallString() {
StringBuilder builder = new StringBuilder();
builder.append("GatewaySenderEventImpl[").append("id=").append(this.id).append(";operation=")
.append(getOperation()).append(";region=").append(this.regionPath).append(";key=")
.append(this.key).append(";shadowKey=").append(this.shadowKey).append(";bucketId=")
.append(this.bucketId).append("]");
return builder.toString();
}
public static boolean isSerializingValue() {
return ((Boolean) isSerializingValue.get()).booleanValue();
}
// public static boolean isDeserializingValue() {
// return ((Boolean)isDeserializingValue.get()).booleanValue();
// }
// / Conflatable interface methods ///
/**
* Determines whether or not to conflate this message. This method will answer true IFF the
* message's operation is AFTER_UPDATE and its region has enabled are conflation. Otherwise, this
* method will answer false. Messages whose operation is AFTER_CREATE, AFTER_DESTROY,
* AFTER_INVALIDATE or AFTER_REGION_DESTROY are not conflated.
*
* @return Whether to conflate this message
*/
@Override
public boolean shouldBeConflated() {
// If the message is an update, it may be conflatable. If it is a
// create, destroy, invalidate or destroy-region, it is not conflatable.
// Only updates are conflated.
return isUpdate();
}
@Override
public String getRegionToConflate() {
return this.regionPath;
}
@Override
public Object getKeyToConflate() {
return this.key;
}
@Override
public Object getValueToConflate() {
// Since all the uses of this are for logging
// changing it to return the string form of the value
// instead of the actual value.
return this.getValueAsString(true);
}
@Override
public void setLatestValue(Object value) {
// Currently this method is never used.
// If someone does want to use it in the future
// then the implementation needs to be updated
// to correctly update value, valueObj, and valueIsObject
throw new UnsupportedOperationException();
}
// / End Conflatable interface methods ///
/**
* Returns whether this <code>GatewayEvent</code> represents an update.
*
* @return whether this <code>GatewayEvent</code> represents an update
*/
protected boolean isUpdate() {
// This event can be in one of three states:
// - in memory primary (initialized)
// - in memory secondary (not initialized)
// - evicted to disk, read back in (initialized)
// In the first case, both the operation and action are set.
// In the second case, only the operation is set.
// In the third case, only the action is set.
return this.operation == null ? this.action == UPDATE_ACTION
: this.operation == EnumListenerEvent.AFTER_UPDATE;
}
/**
* Returns whether this <code>GatewayEvent</code> represents a create.
*
* @return whether this <code>GatewayEvent</code> represents a create
*/
protected boolean isCreate() {
// See the comment in isUpdate() for additional details
return this.operation == null ? this.action == CREATE_ACTION
: this.operation == EnumListenerEvent.AFTER_CREATE;
}
/**
* Returns whether this <code>GatewayEvent</code> represents a destroy.
*
* @return whether this <code>GatewayEvent</code> represents a destroy
*/
protected boolean isDestroy() {
// See the comment in isUpdate() for additional details
return this.operation == null ? this.action == DESTROY_ACTION
: this.operation == EnumListenerEvent.AFTER_DESTROY;
}
/**
* Initialize the unique identifier for this event. This id is used by the receiving
* <code>Gateway</code> to keep track of which events have been processed. Duplicates can be
* dropped.
*/
private void initializeId(EntryEventImpl event) {
// CS43_HA
this.id = event.getEventId();
// TODO:ASIF :Once stabilized remove the check below
if (this.id == null) {
throw new IllegalStateException(
"No event id is available for this gateway event.");
}
}
/**
* Initialize this instance. Get the useful parts of the input operation and event.
*/
public void initialize() {
if (isInitialized()) {
return;
}
this.isInitialized = true;
}
// Initializes the value object. This function need a relook because the
// serialization of the value looks unnecessary.
@Retained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
protected void initializeValue(EntryEventImpl event) throws IOException {
// Set the value to be a byte[] representation of either the value or
// substituteValue (if set).
if (this.substituteValue == null) {
// If the value is already serialized, use it.
this.valueIsObject = 0x01;
/**
* so ends up being stored in this.valueObj
*/
@Retained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
StoredObject so = null;
{
ReferenceCountHelper.setReferenceCountOwner(this);
so = event.getOffHeapNewValue();
ReferenceCountHelper.setReferenceCountOwner(null);
}
if (so != null) {
// if (so != null && !event.hasDelta()) {
// Since GatewaySenderEventImpl instances can live for a long time in the gateway region
// queue
// we do not want the StoredObject to be one that keeps the heap form cached.
so = so.getStoredObjectWithoutHeapForm(); // fixes 51999
this.valueObj = so;
if (!so.isSerialized()) {
this.valueIsObject = 0x00;
}
} else if (event.getCachedSerializedNewValue() != null) {
// We want this to have lower precedence than StoredObject so that the gateway
// can share a reference to the off-heap value.
this.value = event.getCachedSerializedNewValue();
} else {
final Object newValue = event.getRawNewValue();
assert !(newValue instanceof StoredObject); // since we already called getOffHeapNewValue()
// and it returned null
if (newValue instanceof CachedDeserializable) {
this.value = ((CachedDeserializable) newValue).getSerializedValue();
} else if (newValue instanceof byte[]) {
// The value is byte[]. Set _valueIsObject flag to 0x00 (not an object)
this.value = (byte[]) newValue;
this.valueIsObject = 0x00;
} else {
// The value is an object. It will be serialized later when getSerializedValue is called.
this.valueObj = newValue;
// to prevent bug 48281 we need to serialize it now
this.getSerializedValue();
this.valueObj = null;
}
}
} else {
// The substituteValue is set. Use it.
if (this.substituteValue instanceof byte[]) {
// The substituteValue is byte[]. Set valueIsObject flag to 0x00 (not an object)
this.value = (byte[]) this.substituteValue;
this.valueIsObject = 0x00;
} else if (this.substituteValue == TOKEN_NULL) {
// The substituteValue represents null. Set the value and substituteValue to null.
this.value = null;
this.substituteValue = null;
this.valueIsObject = 0x01;
} else {
// The substituteValue is an object. Leave it as is.
this.valueIsObject = 0x01;
}
}
}
protected boolean shouldApplyDelta() {
return false;
}
/**
* Initialize this event's action and number of parts
*
* @param operation The operation from which to initialize this event's action and number of parts
*/
protected void initializeAction(EnumListenerEvent operation) {
if (operation == EnumListenerEvent.AFTER_CREATE) {
// Initialize after create action
this.action = CREATE_ACTION;
// Initialize number of parts
// part 1 = action
// part 2 = posDup flag
// part 3 = regionName
// part 4 = eventId
// part 5 = key
// part 6 = value (create and update only)
// part 7 = whether callbackArgument is non-null
// part 8 = callbackArgument (if non-null)
// part 9 = versionTimeStamp;
this.numberOfParts = (this.callbackArgument == null) ? 8 : 9;
} else if (operation == EnumListenerEvent.AFTER_UPDATE) {
// Initialize after update action
this.action = UPDATE_ACTION;
// Initialize number of parts
this.numberOfParts = (this.callbackArgument == null) ? 8 : 9;
} else if (operation == EnumListenerEvent.AFTER_DESTROY) {
// Initialize after destroy action
this.action = DESTROY_ACTION;
// Initialize number of parts
// Since there is no value, there is one less part
this.numberOfParts = (this.callbackArgument == null) ? 7 : 8;
} else if (operation == EnumListenerEvent.TIMESTAMP_UPDATE) {
// Initialize after destroy action
this.action = VERSION_ACTION;
// Initialize number of parts
// Since there is no value, there is one less part
this.numberOfParts = (this.callbackArgument == null) ? 7 : 8;
} else if (operation == EnumListenerEvent.AFTER_INVALIDATE) {
// Initialize after invalidate action
this.action = INVALIDATE_ACTION;
// Initialize number of parts
// Since there is no value, there is one less part
this.numberOfParts = (this.callbackArgument == null) ? 7 : 8;
}
}
private void initializeOperationDetail(Operation operation) {
if (operation.isLocalLoad()) {
operationDetail = OP_DETAIL_LOCAL_LOAD;
} else if (operation.isNetLoad()) {
operationDetail = OP_DETAIL_NET_LOAD;
} else if (operation.isPutAll()) {
operationDetail = OP_DETAIL_PUTALL;
} else if (operation.isRemoveAll()) {
operationDetail = OP_DETAIL_REMOVEALL;
} else {
operationDetail = OP_DETAIL_NONE;
}
}
@Override
public EventID getEventId() {
return this.id;
}
/**
* Return the EventSequenceID of the Event
*
*/
@Override
public EventSequenceID getEventSequenceID() {
return new EventSequenceID(id.getMembershipID(), id.getThreadID(), id.getSequenceID());
}
public long getVersionTimeStamp() {
return this.versionTimeStamp;
}
@Override
public int getSizeInBytes() {
// Calculate the size of this event. This is used for overflow to disk.
// The sizes of the following variables are calculated:
//
// - the value (byte[])
// - the original callback argument (Object)
// - primitive and object instance variable references
//
// The sizes of the following variables are not calculated:
// - the key because it is a reference
// - the region and regionName because they are references
// - the operation because it is a reference
// - the entry event because it is nulled prior to calling this method
// The size of instances of the following internal datatypes were estimated
// using a NullDataOutputStream and hardcoded into this method:
// - the id (an instance of EventId)
// - the callbackArgument (an instance of GatewayEventCallbackArgument)
int size = 0;
// Add this event overhead
size += Sizeable.PER_OBJECT_OVERHEAD;
// Add object references
// _id reference = 4 bytes
// _region reference = 4 bytes
// _regionName reference = 4 bytes
// _key reference = 4 bytes
// _callbackArgument reference = 4 bytes
// _operation reference = 4 bytes
// _entryEvent reference = 4 bytes
size += 28;
// Add primitive references
// int _action = 4 bytes
// int _numberOfParts = 4 bytes
// byte _valueIsObject = 1 byte
// boolean _possibleDuplicate = 1 byte
// int bucketId = 4 bytes
// long shadowKey = 8 bytes
// long creationTime = 8 bytes
size += 30;
// Add the id (an instance of EventId)
// The hardcoded value below was estimated using a NullDataOutputStream
size += Sizeable.PER_OBJECT_OVERHEAD + 56;
// The value (a byte[])
size += getSerializedValueSize();
// The callback argument (a GatewayEventCallbackArgument wrapping an Object
// which is the original callback argument)
// The hardcoded value below represents the GatewayEventCallbackArgument
// and was estimated using a NullDataOutputStream
size += Sizeable.PER_OBJECT_OVERHEAD + 194;
// The sizeOf call gets the size of the input callback argument.
size += Sizeable.PER_OBJECT_OVERHEAD + sizeOf(getCallbackArgument());
// the version timestamp
size += 8;
return size;
}
private int sizeOf(Object obj) {
int size = 0;
if (obj == null) {
return size;
}
if (obj instanceof String) {
size = ObjectSizer.DEFAULT.sizeof(obj);
} else if (obj instanceof Integer) {
size = 4; // estimate
} else if (obj instanceof Long) {
size = 8; // estimate
} else {
size = CachedDeserializableFactory.calcMemSize(obj) - Sizeable.PER_OBJECT_OVERHEAD;
}
return size;
}
// Asif: If the GatewayEvent serializes to a node where the region itself may
// not be present or the
// region is not created yet , and if the gateway event queue is persistent,
// then even if
// we try to set the region in the fromData , we may still get null. Though
// the product is
// not using this method anywhere still not comfortable changing the Interface
// so
// modifying the implementation a bit.
@Override
public Region<?, ?> getRegion() {
// The region will be null mostly for the other node where the gateway event
// is serialized
return this.region != null ? this.region
: CacheFactory.getAnyInstance().getRegion(this.regionPath);
}
public int getBucketId() {
return bucketId;
}
public boolean isConcurrencyConflict() {
return isConcurrencyConflict;
}
/**
* @param tailKey the tailKey to set
*/
public void setShadowKey(Long tailKey) {
this.shadowKey = tailKey;
}
/**
* @return the tailKey
*/
public Long getShadowKey() {
return this.shadowKey;
}
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof GatewaySenderEventImpl)) {
return false;
}
GatewaySenderEventImpl that = (GatewaySenderEventImpl) obj;
return this.shadowKey.equals(that.shadowKey)
&& this.id.equals(that.id)
&& this.bucketId == that.bucketId
&& this.action == that.action
&& this.regionPath.equals(that.regionPath)
&& this.key.equals(that.key)
&& Arrays.equals(this.value, that.value);
}
public int hashCode() {
int hashCode = 17;
hashCode = 37 * hashCode + ObjectUtils.hashCode(this.shadowKey);
hashCode = 37 * hashCode + ObjectUtils.hashCode(this.id);
hashCode = 37 * hashCode + this.bucketId;
hashCode = 37 * hashCode + this.action;
hashCode = 37 * hashCode + ObjectUtils.hashCode(this.regionPath);
hashCode = 37 * hashCode + ObjectUtils.hashCode(this.key);
hashCode = 37 * hashCode + (this.value == null ? 0 : Arrays.hashCode(this.value));
return hashCode;
}
@Override
public Version[] getSerializationVersions() {
return new Version[] {Version.GEODE_1_9_0};
}
public int getSerializedValueSize() {
int localSerializedValueSize = this.serializedValueSize;
if (localSerializedValueSize != DEFAULT_SERIALIZED_VALUE_SIZE) {
return localSerializedValueSize;
}
@Unretained(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
Object vo = this.valueObj;
if (vo instanceof StoredObject) {
localSerializedValueSize = ((StoredObject) vo).getSizeInBytes();
} else {
if (this.substituteValue != null) {
localSerializedValueSize = sizeOf(this.substituteValue);
} else {
localSerializedValueSize = CachedDeserializableFactory.calcMemSize(getSerializedValue());
}
}
this.serializedValueSize = localSerializedValueSize;
return localSerializedValueSize;
}
@Override
@Released(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
public synchronized void release() {
@Released(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE)
Object vo = this.valueObj;
if (OffHeapHelper.releaseAndTrackOwner(vo, this)) {
this.valueObj = null;
this.valueObjReleased = true;
}
}
public static void release(
@Released(OffHeapIdentifier.GATEWAY_SENDER_EVENT_IMPL_VALUE) Object o) {
if (o instanceof GatewaySenderEventImpl) {
((GatewaySenderEventImpl) o).release();
}
}
/**
* Make a heap copy of this off-heap event and return it. A copy only needs to be made if the
* event's value is stored off-heap. If it is already on the java heap then just return "this". If
* it was stored off-heap and is no longer available (because it was released) then return null.
*/
public GatewaySenderEventImpl makeHeapCopyIfOffHeap() {
if (this.value != null || this.substituteValue != null) {
// we have the value stored on the heap so return this
return this;
} else {
Object v = this.valueObj;
if (v == null) {
if (this.valueObjReleased) {
// this means that the original off heap value was freed
return null;
} else {
return this;
}
}
if (v instanceof StoredObject && ((StoredObject) v).hasRefCount()) {
try {
return makeCopy();
} catch (IllegalStateException ex) {
// this means that the original off heap value was freed
return null;
}
} else {
// the valueObj does not use refCounts so just return this.
return this;
}
}
}
protected GatewaySenderEventImpl makeCopy() {
return new GatewaySenderEventImpl(this);
}
public void copyOffHeapValue() {
if (this.value == null) {
this.value = getSerializedValue();
}
}
}
|
|
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.runtime.manager.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import javax.persistence.EntityManagerFactory;
import org.jbpm.runtime.manager.impl.jpa.EntityManagerFactoryManager;
import org.jbpm.runtime.manager.util.TestUtil;
import org.jbpm.services.task.identity.JBossUserGroupCallbackImpl;
import org.jbpm.test.listener.process.NodeLeftCountDownProcessEventListener;
import org.jbpm.test.util.AbstractBaseTest;
import org.kie.test.util.db.PoolingDataSourceWrapper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.kie.api.event.process.DefaultProcessEventListener;
import org.kie.api.event.process.ProcessEventListener;
import org.kie.api.event.process.ProcessNodeLeftEvent;
import org.kie.api.event.process.ProcessStartedEvent;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.manager.RuntimeEngine;
import org.kie.api.runtime.manager.RuntimeEnvironment;
import org.kie.api.runtime.manager.RuntimeEnvironmentBuilder;
import org.kie.api.runtime.manager.RuntimeManager;
import org.kie.api.runtime.manager.RuntimeManagerFactory;
import org.kie.api.runtime.manager.audit.AuditService;
import org.kie.api.runtime.manager.audit.ProcessInstanceLog;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.task.UserGroupCallback;
import org.kie.internal.io.ResourceFactory;
import org.kie.internal.runtime.manager.InternalRuntimeManager;
import org.kie.internal.runtime.manager.context.CaseContext;
import org.kie.internal.runtime.manager.context.ProcessInstanceIdContext;
public class PerCaseRuntimeManagerTest extends AbstractBaseTest {
private PoolingDataSourceWrapper pds;
private UserGroupCallback userGroupCallback;
private RuntimeManager manager;
private EntityManagerFactory emf;
@Before
public void setup() {
Properties properties= new Properties();
properties.setProperty("mary", "HR");
properties.setProperty("john", "HR");
userGroupCallback = new JBossUserGroupCallbackImpl(properties);
pds = TestUtil.setupPoolingDataSource();
emf = EntityManagerFactoryManager.get().getOrCreate("org.jbpm.persistence.jpa");
}
@After
public void teardown() {
manager.close();
EntityManagerFactoryManager.get().clear();
pds.close();
}
@Test
public void testCreationOfSession() {
final Set<Long> ksessionUsed = new HashSet<Long>();
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-ScriptTask.bpmn2"), ResourceType.BPMN2)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-UserTask.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new DefaultRegisterableItemsFactory(){
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(new DefaultProcessEventListener(){
@Override
public void beforeProcessStarted(ProcessStartedEvent event) {
ksessionUsed.add(((KieSession)event.getKieRuntime()).getIdentifier());
}
});
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newPerCaseRuntimeManager(environment);
assertNotNull(manager);
// ksession for process instance #1
// since there is no process instance yet we need to get new session
RuntimeEngine runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
long ksession1Id = ksession.getIdentifier();
assertTrue(ksession1Id > 0);
// ksession for process instance #2
// since there is no process instance yet we need to get new session
RuntimeEngine runtime2 = manager.getRuntimeEngine(CaseContext.get("Case-2"));
KieSession ksession2 = runtime2.getKieSession();
assertNotNull(ksession2);
long ksession2Id = ksession2.getIdentifier();
assertTrue(ksession2Id > ksession1Id);
ProcessInstance pi1 = ksession.startProcess("UserTask");
ProcessInstance pi2 = ksession2.startProcess("UserTask");
// both processes started
assertEquals(ProcessInstance.STATE_ACTIVE, pi1.getState());
assertEquals(ProcessInstance.STATE_ACTIVE, pi2.getState());
manager.disposeRuntimeEngine(runtime);
manager.disposeRuntimeEngine(runtime2);
runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get(pi1.getId()));
ksession = runtime.getKieSession();
assertEquals(ksession1Id, ksession.getIdentifier());
runtime2 = manager.getRuntimeEngine(ProcessInstanceIdContext.get(pi2.getId()));
ksession2 = runtime2.getKieSession();
assertEquals(ksession2Id, ksession2.getIdentifier());
manager.disposeRuntimeEngine(runtime);
manager.disposeRuntimeEngine(runtime2);
// now let's check by case context
runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
ksession = runtime.getKieSession();
assertEquals(ksession1Id, ksession.getIdentifier());
runtime2 = manager.getRuntimeEngine(CaseContext.get("Case-2"));
ksession2 = runtime2.getKieSession();
assertEquals(ksession2Id, ksession2.getIdentifier());
assertEquals(2, ksessionUsed.size());
assertTrue(ksessionUsed.contains(ksession1Id));
assertTrue(ksessionUsed.contains(ksession2Id));
manager.disposeRuntimeEngine(runtime);
manager.disposeRuntimeEngine(runtime2);
manager.close();
}
@Test
public void testEventSignalingBetweenProcessesWithPeristence() {
final Set<Long> ksessionUsed = new HashSet<Long>();
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("events/throw-an-event.bpmn"), ResourceType.BPMN2)
.addAsset(ResourceFactory.newClassPathResource("events/start-on-event.bpmn"), ResourceType.BPMN2)
.registerableItemsFactory(new DefaultRegisterableItemsFactory(){
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(new DefaultProcessEventListener(){
@Override
public void beforeProcessStarted(ProcessStartedEvent event) {
ksessionUsed.add(((KieSession)event.getKieRuntime()).getIdentifier());
}
});
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newPerCaseRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
KieSession ksession = runtime.getKieSession();
long ksession1Id = ksession.getIdentifier();
assertNotNull(ksession);
ksession.startProcess("com.sample.bpmn.hello");
AuditService auditService = runtime.getAuditService();
List<? extends ProcessInstanceLog> throwProcessLogs = auditService.findProcessInstances("com.sample.bpmn.hello");
List<? extends ProcessInstanceLog> catchProcessLogs = auditService.findProcessInstances("com.sample.bpmn.Second");
assertNotNull(throwProcessLogs);
assertEquals(1, throwProcessLogs.size());
assertEquals(ProcessInstance.STATE_COMPLETED, throwProcessLogs.get(0).getStatus().intValue());
assertNotNull(catchProcessLogs);
assertEquals(1, catchProcessLogs.size());
assertEquals(ProcessInstance.STATE_COMPLETED, catchProcessLogs.get(0).getStatus().intValue());
assertEquals(1, ksessionUsed.size());
assertEquals(ksession1Id, ksessionUsed.iterator().next().longValue());
manager.disposeRuntimeEngine(runtime);
manager.close();
}
@Test
public void testExecuteReusableSubprocess() {
final Set<Long> ksessionUsed = new HashSet<Long>();
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-CallActivity.bpmn2"), ResourceType.BPMN2)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-CallActivitySubProcess.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new DefaultRegisterableItemsFactory(){
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(new DefaultProcessEventListener(){
@Override
public void beforeProcessStarted(ProcessStartedEvent event) {
ksessionUsed.add(((KieSession)event.getKieRuntime()).getIdentifier());
}
});
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newPerCaseRuntimeManager(environment);
assertNotNull(manager);
// since there is no process instance yet we need to get new session
RuntimeEngine runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
long ksession1Id = ksession.getIdentifier();
assertTrue(ksession1Id == 2);
ProcessInstance pi1 = ksession.startProcess("ParentProcess");
assertEquals(ProcessInstance.STATE_ACTIVE, pi1.getState());
manager.disposeRuntimeEngine(runtime);
runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
ksession = runtime.getKieSession();
ksession.getWorkItemManager().completeWorkItem(1, null);
AuditService logService = runtime.getAuditService();
List<? extends ProcessInstanceLog> logs = logService.findActiveProcessInstances("ParentProcess");
assertNotNull(logs);
assertEquals(0, logs.size());
logs = logService.findActiveProcessInstances("SubProcess");
assertNotNull(logs);
assertEquals(0, logs.size());
logs = logService.findProcessInstances("ParentProcess");
assertNotNull(logs);
assertEquals(1, logs.size());
String externalId = logs.get(0).getExternalId();
assertEquals(manager.getIdentifier(), externalId);
logs = logService.findProcessInstances("SubProcess");
assertNotNull(logs);
assertEquals(1, logs.size());
externalId = logs.get(0).getExternalId();
assertEquals(manager.getIdentifier(), externalId);
assertEquals(1, ksessionUsed.size());
assertEquals(ksession1Id, ksessionUsed.iterator().next().longValue());
manager.disposeRuntimeEngine(runtime);
manager.close();
}
@Test
public void testMultipleProcessesInSingleCaseCompletedInSequence() {
final Set<Long> ksessionUsed = new HashSet<Long>();
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-ScriptTask.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new DefaultRegisterableItemsFactory(){
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(new DefaultProcessEventListener(){
@Override
public void beforeProcessStarted(ProcessStartedEvent event) {
ksessionUsed.add(((KieSession)event.getKieRuntime()).getIdentifier());
}
});
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newPerCaseRuntimeManager(environment);
assertNotNull(manager);
// ksession for process instance #1
// since there is no process instance yet we need to get new session
RuntimeEngine runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
long ksession1Id = ksession.getIdentifier();
assertTrue(ksession1Id > 0);
ProcessInstance pi1 = ksession.startProcess("ScriptTask");
assertEquals(ProcessInstance.STATE_COMPLETED, pi1.getState());
manager.disposeRuntimeEngine(runtime);
runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
ksession = runtime.getKieSession();
ProcessInstance pi2 = ksession.startProcess("ScriptTask");
assertEquals(ProcessInstance.STATE_COMPLETED, pi2.getState());
// there should be only one ksession used
assertEquals(1, ksessionUsed.size());
assertEquals(ksession1Id, ksessionUsed.iterator().next().longValue());
manager.close();
}
@Test(timeout=10000)
public void testTimerOnPerCaseManager() throws Exception {
final Set<Long> ksessionUsed = new HashSet<Long>();
final NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("timer", 3);
final List<Long> timerExpirations = new ArrayList<Long>();
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.registerableItemsFactory(new DefaultRegisterableItemsFactory(){
@Override
public List<ProcessEventListener> getProcessEventListeners(
RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(new DefaultProcessEventListener(){
@Override
public void afterNodeLeft(ProcessNodeLeftEvent event) {
if (event.getNodeInstance().getNodeName().equals("timer")) {
timerExpirations.add(event.getProcessInstance().getId());
ksessionUsed.add(((KieSession)event.getKieRuntime()).getIdentifier());
}
}
});
listeners.add(new DefaultProcessEventListener(){
@Override
public void beforeProcessStarted(ProcessStartedEvent event) {
ksessionUsed.add(((KieSession)event.getKieRuntime()).getIdentifier());
}
});
listeners.add(countDownListener);
return listeners;
}
})
.addAsset(ResourceFactory.newClassPathResource("BPMN2-IntermediateCatchEventTimerCycle3.bpmn2"), ResourceType.BPMN2)
.get();
manager = RuntimeManagerFactory.Factory.get().newPerCaseRuntimeManager(environment);
assertNotNull(manager);
// ksession for process instance #1
// since there is no process instance yet we need to get new session
RuntimeEngine runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
KieSession ksession = runtime.getKieSession();
long ksession1Id = ksession.getIdentifier();
ProcessInstance pi1 = ksession.startProcess("IntermediateCatchEvent");
// both processes started
assertEquals(ProcessInstance.STATE_ACTIVE, pi1.getState());
manager.disposeRuntimeEngine(runtime);
// wait a bit for some timers to fire
countDownListener.waitTillCompleted();
runtime = manager.getRuntimeEngine(CaseContext.get("Case-1"));
ksession = runtime.getKieSession();
ksession.abortProcessInstance(pi1.getId());
manager.disposeRuntimeEngine(runtime);
manager.close();
// there should be only one ksession used
assertEquals(1, ksessionUsed.size());
assertEquals(ksession1Id, ksessionUsed.iterator().next().longValue());
}
@Test
public void testSignalEventWithDeactivate() {
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("events/start-on-event.bpmn"), ResourceType.BPMN2)
.get();
manager = RuntimeManagerFactory.Factory.get().newPerCaseRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime1 = manager.getRuntimeEngine(CaseContext.get("Case-1"));
KieSession ksession1 = runtime1.getKieSession();
ksession1.signalEvent("SampleEvent", null);
List<? extends ProcessInstanceLog> logs = runtime1.getAuditService().findProcessInstances();
assertEquals(1, logs.size());
manager.disposeRuntimeEngine(runtime1);
((InternalRuntimeManager) manager).deactivate();
runtime1 = manager.getRuntimeEngine(CaseContext.get("Case-1"));
ksession1 = runtime1.getKieSession();
ksession1.signalEvent("SampleEvent", null);
logs = runtime1.getAuditService().findProcessInstances();
assertEquals(1, logs.size());
manager.disposeRuntimeEngine(runtime1);
((InternalRuntimeManager) manager).activate();
runtime1 = manager.getRuntimeEngine(CaseContext.get("Case-1"));
ksession1 = runtime1.getKieSession();
ksession1.signalEvent("SampleEvent", null);
logs = runtime1.getAuditService().findProcessInstances();
assertEquals(2, logs.size());
manager.disposeRuntimeEngine(runtime1);
}
@Test(timeout=10000)
public void testTimerStartWithDeactivate() {
final NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("Hello", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-TimerStart.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new DefaultRegisterableItemsFactory(){
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newPerCaseRuntimeManager(environment);
assertNotNull(manager);
countDownListener.waitTillCompleted();
RuntimeEngine runtime1 = manager.getRuntimeEngine(CaseContext.get("Case-1"));
List<? extends ProcessInstanceLog> logs = runtime1.getAuditService().findProcessInstances();
assertEquals(1, logs.size());
manager.disposeRuntimeEngine(runtime1);
((InternalRuntimeManager) manager).deactivate();
countDownListener.reset(1);
countDownListener.waitTillCompleted(2000);
runtime1 = manager.getRuntimeEngine(CaseContext.get("Case-1"));
logs = runtime1.getAuditService().findProcessInstances();
assertEquals(1, logs.size());
manager.disposeRuntimeEngine(runtime1);
((InternalRuntimeManager) manager).activate();
countDownListener.reset(1);
countDownListener.waitTillCompleted();
runtime1 = manager.getRuntimeEngine(CaseContext.get("Case-1"));
logs = runtime1.getAuditService().findProcessInstances();
assertEquals(2, logs.size());
manager.disposeRuntimeEngine(runtime1);
}
}
|
|
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.web.tool;
import java.io.File;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Locale;
import java.util.Properties;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.validator.UrlValidator;
import org.sakaiproject.authz.api.AuthzGroup;
import org.sakaiproject.authz.api.GroupNotDefinedException;
import org.sakaiproject.authz.api.Role;
import org.sakaiproject.authz.api.AuthzGroupService;
import org.sakaiproject.cheftool.Context;
import org.sakaiproject.cheftool.JetspeedRunData;
import org.sakaiproject.cheftool.RunData;
import org.sakaiproject.cheftool.VelocityPortlet;
import org.sakaiproject.cheftool.VelocityPortletPaneledAction;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.cover.EntityManager;
import org.sakaiproject.event.api.EventTrackingService;
import org.sakaiproject.event.api.NotificationService;
import org.sakaiproject.event.api.SessionState;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SitePage;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.tool.api.Placement;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.tool.api.ToolSession;
import org.sakaiproject.tool.cover.SessionManager;
import org.sakaiproject.tool.cover.ToolManager;
import org.sakaiproject.user.api.User;
import org.sakaiproject.user.api.UserNotDefinedException;
import org.sakaiproject.user.cover.UserDirectoryService;
import org.sakaiproject.util.FormattedText;
import org.sakaiproject.util.ResourceLoader;
/**
* <p>
* IFrameAction is the Sakai tool to place any web content in an IFrame on the page.
* </p>
* <p>
* Four special modes are supported - these pick the URL content from special places:
* </p>
* <ul>
* <li>"site" - to show the services "server.info.url" configuration URL setting</li>
* <li>"workspace" - to show the configured "myworkspace.info.url" URL, introducing a my workspace to users</li>
* <li>"worksite" - to show the current site's "getInfoUrlFull()" setting</li>
* <li>"annotatedurl" - to show a link to a configured target url, with a description following the link. Aid in redirection.</li>
* </ul>
*/
public class IFrameAction extends VelocityPortletPaneledAction
{
private static Log M_log = LogFactory.getLog(IFrameAction.class);
/** Event for accessing the web-content tool */
protected final static String EVENT_ACCESS_WEB_CONTENT = "webcontent.read";
/** Event for modifying the web-content tool configuration */
protected final static String EVENT_REVISE_WEB_CONTENT = "webcontent.revise";
/** Resource bundle using current language locale */
protected static ResourceLoader rb = new ResourceLoader("iframe");
/** The source URL, in state, config and context. */
protected final static String SOURCE = "source";
/** The value in state and context for the source URL to actually used, as computed from special and URL. */
protected final static String URL = "url";
/** The height, in state, config and context. */
protected final static String HEIGHT = "height";
/** The custom height from user input * */
protected final static String CUSTOM_HEIGHT = "customNumberField";
/** The special attribute, in state, config and context. */
protected final static String SPECIAL = "special";
/** The Annotated URL Tool's url attribute, in state, config and context. */
protected final static String TARGETPAGE_URL = "TargetPageUrl";
/** The Annotated URL Tool's popup attribute, in state, config and context. */
protected final static String TARGETPAGE_POPUP = "TargetPagePopup";
/** The Annotated URL Tool's name attribute, in state, config and context. */
protected final static String TARGETPAGE_NAME = "TargetPageName";
/** The Annotated URL Tool's text attribute, in state, config and context. */
protected final static String ANNOTATED_TEXT = "desp";
/** Support an external url defined in sakai.properties, in state, config and context. */
protected final static String SAKAI_PROPERTIES_URL_KEY = "sakai.properties.url.key";
/** If set, always hide the OPTIONS button */
protected final static String HIDE_OPTIONS = "hide.options";
/** Special value for site. */
protected final static String SPECIAL_SITE = "site";
/** Special value for Annotated URL Tool. */
protected final static String SPECIAL_ANNOTATEDURL = "annotatedurl";
/** Special value for myworkspace. */
protected final static String SPECIAL_WORKSPACE = "workspace";
/** Special value for worksite. */
protected final static String SPECIAL_WORKSITE = "worksite";
/** The title, in state and context. */
protected final static String TITLE = "title";
/**
* Whether to pass through the PID to the URL displayed in the IFRAME. This enables integration in that the application in the IFRAME will know what site and tool it is part of.
*/
private final static String PASS_PID = "passthroughPID";
/** Valid digits for custom height from user input **/
protected static final String VALID_DIGITS = "0123456789";
/** Choices of pixels displayed in the customization page */
public String[] ourPixels = { "300px", "450px", "600px", "750px", "900px", "1200px", "1800px", "2400px" };
/** Attributes for web content tool page title **/
private static final String STATE_PAGE_TITLE = "pageTitle";
private static final String FORM_PAGE_TITLE = "title-of-page";
private static final String FORM_TOOL_TITLE = "title-of-tool";
private static final int MAX_TITLE_LENGTH = 99;
private static final int MAX_SITE_INFO_URL_LENGTH = 255;
/**
* Expand macros to insert session information into the URL?
*/
private final static String MACRO_EXPANSION = "expandMacros";
/** Macro name: Site id (GUID) */
protected static final String MACRO_SITE_ID = "${SITE_ID}";
/** Macro name: User id */
protected static final String MACRO_USER_ID = "${USER_ID}";
/** Macro name: User enterprise id */
protected static final String MACRO_USER_EID = "${USER_EID}";
/** Macro name: First name */
protected static final String MACRO_USER_FIRST_NAME = "${USER_FIRST_NAME}";
/** Macro name: Last name */
protected static final String MACRO_USER_LAST_NAME = "${USER_LAST_NAME}";
/** Macro name: Role */
protected static final String MACRO_USER_ROLE = "${USER_ROLE}";
/** Macro name: Session */
protected static final String MACRO_SESSION_ID = "${SESSION_ID}";
private static final String MACRO_CLASS_SITE_PROP = "SITE_PROP:";
private static final String IFRAME_ALLOWED_MACROS_PROPERTY = "iframe.allowed.macros";
private static final String MACRO_DEFAULT_ALLOWED = "${USER_ID},${USER_EID},${USER_FIRST_NAME},${USER_LAST_NAME},${SITE_ID},${USER_ROLE}";
private static ArrayList allowedMacrosList;
// initialize list of approved macros for replacement within URL
static
{
allowedMacrosList = new ArrayList();
final String allowedMacros =
ServerConfigurationService.getString(IFRAME_ALLOWED_MACROS_PROPERTY, MACRO_DEFAULT_ALLOWED);
String parts[] = allowedMacros.split(",");
if(parts != null) {
for(int i = 0; i < parts.length; i++) {
allowedMacrosList.add(parts[i]);
}
}
}
/** For tracking event */
private static EventTrackingService m_eventTrackingService = null;
private AuthzGroupService authzGroupService;
/**
* Populate the state with configuration settings
*/
protected void initState(SessionState state, VelocityPortlet portlet, JetspeedRunData rundata)
{
// TODO: we might want to keep this from running for each request - but by letting it we get fresh info each time... -ggolden
super.initState(state, portlet, rundata);
Placement placement = ToolManager.getCurrentPlacement();
Properties config = placement.getConfig();
// set the pass_pid parameter
boolean passPid = false;
String passPidStr = config.getProperty(PASS_PID, "false");
state.removeAttribute(PASS_PID);
if ("true".equalsIgnoreCase(passPidStr))
{
state.setAttribute(PASS_PID, Boolean.TRUE);
passPid = true;
}
// Assume macro expansion (disable on request)
boolean macroExpansion = true;
String macroExpansionStr = config.getProperty(MACRO_EXPANSION, "true");
state.removeAttribute(MACRO_EXPANSION);
if ("false".equalsIgnoreCase(macroExpansionStr))
{
state.setAttribute(MACRO_EXPANSION, Boolean.FALSE);
macroExpansion = false;
}
// set the special setting
String special = config.getProperty(SPECIAL);
final String sakaiPropertiesUrlKey = config.getProperty(SAKAI_PROPERTIES_URL_KEY);
final String hideOptions = config.getProperty(HIDE_OPTIONS);
// check for an older way the ChefWebPagePortlet took parameters, converting to our "special" values
if (special == null)
{
if ("true".equals(config.getProperty("site")))
{
special = SPECIAL_SITE;
}
else if ("true".equals(config.getProperty("workspace")))
{
special = SPECIAL_WORKSPACE;
}
else if ("true".equals(config.getProperty("worksite")))
{
special = SPECIAL_WORKSITE;
}
else if ("true".equals(config.getProperty("annotatedurl")))
{
special = SPECIAL_ANNOTATEDURL;
}
}
state.removeAttribute(SPECIAL);
if ((special != null) && (special.trim().length() > 0))
{
state.setAttribute(SPECIAL, special);
}
state.removeAttribute(HIDE_OPTIONS);
if ((hideOptions != null) && (hideOptions.trim().length() > 0))
{
state.setAttribute(HIDE_OPTIONS, hideOptions);
}
// set the source url setting
String source = StringUtils.trimToNull(config.getProperty(SOURCE));
// check for an older way the ChefWebPagePortlet took parameters, converting to our "source" value
if (source == null)
{
source = StringUtils.trimToNull(config.getProperty("url"));
}
// store the raw as-configured source url
state.removeAttribute(SOURCE);
if (source != null)
{
state.setAttribute(SOURCE, source);
}
// compute working URL, modified from the configuration URL if special
String url = sourceUrl(special, source, placement.getContext(), macroExpansion, passPid, placement.getId(), sakaiPropertiesUrlKey);
state.setAttribute(URL, url);
// set the height
state.setAttribute(HEIGHT, config.getProperty(HEIGHT, "600px"));
state.setAttribute(ANNOTATED_TEXT, config.getProperty(ANNOTATED_TEXT, ""));
// set Annotated URL Tool attributes if TargetPageUrl is defined
if(config.getProperty(TARGETPAGE_URL)!=null)
{
state.setAttribute(TARGETPAGE_URL,config.getProperty(TARGETPAGE_URL));
state.setAttribute(TARGETPAGE_NAME,config.getProperty(TARGETPAGE_NAME));
state.setAttribute(TARGETPAGE_POPUP,config.getProperty(TARGETPAGE_POPUP));
}
// set the title
state.setAttribute(TITLE, placement.getTitle());
if (state.getAttribute(STATE_PAGE_TITLE) == null)
{
SitePage p = SiteService.findPage(getCurrentSitePageId());
state.setAttribute(STATE_PAGE_TITLE, p.getTitle());
}
// if events found in tool registration file put them in state
if((StringUtils.trimToNull(config.getProperty(EVENT_ACCESS_WEB_CONTENT)) != null)) {
state.setAttribute(EVENT_ACCESS_WEB_CONTENT, config.getProperty(EVENT_ACCESS_WEB_CONTENT));
}
if((StringUtils.trimToNull(config.getProperty(EVENT_REVISE_WEB_CONTENT)) != null)) {
state.setAttribute(EVENT_REVISE_WEB_CONTENT, config.getProperty(EVENT_REVISE_WEB_CONTENT));
}
if (m_eventTrackingService == null)
{
m_eventTrackingService = (EventTrackingService) ComponentManager.get("org.sakaiproject.event.api.EventTrackingService");
}
if (authzGroupService == null)
{
authzGroupService = ComponentManager.get(AuthzGroupService.class);
}
}
/**
* Get the current site page our current tool is placed on.
*
* @return The site page id on which our tool is placed.
*/
protected String getCurrentSitePageId()
{
ToolSession ts = SessionManager.getCurrentToolSession();
if (ts != null)
{
ToolConfiguration tool = SiteService.findTool(ts.getPlacementId());
if (tool != null)
{
return tool.getPageId();
}
}
return null;
}
/** Construct and return localized filepath, if it exists
**/
private String getLocalizedURL(String property) {
String filename = ServerConfigurationService.getString(property);
if ( filename == null || filename.trim().length()==0 )
return filename;
else
filename = filename.trim();
int extIndex = filename.lastIndexOf(".") >= 0 ? filename.lastIndexOf(".") : filename.length()-1;
String ext = filename.substring(extIndex);
String doc = filename.substring(0,extIndex);
Locale locale = new ResourceLoader().getLocale();
if (locale != null){
// check if localized file exists for current language/locale/variant
String localizedFile = doc + "_" + locale.toString() + ext;
String filePath = getServletConfig().getServletContext().getRealPath( ".."+localizedFile );
if ( (new File(filePath)).exists() )
return localizedFile;
// otherwise, check if localized file exists for current language
localizedFile = doc + "_" + locale.getLanguage() + ext;
filePath = getServletConfig().getServletContext().getRealPath( ".."+localizedFile );
if ( (new File(filePath)).exists() )
return localizedFile;
}
return filename;
}
/**
* Compute the actual URL we will used, based on the configuration special and source URLs
*/
protected String sourceUrl(String special, String source, String context, boolean macroExpansion, boolean passPid, String pid, String sakaiPropertiesUrlKey)
{
String rv = StringUtils.trimToNull(source);
// if marked for "site", use the site intro from the properties
if (SPECIAL_SITE.equals(special))
{
rv = StringUtils.trimToNull(getLocalizedURL("server.info.url"));
}
// if marked for "workspace", use the "user" site info from the properties
else if (SPECIAL_WORKSPACE.equals(special))
{
rv = StringUtils.trimToNull(getLocalizedURL("myworkspace.info.url"));
}
// if marked for "worksite", use the setting from the site's definition
else if (SPECIAL_WORKSITE.equals(special))
{
// set the url to the site of this request's config'ed url
try
{
// get the site's info URL, if defined
Site s = SiteService.getSite(context);
rv = StringUtils.trimToNull(s.getInfoUrlFull());
// compute the info url for the site if it has no specific InfoUrl
if (rv == null)
{
// access will show the site description or title...
rv = ServerConfigurationService.getAccessUrl() + s.getReference();
}
}
catch (Exception any)
{
}
}
else if (sakaiPropertiesUrlKey != null && sakaiPropertiesUrlKey.length() > 1)
{
// set the url to a string defined in sakai.properties
rv = StringUtils.trimToNull(ServerConfigurationService.getString(sakaiPropertiesUrlKey));
}
// if it's not special, and we have no value yet, set it to the webcontent instruction page, as configured
if (rv == null || rv.equals("http://") || rv.equals("https://"))
{
rv = StringUtils.trimToNull(getLocalizedURL("webcontent.instructions.url"));
}
if (rv != null)
{
// accept a partial reference url (i.e. "/content/group/sakai/test.gif"), convert to full url
rv = convertReferenceUrl(rv);
// pass the PID through on the URL, IF configured to do so
if (passPid)
{
if (rv.indexOf("?") < 0)
{
rv = rv + "?";
}
else
{
rv = rv + "&";
}
rv = rv + "pid=" + pid;
}
if (macroExpansion)
{
rv = doMacroExpansion(rv);
}
}
return rv;
}
/**
* If the url is a valid reference, convert it to a URL, else return it unchanged.
*/
protected String convertReferenceUrl(String url)
{
// make a reference
Reference ref = EntityManager.newReference(url);
// if it didn't recognize this, return it unchanged
if (ref.isKnownType())
{
// return the reference's url
String refUrl = ref.getUrl();
if (refUrl != null)
{
return refUrl;
}
}
return url;
}
/**
* Get the current user id
* @throws SessionDataException
* @return User id
*/
private String getUserId() throws SessionDataException
{
Session session = SessionManager.getCurrentSession();
if (session == null)
{
throw new SessionDataException("No current user session");
}
return session.getUserId();
}
/**
* Get the current session id
* @throws SessionDataException
* @return Session id
*/
private String getSessionId() throws SessionDataException
{
Session session = SessionManager.getCurrentSession();
if (session == null)
{
throw new SessionDataException("No current user session");
}
return session.getId();
}
/**
* Get the current user eid
* @throws SessionDataException
* @return User eid
*/
private String getUserEid() throws SessionDataException
{
Session session = SessionManager.getCurrentSession();
if (session == null)
{
throw new SessionDataException("No current user session");
}
return session.getUserEid();
}
/**
* Get current User information
* @throws IdUnusedException, SessionDataException
* @return {@link User} data
* @throws UserNotDefinedException
*/
private User getUser() throws IdUnusedException, SessionDataException, UserNotDefinedException
{
return UserDirectoryService.getUser(this.getUserId());
}
/**
* Get the current site id
* @throws SessionDataException
* @return Site id (GUID)
*/
private String getSiteId() throws SessionDataException
{
Placement placement = ToolManager.getCurrentPlacement();
if (placement == null)
{
throw new SessionDataException("No current tool placement");
}
return placement.getContext();
}
/**
* Fetch the user role in the current site
* @throws IdUnusedException, SessionDataException
* @return Role
* @throws GroupNotDefinedException
*/
private String getUserRole() throws IdUnusedException, SessionDataException, GroupNotDefinedException
{
AuthzGroup group;
Role role;
group = authzGroupService.getAuthzGroup("/site/" + getSiteId());
if (group == null)
{
throw new SessionDataException("No current group");
}
role = group.getUserRole(this.getUserId());
if (role == null)
{
throw new SessionDataException("No current role");
}
return role.getId();
}
/**
* Get a site property by name
*
* @param name Property name
* @throws IdUnusedException, SessionDataException
* @return The property value (null if none)
*/
private String getSiteProperty(String name) throws IdUnusedException, SessionDataException
{
Site site;
site = SiteService.getSite(getSiteId());
return site.getProperties().getProperty(name);
}
/**
* Lookup value for requested macro name
*/
private String getMacroValue(String macroName)
{
try
{
if (macroName.equals(MACRO_USER_ID))
{
return this.getUserId();
}
if (macroName.equals(MACRO_USER_EID))
{
return this.getUserEid();
}
if (macroName.equals(MACRO_USER_FIRST_NAME))
{
return this.getUser().getFirstName();
}
if (macroName.equals(MACRO_USER_LAST_NAME))
{
return this.getUser().getLastName();
}
if (macroName.equals(MACRO_SITE_ID))
{
return getSiteId();
}
if (macroName.equals(MACRO_USER_ROLE))
{
return this.getUserRole();
}
if (macroName.equals(MACRO_SESSION_ID))
{
return this.getSessionId();
}
if (macroName.startsWith("${"+MACRO_CLASS_SITE_PROP))
{
macroName = macroName.substring(2); // Remove leading "${"
macroName = macroName.substring(0, macroName.length()-1); // Remove trailing "}"
// at this point we have "SITE_PROP:some-property-name"
// separate the property name from the prefix then return the property value
String[] sitePropertyKey = macroName.split(":");
if (sitePropertyKey != null && sitePropertyKey.length > 1) {
String sitePropertyValue = getSiteProperty(sitePropertyKey[1]);
return (sitePropertyValue == null) ? "" : sitePropertyValue;
}
}
}
catch (Throwable throwable)
{
return "";
}
/*
* An unsupported macro: use the original text "as is"
*/
return macroName;
}
/**
* Expand one macro reference
* @param text Expand macros found in this text
* @param macroName Macro name
*/
private void expand(StringBuilder sb, String macroName)
{
int index;
/*
* Replace every occurance of the macro in the parameter list
*/
index = sb.indexOf(macroName);
while (index != -1)
{
String macroValue = URLEncoder.encode(getMacroValue(macroName));
sb.replace(index, (index + macroName.length()), macroValue);
index = sb.indexOf(macroName, (index + macroValue.length()));
}
}
/**
* Expand macros, inserting session and site information
* @param originalText Expand macros found in this text
* @return [possibly] Updated text
*/
private String doMacroExpansion(String originalText)
{
StringBuilder sb;
/*
* Quit now if no macros are embedded in the text
*/
if (originalText.indexOf("${") == -1)
{
return originalText;
}
/*
* Expand each macro
*/
sb = new StringBuilder(originalText);
Iterator i = allowedMacrosList.iterator();
while(i.hasNext()) {
String macro = (String) i.next();
expand(sb, macro);
}
return sb.toString();
}
/**
* Setup the velocity context and choose the template for the response.
*/
public String buildMainPanelContext(VelocityPortlet portlet, Context context, RunData rundata, SessionState state)
{
// do options if we are in options mode
if (MODE_OPTIONS.equals(state.getAttribute(STATE_MODE)))
{
return buildOptionsPanelContext(portlet, context, rundata, state);
}
// if we rely on state (like all the other tools), we won't pick up any changes others make to the configuration till we are refreshed... -ggolden
// set our configuration into the context for the vm
String url = (String) state.getAttribute(URL);
String special = (String) state.getAttribute(SPECIAL);
context.put(URL, url);
context.put(HEIGHT, state.getAttribute(HEIGHT));
if(url != null && url.startsWith("http:") && ServerConfigurationService.getServerUrl().startsWith("https:")){
context.put("popup", true);
}
//for annotatedurl
context.put(TARGETPAGE_URL, state.getAttribute(TARGETPAGE_URL));
context.put(TARGETPAGE_POPUP, state.getAttribute(TARGETPAGE_POPUP));
context.put(TARGETPAGE_NAME, state.getAttribute(TARGETPAGE_NAME));
context.put(ANNOTATED_TEXT, state.getAttribute(ANNOTATED_TEXT));
// set the resource bundle with our strings
context.put("tlang", rb);
// setup for the options menu if needed
String hideOptions = (String) state.getAttribute(HIDE_OPTIONS);
if (hideOptions != null && "true".equalsIgnoreCase(hideOptions))
{
// always hide Options menu if hide.options is specified
} else if (SiteService.allowUpdateSite(ToolManager.getCurrentPlacement().getContext()))
{
context.put("options_title", ToolManager.getCurrentPlacement().getTitle() + " " + rb.getString("gen.options"));
}
// tracking event
String siteId = "";
try
{
Site s = SiteService.getSite(ToolManager.getCurrentPlacement().getContext());
siteId = s.getId();
}
catch (Throwable e)
{
}
if (special == null)
{
if(state.getAttribute(EVENT_ACCESS_WEB_CONTENT) == null) {
// this is a Web Content tool
m_eventTrackingService.post(m_eventTrackingService.newEvent(EVENT_ACCESS_WEB_CONTENT, url, siteId, false, NotificationService.NOTI_NONE));
}
else {
// event in tool registration file will be used
m_eventTrackingService.post(m_eventTrackingService.newEvent((String)state.getAttribute(EVENT_ACCESS_WEB_CONTENT), url, siteId, false, NotificationService.NOTI_NONE));
}
}
else {
if(state.getAttribute(EVENT_ACCESS_WEB_CONTENT) != null) {
// special and event in tool registration file
m_eventTrackingService.post(m_eventTrackingService.newEvent((String)state.getAttribute(EVENT_ACCESS_WEB_CONTENT), url, siteId, false, NotificationService.NOTI_NONE));
}
}
return (String) getContext(rundata).get("template");
}
/**
* Setup the velocity context and choose the template for options.
*/
public String buildOptionsPanelContext(VelocityPortlet portlet, Context context, RunData data, SessionState state)
{
// provide the source, and let the user edit, if not special
String special = (String) state.getAttribute(SPECIAL);
String source = "";
String siteId = "";
if (special == null)
{
source = (String) state.getAttribute(SOURCE);
if (source == null) source = "";
context.put(SOURCE, source);
context.put("heading", rb.getString("gen.custom"));
}
// set the heading based on special
else
{
if (SPECIAL_SITE.equals(special))
{
context.put("heading", rb.getString("gen.custom.site"));
}
else if (SPECIAL_WORKSPACE.equals(special))
{
context.put("heading", rb.getString("gen.custom.workspace"));
}
else if (SPECIAL_WORKSITE.equals(special))
{
context.put("heading", rb.getString("gen.custom.worksite"));
// for worksite, also include the Site's infourl and description
try
{
Site s = SiteService.getSite(ToolManager.getCurrentPlacement().getContext());
siteId = s.getId();
String infoUrl = StringUtils.trimToNull(s.getInfoUrl());
if (infoUrl != null)
{
context.put("info_url", infoUrl);
}
String description = StringUtils.trimToNull(s.getDescription());
if (description != null)
{
description = FormattedText.escapeHtmlFormattedTextarea(description);
context.put("description", description);
}
}
catch (Throwable e)
{
}
}
else if (SPECIAL_ANNOTATEDURL.equals(special))
{
context.put("heading", rb.getString("gen.custom.annotatedurl"));
// for Annotated URL Tool page, also include the description
try
{
String desp = state.getAttribute(ANNOTATED_TEXT).toString();
context.put("description", desp);
}
catch (Throwable e)
{
}
}
else
{
context.put("heading", rb.getString("gen.custom"));
}
}
boolean selected = false;
String height = state.getAttribute(HEIGHT).toString();
for (int i = 0; i < ourPixels.length; i++)
{
if (height.equals(ourPixels[i]))
{
selected = true;
continue;
}
}
if (!selected)
{
String[] strings = height.trim().split("px");
context.put("custom_height", strings[0]);
height = rb.getString("gen.heisomelse");
}
context.put(HEIGHT, height);
context.put(TITLE, state.getAttribute(TITLE));
context.put("tlang", rb);
context.put("doUpdate", BUTTON + "doConfigure_update");
context.put("doCancel", BUTTON + "doCancel");
context.put("form_tool_title", FORM_TOOL_TITLE);
context.put("form_page_title", FORM_PAGE_TITLE);
// if we are part of a site, and the only tool on the page, offer the popup to edit
Placement placement = ToolManager.getCurrentPlacement();
ToolConfiguration toolConfig = SiteService.findTool(placement.getId());
if ((state.getAttribute(SPECIAL) == null) && (toolConfig != null))
{
try
{
Site site = SiteService.getSite(toolConfig.getSiteId());
siteId = site.getId();
SitePage page = site.getPage(toolConfig.getPageId());
// if this is the only tool on that page, update the page's title also
if ((page.getTools() != null) && (page.getTools().size() == 1))
{
context.put("showPopup", Boolean.TRUE);
context.put("popup", Boolean.valueOf(page.isPopUp()));
context.put("pageTitleEditable", Boolean.TRUE);
context.put("page_title", (String) state.getAttribute(STATE_PAGE_TITLE));
}
}
catch (Throwable e)
{
}
}
// pick the "-customize" template based on the standard template name
String template = (String) getContext(data).get("template");
// pick the site customize template if we are in that mode
if (SPECIAL_WORKSITE.equals(special))
{
template = template + "-site-customize";
}
else if (SPECIAL_WORKSPACE.equals(special))
{
template = template + "-customize";
}
else if (SPECIAL_ANNOTATEDURL.equals(special))
{
template = template + "-annotatedurl-customize";
}
else
{
template = template + "-customize";
}
// tracking event
if(siteId.length() == 0) {
try
{
Site s = SiteService.getSite(ToolManager.getCurrentPlacement().getContext());
siteId = s.getId();
}
catch (Throwable e)
{
}
}
if (special == null)
{
if(state.getAttribute(EVENT_REVISE_WEB_CONTENT) == null) {
// this is a Web Content tool
m_eventTrackingService.post(m_eventTrackingService.newEvent(EVENT_REVISE_WEB_CONTENT, source, siteId, true, NotificationService.NOTI_NONE));
}
else {
// event in tool registration file will be used
m_eventTrackingService.post(m_eventTrackingService.newEvent((String)state.getAttribute(EVENT_REVISE_WEB_CONTENT), source, siteId, true, NotificationService.NOTI_NONE));
}
}
else {
if(state.getAttribute(EVENT_REVISE_WEB_CONTENT) != null) {
// special and event in tool registration file
m_eventTrackingService.post(m_eventTrackingService.newEvent((String)state.getAttribute(EVENT_REVISE_WEB_CONTENT), source, siteId, true, NotificationService.NOTI_NONE));
}
}
// output the max limit
context.put("max_length_title", MAX_TITLE_LENGTH);
context.put("max_length_info_url", MAX_SITE_INFO_URL_LENGTH);
return template;
}
/**
* Handle the configure context's update button
*/
public void doConfigure_update(RunData data, Context context)
{
// TODO: if we do limit the initState() calls, we need to make sure we get a new one after this call -ggolden
String peid = ((JetspeedRunData) data).getJs_peid();
SessionState state = ((JetspeedRunData) data).getPortletSessionState(peid);
Placement placement = ToolManager.getCurrentPlacement();
// get the site toolConfiguration, if this is part of a site.
ToolConfiguration toolConfig = SiteService.findTool(placement.getId());
// height
String height = data.getParameters().getString(HEIGHT);
if (height.equals(rb.getString("gen.heisomelse")))
{
String customHeight = data.getParameters().getString(CUSTOM_HEIGHT);
if ((customHeight != null) && (!customHeight.equals("")))
{
if (!checkDigits(customHeight))
{
addAlert(state, rb.getString("java.alert.pleentval"));
return;
}
state.setAttribute(HEIGHT, customHeight);
height = customHeight + "px";
state.setAttribute(HEIGHT, height);
placement.getPlacementConfig().setProperty(HEIGHT, height);
}
else
{
addAlert(state, rb.getString("java.alert.pleentval"));
return;
}
}
else if (SPECIAL_ANNOTATEDURL.equals(state.getAttribute(SPECIAL)))
{
// update the site info
try
{
String desp = data.getParameters().getString("description");
state.setAttribute(ANNOTATED_TEXT, desp);
placement.getPlacementConfig().setProperty(ANNOTATED_TEXT, desp);
}
catch (Throwable e)
{
}
}
else
{
state.setAttribute(HEIGHT, height);
placement.getPlacementConfig().setProperty(HEIGHT, height);
}
// title
String title = data.getParameters().getString(TITLE);
if (StringUtils.isBlank(title))
{
addAlert(state, rb.getString("gen.tootit.empty"));
return;
// SAK-19515 check for LENGTH of tool title
}
else if (title.length() > MAX_TITLE_LENGTH)
{
addAlert(state, rb.getString("gen.tootit.toolong"));
return;
}
placement.setTitle(title);
// site info url
String infoUrl = StringUtils.trimToNull(data.getParameters().getString("infourl"));
if (infoUrl != null && infoUrl.length() > MAX_SITE_INFO_URL_LENGTH)
{
addAlert(state, rb.getString("gen.info.url.toolong"));
return;
}
try
{
Site site = SiteService.getSite(toolConfig.getSiteId());
SitePage page = site.getPage(toolConfig.getPageId());
page.setTitleCustom(true);
// for web content tool, if it is a site page tool, and the only tool on the page, update the page title / popup.
if ((state.getAttribute(SPECIAL) == null) && (toolConfig != null))
{
// if this is the only tool on that page, update the page's title also
if ((page.getTools() != null) && (page.getTools().size() == 1))
{
String newPageTitle = data.getParameters().getString(FORM_PAGE_TITLE);
if (StringUtils.isBlank(newPageTitle))
{
addAlert(state, rb.getString("gen.pagtit.empty"));
return;
}
else if (newPageTitle.length() > MAX_TITLE_LENGTH)
{
addAlert(state, rb.getString("gen.pagtit.toolong"));
return;
}
page.setTitle(newPageTitle);
state.setAttribute(STATE_PAGE_TITLE, newPageTitle);
// popup
boolean popup = data.getParameters().getBoolean("popup");
page.setPopup(popup);
}
}
SiteService.save(site);
}
catch (Exception ignore)
{
M_log.warn("doConfigure_update: " + ignore);
}
// read source if we are not special
if (state.getAttribute(SPECIAL) == null)
{
String source = StringUtils.trimToEmpty(data.getParameters().getString(SOURCE));
// User entered nothing in the source box; give the user an alert
if (StringUtils.isBlank(source))
{
addAlert(state, rb.getString("gen.url.empty"));
return;
}
if ((!source.startsWith("/")) && (source.indexOf("://") == -1))
{
source = "http://" + source;
}
// Validate the url
UrlValidator urlValidator = new UrlValidator();
if (!urlValidator.isValid(source))
{
addAlert(state, rb.getString("gen.url.invalid"));
return;
}
// update state
placement.getPlacementConfig().setProperty(SOURCE, source);
}
else if (SPECIAL_WORKSITE.equals(state.getAttribute(SPECIAL)))
{
if ((infoUrl != null) && (infoUrl.length() > 0) && (!infoUrl.startsWith("/")) && (infoUrl.indexOf("://") == -1))
{
infoUrl = "http://" + infoUrl;
}
String description = StringUtils.trimToNull(data.getParameters().getString("description"));
description = FormattedText.processEscapedHtml(description);
// update the site info
try
{
SiteService.saveSiteInfo(ToolManager.getCurrentPlacement().getContext(), description, infoUrl);
}
catch (Throwable e)
{
M_log.warn("doConfigure_update: " + e);
}
}
// save
// TODO: we might have just saved the entire site, so this would not be needed -ggolden
placement.save();
// we are done with customization... back to the main mode
state.removeAttribute(STATE_MODE);
// refresh the whole page, since popup and title may have changed
scheduleTopRefresh();
}
/**
* doCancel called for form input tags type="submit" named="eventSubmit_doCancel" cancel the options process
*/
public void doCancel(RunData data, Context context)
{
// access the portlet element id to find our state
String peid = ((JetspeedRunData) data).getJs_peid();
SessionState state = ((JetspeedRunData) data).getPortletSessionState(peid);
// we are done with customization... back to the main mode
state.removeAttribute(STATE_MODE);
state.removeAttribute(STATE_MESSAGE);
}
/**
* Check if the string from user input contains any characters other than digits
*
* @param height
* String from user input
* @return True if all are digits. Or False if any is not digit.
*/
private boolean checkDigits(String height)
{
for (int i = 0; i < height.length(); i++)
{
if (VALID_DIGITS.indexOf(height.charAt(i)) == -1) return false;
}
return true;
}
/**
* Note a "local" problem (we failed to get session or site data)
*/
private static class SessionDataException extends Exception
{
public SessionDataException(String text)
{
super(text);
}
}
}
|
|
/*
* Copyright (c) 2008-2014 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.mongodb.ClusterConnectionMode.Multiple;
import static com.mongodb.ClusterType.ReplicaSet;
import static com.mongodb.ServerConnectionState.Connected;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ReadPreferenceTest {
private static final int FOUR_MEG = 4 * 1024 * 1024;
private static final String HOST = "localhost";
private ServerDescription primary, secondary, otherSecondary;
private ClusterDescription set;
private ClusterDescription setNoSecondary;
private ClusterDescription setNoPrimary;
@Before
public void setUp() throws IOException {
final Tags tags1 = new Tags("foo", "1").append("bar", "2").append("baz", "1");
final Tags tags2 = new Tags("foo", "1").append("bar", "2").append("baz", "2");
final Tags tags3 = new Tags("foo", "1").append("bar", "2").append("baz", "3");
final long acceptableLatencyMS = 15;
final long bestPingTime = 50;
final long acceptablePingTime = bestPingTime + (acceptableLatencyMS / 2);
final long unacceptablePingTime = bestPingTime + acceptableLatencyMS + 1;
primary = ServerDescription.builder().state(Connected).address(new ServerAddress(HOST, 27017))
.averagePingTime(acceptablePingTime * 1000000L, java.util.concurrent.TimeUnit.NANOSECONDS)
.ok(true)
.type(ServerType.ReplicaSetPrimary)
.tags(tags1)
.maxDocumentSize(FOUR_MEG).build();
secondary = ServerDescription.builder().state(Connected).address(new ServerAddress(HOST, 27018))
.averagePingTime(bestPingTime * 1000000L, java.util.concurrent.TimeUnit.NANOSECONDS)
.ok(true)
.type(ServerType.ReplicaSetSecondary)
.tags(tags2)
.maxDocumentSize(FOUR_MEG).build();
otherSecondary = ServerDescription.builder().state(Connected).address(new ServerAddress(HOST, 27019))
.averagePingTime(unacceptablePingTime * 1000000L, java.util.concurrent.TimeUnit.NANOSECONDS)
.ok(true)
.type(ServerType.ReplicaSetSecondary)
.tags(tags3)
.maxDocumentSize(FOUR_MEG)
.build();
final List<ServerDescription> nodeList = new ArrayList<ServerDescription>();
nodeList.add(primary);
nodeList.add(secondary);
nodeList.add(otherSecondary);
set = new ClusterDescription(Multiple, ReplicaSet, nodeList);
setNoPrimary = new ClusterDescription(Multiple, ReplicaSet, Arrays.asList(secondary, otherSecondary));
setNoSecondary = new ClusterDescription(Multiple, ReplicaSet, Arrays.asList(primary));
}
@Test
public void testStaticPreferences() {
assertEquals(new BasicDBObject("mode", "primary"), ReadPreference.primary().toDBObject());
assertEquals(new BasicDBObject("mode", "secondary"), ReadPreference.secondary().toDBObject());
assertEquals(new BasicDBObject("mode", "secondaryPreferred"), ReadPreference.secondaryPreferred().toDBObject());
assertEquals(new BasicDBObject("mode", "primaryPreferred"), ReadPreference.primaryPreferred().toDBObject());
assertEquals(new BasicDBObject("mode", "nearest"), ReadPreference.nearest().toDBObject());
}
@Test
public void testPrimaryReadPreference() {
assertEquals(1, ReadPreference.primary().choose(set).size());
assertEquals(primary, ReadPreference.primary().choose(set).get(0));
assertTrue(ReadPreference.primary().choose(setNoPrimary).isEmpty());
}
@Test
public void testSecondaryReadPreference() {
assertTrue(ReadPreference.secondary().toString().startsWith("secondary"));
List<ServerDescription> candidates = ReadPreference.secondary().choose(set);
assertEquals(2, candidates.size());
assertTrue(candidates.contains(secondary));
assertTrue(candidates.contains(otherSecondary));
candidates = ReadPreference.secondary().choose(setNoSecondary);
assertTrue(candidates.isEmpty());
// Test secondary mode, with tags
// List<String> stringList = Arrays.asList("foo", "bar");
// List<TagMap> tagsList2 = Arrays.asList(TagMap.singleton("foo", "bar"), TagMap.singleton("bar", "baz"));
// List<Map<String, String>> tagsList3 = Arrays.asList(Collections.singletonMap("foo", "1"));
// List<Map<String, String>> tagsList4 = Arrays.asList(Collections.<String, String>singletonMap("foo", "1"));
ReadPreference pref = ReadPreference.secondary(new BasicDBObject("foo", "1"), new BasicDBObject("bar", "2"));
assertTrue(pref.toString().startsWith("secondary"));
candidates = ReadPreference.secondary().choose(set);
assertTrue((candidates.get(0).equals(secondary) || candidates.get(0).equals(otherSecondary)) && !candidates.get(0).equals(primary));
pref = ReadPreference.secondary(new BasicDBObject("baz", "1"));
assertTrue(pref.choose(set).isEmpty());
pref = ReadPreference.secondary(new BasicDBObject("baz", "2"));
assertTrue(pref.choose(set).get(0).equals(secondary));
pref = ReadPreference.secondary(new BasicDBObject("madeup", "1"));
// assertEquals(Collections.<String, String>singletonMap("mode", "secondary")
// .append("tags", Arrays.asList(Collections.<String, String>singletonMap("madeup", "1"))),
// pref.toDBObject());
assertTrue(pref.choose(set).isEmpty());
}
@Test
public void testPrimaryPreferredMode() {
ReadPreference pref = ReadPreference.primaryPreferred();
List<ServerDescription> candidates = pref.choose(set);
assertEquals(1, candidates.size());
assertEquals(primary, candidates.get(0));
candidates = pref.choose(setNoPrimary);
assertEquals(2, candidates.size());
assertTrue(candidates.contains(secondary));
assertTrue(candidates.contains(otherSecondary));
pref = ReadPreference.primaryPreferred(new BasicDBObject("baz", "2"));
assertEquals(1, pref.choose(set).size());
assertEquals(primary, pref.choose(set).get(0));
assertEquals(1, pref.choose(setNoPrimary).size());
assertEquals(secondary, pref.choose(setNoPrimary).get(0));
}
@Test
public void testSecondaryPreferredMode() {
ReadPreference pref = ReadPreference.secondary(new BasicDBObject("baz", "2"));
assertTrue(pref.choose(set).get(0).equals(secondary));
// test that the primary is returned if no secondaries match the tag
pref = ReadPreference.secondaryPreferred(new BasicDBObject("madeup", "1"));
assertTrue(pref.choose(set).get(0).equals(primary));
pref = ReadPreference.secondaryPreferred();
final List<ServerDescription> candidates = pref.choose(set);
assertEquals(2, candidates.size());
assertTrue(candidates.contains(secondary));
assertTrue(candidates.contains(otherSecondary));
assertTrue(ReadPreference.secondaryPreferred().choose(setNoSecondary).contains(primary));
}
@Test
public void testNearestMode() {
ReadPreference pref = ReadPreference.nearest();
assertEquals(3, pref.choose(set).size());
pref = ReadPreference.nearest(new BasicDBObject("baz", "1"));
assertTrue(pref.choose(set).get(0).equals(primary));
pref = ReadPreference.nearest(new BasicDBObject("baz", "2"));
assertTrue(pref.choose(set).get(0).equals(secondary));
pref = ReadPreference.nearest(new BasicDBObject("madeup", "1"));
// assertEquals(new Tags("mode", "nearest")
// .append("tags", Arrays.asList(new Tags("madeup", "1"))),
// pref.toDBObject());
assertTrue(pref.choose(set).isEmpty());
}
@Test
public void testValueOf() {
assertEquals(ReadPreference.primary(), ReadPreference.valueOf("primary"));
assertEquals(ReadPreference.secondary(), ReadPreference.valueOf("secondary"));
assertEquals(ReadPreference.primaryPreferred(), ReadPreference.valueOf("primaryPreferred"));
assertEquals(ReadPreference.secondaryPreferred(), ReadPreference.valueOf("secondaryPreferred"));
assertEquals(ReadPreference.nearest(), ReadPreference.valueOf("nearest"));
final DBObject first = new BasicDBObject("dy", "ny");
assertEquals(ReadPreference.secondary(first), ReadPreference.valueOf("secondary", first));
assertEquals(ReadPreference.primaryPreferred(first),
ReadPreference.valueOf("primaryPreferred", first));
assertEquals(ReadPreference.secondaryPreferred(first),
ReadPreference.valueOf("secondaryPreferred", first));
assertEquals(ReadPreference.nearest(first), ReadPreference.valueOf("nearest", first));
}
@Test
public void testGetName() {
assertEquals("primary", ReadPreference.primary().getName());
assertEquals("secondary", ReadPreference.secondary().getName());
assertEquals("primaryPreferred", ReadPreference.primaryPreferred().getName());
assertEquals("secondaryPreferred", ReadPreference.secondaryPreferred().getName());
assertEquals("nearest", ReadPreference.nearest().getName());
final DBObject first = new BasicDBObject("dy", "ny");
assertEquals(ReadPreference.secondary(first), ReadPreference.valueOf("secondary", first));
assertEquals(ReadPreference.primaryPreferred(first), ReadPreference.valueOf("primaryPreferred", first));
assertEquals(ReadPreference.secondaryPreferred(first), ReadPreference.valueOf("secondaryPreferred", first));
assertEquals(ReadPreference.nearest(first), ReadPreference.valueOf("nearest", first));
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.quickFix;
import com.intellij.codeInsight.daemon.LightDaemonAnalyzerTestCase;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.codeInsight.daemon.impl.HighlightInfoType;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiUtil;
import com.intellij.testFramework.LightPlatformCodeInsightTestCase;
import com.intellij.testFramework.LightPlatformTestCase;
import com.intellij.testFramework.fixtures.impl.CodeInsightTestFixtureImpl;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ObjectUtils;
import com.intellij.util.ui.UIUtil;
import junit.framework.ComparisonFailure;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
import java.io.File;
import java.io.IOException;
import java.util.List;
public abstract class LightQuickFixTestCase extends LightDaemonAnalyzerTestCase {
@NonNls protected static final String BEFORE_PREFIX = "before";
@NonNls protected static final String AFTER_PREFIX = "after";
private static QuickFixTestCase myWrapper;
@Override
protected void tearDown() throws Exception {
myWrapper = null;
super.tearDown();
}
protected boolean shouldBeAvailableAfterExecution() {
return false;
}
private static void doTestFor(@NotNull String testName, @NotNull QuickFixTestCase quickFixTestCase) {
final String relativePath = ObjectUtils.notNull(quickFixTestCase.getBasePath(), "") + "/" + BEFORE_PREFIX + testName;
final String testFullPath = quickFixTestCase.getTestDataPath().replace(File.separatorChar, '/') + relativePath;
final File testFile = new File(testFullPath);
CommandProcessor.getInstance().executeCommand(quickFixTestCase.getProject(), () -> {
try {
String contents = StringUtil.convertLineSeparators(FileUtil.loadFile(testFile, CharsetToolkit.UTF8_CHARSET));
quickFixTestCase.configureFromFileText(testFile.getName(), contents);
quickFixTestCase.bringRealEditorBack();
final ActionHint actionHint = quickFixTestCase.parseActionHintImpl(quickFixTestCase.getFile(), contents);
quickFixTestCase.beforeActionStarted(testName, contents);
try {
myWrapper = quickFixTestCase;
quickFixTestCase.doAction(actionHint, testFullPath, testName);
}
finally {
myWrapper = null;
quickFixTestCase.afterActionCompleted(testName, contents);
}
}
catch (ComparisonFailure e) {
throw e;
}
catch (Throwable e) {
e.printStackTrace();
Assert.fail(testName + " failed");
}
}, "", "");
}
protected void afterActionCompleted(final String testName, final String contents) {
}
protected void beforeActionStarted(final String testName, final String contents) {
}
public static void doAction(@NotNull ActionHint actionHint,
String testFullPath,
String testName,
QuickFixTestCase quickFix) throws Exception {
IntentionAction action = actionHint.findAndCheck(quickFix.getAvailableActions(),
() -> getTestInfo(testFullPath, quickFix));
if (action != null) {
String text = action.getText();
quickFix.invoke(action);
UIUtil.dispatchAllInvocationEvents();
UIUtil.dispatchAllInvocationEvents();
if (!quickFix.shouldBeAvailableAfterExecution()) {
final IntentionAction afterAction = quickFix.findActionWithText(text);
if (afterAction != null) {
fail("Action '" + text + "' is still available after its invocation in test " + testFullPath);
}
}
String expectedFilePath = ObjectUtils.notNull(quickFix.getBasePath(), "") + "/" + AFTER_PREFIX + testName;
quickFix.checkResultByFile("In file :" + expectedFilePath, expectedFilePath, false);
String familyName = action.getFamilyName();
if (StringUtil.isEmptyOrSpaces(familyName)) {
fail("Action '" + text + "' provides empty family name which means that user would see action with empty presentable text in Inspection Results");
}
}
}
private static String getTestInfo(String testFullPath, QuickFixTestCase quickFix) {
String infos = StreamEx.of(quickFix.doHighlighting())
.filter(info -> info.getSeverity() != HighlightInfoType.SYMBOL_TYPE_SEVERITY)
.map(info -> {
String fixes = "";
if (info.quickFixActionRanges != null) {
fixes = StreamEx.of(info.quickFixActionRanges)
.map(p -> p.getSecond()+" "+p.getFirst())
.mapLastOrElse("|- "::concat, "\\- "::concat)
.map(str -> " " + str + "\n")
.joining();
}
return info.getSeverity() +
": (" + info.getStartOffset() + "," + info.getEndOffset() + ") '" +
info.getText() + "': " + info.getDescription() + "\n" + fixes;
})
.joining(" ");
return "Test: " + testFullPath + "\n" +
"Language level: " + PsiUtil.getLanguageLevel(quickFix.getProject()) + "\n" +
(quickFix.getProject().equals(getProject()) ? ("SDK: " + ModuleRootManager.getInstance(getModule()).getSdk() + "\n") : "") +
"Infos: " + infos;
}
protected void doAction(@NotNull ActionHint actionHint, final String testFullPath, final String testName)
throws Exception {
doAction(actionHint, testFullPath, testName, myWrapper);
}
protected void doAction(@NotNull String actionName) {
final List<IntentionAction> available = getAvailableActions();
final IntentionAction action = findActionWithText(available, actionName);
assertNotNull("Action '" + actionName + "' not found among " + available, action);
invoke(action);
}
protected static void invoke(@NotNull IntentionAction action) throws IncorrectOperationException {
CodeInsightTestFixtureImpl.invokeIntention(action, getFile(), getEditor(), action.getText());
}
protected IntentionAction findActionAndCheck(@NotNull ActionHint hint, String testFullPath) {
return hint.findAndCheck(getAvailableActions(), () -> "Test: "+testFullPath);
}
protected IntentionAction findActionWithText(@NotNull String text) {
return findActionWithText(getAvailableActions(), text);
}
public static IntentionAction findActionWithText(@NotNull List<IntentionAction> actions, @NotNull String text) {
for (IntentionAction action : actions) {
if (text.equals(action.getText())) {
return action;
}
}
return null;
}
/**
* @deprecated use {@link LightQuickFixParameterizedTestCase}
* to get separate tests for all data files in testData directory.
*/
protected void doAllTests() {
doAllTests(createWrapper());
}
public static void doAllTests(QuickFixTestCase testCase) {
final File[] files = getBeforeTestFiles(testCase);
for (File file : files) {
final String testName = file.getName().substring(BEFORE_PREFIX.length());
doTestFor(testName, testCase);
}
}
@NotNull
public static File[] getBeforeTestFiles(@NotNull QuickFixTestCase testCase) {
assertNotNull("getBasePath() should not return null!", testCase.getBasePath());
final String testDirPath = testCase.getTestDataPath().replace(File.separatorChar, '/') + testCase.getBasePath();
File testDir = new File(testDirPath);
final File[] files = testDir.listFiles((dir, name) -> name.startsWith(BEFORE_PREFIX));
if (files == null || files.length == 0) {
fail("Test files not found in " + testDirPath);
}
return files;
}
protected void doSingleTest(@NotNull String fileSuffix) {
doTestFor(fileSuffix, createWrapper());
}
protected void doSingleTest(String fileSuffix, String testDataPath) {
doTestFor(fileSuffix, createWrapper(testDataPath));
}
protected ActionHint parseActionHintImpl(@NotNull PsiFile file, @NotNull String contents) {
return ActionHint.parse(file, contents);
}
@NotNull
protected QuickFixTestCase createWrapper() {
return createWrapper(null);
}
@NotNull
protected QuickFixTestCase createWrapper(final String testDataPath) {
return new QuickFixTestCase() {
public String myTestDataPath = testDataPath;
@Override
public String getBasePath() {
return LightQuickFixTestCase.this.getBasePath();
}
@NotNull
@Override
public String getTestDataPath() {
if (myTestDataPath == null) {
myTestDataPath = LightQuickFixTestCase.this.getTestDataPath();
}
return myTestDataPath;
}
@NotNull
@Override
public ActionHint parseActionHintImpl(@NotNull PsiFile file, @NotNull String contents) {
return LightQuickFixTestCase.this.parseActionHintImpl(file, contents);
}
@Override
public void beforeActionStarted(@NotNull String testName, @NotNull String contents) {
LightQuickFixTestCase.this.beforeActionStarted(testName, contents);
}
@Override
public void afterActionCompleted(@NotNull String testName, @NotNull String contents) {
LightQuickFixTestCase.this.afterActionCompleted(testName, contents);
}
@Override
public void doAction(@NotNull ActionHint actionHint, @NotNull String testFullPath, @NotNull String testName) throws Exception {
LightQuickFixTestCase.this.doAction(actionHint, testFullPath, testName);
}
@Override
public void checkResultByFile(@NotNull String message, @NotNull String expectedFilePath, boolean ignoreTrailingSpaces) throws Exception {
LightQuickFixTestCase.this.checkResultByFile(message, expectedFilePath, ignoreTrailingSpaces);
}
@Override
public IntentionAction findActionWithText(@NotNull String text) {
return LightQuickFixTestCase.this.findActionWithText(text);
}
@Override
public boolean shouldBeAvailableAfterExecution() {
return LightQuickFixTestCase.this.shouldBeAvailableAfterExecution();
}
@Override
public void invoke(@NotNull IntentionAction action) {
LightQuickFixTestCase.invoke(action);
}
@NotNull
@Override
public List<HighlightInfo> doHighlighting() {
return LightQuickFixTestCase.this.doHighlighting();
}
@NotNull
@Override
public List<IntentionAction> getAvailableActions() {
return LightQuickFixTestCase.this.getAvailableActions();
}
@Override
public void configureFromFileText(@NotNull String name, @NotNull String contents) throws IOException {
LightPlatformCodeInsightTestCase.configureFromFileText(name, contents, true);
}
@Override
public PsiFile getFile() {
return LightPlatformCodeInsightTestCase.getFile();
}
@Override
public Project getProject() {
return LightPlatformTestCase.getProject();
}
@Override
public void bringRealEditorBack() {
LightPlatformCodeInsightTestCase.bringRealEditorBack();
}
};
}
protected List<IntentionAction> getAvailableActions() {
doHighlighting();
return getAvailableActions(getEditor(), getFile());
}
@NotNull
public static List<IntentionAction> getAvailableActions(@NotNull Editor editor, @NotNull PsiFile file) {
return CodeInsightTestFixtureImpl.getAvailableIntentions(editor, file);
}
@NonNls protected String getBasePath() {return null;}
}
|
|
/**
* @author xiafan
*/
package imc.disxmldb.xupdate;
import imc.disxmldb.CollectionStore;
import imc.disxmldb.config.SysConfig;
import imc.disxmldb.config.XMLMetaData;
import imc.disxmldb.dom.AttributeNode;
import imc.disxmldb.dom.AttributeNodeImpl;
import imc.disxmldb.dom.ElementNode;
import imc.disxmldb.dom.ElementNodeImpl;
import imc.disxmldb.dom.XMLNode;
import imc.disxmldb.dom.typesystem.ValueType;
import imc.disxmldb.dom.numbering.INumberingSchema;
import imc.disxmldb.dom.numbering.NumberingSchema;
import imc.disxmldb.dom.typesystem.TypeResolver;
import imc.disxmldb.xupdate.XUpdateProcessor.AppendContext;
import java.lang.management.ManagementFactory;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeoutException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.cassandra.db.RowMutation;
import org.apache.cassandra.service.StorageProxy;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.DefaultHandler;
/**
* this class implements the append command in the XUpdate query. It use SAX to
* parse the XML segment and encode them using the numbering schema. Those
* encoded XML nodes will them be stored distributed.
*/
public class AppendHandler extends DefaultHandler implements AppendHandlerMBean {
public static final String ROOTNODE = "root";
public static final String ROOTNDOE_START_TAG = "<" + ROOTNODE + ">";
public static final String ROOTNODE_END_TAG = "</" + ROOTNODE + ">";
/*
* double rangeSize = SysConfig.DEFAULT_RANGE_SIZE; double[] range = new
* double[2]; double rangeBound = 0.0;
*/
INumberingSchema numbering;
int nodeCount = 0;
CollectionStore colStore = null;
XMLMetaData metaData = null;
ElementNode rootNode = null;
XMLNode lastNode = null;
XMLNode curNode = null;
List<RowMutation> mutations = new LinkedList<RowMutation>();
RowMutation mutation = null;
StringBuilder value = new StringBuilder();
List<XMLNode> rootAppendedNodes = new LinkedList<XMLNode>();
Exception ex = null;
int level = -1;
/*
* static { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
* try { mbs.registerMBean(new AppendHandler(), new ObjectName(
* "imc.disxmldb.xupdate:type=AppendHandler")); } catch (Exception e) {
* throw new RuntimeException(e); } } private AppendHandler() {
*
* }
*/
public AppendHandler(CollectionStore colStore, XMLMetaData metaData,
double[] range, int nodeCount, ElementNode parent) {
this.nodeCount = nodeCount;
this.numbering = new NumberingSchema(range,
SysConfig.DEFAULT_RANGE_SIZE, nodeCount);
/*
* this.range[0] = range[0]; this.range[1] = range[1];
*
* if (range[1] == Double.MAX_VALUE) this.range[1] = range[0];
*
* rangeBound = range[1];
*/
this.colStore = colStore;
this.metaData = metaData;
rootNode = parent;
curNode = (XMLNode) parent;
/*
* if (nodeCount * SysConfig.DEFAULT_RANGE_SIZE * 4 < (range[1] -
* range[0])) { rangeSize = SysConfig.DEFAULT_RANGE_SIZE; } else {
* rangeSize = (range[1] - range[0]) / (nodeCount * 3.2); }
* this.range[1] = this.range[0];
*/
}
@Override
public void ignorableWhitespace(char ch[], int start, int length)
throws SAXException {
value.append(ch, start, length);
}
@Override
public void characters(char[] ch, int start, int length)
throws SAXException {
value.append(ch, start, length);
}
@Override
public void endDocument() throws SAXException {
/*
* if (lastNode.getRange()[1] > rangeBound) { throw new SAXException(
* "append node fails, which is caused by the lack of coding range for the xml nodes"
* ); }
*/
if (numbering.isOverflow()) {
throw new SAXException(
"append node fails, which is caused by the lack of coding range for the xml nodes");
}
try {
/*
* if (mutations.size() > 0) {
* mutations.add(colStore.ElementNodeMutation(metaData, rootNode));
* StorageProxy.mutate(mutations,
* SysConstant.XMLSTORE_CONSISTENCY_LEVEL); }
*/
if (mutation != null) {
mutation = colStore.ElementNodeMutation(metaData, rootNode,
mutation);
StorageProxy.mutate(Arrays.asList(mutation),
SysConfig.XMLSTORE_CONSISTENCY_LEVEL);
}
metaData.syncMaxNodeID();
metaData.increXMLNodeNum(nodeCount);
} catch (Exception e) {
/*
* TODO in this case, we should rollback the mutations that have
* been applied.
*/
ex = e;
throw new SAXException(e);
}
}
@Override
public void endElement(String uri, String localName, String qName)
throws SAXException {
if (level > 0) {
if (value.length() != 0) {
String tmp = value.toString().trim();
ValueType type = TypeResolver.resolve(tmp);
curNode.setValueType(type);
curNode.setValue(ValueType.getValidator(type)
.fromString(tmp));
value = new StringBuilder();
}
/*
* range[1] += rangeSize; curNode.setRangeUpper(range[1]);
*/
curNode.setRangeUpper(numbering.endElement());
/*
* mutations.add(colStore.ElementNodeMutation(metaData,
* (ElementNode) curNode));
*/
mutation = colStore.ElementNodeMutation(metaData,
(ElementNode) curNode, mutation);
XMLNode pre = curNode;
curNode = curNode.getParent();
if (curNode == rootNode) {
lastNode = pre;
}
}
level--;
}
@Override
public void endPrefixMapping(String prefix) throws SAXException {
}
@Override
public void startDocument() throws SAXException {
}
@Override
public void startElement(String namespaceURI, String localName,
String qName, Attributes attributes) throws SAXException {
level++;
if (level > 0) {
String tagName = null;
if (namespaceURI.length() > 0) {
tagName = localName;
} else {
tagName = qName;
}
XMLNode parent = curNode;
curNode = new ElementNodeImpl(tagName,
metaData.increAndGetMaxNodeID(), parent.getLevel() + 1,
parent);
((ElementNode) parent).addChild(curNode);
if (parent == rootNode)
rootAppendedNodes.add(curNode);
curNode.setRangeLower(numbering.startElement());
/*
* range[0] = range[1] + rangeSize; curNode.setRangeLower(range[0]);
* range[1] = range[0];
*/
for (int i = 0; i < attributes.getLength(); i++) {
/*
* range[0] = range[1] + rangeSize; range[1] = range[0] +
* rangeSize;
*/
XMLNode attr = new AttributeNodeImpl(attributes.getQName(i),
metaData.increAndGetMaxNodeID(),
curNode.getLevel() + 1, curNode);
((ElementNode) curNode).addAttribute(attr);
ValueType type = TypeResolver.resolve(attributes.getValue(i));
attr.setValueType(type);
attr.setValue(attributes.getValue(i));
/*
* attr.setRangeLower(range[0]); attr.setRangeUpper(range[1]);
*/
attr.setRangeLower(numbering.startAttribute());
attr.setRangeUpper(numbering.endAttribute());
// mutations.add(colStore.AttribNodeMutation(metaData, attr));
mutation = colStore
.AttribNodeMutation(metaData, attr, mutation);
}
}
}
@Override
public void startPrefixMapping(String prefix, String uri)
throws SAXException {
}
@Override
public void warning(SAXParseException exception) throws SAXException {
String msg = "warning at (" + exception.getLineNumber() + ","
+ exception.getColumnNumber() + ") : " + exception.getMessage();
throw new SAXException(msg, exception);
}
@Override
public void error(SAXParseException exception) throws SAXException {
String msg = "error at (" + exception.getLineNumber() + ","
+ exception.getColumnNumber() + ") : " + exception.getMessage();
throw new SAXException(msg, exception);
}
@Override
public void fatalError(SAXParseException exception) throws SAXException {
String msg = "fatal error at (" + exception.getLineNumber() + ","
+ exception.getColumnNumber() + ") : " + exception.getMessage();
throw new SAXException(msg, exception);
}
public void rollBack() {
// if the
if (ex != null && ex instanceof TimeoutException) {
// it means we should wait for sometime, some node is busy now
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
}
}
AppendRollBack rollBack = new AppendRollBack(colStore, metaData,
rootNode, rootAppendedNodes);
rollBack.rollback();
}
}
|
|
package com.squeezer.asr2application;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.preference.PreferenceManager;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.app.ActionBar;
import android.support.v4.app.Fragment;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.os.Build;
import android.widget.Button;
import android.widget.TextView;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.squeezer.asr2application.core.JokeWrapper;
import com.squeezer.asr2application.fragment.Fragment2;
import com.squeezer.asr2application.fragment.Fragment3;
import com.squeezer.asr2application.fragment.MainFragment;
import com.squeezer.asr2application.fragment.dialog.AddDialogFragment;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
public class MainActivity extends ActionBarActivity implements MainFragment.OnButtonClicked, AddDialogFragment.OnAddFragmentInteractionListener, View.OnClickListener {
public static final String EXTRA_NAME_KEY = "name";
public static final String EXTRA_LAST_NAME_KEY = "last_name";
public static final String EXTRA_AGE_KEY = "age";
public static final int RESULT_REQUEST_CODE = 1000;
private static final String PREFERENCE_BOTTOM_FRAGMENT_KEY = "bottom_layout_key";
private static final String PREFERENCE_BOTTOM_FRAGMENT_VALUE_1 = "fragment_2";
private static final String PREFERENCE_BOTTOM_FRAGMENT_VALUE_2 = "fragment_3";
private SharedPreferences mSharedPref;
private TextView mJokeText;
private Button mJokeButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.v("slim", "onCreate");
setContentView(R.layout.activity_main);
if (savedInstanceState == null) {
Log.v("slim", "savedInstanceState == null");
} else {
Log.v("slim", "savedInstanceState != null");
}
getSupportFragmentManager().beginTransaction()
.add(R.id.container, MainFragment.newInstance(this))
.commit();
mSharedPref = PreferenceManager.getDefaultSharedPreferences(this);
String defaultBottomContent = mSharedPref.getString(PREFERENCE_BOTTOM_FRAGMENT_KEY, PREFERENCE_BOTTOM_FRAGMENT_VALUE_1);
if (defaultBottomContent.equals(PREFERENCE_BOTTOM_FRAGMENT_VALUE_1)) {
getSupportFragmentManager().beginTransaction()
.add(R.id.bottom_layout, new Fragment2())
.commit();
} else {
getSupportFragmentManager().beginTransaction()
.add(R.id.bottom_layout, new Fragment3())
.commit();
}
mJokeText = (TextView) findViewById(R.id.text);
mJokeButton = (Button) findViewById(R.id.joke_button);
mJokeButton.setOnClickListener(this);
}
@Override
protected void onStart() {
super.onStart();
Log.v("slim", "onStart");
}
@Override
protected void onResume() {
super.onResume();
Log.v("slim", "onResume");
}
@Override
protected void onPause() {
super.onPause();
Log.v("slim", "onPause");
}
@Override
protected void onStop() {
super.onStop();
Log.v("slim", "onStop");
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.v("slim", "onDestroy");
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
switch (id) {
case R.id.action_settings:
Intent intent = new Intent(getApplicationContext(), Activity2.class);
Bundle bundle = new Bundle();
bundle.putInt(EXTRA_AGE_KEY, 25);
bundle.putCharSequence(EXTRA_NAME_KEY, "Slim");
bundle.putCharSequence(EXTRA_LAST_NAME_KEY, "BH");
intent.putExtras(bundle);
startActivityForResult(intent, RESULT_REQUEST_CODE);
break;
case R.id.action_add:
AddDialogFragment addDialogFragment = AddDialogFragment.newInstance(this);
addDialogFragment.show(getSupportFragmentManager(), "test");
break;
case R.id.action_about:
Intent intent3 = new Intent(getApplicationContext(), Activity3.class);
startActivity(intent3);
break;
case R.id.action_player:
Intent intent6 = new Intent(getApplicationContext(), Activity6.class);
startActivity(intent6);
break;
case R.id.action_view:
Intent intent5 = new Intent(getApplicationContext(), Activity5.class);
startActivity(intent5);
break;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == RESULT_REQUEST_CODE) {
Log.v("slim", "onActivityResult called with request code = RESULT_REQUEST_CODE and result code = " + resultCode);
//DO here what ever you want
}
}
@Override
public void buttonClicked() {
SharedPreferences.Editor prefEditor = mSharedPref.edit();
prefEditor.putString(PREFERENCE_BOTTOM_FRAGMENT_KEY, PREFERENCE_BOTTOM_FRAGMENT_VALUE_2);
prefEditor.commit();
}
@Override
public void onOk(String title, String description) {
Log.v("slim", "ok clicked");
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.joke_button:
retrieveJoke();
break;
}
}
private void retrieveJoke() {
JokeRequestAsyncTask jokeTask = new JokeRequestAsyncTask();
jokeTask.execute();
}
private class JokeRequestAsyncTask extends AsyncTask<Void, Void, String> {
@Override
protected String doInBackground(Void... params) {
String result = null;
HttpURLConnection con = null;
InputStream is = null;
try {
String wsUrl = "http://api.icndb.com/jokes/random";
con = (HttpURLConnection) (new URL(wsUrl)).openConnection();
con.setRequestMethod("GET");
con.setDoInput(true);
con.setDoOutput(true);
con.connect();
// read the response
StringBuffer buffer = new StringBuffer();
is = con.getInputStream();
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = null;
while ((line = br.readLine()) != null) {
buffer.append(line + "\r\n");
}
is.close();
con.disconnect();
result = buffer.toString();
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
protected void onPostExecute(String data) {
Gson gson = new GsonBuilder().create();
JokeWrapper
jokeWrapper = gson.fromJson(data,
JokeWrapper.class);
mJokeText.setText(jokeWrapper.getJoke());
}
}
}
|
|
package devlight.io.library.behavior;
import android.animation.ObjectAnimator;
import android.animation.ValueAnimator;
import android.os.Build;
import android.support.design.widget.CoordinatorLayout;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPropertyAnimatorCompat;
import android.support.v4.view.ViewPropertyAnimatorUpdateListener;
import android.support.v4.view.animation.LinearOutSlowInInterpolator;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Interpolator;
import devlight.io.library.ntb.NavigationTabBar;
public class NavigationTabBarBehavior extends VerticalScrollingBehavior<NavigationTabBar> {
private final static Interpolator INTERPOLATOR = new LinearOutSlowInInterpolator();
private final static int ANIMATION_DURATION = 300;
private ViewPropertyAnimatorCompat mTranslationAnimator;
private ObjectAnimator mTranslationObjectAnimator;
private Snackbar.SnackbarLayout mSnackBarLayout;
private FloatingActionButton mFloatingActionButton;
private int mSnackBarHeight = -1;
private float
mTargetOffset = 0,
mFabTargetOffset = 0,
mFabDefaultBottomMargin = 0;
private boolean mHidden;
private boolean mFabBottomMarginInitialized;
private boolean mBehaviorTranslationEnabled = true;
public NavigationTabBarBehavior(final boolean behaviorTranslationEnabled) {
super();
this.mBehaviorTranslationEnabled = behaviorTranslationEnabled;
}
@Override
public boolean onLayoutChild(CoordinatorLayout parent, NavigationTabBar child, int layoutDirection) {
return super.onLayoutChild(parent, child, layoutDirection);
}
@Override
public boolean onDependentViewChanged(CoordinatorLayout parent, NavigationTabBar child, View dependency) {
return super.onDependentViewChanged(parent, child, dependency);
}
@Override
public void onDependentViewRemoved(CoordinatorLayout parent, NavigationTabBar child, View dependency) {
super.onDependentViewRemoved(parent, child, dependency);
}
@Override
public boolean layoutDependsOn(CoordinatorLayout parent, NavigationTabBar child, View dependency) {
updateSnackBar(child, dependency);
updateFloatingActionButton(dependency);
return super.layoutDependsOn(parent, child, dependency);
}
@Override
public void onNestedVerticalOverScroll() {
// This method is intentionally empty, because of override
}
@Override
public void onDirectionNestedPreScroll() {
// This method is intentionally empty, because of override
}
@Override
protected boolean onNestedDirectionFling() {
return false;
}
@Override
public void onNestedScroll(CoordinatorLayout coordinatorLayout, NavigationTabBar child, View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed) {
super.onNestedScroll(coordinatorLayout, child, target, dxConsumed, dyConsumed, dxUnconsumed, dyUnconsumed);
if (dyConsumed < 0) handleDirection(child, ScrollDirection.SCROLL_DIRECTION_DOWN);
else if (dyConsumed > 0) handleDirection(child, ScrollDirection.SCROLL_DIRECTION_UP);
}
@Override
public boolean onStartNestedScroll(CoordinatorLayout coordinatorLayout, NavigationTabBar child, View directTargetChild, View target, int nestedScrollAxes) {
return nestedScrollAxes == ViewCompat.SCROLL_AXIS_VERTICAL || super.onStartNestedScroll(coordinatorLayout, child, directTargetChild, target, nestedScrollAxes);
}
// Handle scroll direction
private void handleDirection(NavigationTabBar child, int scrollDirection) {
if (!mBehaviorTranslationEnabled) return;
if (scrollDirection == ScrollDirection.SCROLL_DIRECTION_DOWN && mHidden) {
mHidden = false;
animateOffset(child, 0, false, true);
} else if (scrollDirection == ScrollDirection.SCROLL_DIRECTION_UP && !mHidden) {
mHidden = true;
animateOffset(child, child.getHeight(), false, true);
}
}
// Animate offset
private void animateOffset(final NavigationTabBar child, final int offset, boolean forceAnimation, boolean withAnimation) {
if (!mBehaviorTranslationEnabled && !forceAnimation) return;
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
ensureOrCancelObjectAnimation(child, offset, withAnimation);
mTranslationObjectAnimator.start();
} else {
ensureOrCancelAnimator(child, withAnimation);
mTranslationAnimator.translationY(offset).start();
}
}
// Manage animation for Android >= KITKAT
private void ensureOrCancelAnimator(final NavigationTabBar child, boolean withAnimation) {
if (mTranslationAnimator == null) {
mTranslationAnimator = ViewCompat.animate(child);
mTranslationAnimator.setDuration(withAnimation ? ANIMATION_DURATION : 0);
mTranslationAnimator.setUpdateListener(new ViewPropertyAnimatorUpdateListener() {
@Override
public void onAnimationUpdate(View view) {
// Animate snack bar
if (mSnackBarLayout != null && mSnackBarLayout.getLayoutParams() instanceof ViewGroup.MarginLayoutParams) {
mTargetOffset = child.getBarHeight() - view.getTranslationY();
final ViewGroup.MarginLayoutParams p =
(ViewGroup.MarginLayoutParams) mSnackBarLayout.getLayoutParams();
p.setMargins(p.leftMargin, p.topMargin, p.rightMargin, (int) mTargetOffset);
mSnackBarLayout.requestLayout();
}
// Animate Floating Action Button
if (mFloatingActionButton != null && mFloatingActionButton.getLayoutParams() instanceof ViewGroup.MarginLayoutParams) {
final ViewGroup.MarginLayoutParams p =
(ViewGroup.MarginLayoutParams) mFloatingActionButton.getLayoutParams();
mFabTargetOffset = mFabDefaultBottomMargin - view.getTranslationY();
p.setMargins(p.leftMargin, p.topMargin, p.rightMargin, (int) mFabTargetOffset);
mFloatingActionButton.requestLayout();
}
}
});
mTranslationAnimator.setInterpolator(INTERPOLATOR);
} else {
mTranslationAnimator.setDuration(withAnimation ? ANIMATION_DURATION : 0);
mTranslationAnimator.cancel();
}
}
private static ObjectAnimator objectAnimatorOfTranslationY(View target, int offset) {
final ObjectAnimator res;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
res = ObjectAnimator.ofFloat(target, View.TRANSLATION_Y, offset);
else {
res = new ObjectAnimator();
res.setTarget(target);
res.setPropertyName("translationY");
res.setFloatValues(offset);
}
return res;
}
// Manage animation for Android < KITKAT
private void ensureOrCancelObjectAnimation(final NavigationTabBar child, final int offset, boolean withAnimation) {
if (mTranslationObjectAnimator != null) mTranslationObjectAnimator.cancel();
mTranslationObjectAnimator = objectAnimatorOfTranslationY(child, offset);
mTranslationObjectAnimator.setDuration(withAnimation ? ANIMATION_DURATION : 0);
mTranslationObjectAnimator.setInterpolator(INTERPOLATOR);
mTranslationObjectAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
if (mSnackBarLayout != null && mSnackBarLayout.getLayoutParams() instanceof ViewGroup.MarginLayoutParams) {
mTargetOffset = child.getBarHeight() - child.getTranslationY();
final ViewGroup.MarginLayoutParams p =
(ViewGroup.MarginLayoutParams) mSnackBarLayout.getLayoutParams();
p.setMargins(p.leftMargin, p.topMargin, p.rightMargin, (int) mTargetOffset);
mSnackBarLayout.requestLayout();
}
// Animate Floating Action Button
if (mFloatingActionButton != null && mFloatingActionButton.getLayoutParams() instanceof ViewGroup.MarginLayoutParams) {
mFabTargetOffset = mFabDefaultBottomMargin - child.getTranslationY();
final ViewGroup.MarginLayoutParams p =
(ViewGroup.MarginLayoutParams) mFloatingActionButton.getLayoutParams();
p.setMargins(p.leftMargin, p.topMargin, p.rightMargin, (int) mFabTargetOffset);
mFloatingActionButton.requestLayout();
}
}
});
}
@SuppressWarnings("unused")
public static NavigationTabBarBehavior from(NavigationTabBar view) {
final ViewGroup.LayoutParams params = view.getLayoutParams();
if (!(params instanceof CoordinatorLayout.LayoutParams))
throw new IllegalArgumentException("The view is not a child of CoordinatorLayout");
final CoordinatorLayout.Behavior behavior =
((CoordinatorLayout.LayoutParams) params).getBehavior();
if (!(behavior instanceof NavigationTabBarBehavior))
throw new IllegalArgumentException(
"The view is not associated with NavigationTabBarBehavior");
return (NavigationTabBarBehavior) behavior;
}
// Enable or not the behavior translation
public void setBehaviorTranslationEnabled(boolean behaviorTranslationEnabled) {
this.mBehaviorTranslationEnabled = behaviorTranslationEnabled;
}
// Hide NTB with animation
public void hideView(NavigationTabBar view, int offset, boolean withAnimation) {
if (!mHidden) {
mHidden = true;
animateOffset(view, offset, true, withAnimation);
}
}
// Reset NTB position with animation
@SuppressWarnings("SameParameterValue")
public void resetOffset(NavigationTabBar view, boolean withAnimation) {
if (mHidden) {
mHidden = false;
animateOffset(view, 0, true, withAnimation);
}
}
// Update snack bar bottom margin
private void updateSnackBar(final NavigationTabBar child, View dependency) {
if (dependency != null && dependency instanceof Snackbar.SnackbarLayout) {
mSnackBarLayout = (Snackbar.SnackbarLayout) dependency;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mSnackBarLayout.addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
@Override
public void onLayoutChange(View v, int left, int top, int right, int bottom,
int oldLeft, int oldTop, int oldRight, int oldBottom) {
if (mFloatingActionButton != null &&
mFloatingActionButton.getLayoutParams() instanceof ViewGroup.MarginLayoutParams) {
mFabTargetOffset = mFabDefaultBottomMargin - child.getTranslationY();
final ViewGroup.MarginLayoutParams p =
(ViewGroup.MarginLayoutParams) mFloatingActionButton.getLayoutParams();
p.setMargins(p.leftMargin, p.topMargin, p.rightMargin, (int) mFabTargetOffset);
mFloatingActionButton.requestLayout();
}
}
});
}
if (mSnackBarHeight == -1) mSnackBarHeight = dependency.getHeight();
final int targetMargin = (int) (child.getBarHeight() - child.getTranslationY());
child.bringToFront();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
dependency.setStateListAnimator(null);
dependency.setElevation(0.0F);
}
if (dependency.getLayoutParams() instanceof ViewGroup.MarginLayoutParams) {
final ViewGroup.MarginLayoutParams p =
(ViewGroup.MarginLayoutParams) dependency.getLayoutParams();
p.setMargins(p.leftMargin, p.topMargin, p.rightMargin, targetMargin);
dependency.requestLayout();
}
}
}
// Update floating action button bottom margin
private void updateFloatingActionButton(final View dependency) {
if (dependency != null && dependency instanceof FloatingActionButton) {
mFloatingActionButton = (FloatingActionButton) dependency;
if (!mFabBottomMarginInitialized &&
dependency.getLayoutParams() instanceof ViewGroup.MarginLayoutParams) {
mFabBottomMarginInitialized = true;
final ViewGroup.MarginLayoutParams p =
(ViewGroup.MarginLayoutParams) dependency.getLayoutParams();
mFabDefaultBottomMargin = p.bottomMargin;
}
}
}
}
|
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.phonegap;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.content.ContentValues;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.MediaPlayer;
import android.net.Uri;
import android.util.Log;
import com.phonegap.api.LOG;
import com.phonegap.api.Plugin;
import com.phonegap.api.PluginResult;
public class Capture extends Plugin {
private static final String VIDEO_3GPP = "video/3gpp";
private static final String AUDIO_3GPP = "audio/3gpp";
private static final String IMAGE_JPEG = "image/jpeg";
private static final int CAPTURE_AUDIO = 0; // Constant for capture audio
private static final int CAPTURE_IMAGE = 1; // Constant for capture image
private static final int CAPTURE_VIDEO = 2; // Constant for capture video
private static final String LOG_TAG = "Capture";
private static final int CAPTURE_INTERNAL_ERR = 0;
private static final int CAPTURE_APPLICATION_BUSY = 1;
private static final int CAPTURE_INVALID_ARGUMENT = 2;
private static final int CAPTURE_NO_MEDIA_FILES = 3;
private static final int CAPTURE_NOT_SUPPORTED = 20;
private String callbackId; // The ID of the callback to be invoked with our result
private long limit; // the number of pics/vids/clips to take
private double duration; // optional duration parameter for video recording
private JSONArray results; // The array of results to be returned to the user
private Uri imageUri; // Uri of captured image
@Override
public PluginResult execute(String action, JSONArray args, String callbackId) {
this.callbackId = callbackId;
this.limit = 1;
this.duration = 0.0f;
this.results = new JSONArray();
JSONObject options = args.optJSONObject(0);
if (options != null) {
limit = options.optLong("limit", 1);
duration = options.optDouble("duration", 0.0f);
}
if (action.equals("getFormatData")) {
try {
JSONObject obj = getFormatData(args.getString(0), args.getString(1));
return new PluginResult(PluginResult.Status.OK, obj);
} catch (JSONException e) {
return new PluginResult(PluginResult.Status.ERROR);
}
}
else if (action.equals("captureAudio")) {
this.captureAudio();
}
else if (action.equals("captureImage")) {
this.captureImage();
}
else if (action.equals("captureVideo")) {
this.captureVideo(duration);
}
PluginResult r = new PluginResult(PluginResult.Status.NO_RESULT);
r.setKeepCallback(true);
return r;
}
/**
* Provides the media data file data depending on it's mime type
*
* @param filePath path to the file
* @param mimeType of the file
* @return a MediaFileData object
*/
private JSONObject getFormatData(String filePath, String mimeType) {
JSONObject obj = new JSONObject();
try {
// setup defaults
obj.put("height", 0);
obj.put("width", 0);
obj.put("bitrate", 0);
obj.put("duration", 0);
obj.put("codecs", "");
// If the mimeType isn't set the rest will fail
// so let's see if we can determine it.
if (mimeType == null || mimeType.equals("")) {
mimeType = FileUtils.getMimeType(filePath);
}
Log.d(LOG_TAG, "Mime type = " + mimeType);
if (mimeType.equals(IMAGE_JPEG) || filePath.endsWith(".jpg")) {
obj = getImageData(filePath, obj);
}
else if (mimeType.endsWith(AUDIO_3GPP)) {
obj = getAudioVideoData(filePath, obj, false);
}
else if (mimeType.equals(VIDEO_3GPP)) {
obj = getAudioVideoData(filePath, obj, true);
}
}
catch (JSONException e) {
Log.d(LOG_TAG, "Error: setting media file data object");
}
return obj;
}
/**
* Get the Image specific attributes
*
* @param filePath path to the file
* @param obj represents the Media File Data
* @return a JSONObject that represents the Media File Data
* @throws JSONException
*/
private JSONObject getImageData(String filePath, JSONObject obj) throws JSONException {
Bitmap bitmap = BitmapFactory.decodeFile(filePath);
obj.put("height", bitmap.getHeight());
obj.put("width", bitmap.getWidth());
return obj;
}
/**
* Get the Image specific attributes
*
* @param filePath path to the file
* @param obj represents the Media File Data
* @param video if true get video attributes as well
* @return a JSONObject that represents the Media File Data
* @throws JSONException
*/
private JSONObject getAudioVideoData(String filePath, JSONObject obj, boolean video) throws JSONException {
MediaPlayer player = new MediaPlayer();
try {
player.setDataSource(filePath);
player.prepare();
obj.put("duration", player.getDuration());
if (video) {
obj.put("height", player.getVideoHeight());
obj.put("width", player.getVideoWidth());
}
}
catch (IOException e) {
Log.d(LOG_TAG, "Error: loading video file");
}
return obj;
}
/**
* Sets up an intent to capture audio. Result handled by onActivityResult()
*/
private void captureAudio() {
Intent intent = new Intent(android.provider.MediaStore.Audio.Media.RECORD_SOUND_ACTION);
this.ctx.startActivityForResult((Plugin) this, intent, CAPTURE_AUDIO);
}
/**
* Sets up an intent to capture images. Result handled by onActivityResult()
*/
private void captureImage() {
Intent intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
// Specify file so that large image is captured and returned
File photo = new File(DirectoryManager.getTempDirectoryPath(ctx), "Capture.jpg");
intent.putExtra(android.provider.MediaStore.EXTRA_OUTPUT, Uri.fromFile(photo));
this.imageUri = Uri.fromFile(photo);
this.ctx.startActivityForResult((Plugin) this, intent, CAPTURE_IMAGE);
}
/**
* Sets up an intent to capture video. Result handled by onActivityResult()
*/
private void captureVideo(double duration) {
Intent intent = new Intent(android.provider.MediaStore.ACTION_VIDEO_CAPTURE);
// Introduced in API 8
//intent.putExtra(android.provider.MediaStore.EXTRA_DURATION_LIMIT, duration);
this.ctx.startActivityForResult((Plugin) this, intent, CAPTURE_VIDEO);
}
/**
* Called when the video view exits.
*
* @param requestCode The request code originally supplied to startActivityForResult(),
* allowing you to identify who this result came from.
* @param resultCode The integer result code returned by the child activity through its setResult().
* @param intent An Intent, which can return result data to the caller (various data can be attached to Intent "extras").
* @throws JSONException
*/
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
// Result received okay
if (resultCode == Activity.RESULT_OK) {
// An audio clip was requested
if (requestCode == CAPTURE_AUDIO) {
// Get the uri of the audio clip
Uri data = intent.getData();
// create a file object from the uri
results.put(createMediaFile(data));
if (results.length() >= limit) {
// Send Uri back to JavaScript for listening to audio
this.success(new PluginResult(PluginResult.Status.OK, results, "navigator.device.capture._castMediaFile"), this.callbackId);
} else {
// still need to capture more audio clips
captureAudio();
}
} else if (requestCode == CAPTURE_IMAGE) {
// For some reason if I try to do:
// Uri data = intent.getData();
// It crashes in the emulator and on my phone with a null pointer exception
// To work around it I had to grab the code from CameraLauncher.java
try {
// Create an ExifHelper to save the exif data that is lost during compression
ExifHelper exif = new ExifHelper();
exif.createInFile(DirectoryManager.getTempDirectoryPath(ctx) + "/Capture.jpg");
exif.readExifData();
// Read in bitmap of captured image
Bitmap bitmap = android.provider.MediaStore.Images.Media.getBitmap(this.ctx.getContentResolver(), imageUri);
// Create entry in media store for image
// (Don't use insertImage() because it uses default compression setting of 50 - no way to change it)
ContentValues values = new ContentValues();
values.put(android.provider.MediaStore.Images.Media.MIME_TYPE, IMAGE_JPEG);
Uri uri = null;
try {
uri = this.ctx.getContentResolver().insert(android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
} catch (UnsupportedOperationException e) {
LOG.d(LOG_TAG, "Can't write to external media storage.");
try {
uri = this.ctx.getContentResolver().insert(android.provider.MediaStore.Images.Media.INTERNAL_CONTENT_URI, values);
} catch (UnsupportedOperationException ex) {
LOG.d(LOG_TAG, "Can't write to internal media storage.");
this.fail(createErrorObject(CAPTURE_INTERNAL_ERR, "Error capturing image - no media storage found."));
return;
}
}
// Add compressed version of captured image to returned media store Uri
OutputStream os = this.ctx.getContentResolver().openOutputStream(uri);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, os);
os.close();
bitmap.recycle();
bitmap = null;
System.gc();
// Restore exif data to file
exif.createOutFile(FileUtils.getRealPathFromURI(uri, this.ctx));
exif.writeExifData();
// Add image to results
results.put(createMediaFile(uri));
if (results.length() >= limit) {
// Send Uri back to JavaScript for viewing image
this.success(new PluginResult(PluginResult.Status.OK, results, "navigator.device.capture._castMediaFile"), this.callbackId);
} else {
// still need to capture more images
captureImage();
}
} catch (IOException e) {
e.printStackTrace();
this.fail(createErrorObject(CAPTURE_INTERNAL_ERR, "Error capturing image."));
}
} else if (requestCode == CAPTURE_VIDEO) {
// Get the uri of the video clip
Uri data = intent.getData();
// create a file object from the uri
results.put(createMediaFile(data));
if (results.length() >= limit) {
// Send Uri back to JavaScript for viewing video
this.success(new PluginResult(PluginResult.Status.OK, results, "navigator.device.capture._castMediaFile"), this.callbackId);
} else {
// still need to capture more video clips
captureVideo(duration);
}
}
}
// If canceled
else if (resultCode == Activity.RESULT_CANCELED) {
// If we have partial results send them back to the user
if (results.length() > 0) {
this.success(new PluginResult(PluginResult.Status.OK, results, "navigator.device.capture._castMediaFile"), this.callbackId);
}
// user canceled the action
else {
this.fail(createErrorObject(CAPTURE_NO_MEDIA_FILES, "Canceled."));
}
}
// If something else
else {
// If we have partial results send them back to the user
if (results.length() > 0) {
this.success(new PluginResult(PluginResult.Status.OK, results, "navigator.device.capture._castMediaFile"), this.callbackId);
}
// something bad happened
else {
this.fail(createErrorObject(CAPTURE_NO_MEDIA_FILES, "Did not complete!"));
}
}
}
/**
* Creates a JSONObject that represents a File from the Uri
*
* @param data the Uri of the audio/image/video
* @return a JSONObject that represents a File
* @throws IOException
*/
private JSONObject createMediaFile(Uri data){
File fp = new File(FileUtils.getRealPathFromURI(data, this.ctx));
JSONObject obj = new JSONObject();
try {
// File properties
obj.put("name", fp.getName());
obj.put("fullPath", fp.getAbsolutePath());
// Because of an issue with MimeTypeMap.getMimeTypeFromExtension() all .3gpp files
// are reported as video/3gpp. I'm doing this hacky check of the URI to see if it
// is stored in the audio or video content store.
if (fp.getAbsoluteFile().toString().endsWith(".3gp") || fp.getAbsoluteFile().toString().endsWith(".3gpp")) {
if (data.toString().contains("/audio/")) {
obj.put("type", AUDIO_3GPP);
} else {
obj.put("type", VIDEO_3GPP);
}
} else {
obj.put("type", FileUtils.getMimeType(fp.getAbsolutePath()));
}
obj.put("lastModifiedDate", fp.lastModified());
obj.put("size", fp.length());
} catch (JSONException e) {
// this will never happen
e.printStackTrace();
}
return obj;
}
private JSONObject createErrorObject(int code, String message) {
JSONObject obj = new JSONObject();
try {
obj.put("code", code);
obj.put("message", message);
} catch (JSONException e) {
// This will never happen
}
return obj;
}
/**
* Send error message to JavaScript.
*
* @param err
*/
public void fail(JSONObject err) {
this.error(new PluginResult(PluginResult.Status.ERROR, err), this.callbackId);
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.find;
import com.intellij.find.editorHeaderActions.ContextAwareShortcutProvider;
import com.intellij.find.editorHeaderActions.ShowMoreOptions;
import com.intellij.find.editorHeaderActions.Utils;
import com.intellij.find.editorHeaderActions.VariantsCompletionAction;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.impl.ActionToolbarImpl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.impl.EditorHeaderComponent;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.BooleanGetter;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.LightColors;
import com.intellij.ui.OnePixelSplitter;
import com.intellij.ui.SearchTextField;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.components.panels.Wrapper;
import com.intellij.ui.speedSearch.SpeedSearchSupply;
import com.intellij.util.EventDispatcher;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.text.JTextComponent;
import java.awt.*;
import java.awt.event.*;
import java.util.EventListener;
import java.util.List;
import static java.awt.event.InputEvent.CTRL_DOWN_MASK;
import static java.awt.event.InputEvent.META_DOWN_MASK;
public class SearchReplaceComponent extends EditorHeaderComponent implements DataProvider {
private final EventDispatcher<Listener> myEventDispatcher = EventDispatcher.create(Listener.class);
private final MyTextComponentWrapper mySearchFieldWrapper;
private JTextComponent mySearchTextComponent;
private final MyTextComponentWrapper myReplaceFieldWrapper;
private JTextComponent myReplaceTextComponent;
private final JPanel myLeftPanel;
private final JPanel myRightPanel;
private final DefaultActionGroup mySearchFieldActions;
private final ActionToolbarImpl mySearchActionsToolbar1;
private final ActionToolbarImpl mySearchActionsToolbar2;
private final ActionToolbarImpl.PopupStateModifier mySearchToolbar1PopupStateModifier;
private final DefaultActionGroup myReplaceFieldActions;
private final ActionToolbarImpl myReplaceActionsToolbar1;
private final ActionToolbarImpl myReplaceActionsToolbar2;
private final JPanel myReplaceToolbarWrapper;
private final Project myProject;
private final JComponent myTargetComponent;
private final Runnable myCloseAction;
private final Runnable myReplaceAction;
private final DataProvider myDataProviderDelegate;
private boolean myMultilineMode;
private String myStatusText = "";
@NotNull
public static Builder buildFor(@Nullable Project project, @NotNull JComponent component) {
return new Builder(project, component);
}
private SearchReplaceComponent(@Nullable Project project,
@NotNull JComponent targetComponent,
@NotNull DefaultActionGroup searchToolbar1Actions,
@NotNull final BooleanGetter searchToolbar1ModifiedFlagGetter,
@NotNull DefaultActionGroup searchToolbar2Actions,
@NotNull DefaultActionGroup searchFieldActions,
@NotNull DefaultActionGroup replaceToolbar1Actions,
@NotNull DefaultActionGroup replaceToolbar2Actions,
@NotNull DefaultActionGroup replaceFieldActions,
@Nullable Runnable replaceAction,
@Nullable Runnable closeAction,
@Nullable DataProvider dataProvider) {
myProject = project;
myTargetComponent = targetComponent;
mySearchFieldActions = searchFieldActions;
myReplaceFieldActions = replaceFieldActions;
myReplaceAction = replaceAction;
myCloseAction = closeAction;
mySearchToolbar1PopupStateModifier = new ActionToolbarImpl.PopupStateModifier() {
@Override
public int getModifiedPopupState() {
return ActionButtonComponent.PUSHED;
}
@Override
public boolean willModify() {
return searchToolbar1ModifiedFlagGetter.get();
}
};
mySearchFieldWrapper = new MyTextComponentWrapper() {
@Override
public void setContent(JComponent wrapped) {
super.setContent(wrapped);
mySearchTextComponent = unwrapTextComponent(wrapped);
}
};
myReplaceFieldWrapper = new MyTextComponentWrapper() {
@Override
public void setContent(JComponent wrapped) {
super.setContent(wrapped);
myReplaceTextComponent = unwrapTextComponent(wrapped);
}
};
myLeftPanel = new NonOpaquePanel(new BorderLayout());
myLeftPanel.setBorder(JBUI.Borders.emptyLeft(6));
myLeftPanel.add(mySearchFieldWrapper, BorderLayout.NORTH);
myLeftPanel.add(myReplaceFieldWrapper, BorderLayout.SOUTH);
mySearchActionsToolbar1 = createSearchToolbar1(searchToolbar1Actions);
Wrapper searchToolbarWrapper1 = new NonOpaquePanel(new BorderLayout());
searchToolbarWrapper1.add(mySearchActionsToolbar1, BorderLayout.WEST);
mySearchActionsToolbar2 = createSearchToolbar2(searchToolbar2Actions);
Wrapper searchToolbarWrapper2 = new Wrapper(mySearchActionsToolbar2);
mySearchActionsToolbar2.setBorder(JBUI.Borders.emptyLeft(16));
JPanel searchPair = new NonOpaquePanel(new BorderLayout()).setVerticalSizeReferent(mySearchFieldWrapper);
searchPair.add(searchToolbarWrapper1, BorderLayout.WEST);
searchPair.add(searchToolbarWrapper2, BorderLayout.CENTER);
myReplaceActionsToolbar1 = createReplaceToolbar1(replaceToolbar1Actions);
Wrapper replaceToolbarWrapper1 = new Wrapper(myReplaceActionsToolbar1).setVerticalSizeReferent(myReplaceFieldWrapper);
myReplaceActionsToolbar2 = createReplaceToolbar2(replaceToolbar2Actions);
Wrapper replaceToolbarWrapper2 = new Wrapper(myReplaceActionsToolbar2).setVerticalSizeReferent(myReplaceFieldWrapper);
myReplaceActionsToolbar2.setBorder(JBUI.Borders.emptyLeft(16));
myReplaceToolbarWrapper = new NonOpaquePanel(new BorderLayout());
myReplaceToolbarWrapper.add(replaceToolbarWrapper1, BorderLayout.WEST);
myReplaceToolbarWrapper.add(replaceToolbarWrapper2, BorderLayout.CENTER);
searchToolbarWrapper1.setHorizontalSizeReferent(replaceToolbarWrapper1);
JLabel closeLabel = new JLabel(null, AllIcons.Actions.Cross, SwingConstants.RIGHT);
closeLabel.setBorder(JBUI.Borders.empty(5));
closeLabel.setVerticalAlignment(SwingConstants.TOP);
closeLabel.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(final MouseEvent e) {
close();
}
});
closeLabel.setToolTipText("Close search bar (Escape)");
searchPair.add(new Wrapper.North(closeLabel), BorderLayout.EAST);
myRightPanel = new NonOpaquePanel(new BorderLayout());
myRightPanel.add(searchPair, BorderLayout.NORTH);
myRightPanel.add(myReplaceToolbarWrapper, BorderLayout.CENTER);
OnePixelSplitter splitter = new OnePixelSplitter(false, .25F);
myRightPanel.setBorder(JBUI.Borders.emptyLeft(6));
splitter.setFirstComponent(myLeftPanel);
splitter.setSecondComponent(myRightPanel);
splitter.setHonorComponentsMinimumSize(true);
splitter.setAndLoadSplitterProportionKey("FindSplitterProportion");
splitter.setOpaque(false);
splitter.getDivider().setOpaque(false);
add(splitter, BorderLayout.CENTER);
update("", "", false, false);
// it's assigned after all action updates so that actions don't get access to uninitialized components
myDataProviderDelegate = dataProvider;
setFocusCycleRoot(true);
setFocusTraversalPolicy(new LayoutFocusTraversalPolicy());
}
public void resetUndoRedoActions() {
UIUtil.resetUndoRedoActions(mySearchTextComponent);
UIUtil.resetUndoRedoActions(myReplaceTextComponent);
}
@Override
public void removeNotify() {
super.removeNotify();
addTextToRecent(mySearchTextComponent);
if (myReplaceTextComponent != null) {
addTextToRecent(myReplaceTextComponent);
}
}
public void requestFocusInTheSearchFieldAndSelectContent(Project project) {
mySearchTextComponent.selectAll();
IdeFocusManager.getInstance(project).requestFocus(mySearchTextComponent, true);
if (myReplaceTextComponent != null) {
myReplaceTextComponent.selectAll();
}
}
public void setStatusText(@NotNull String status) {
myStatusText = status;
}
@NotNull
public String getStatusText() {
return myStatusText;
}
public void replace() {
if (myReplaceAction != null) {
myReplaceAction.run();
}
}
public void close() {
if (myCloseAction != null) {
myCloseAction.run();
}
}
public void setRegularBackground() {
mySearchTextComponent.setBackground(UIUtil.getTextFieldBackground());
}
public void setNotFoundBackground() {
mySearchTextComponent.setBackground(LightColors.RED);
}
@Override
public Insets getInsets() {
Insets insets = super.getInsets();
if (UIUtil.isUnderGTKLookAndFeel()) {
insets.top += 1;
insets.bottom += 2;
}
return insets;
}
@Nullable
@Override
public Object getData(@NonNls String dataId) {
if (SpeedSearchSupply.SPEED_SEARCH_CURRENT_QUERY.is(dataId)) {
return mySearchTextComponent.getText();
}
return myDataProviderDelegate != null ? myDataProviderDelegate.getData(dataId) : null;
}
public Project getProject() {
return myProject;
}
public void addListener(@NotNull Listener listener) {
myEventDispatcher.addListener(listener);
}
public boolean isMultiline() {
return myMultilineMode;
}
private void setMultilineInternal(boolean multiline) {
boolean stateChanged = multiline != myMultilineMode;
myMultilineMode = multiline;
if (stateChanged) {
multilineStateChanged();
}
}
@NotNull
public JTextComponent getSearchTextComponent() {
return mySearchTextComponent;
}
@NotNull
public JTextComponent getReplaceTextComponent() {
return myReplaceTextComponent;
}
private void updateSearchComponent(@NotNull String textToSet) {
if (!updateTextComponent(true)) {
String existingText = mySearchTextComponent.getText();
if (!existingText.equals(textToSet)) {
mySearchTextComponent.setText(textToSet);
// textToSet should be selected even if we have no selection before (if we have the selection then setText will remain it)
if (existingText.length() == 0) mySearchTextComponent.selectAll();
}
return;
}
mySearchTextComponent.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
ApplicationManager.getApplication().invokeLater(() -> searchFieldDocumentChanged());
}
});
mySearchTextComponent.registerKeyboardAction(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
if (StringUtil.isEmpty(mySearchTextComponent.getText())) {
close();
}
else {
IdeFocusManager.getInstance(myProject).requestFocus(myTargetComponent, true);
addTextToRecent(mySearchTextComponent);
}
}
}, KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, SystemInfo.isMac ? META_DOWN_MASK : CTRL_DOWN_MASK),
JComponent.WHEN_FOCUSED);
new VariantsCompletionAction(mySearchTextComponent); // It registers a shortcut set automatically on construction
}
private void updateReplaceComponent(@NotNull String textToSet) {
if (!updateTextComponent(false)) {
String existingText = myReplaceTextComponent.getText();
if (!existingText.equals(textToSet)) {
myReplaceTextComponent.setText(textToSet);
if (existingText.length() == 0) myReplaceTextComponent.selectAll();
}
return;
}
myReplaceTextComponent.setText(textToSet);
myReplaceTextComponent.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
ApplicationManager.getApplication().invokeLater(() -> replaceFieldDocumentChanged());
}
});
if (!isMultiline()) {
installReplaceOnEnterAction(myReplaceTextComponent);
}
new VariantsCompletionAction(myReplaceTextComponent);
myReplaceFieldWrapper.revalidate();
myReplaceFieldWrapper.repaint();
}
public void update(@NotNull String findText, @NotNull String replaceText, boolean replaceMode, boolean multiline) {
setMultilineInternal(multiline);
boolean needToResetSearchFocus = mySearchTextComponent != null && mySearchTextComponent.hasFocus();
boolean needToResetReplaceFocus = myReplaceTextComponent != null && myReplaceTextComponent.hasFocus();
updateSearchComponent(findText);
updateReplaceComponent(replaceText);
if (replaceMode) {
if (myReplaceFieldWrapper.getParent() == null) {
myLeftPanel.add(myReplaceFieldWrapper, BorderLayout.CENTER);
}
if (myReplaceToolbarWrapper.getParent() == null) {
myRightPanel.add(myReplaceToolbarWrapper, BorderLayout.CENTER);
}
if (needToResetReplaceFocus) {
myReplaceTextComponent.requestFocusInWindow();
}
}
else {
if (myReplaceFieldWrapper.getParent() != null) {
myLeftPanel.remove(myReplaceFieldWrapper);
}
if (myReplaceToolbarWrapper.getParent() != null) {
myRightPanel.remove(myReplaceToolbarWrapper);
}
}
if (needToResetSearchFocus) mySearchTextComponent.requestFocusInWindow();
updateBindings();
updateActions();
revalidate();
repaint();
}
public void updateActions() {
mySearchActionsToolbar1.updateActionsImmediately();
mySearchActionsToolbar2.updateActionsImmediately();
myReplaceActionsToolbar1.updateActionsImmediately();
myReplaceActionsToolbar2.updateActionsImmediately();
}
public void addTextToRecent(@NotNull JTextComponent textField) {
final String text = textField.getText();
if (text.length() > 0) {
FindInProjectSettings findInProjectSettings = FindInProjectSettings.getInstance(myProject);
if (textField == mySearchTextComponent) {
findInProjectSettings.addStringToFind(text);
if (mySearchFieldWrapper.getTargetComponent() instanceof SearchTextField) {
((SearchTextField)mySearchFieldWrapper.getTargetComponent()).addCurrentTextToHistory();
}
}
else {
findInProjectSettings.addStringToReplace(text);
if (myReplaceFieldWrapper.getTargetComponent() instanceof SearchTextField) {
((SearchTextField)myReplaceFieldWrapper.getTargetComponent()).addCurrentTextToHistory();
}
}
}
}
private boolean updateTextComponent(boolean search) {
JTextComponent oldComponent = search ? mySearchTextComponent : myReplaceTextComponent;
if (oldComponent != null) return false;
final MyTextComponentWrapper wrapper = search ? mySearchFieldWrapper : myReplaceFieldWrapper;
final JTextComponent textComponent;
SearchTextArea textArea = new SearchTextArea(search);
textComponent = textArea.getTextArea();
((JTextArea)textComponent).setRows(isMultiline() ? 2 : 1);
wrapper.setContent(textArea);
UIUtil.addUndoRedoActions(textComponent);
textComponent.putClientProperty("AuxEditorComponent", Boolean.TRUE);
textComponent.setBackground(UIUtil.getTextFieldBackground());
textComponent.addFocusListener(new FocusListener() {
@Override
public void focusGained(final FocusEvent e) {
textComponent.repaint();
}
@Override
public void focusLost(final FocusEvent e) {
textComponent.repaint();
}
});
installCloseOnEscapeAction(textComponent);
return true;
}
private void searchFieldDocumentChanged() {
if (mySearchTextComponent instanceof JTextArea) {
adjustRows((JTextArea)mySearchTextComponent);
}
myEventDispatcher.getMulticaster().searchFieldDocumentChanged();
}
private void replaceFieldDocumentChanged() {
if (myReplaceTextComponent instanceof JTextArea) {
adjustRows((JTextArea)myReplaceTextComponent);
}
myReplaceActionsToolbar2.invalidate();
doLayout();
myEventDispatcher.getMulticaster().replaceFieldDocumentChanged();
}
private void multilineStateChanged() {
myEventDispatcher.getMulticaster().multilineStateChanged();
}
private static void adjustRows(@NotNull JTextArea area) {
area.setRows(Math.max(1, Math.min(3, StringUtil.countChars(area.getText(), '\n') + 1)));
}
private void installCloseOnEscapeAction(@NotNull JTextComponent c) {
new AnAction() {
@Override
public void actionPerformed(AnActionEvent e) {
close();
}
}.registerCustomShortcutSet(KeymapUtil.getActiveKeymapShortcuts(IdeActions.ACTION_EDITOR_ESCAPE), c);
}
private void installReplaceOnEnterAction(@NotNull JTextComponent c) {
ActionListener action = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
replace();
}
};
c.registerKeyboardAction(action, KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), JComponent.WHEN_FOCUSED);
}
private void updateBindings() {
updateBindings(mySearchFieldActions, mySearchFieldWrapper);
updateBindings(mySearchActionsToolbar1, mySearchFieldWrapper);
updateBindings(mySearchActionsToolbar2, mySearchFieldWrapper);
updateBindings(myReplaceFieldActions, myReplaceFieldWrapper);
updateBindings(myReplaceActionsToolbar1, myReplaceToolbarWrapper);
updateBindings(myReplaceActionsToolbar2, myReplaceToolbarWrapper);
}
private void updateBindings(@NotNull DefaultActionGroup group, @NotNull JComponent shortcutHolder) {
updateBindings(ContainerUtil.immutableList(group.getChildActionsOrStubs()), shortcutHolder);
}
private void updateBindings(@NotNull ActionToolbarImpl toolbar, @NotNull JComponent shortcutHolder) {
updateBindings(toolbar.getActions(), shortcutHolder);
}
private void updateBindings(@NotNull List<? extends AnAction> actions, @NotNull JComponent shortcutHolder) {
DataContext context = DataManager.getInstance().getDataContext(this);
for (AnAction action : actions) {
ShortcutSet shortcut = null;
if (action instanceof ContextAwareShortcutProvider) {
shortcut = ((ContextAwareShortcutProvider)action).getShortcut(context);
}
else if (action instanceof ShortcutProvider) {
shortcut = ((ShortcutProvider)action).getShortcut();
}
if (shortcut != null) {
action.registerCustomShortcutSet(shortcut, shortcutHolder);
}
}
}
@NotNull
private ActionToolbarImpl createSearchToolbar1(@NotNull DefaultActionGroup group) {
ActionToolbarImpl toolbar = createToolbar(group);
toolbar.setForceMinimumSize(true);
toolbar.setReservePlaceAutoPopupIcon(false);
toolbar.setSecondaryButtonPopupStateModifier(mySearchToolbar1PopupStateModifier);
toolbar.setSecondaryActionsTooltip("Show Filter Popup (" + KeymapUtil.getShortcutText(ShowMoreOptions.SHORT_CUT) + ")");
toolbar.setSecondaryActionsIcon(AllIcons.General.Filter);
new ShowMoreOptions(toolbar, mySearchFieldWrapper);
return toolbar;
}
@NotNull
private ActionToolbarImpl createSearchToolbar2(@NotNull DefaultActionGroup group) {
return createToolbar(group);
}
@NotNull
private ActionToolbarImpl createReplaceToolbar1(@NotNull DefaultActionGroup group) {
ActionToolbarImpl toolbar = createToolbar(group);
toolbar.setForceMinimumSize(true);
toolbar.setReservePlaceAutoPopupIcon(false);
return toolbar;
}
@NotNull
private ActionToolbarImpl createReplaceToolbar2(@NotNull DefaultActionGroup group) {
return createToolbar(group);
}
@NotNull
private ActionToolbarImpl createToolbar(@NotNull ActionGroup group) {
return tweakToolbar((ActionToolbarImpl)ActionManager.getInstance().createActionToolbar(ActionPlaces.EDITOR_TOOLBAR, group, true));
}
@NotNull
private ActionToolbarImpl tweakToolbar(@NotNull ActionToolbarImpl toolbar) {
toolbar.setTargetComponent(this);
toolbar.setLayoutPolicy(ActionToolbar.AUTO_LAYOUT_POLICY);
toolbar.setBorder(null);
Utils.setSmallerFontForChildren(toolbar);
return toolbar;
}
public interface Listener extends EventListener {
void searchFieldDocumentChanged();
void replaceFieldDocumentChanged();
void multilineStateChanged();
}
public static class Builder {
private final Project myProject;
private final JComponent myTargetComponent;
private DataProvider myDataProvider;
private Runnable myReplaceAction;
private Runnable myCloseAction;
private DefaultActionGroup mySearchActions = new DefaultActionGroup("search bar 1", false);
private DefaultActionGroup myExtraSearchActions = new DefaultActionGroup("search bar 2", false);
private DefaultActionGroup mySearchFieldActions = new DefaultActionGroup("search field actions", false);
private BooleanGetter mySearchToolbarModifiedFlagGetter = BooleanGetter.FALSE;
private DefaultActionGroup myReplaceActions = new DefaultActionGroup("replace bar 1", false);
private DefaultActionGroup myExtraReplaceActions = new DefaultActionGroup("replace bar 1", false);
private DefaultActionGroup myReplaceFieldActions = new DefaultActionGroup("replace field actions", false);
private Builder(@Nullable Project project, @NotNull JComponent component) {
myProject = project;
myTargetComponent = component;
}
@NotNull
public Builder withDataProvider(@NotNull DataProvider provider) {
myDataProvider = provider;
return this;
}
@NotNull
public Builder withReplaceAction(@NotNull Runnable action) {
myReplaceAction = action;
return this;
}
@NotNull
public Builder withCloseAction(@NotNull Runnable action) {
myCloseAction = action;
return this;
}
@NotNull
public Builder addSearchFieldActions(@NotNull AnAction... actions) {
mySearchFieldActions.addAll(actions);
return this;
}
@NotNull
public Builder addReplaceFieldActions(@NotNull AnAction... actions) {
myReplaceFieldActions.addAll(actions);
return this;
}
@NotNull
public Builder addPrimarySearchActions(@NotNull AnAction... actions) {
mySearchActions.addAll(actions);
return this;
}
@NotNull
public Builder addSecondarySearchActions(@NotNull AnAction... actions) {
for (AnAction action : actions) {
mySearchActions.addAction(action).setAsSecondary(true);
}
return this;
}
@NotNull
public Builder withSecondarySearchActionsIsModifiedGetter(@NotNull BooleanGetter getter) {
mySearchToolbarModifiedFlagGetter = getter;
return this;
}
@NotNull
public Builder addExtraSearchActions(@NotNull AnAction... actions) {
myExtraSearchActions.addAll(actions);
return this;
}
@NotNull
public Builder addPrimaryReplaceActions(@NotNull AnAction... actions) {
myReplaceActions.addAll(actions);
return this;
}
@NotNull
public Builder addExtraReplaceAction(@NotNull AnAction... actions) {
myExtraReplaceActions.addAll(actions);
return this;
}
@NotNull
public SearchReplaceComponent build() {
return new SearchReplaceComponent(myProject,
myTargetComponent,
mySearchActions,
mySearchToolbarModifiedFlagGetter,
myExtraSearchActions,
mySearchFieldActions,
myReplaceActions,
myExtraReplaceActions,
myReplaceFieldActions,
myReplaceAction,
myCloseAction,
myDataProvider);
}
}
private static class MyTextComponentWrapper extends Wrapper {
@Nullable
public JTextComponent getTextComponent() {
JComponent wrapped = getTargetComponent();
return wrapped != null ? unwrapTextComponent(wrapped) : null;
}
@NotNull
protected static JTextComponent unwrapTextComponent(@NotNull JComponent wrapped) {
if (wrapped instanceof SearchTextField) {
return ((SearchTextField)wrapped).getTextEditor();
}
if (wrapped instanceof SearchTextArea) {
return ((SearchTextArea)wrapped).getTextArea();
}
throw new AssertionError();
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import java.util.Optional;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor.RecordingLevel;
import org.apache.kafka.common.record.RecordBatch;
import org.apache.kafka.common.record.TimestampType;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.StreamsConfig.InternalConfig;
import org.apache.kafka.streams.errors.InvalidStateStoreException;
import org.apache.kafka.streams.errors.ProcessorStateException;
import org.apache.kafka.streams.processor.StateStoreContext;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.ChangelogRecordDeserializationHelper;
import org.apache.kafka.streams.processor.internals.ProcessorRecordContext;
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
import org.apache.kafka.streams.query.Position;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.RocksDBConfigSetter;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.internals.metrics.RocksDBMetricsRecorder;
import org.apache.kafka.test.InternalMockProcessorContext;
import org.apache.kafka.test.MockRocksDbConfigSetter;
import org.apache.kafka.test.StreamsTestUtils;
import org.apache.kafka.test.TestUtils;
import org.easymock.EasyMock;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.rocksdb.BlockBasedTableConfig;
import org.rocksdb.BloomFilter;
import org.rocksdb.Cache;
import org.rocksdb.Filter;
import org.rocksdb.LRUCache;
import org.rocksdb.Options;
import org.rocksdb.PlainTableConfig;
import org.rocksdb.Statistics;
import java.io.File;
import java.io.IOException;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.isNull;
import static org.easymock.EasyMock.mock;
import static org.easymock.EasyMock.notNull;
import static org.easymock.EasyMock.reset;
import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasEntry;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.powermock.api.easymock.PowerMock.replay;
import static org.powermock.api.easymock.PowerMock.verify;
@SuppressWarnings("unchecked")
public class RocksDBStoreTest extends AbstractKeyValueStoreTest {
private static boolean enableBloomFilters = false;
final static String DB_NAME = "db-name";
final static String METRICS_SCOPE = "metrics-scope";
private File dir;
private final Time time = new MockTime();
private final Serializer<String> stringSerializer = new StringSerializer();
private final Deserializer<String> stringDeserializer = new StringDeserializer();
private final RocksDBMetricsRecorder metricsRecorder = mock(RocksDBMetricsRecorder.class);
InternalMockProcessorContext context;
RocksDBStore rocksDBStore;
@Before
public void setUp() {
final Properties props = StreamsTestUtils.getStreamsConfig();
props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class);
dir = TestUtils.tempDirectory();
context = new InternalMockProcessorContext<>(
dir,
Serdes.String(),
Serdes.String(),
new StreamsConfig(props)
);
rocksDBStore = getRocksDBStore();
}
@After
public void tearDown() {
rocksDBStore.close();
}
@Override
protected <K, V> KeyValueStore<K, V> createKeyValueStore(final StateStoreContext context) {
final StoreBuilder<KeyValueStore<K, V>> storeBuilder = Stores.keyValueStoreBuilder(
Stores.persistentKeyValueStore("my-store"),
(Serde<K>) context.keySerde(),
(Serde<V>) context.valueSerde());
final KeyValueStore<K, V> store = storeBuilder.build();
store.init(context, store);
return store;
}
RocksDBStore getRocksDBStore() {
return new RocksDBStore(DB_NAME, METRICS_SCOPE);
}
private RocksDBStore getRocksDBStoreWithRocksDBMetricsRecorder() {
return new RocksDBStore(DB_NAME, METRICS_SCOPE, metricsRecorder);
}
private InternalMockProcessorContext getProcessorContext(final Properties streamsProps) {
return new InternalMockProcessorContext(
TestUtils.tempDirectory(),
new StreamsConfig(streamsProps)
);
}
private InternalMockProcessorContext getProcessorContext(
final RecordingLevel recordingLevel,
final Class<? extends RocksDBConfigSetter> rocksDBConfigSetterClass) {
final Properties streamsProps = StreamsTestUtils.getStreamsConfig();
streamsProps.setProperty(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, recordingLevel.name());
streamsProps.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, rocksDBConfigSetterClass);
return getProcessorContext(streamsProps);
}
private InternalMockProcessorContext getProcessorContext(final RecordingLevel recordingLevel) {
final Properties streamsProps = StreamsTestUtils.getStreamsConfig();
streamsProps.setProperty(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, recordingLevel.name());
return getProcessorContext(streamsProps);
}
@Test
public void shouldAddValueProvidersWithoutStatisticsToInjectedMetricsRecorderWhenRecordingLevelInfo() {
rocksDBStore = getRocksDBStoreWithRocksDBMetricsRecorder();
context = getProcessorContext(RecordingLevel.INFO);
reset(metricsRecorder);
metricsRecorder.addValueProviders(eq(DB_NAME), notNull(), notNull(), isNull());
replay(metricsRecorder);
rocksDBStore.openDB(context.appConfigs(), context.stateDir());
verify(metricsRecorder);
reset(metricsRecorder);
}
@Test
public void shouldAddValueProvidersWithStatisticsToInjectedMetricsRecorderWhenRecordingLevelDebug() {
rocksDBStore = getRocksDBStoreWithRocksDBMetricsRecorder();
context = getProcessorContext(RecordingLevel.DEBUG);
reset(metricsRecorder);
metricsRecorder.addValueProviders(eq(DB_NAME), notNull(), notNull(), notNull());
replay(metricsRecorder);
rocksDBStore.openDB(context.appConfigs(), context.stateDir());
verify(metricsRecorder);
reset(metricsRecorder);
}
@Test
public void shouldRemoveValueProvidersFromInjectedMetricsRecorderOnClose() {
rocksDBStore = getRocksDBStoreWithRocksDBMetricsRecorder();
try {
context = getProcessorContext(RecordingLevel.DEBUG);
rocksDBStore.openDB(context.appConfigs(), context.stateDir());
reset(metricsRecorder);
metricsRecorder.removeValueProviders(DB_NAME);
replay(metricsRecorder);
} finally {
rocksDBStore.close();
}
verify(metricsRecorder);
}
public static class RocksDBConfigSetterWithUserProvidedStatistics implements RocksDBConfigSetter {
public RocksDBConfigSetterWithUserProvidedStatistics(){}
public void setConfig(final String storeName, final Options options, final Map<String, Object> configs) {
options.setStatistics(new Statistics());
}
public void close(final String storeName, final Options options) {
options.statistics().close();
}
}
@Test
public void shouldNotSetStatisticsInValueProvidersWhenUserProvidesStatistics() {
rocksDBStore = getRocksDBStoreWithRocksDBMetricsRecorder();
context = getProcessorContext(RecordingLevel.DEBUG, RocksDBConfigSetterWithUserProvidedStatistics.class);
metricsRecorder.addValueProviders(eq(DB_NAME), notNull(), notNull(), isNull());
replay(metricsRecorder);
rocksDBStore.openDB(context.appConfigs(), context.stateDir());
verify(metricsRecorder);
reset(metricsRecorder);
}
public static class RocksDBConfigSetterWithUserProvidedNewBlockBasedTableFormatConfig implements RocksDBConfigSetter {
public RocksDBConfigSetterWithUserProvidedNewBlockBasedTableFormatConfig(){}
public void setConfig(final String storeName, final Options options, final Map<String, Object> configs) {
options.setTableFormatConfig(new BlockBasedTableConfig());
}
public void close(final String storeName, final Options options) {
options.statistics().close();
}
}
@Test
public void shouldThrowWhenUserProvidesNewBlockBasedTableFormatConfig() {
rocksDBStore = getRocksDBStoreWithRocksDBMetricsRecorder();
context = getProcessorContext(
RecordingLevel.DEBUG,
RocksDBConfigSetterWithUserProvidedNewBlockBasedTableFormatConfig.class
);
assertThrows(
"The used block-based table format configuration does not expose the " +
"block cache. Use the BlockBasedTableConfig instance provided by Options#tableFormatConfig() to configure " +
"the block-based table format of RocksDB. Do not provide a new instance of BlockBasedTableConfig to " +
"the RocksDB options.",
ProcessorStateException.class,
() -> rocksDBStore.openDB(context.appConfigs(), context.stateDir())
);
}
public static class RocksDBConfigSetterWithUserProvidedNewPlainTableFormatConfig implements RocksDBConfigSetter {
public RocksDBConfigSetterWithUserProvidedNewPlainTableFormatConfig(){}
public void setConfig(final String storeName, final Options options, final Map<String, Object> configs) {
options.setTableFormatConfig(new PlainTableConfig());
}
public void close(final String storeName, final Options options) {
options.statistics().close();
}
}
@Test
public void shouldNotSetCacheInValueProvidersWhenUserProvidesPlainTableFormatConfig() {
rocksDBStore = getRocksDBStoreWithRocksDBMetricsRecorder();
context = getProcessorContext(
RecordingLevel.DEBUG,
RocksDBConfigSetterWithUserProvidedNewPlainTableFormatConfig.class
);
metricsRecorder.addValueProviders(eq(DB_NAME), notNull(), isNull(), notNull());
replay(metricsRecorder);
rocksDBStore.openDB(context.appConfigs(), context.stateDir());
verify(metricsRecorder);
reset(metricsRecorder);
}
@Test
public void shouldNotThrowExceptionOnRestoreWhenThereIsPreExistingRocksDbFiles() {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
rocksDBStore.put(new Bytes("existingKey".getBytes(UTF_8)), "existingValue".getBytes(UTF_8));
rocksDBStore.flush();
final List<KeyValue<byte[], byte[]>> restoreBytes = new ArrayList<>();
final byte[] restoredKey = "restoredKey".getBytes(UTF_8);
final byte[] restoredValue = "restoredValue".getBytes(UTF_8);
restoreBytes.add(KeyValue.pair(restoredKey, restoredValue));
context.restore(DB_NAME, restoreBytes);
assertThat(
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "restoredKey")))),
equalTo("restoredValue"));
}
@Test
public void shouldCallRocksDbConfigSetter() {
MockRocksDbConfigSetter.called = false;
final Properties props = StreamsTestUtils.getStreamsConfig();
props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class);
final Object param = new Object();
props.put("abc.def", param);
final InternalMockProcessorContext context = new InternalMockProcessorContext(
dir,
Serdes.String(),
Serdes.String(),
new StreamsConfig(props)
);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
assertTrue(MockRocksDbConfigSetter.called);
assertThat(MockRocksDbConfigSetter.configMap.get("abc.def"), equalTo(param));
}
@Test
public void shouldThrowProcessorStateExceptionOnOpeningReadOnlyDir() {
final File tmpDir = TestUtils.tempDirectory();
final InternalMockProcessorContext tmpContext = new InternalMockProcessorContext(tmpDir, new StreamsConfig(StreamsTestUtils.getStreamsConfig()));
assertTrue(tmpDir.setReadOnly());
assertThrows(ProcessorStateException.class, () -> rocksDBStore.openDB(tmpContext.appConfigs(), tmpContext.stateDir()));
}
@Test
public void shouldPutAll() {
final List<KeyValue<Bytes, byte[]>> entries = new ArrayList<>();
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "1")),
stringSerializer.serialize(null, "a")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "2")),
stringSerializer.serialize(null, "b")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "3")),
stringSerializer.serialize(null, "c")));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
rocksDBStore.putAll(entries);
rocksDBStore.flush();
assertEquals(
"a",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals(
"b",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals(
"c",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
}
@Test
public void shouldMatchPositionAfterPut() {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.setRecordContext(new ProcessorRecordContext(0, 1, 0, "", new RecordHeaders()));
rocksDBStore.put(new Bytes(stringSerializer.serialize(null, "one")), stringSerializer.serialize(null, "A"));
context.setRecordContext(new ProcessorRecordContext(0, 2, 0, "", new RecordHeaders()));
rocksDBStore.put(new Bytes(stringSerializer.serialize(null, "two")), stringSerializer.serialize(null, "B"));
context.setRecordContext(new ProcessorRecordContext(0, 3, 0, "", new RecordHeaders()));
rocksDBStore.put(new Bytes(stringSerializer.serialize(null, "three")), stringSerializer.serialize(null, "C"));
final Position expected = Position.fromMap(mkMap(mkEntry("", mkMap(mkEntry(0, 3L)))));
final Position actual = rocksDBStore.getPosition();
assertEquals(expected, actual);
}
@Test
public void shouldReturnKeysWithGivenPrefix() {
final List<KeyValue<Bytes, byte[]>> entries = new ArrayList<>();
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "k1")),
stringSerializer.serialize(null, "a")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "prefix_3")),
stringSerializer.serialize(null, "b")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "k2")),
stringSerializer.serialize(null, "c")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "prefix_2")),
stringSerializer.serialize(null, "d")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "k3")),
stringSerializer.serialize(null, "e")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "prefix_1")),
stringSerializer.serialize(null, "f")));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
rocksDBStore.putAll(entries);
rocksDBStore.flush();
try (final KeyValueIterator<Bytes, byte[]> keysWithPrefix = rocksDBStore.prefixScan("prefix", stringSerializer)) {
final List<String> valuesWithPrefix = new ArrayList<>();
int numberOfKeysReturned = 0;
while (keysWithPrefix.hasNext()) {
final KeyValue<Bytes, byte[]> next = keysWithPrefix.next();
valuesWithPrefix.add(new String(next.value));
numberOfKeysReturned++;
}
assertThat(numberOfKeysReturned, is(3));
assertThat(valuesWithPrefix.get(0), is("f"));
assertThat(valuesWithPrefix.get(1), is("d"));
assertThat(valuesWithPrefix.get(2), is("b"));
}
}
@Test
public void shouldReturnKeysWithGivenPrefixExcludingNextKeyLargestKey() {
final List<KeyValue<Bytes, byte[]>> entries = new ArrayList<>();
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "abc")),
stringSerializer.serialize(null, "f")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "abcd")),
stringSerializer.serialize(null, "f")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "abce")),
stringSerializer.serialize(null, "f")));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
rocksDBStore.putAll(entries);
rocksDBStore.flush();
try (final KeyValueIterator<Bytes, byte[]> keysWithPrefixAsabcd = rocksDBStore.prefixScan("abcd", stringSerializer)) {
int numberOfKeysReturned = 0;
while (keysWithPrefixAsabcd.hasNext()) {
keysWithPrefixAsabcd.next().key.get();
numberOfKeysReturned++;
}
assertThat(numberOfKeysReturned, is(1));
}
}
@Test
public void shouldReturnUUIDsWithStringPrefix() {
final List<KeyValue<Bytes, byte[]>> entries = new ArrayList<>();
final Serializer<UUID> uuidSerializer = Serdes.UUID().serializer();
final UUID uuid1 = UUID.randomUUID();
final UUID uuid2 = UUID.randomUUID();
final String prefix = uuid1.toString().substring(0, 4);
final int numMatches = uuid2.toString().substring(0, 4).equals(prefix) ? 2 : 1;
entries.add(new KeyValue<>(
new Bytes(uuidSerializer.serialize(null, uuid1)),
stringSerializer.serialize(null, "a")));
entries.add(new KeyValue<>(
new Bytes(uuidSerializer.serialize(null, uuid2)),
stringSerializer.serialize(null, "b")));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
rocksDBStore.putAll(entries);
rocksDBStore.flush();
try (final KeyValueIterator<Bytes, byte[]> keysWithPrefix = rocksDBStore.prefixScan(prefix, stringSerializer)) {
final List<String> valuesWithPrefix = new ArrayList<>();
int numberOfKeysReturned = 0;
while (keysWithPrefix.hasNext()) {
final KeyValue<Bytes, byte[]> next = keysWithPrefix.next();
valuesWithPrefix.add(new String(next.value));
numberOfKeysReturned++;
}
assertThat(numberOfKeysReturned, is(numMatches));
if (numMatches == 2) {
assertThat(valuesWithPrefix.get(0), either(is("a")).or(is("b")));
} else {
assertThat(valuesWithPrefix.get(0), is("a"));
}
}
}
@Test
public void shouldReturnNoKeys() {
final List<KeyValue<Bytes, byte[]>> entries = new ArrayList<>();
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "a")),
stringSerializer.serialize(null, "a")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "b")),
stringSerializer.serialize(null, "c")));
entries.add(new KeyValue<>(
new Bytes(stringSerializer.serialize(null, "c")),
stringSerializer.serialize(null, "e")));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
rocksDBStore.putAll(entries);
rocksDBStore.flush();
try (final KeyValueIterator<Bytes, byte[]> keysWithPrefix = rocksDBStore.prefixScan("d", stringSerializer)) {
int numberOfKeysReturned = 0;
while (keysWithPrefix.hasNext()) {
keysWithPrefix.next();
numberOfKeysReturned++;
}
assertThat(numberOfKeysReturned, is(0));
}
}
@Test
public void shouldRestoreAll() {
final List<KeyValue<byte[], byte[]>> entries = getKeyValueEntries();
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restore(rocksDBStore.name(), entries);
assertEquals(
"a",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals(
"b",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals(
"c",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
}
@Test
public void shouldPutOnlyIfAbsentValue() {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
final Bytes keyBytes = new Bytes(stringSerializer.serialize(null, "one"));
final byte[] valueBytes = stringSerializer.serialize(null, "A");
final byte[] valueBytesUpdate = stringSerializer.serialize(null, "B");
rocksDBStore.putIfAbsent(keyBytes, valueBytes);
rocksDBStore.putIfAbsent(keyBytes, valueBytesUpdate);
final String retrievedValue = stringDeserializer.deserialize(null, rocksDBStore.get(keyBytes));
assertEquals("A", retrievedValue);
}
@Test
public void shouldHandleDeletesOnRestoreAll() {
final List<KeyValue<byte[], byte[]>> entries = getKeyValueEntries();
entries.add(new KeyValue<>("1".getBytes(UTF_8), null));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restore(rocksDBStore.name(), entries);
try (final KeyValueIterator<Bytes, byte[]> iterator = rocksDBStore.all()) {
final Set<String> keys = new HashSet<>();
while (iterator.hasNext()) {
keys.add(stringDeserializer.deserialize(null, iterator.next().key.get()));
}
assertThat(keys, equalTo(Utils.mkSet("2", "3")));
}
}
@Test
public void shouldHandleDeletesAndPutBackOnRestoreAll() {
final List<KeyValue<byte[], byte[]>> entries = new ArrayList<>();
entries.add(new KeyValue<>("1".getBytes(UTF_8), "a".getBytes(UTF_8)));
entries.add(new KeyValue<>("2".getBytes(UTF_8), "b".getBytes(UTF_8)));
// this will be deleted
entries.add(new KeyValue<>("1".getBytes(UTF_8), null));
entries.add(new KeyValue<>("3".getBytes(UTF_8), "c".getBytes(UTF_8)));
// this will restore key "1" as WriteBatch applies updates in order
entries.add(new KeyValue<>("1".getBytes(UTF_8), "restored".getBytes(UTF_8)));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restore(rocksDBStore.name(), entries);
try (final KeyValueIterator<Bytes, byte[]> iterator = rocksDBStore.all()) {
final Set<String> keys = new HashSet<>();
while (iterator.hasNext()) {
keys.add(stringDeserializer.deserialize(null, iterator.next().key.get()));
}
assertThat(keys, equalTo(Utils.mkSet("1", "2", "3")));
assertEquals(
"restored",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals(
"b",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals(
"c",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
}
}
@Test
public void shouldRestoreThenDeleteOnRestoreAll() {
final List<KeyValue<byte[], byte[]>> entries = getKeyValueEntries();
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restore(rocksDBStore.name(), entries);
assertEquals(
"a",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals(
"b",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals(
"c",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
entries.clear();
entries.add(new KeyValue<>("2".getBytes(UTF_8), "b".getBytes(UTF_8)));
entries.add(new KeyValue<>("3".getBytes(UTF_8), "c".getBytes(UTF_8)));
entries.add(new KeyValue<>("1".getBytes(UTF_8), null));
context.restore(rocksDBStore.name(), entries);
try (final KeyValueIterator<Bytes, byte[]> iterator = rocksDBStore.all()) {
final Set<String> keys = new HashSet<>();
while (iterator.hasNext()) {
keys.add(stringDeserializer.deserialize(null, iterator.next().key.get()));
}
assertThat(keys, equalTo(Utils.mkSet("2", "3")));
}
}
@Test
public void shouldThrowNullPointerExceptionOnNullPut() {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
assertThrows(
NullPointerException.class,
() -> rocksDBStore.put(null, stringSerializer.serialize(null, "someVal")));
}
@Test
public void shouldThrowNullPointerExceptionOnNullPutAll() {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
assertThrows(
NullPointerException.class,
() -> rocksDBStore.put(null, stringSerializer.serialize(null, "someVal")));
}
@Test
public void shouldThrowNullPointerExceptionOnNullGet() {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
assertThrows(
NullPointerException.class,
() -> rocksDBStore.get(null));
}
@Test
public void shouldThrowNullPointerExceptionOnDelete() {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
assertThrows(
NullPointerException.class,
() -> rocksDBStore.delete(null));
}
@Test
public void shouldReturnValueOnRange() {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
final KeyValue<String, String> kv0 = new KeyValue<>("0", "zero");
final KeyValue<String, String> kv1 = new KeyValue<>("1", "one");
final KeyValue<String, String> kv2 = new KeyValue<>("2", "two");
rocksDBStore.put(new Bytes(kv0.key.getBytes(UTF_8)), kv0.value.getBytes(UTF_8));
rocksDBStore.put(new Bytes(kv1.key.getBytes(UTF_8)), kv1.value.getBytes(UTF_8));
rocksDBStore.put(new Bytes(kv2.key.getBytes(UTF_8)), kv2.value.getBytes(UTF_8));
final LinkedList<KeyValue<String, String>> expectedContents = new LinkedList<>();
expectedContents.add(kv0);
expectedContents.add(kv1);
try (final KeyValueIterator<Bytes, byte[]> iterator = rocksDBStore.range(null, new Bytes(stringSerializer.serialize(null, "1")))) {
assertEquals(expectedContents, getDeserializedList(iterator));
}
}
@Test
public void shouldThrowProcessorStateExceptionOnPutDeletedDir() throws IOException {
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
Utils.delete(dir);
rocksDBStore.put(
new Bytes(stringSerializer.serialize(null, "anyKey")),
stringSerializer.serialize(null, "anyValue"));
assertThrows(ProcessorStateException.class, () -> rocksDBStore.flush());
}
@Test
public void shouldHandleToggleOfEnablingBloomFilters() {
final Properties props = StreamsTestUtils.getStreamsConfig();
props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, TestingBloomFilterRocksDBConfigSetter.class);
dir = TestUtils.tempDirectory();
context = new InternalMockProcessorContext(dir,
Serdes.String(),
Serdes.String(),
new StreamsConfig(props));
enableBloomFilters = false;
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
final List<String> expectedValues = new ArrayList<>();
expectedValues.add("a");
expectedValues.add("b");
expectedValues.add("c");
final List<KeyValue<byte[], byte[]>> keyValues = getKeyValueEntries();
for (final KeyValue<byte[], byte[]> keyValue : keyValues) {
rocksDBStore.put(new Bytes(keyValue.key), keyValue.value);
}
int expectedIndex = 0;
for (final KeyValue<byte[], byte[]> keyValue : keyValues) {
final byte[] valBytes = rocksDBStore.get(new Bytes(keyValue.key));
assertThat(new String(valBytes, UTF_8), is(expectedValues.get(expectedIndex++)));
}
assertFalse(TestingBloomFilterRocksDBConfigSetter.bloomFiltersSet);
rocksDBStore.close();
expectedIndex = 0;
// reopen with Bloom Filters enabled
// should open fine without errors
enableBloomFilters = true;
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
for (final KeyValue<byte[], byte[]> keyValue : keyValues) {
final byte[] valBytes = rocksDBStore.get(new Bytes(keyValue.key));
assertThat(new String(valBytes, UTF_8), is(expectedValues.get(expectedIndex++)));
}
assertTrue(TestingBloomFilterRocksDBConfigSetter.bloomFiltersSet);
}
@Test
public void shouldVerifyThatMetricsRecordedFromStatisticsGetMeasurementsFromRocksDB() {
final TaskId taskId = new TaskId(0, 0);
final Metrics metrics = new Metrics(new MetricConfig().recordLevel(RecordingLevel.DEBUG));
final StreamsMetricsImpl streamsMetrics =
new StreamsMetricsImpl(metrics, "test-application", StreamsConfig.METRICS_LATEST, time);
context = EasyMock.niceMock(InternalMockProcessorContext.class);
EasyMock.expect(context.metrics()).andStubReturn(streamsMetrics);
EasyMock.expect(context.taskId()).andStubReturn(taskId);
EasyMock.expect(context.appConfigs())
.andStubReturn(new StreamsConfig(StreamsTestUtils.getStreamsConfig()).originals());
EasyMock.expect(context.stateDir()).andStubReturn(dir);
final MonotonicProcessorRecordContext processorRecordContext = new MonotonicProcessorRecordContext("test", 0);
EasyMock.expect(context.recordMetadata()).andStubReturn(Optional.of(processorRecordContext));
EasyMock.replay(context);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
final byte[] key = "hello".getBytes();
final byte[] value = "world".getBytes();
rocksDBStore.put(Bytes.wrap(key), value);
streamsMetrics.rocksDBMetricsRecordingTrigger().run();
final Metric bytesWrittenTotal = metrics.metric(new MetricName(
"bytes-written-total",
StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP,
"description is not verified",
streamsMetrics.storeLevelTagMap(taskId.toString(), METRICS_SCOPE, DB_NAME)
));
assertThat((double) bytesWrittenTotal.metricValue(), greaterThan(0d));
}
@Test
public void shouldVerifyThatMetricsRecordedFromPropertiesGetMeasurementsFromRocksDB() {
final TaskId taskId = new TaskId(0, 0);
final Metrics metrics = new Metrics(new MetricConfig().recordLevel(RecordingLevel.INFO));
final StreamsMetricsImpl streamsMetrics =
new StreamsMetricsImpl(metrics, "test-application", StreamsConfig.METRICS_LATEST, time);
context = EasyMock.niceMock(InternalMockProcessorContext.class);
EasyMock.expect(context.metrics()).andStubReturn(streamsMetrics);
EasyMock.expect(context.taskId()).andStubReturn(taskId);
EasyMock.expect(context.appConfigs())
.andStubReturn(new StreamsConfig(StreamsTestUtils.getStreamsConfig()).originals());
EasyMock.expect(context.stateDir()).andStubReturn(dir);
final MonotonicProcessorRecordContext processorRecordContext = new MonotonicProcessorRecordContext("test", 0);
EasyMock.expect(context.recordMetadata()).andStubReturn(Optional.of(processorRecordContext));
EasyMock.replay(context);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
final byte[] key = "hello".getBytes();
final byte[] value = "world".getBytes();
rocksDBStore.put(Bytes.wrap(key), value);
final Metric numberOfEntriesActiveMemTable = metrics.metric(new MetricName(
"num-entries-active-mem-table",
StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP,
"description is not verified",
streamsMetrics.storeLevelTagMap(taskId.toString(), METRICS_SCOPE, DB_NAME)
));
assertThat(numberOfEntriesActiveMemTable, notNullValue());
assertThat((BigInteger) numberOfEntriesActiveMemTable.metricValue(), greaterThan(BigInteger.valueOf(0)));
}
@Test
public void shouldVerifyThatPropertyBasedMetricsUseValidPropertyName() {
final TaskId taskId = new TaskId(0, 0);
final Metrics metrics = new Metrics(new MetricConfig().recordLevel(RecordingLevel.INFO));
final StreamsMetricsImpl streamsMetrics =
new StreamsMetricsImpl(metrics, "test-application", StreamsConfig.METRICS_LATEST, time);
final Properties props = StreamsTestUtils.getStreamsConfig();
context = EasyMock.niceMock(InternalMockProcessorContext.class);
EasyMock.expect(context.metrics()).andStubReturn(streamsMetrics);
EasyMock.expect(context.taskId()).andStubReturn(taskId);
EasyMock.expect(context.appConfigs()).andStubReturn(new StreamsConfig(props).originals());
EasyMock.expect(context.stateDir()).andStubReturn(dir);
EasyMock.replay(context);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
final List<String> propertyNames = Arrays.asList(
"num-entries-active-mem-table",
"num-deletes-active-mem-table",
"num-entries-imm-mem-tables",
"num-deletes-imm-mem-tables",
"num-immutable-mem-table",
"cur-size-active-mem-table",
"cur-size-all-mem-tables",
"size-all-mem-tables",
"mem-table-flush-pending",
"num-running-flushes",
"compaction-pending",
"num-running-compactions",
"estimate-pending-compaction-bytes",
"total-sst-files-size",
"live-sst-files-size",
"num-live-versions",
"block-cache-capacity",
"block-cache-usage",
"block-cache-pinned-usage",
"estimate-num-keys",
"estimate-table-readers-mem",
"background-errors"
);
for (final String propertyname : propertyNames) {
final Metric metric = metrics.metric(new MetricName(
propertyname,
StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP,
"description is not verified",
streamsMetrics.storeLevelTagMap(taskId.toString(), METRICS_SCOPE, DB_NAME)
));
assertThat("Metric " + propertyname + " not found!", metric, notNullValue());
metric.metricValue();
}
}
@Test
public void shouldPerformRangeQueriesWithCachingDisabled() {
context.setTime(1L);
store.put(1, "hi");
store.put(2, "goodbye");
try (final KeyValueIterator<Integer, String> range = store.range(1, 2)) {
assertEquals("hi", range.next().value);
assertEquals("goodbye", range.next().value);
assertFalse(range.hasNext());
}
}
@Test
public void shouldPerformAllQueriesWithCachingDisabled() {
context.setTime(1L);
store.put(1, "hi");
store.put(2, "goodbye");
try (final KeyValueIterator<Integer, String> range = store.all()) {
assertEquals("hi", range.next().value);
assertEquals("goodbye", range.next().value);
assertFalse(range.hasNext());
}
}
@Test
public void shouldCloseOpenRangeIteratorsWhenStoreClosedAndThrowInvalidStateStoreOnHasNextAndNext() {
context.setTime(1L);
store.put(1, "hi");
store.put(2, "goodbye");
try (final KeyValueIterator<Integer, String> iteratorOne = store.range(1, 5);
final KeyValueIterator<Integer, String> iteratorTwo = store.range(1, 4)) {
assertTrue(iteratorOne.hasNext());
assertTrue(iteratorTwo.hasNext());
store.close();
Assertions.assertThrows(InvalidStateStoreException.class, () -> iteratorOne.hasNext());
Assertions.assertThrows(InvalidStateStoreException.class, () -> iteratorOne.next());
Assertions.assertThrows(InvalidStateStoreException.class, () -> iteratorTwo.hasNext());
Assertions.assertThrows(InvalidStateStoreException.class, () -> iteratorTwo.next());
}
}
@Test
public void shouldRestoreRecordsAndConsistencyVectorSingleTopic() {
final List<ConsumerRecord<byte[], byte[]>> entries = getChangelogRecords();
final Properties props = StreamsTestUtils.getStreamsConfig();
props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class);
props.put(InternalConfig.IQ_CONSISTENCY_OFFSET_VECTOR_ENABLED, true);
dir = TestUtils.tempDirectory();
context = new InternalMockProcessorContext<>(
dir,
Serdes.String(),
Serdes.String(),
new StreamsConfig(props)
);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restoreWithHeaders(rocksDBStore.name(), entries);
assertEquals(
"a",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals(
"b",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals(
"c",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
assertThat(rocksDBStore.getPosition(), Matchers.notNullValue());
assertThat(rocksDBStore.getPosition().getPartitionPositions(""), Matchers.notNullValue());
assertThat(rocksDBStore.getPosition().getPartitionPositions(""), hasEntry(0, 3L));
}
@Test
public void shouldRestoreRecordsAndConsistencyVectorMultipleTopics() {
final List<ConsumerRecord<byte[], byte[]>> entries = getChangelogRecordsMultipleTopics();
final Properties props = StreamsTestUtils.getStreamsConfig();
props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class);
props.put(InternalConfig.IQ_CONSISTENCY_OFFSET_VECTOR_ENABLED, true);
dir = TestUtils.tempDirectory();
context = new InternalMockProcessorContext<>(
dir,
Serdes.String(),
Serdes.String(),
new StreamsConfig(props)
);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restoreWithHeaders(rocksDBStore.name(), entries);
assertEquals(
"a",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals(
"b",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals(
"c",
stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
assertThat(rocksDBStore.getPosition(), Matchers.notNullValue());
assertThat(rocksDBStore.getPosition().getPartitionPositions("A"), Matchers.notNullValue());
assertThat(rocksDBStore.getPosition().getPartitionPositions("A"), hasEntry(0, 3L));
assertThat(rocksDBStore.getPosition().getPartitionPositions("B"), Matchers.notNullValue());
assertThat(rocksDBStore.getPosition().getPartitionPositions("B"), hasEntry(0, 2L));
}
@Test
public void shouldHandleTombstoneRecords() {
final List<ConsumerRecord<byte[], byte[]>> entries = getChangelogRecordsWithTombstones();
final Properties props = StreamsTestUtils.getStreamsConfig();
props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class);
props.put(InternalConfig.IQ_CONSISTENCY_OFFSET_VECTOR_ENABLED, true);
dir = TestUtils.tempDirectory();
context = new InternalMockProcessorContext<>(
dir,
Serdes.String(),
Serdes.String(),
new StreamsConfig(props)
);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restoreWithHeaders(rocksDBStore.name(), entries);
assertNull(stringDeserializer.deserialize(
null,
rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertThat(rocksDBStore.getPosition(), Matchers.notNullValue());
assertThat(rocksDBStore.getPosition().getPartitionPositions("A"), hasEntry(0, 2L));
}
@Test
public void shouldNotThrowWhenRestoringOnMissingHeaders() {
final List<KeyValue<byte[], byte[]>> entries = getChangelogRecordsWithoutHeaders();
final Properties props = StreamsTestUtils.getStreamsConfig();
props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class);
props.put(InternalConfig.IQ_CONSISTENCY_OFFSET_VECTOR_ENABLED, true);
dir = TestUtils.tempDirectory();
context = new InternalMockProcessorContext<>(
dir,
Serdes.String(),
Serdes.String(),
new StreamsConfig(props)
);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restore(rocksDBStore.name(), entries);
assertThat(rocksDBStore.getPosition(), is(Position.emptyPosition()));
}
private List<ConsumerRecord<byte[], byte[]>> getChangelogRecords() {
final List<ConsumerRecord<byte[], byte[]>> entries = new ArrayList<>();
entries.add(createChangelogRecord("1".getBytes(UTF_8), "a".getBytes(UTF_8), "", 0, 1));
entries.add(createChangelogRecord("2".getBytes(UTF_8), "b".getBytes(UTF_8), "", 0, 2));
entries.add(createChangelogRecord("3".getBytes(UTF_8), "c".getBytes(UTF_8), "", 0, 3));
return entries;
}
private List<ConsumerRecord<byte[], byte[]>> getChangelogRecordsMultipleTopics() {
final List<ConsumerRecord<byte[], byte[]>> entries = new ArrayList<>();
entries.add(createChangelogRecord("1".getBytes(UTF_8), "a".getBytes(UTF_8), "A", 0, 1));
entries.add(createChangelogRecord("2".getBytes(UTF_8), "b".getBytes(UTF_8), "B", 0, 2));
entries.add(createChangelogRecord("3".getBytes(UTF_8), "c".getBytes(UTF_8), "A", 0, 3));
return entries;
}
private List<ConsumerRecord<byte[], byte[]>> getChangelogRecordsWithTombstones() {
final List<ConsumerRecord<byte[], byte[]>> entries = new ArrayList<>();
entries.add(createChangelogRecord("1".getBytes(UTF_8), "a".getBytes(UTF_8), "A", 0, 1));
entries.add(createChangelogRecord("1".getBytes(UTF_8), null, "A", 0, 2));
return entries;
}
private List<KeyValue<byte[], byte[]>> getChangelogRecordsWithoutHeaders() {
final List<KeyValue<byte[], byte[]>> entries = new ArrayList<>();
entries.add(new KeyValue<>("1".getBytes(UTF_8), "a".getBytes(UTF_8)));
entries.add(new KeyValue<>("2".getBytes(UTF_8), "b".getBytes(UTF_8)));
entries.add(new KeyValue<>("3".getBytes(UTF_8), "c".getBytes(UTF_8)));
return entries;
}
private ConsumerRecord<byte[], byte[]> createChangelogRecord(
final byte[] key, final byte[] value, final String topic, final int partition, final long offset) {
final Headers headers = new RecordHeaders();
Position position = Position.emptyPosition();
position = position.withComponent(topic, partition, offset);
headers.add(ChangelogRecordDeserializationHelper.CHANGELOG_VERSION_HEADER_RECORD_CONSISTENCY);
headers.add(new RecordHeader(
ChangelogRecordDeserializationHelper.CHANGELOG_POSITION_HEADER_KEY,
PositionSerde.serialize(position).array()));
return new ConsumerRecord<>("", 0, 0L, RecordBatch.NO_TIMESTAMP, TimestampType.NO_TIMESTAMP_TYPE, -1, -1,
key, value, headers, Optional.empty());
}
public static class TestingBloomFilterRocksDBConfigSetter implements RocksDBConfigSetter {
static boolean bloomFiltersSet;
static Filter filter;
static Cache cache;
@Override
public void setConfig(final String storeName, final Options options, final Map<String, Object> configs) {
final BlockBasedTableConfig tableConfig = (BlockBasedTableConfig) options.tableFormatConfig();
cache = new LRUCache(50 * 1024 * 1024L);
tableConfig.setBlockCache(cache);
tableConfig.setBlockSize(4096L);
if (enableBloomFilters) {
filter = new BloomFilter();
tableConfig.setFilterPolicy(filter);
options.optimizeFiltersForHits();
bloomFiltersSet = true;
} else {
options.setOptimizeFiltersForHits(false);
bloomFiltersSet = false;
}
options.setTableFormatConfig(tableConfig);
}
@Override
public void close(final String storeName, final Options options) {
if (filter != null) {
filter.close();
}
cache.close();
}
}
private List<KeyValue<byte[], byte[]>> getKeyValueEntries() {
final List<KeyValue<byte[], byte[]>> entries = new ArrayList<>();
entries.add(new KeyValue<>("1".getBytes(UTF_8), "a".getBytes(UTF_8)));
entries.add(new KeyValue<>("2".getBytes(UTF_8), "b".getBytes(UTF_8)));
entries.add(new KeyValue<>("3".getBytes(UTF_8), "c".getBytes(UTF_8)));
return entries;
}
private List<KeyValue<String, String>> getDeserializedList(final KeyValueIterator<Bytes, byte[]> iter) {
final List<KeyValue<Bytes, byte[]>> bytes = Utils.toList(iter);
final List<KeyValue<String, String>> result = bytes.stream().map(kv -> new KeyValue<String, String>(kv.key.toString(), stringDeserializer.deserialize(null, kv.value))).collect(Collectors.toList());
return result;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.openstack.nova.v2_0.compute.extensions;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
import java.net.URI;
import java.util.Properties;
import java.util.Set;
import org.jclouds.compute.domain.SecurityGroup;
import org.jclouds.compute.extensions.SecurityGroupExtension;
import org.jclouds.domain.LocationBuilder;
import org.jclouds.domain.LocationScope;
import org.jclouds.http.HttpRequest;
import org.jclouds.http.HttpResponse;
import org.jclouds.net.domain.IpPermission;
import org.jclouds.net.domain.IpProtocol;
import org.jclouds.openstack.nova.v2_0.internal.BaseNovaComputeServiceExpectTest;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
@Test(groups = "unit", testName = "NovaSecurityGroupExtensionExpectTest")
public class NovaSecurityGroupExtensionExpectTest extends BaseNovaComputeServiceExpectTest {
protected String region = "az-1.region-a.geo-1";
protected HttpRequest list = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
protected HttpResponse listResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_list_extension.json")).build();
@Override
protected Properties setupProperties() {
Properties overrides = super.setupProperties();
overrides.setProperty("jclouds.regions", region);
return overrides;
}
public void testListSecurityGroups() {
Builder<HttpRequest, HttpResponse> requestResponseMap = ImmutableMap.<HttpRequest, HttpResponse> builder();
requestResponseMap.put(keystoneAuthWithUsernameAndPasswordAndTenantName, responseWithKeystoneAccess);
requestResponseMap.put(extensionsOfNovaRequest, extensionsOfNovaResponse);
requestResponseMap.put(list, listResponse).build();
SecurityGroupExtension extension = requestsSendResponses(requestResponseMap.build()).getSecurityGroupExtension().get();
Set<SecurityGroup> groups = extension.listSecurityGroups();
assertEquals(groups.size(), 1);
}
public void testListSecurityGroupsInLocation() {
HttpRequest list = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse listResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_list.json")).build();
Builder<HttpRequest, HttpResponse> requestResponseMap = ImmutableMap.<HttpRequest, HttpResponse> builder();
requestResponseMap.put(keystoneAuthWithUsernameAndPasswordAndTenantName, responseWithKeystoneAccess);
requestResponseMap.put(extensionsOfNovaRequest, extensionsOfNovaResponse);
requestResponseMap.put(list, listResponse).build();
SecurityGroupExtension extension = requestsSendResponses(requestResponseMap.build()).getSecurityGroupExtension().get();
Set<SecurityGroup> groups = extension.listSecurityGroupsInLocation(new LocationBuilder()
.scope(LocationScope.REGION)
.id(region)
.description("region")
.build());
assertEquals(groups.size(), 1);
}
public void testListSecurityGroupsForNode() {
HttpRequest serverReq = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-create-server-ext/8d0a6ca5-8849-4b3d-b86e-f24c92490ebb"))
.headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse serverResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/server_with_security_groups_extension.json")).build();
HttpRequest list = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse listResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_list.json")).build();
Builder<HttpRequest, HttpResponse> requestResponseMap = ImmutableMap.<HttpRequest, HttpResponse> builder();
requestResponseMap.put(keystoneAuthWithUsernameAndPasswordAndTenantName, responseWithKeystoneAccess);
requestResponseMap.put(extensionsOfNovaRequest, extensionsOfNovaResponse);
requestResponseMap.put(serverReq, serverResponse);
requestResponseMap.put(list, listResponse).build();
SecurityGroupExtension extension = requestsSendResponses(requestResponseMap.build()).getSecurityGroupExtension().get();
Set<SecurityGroup> groups = extension.listSecurityGroupsForNode(region + "/8d0a6ca5-8849-4b3d-b86e-f24c92490ebb");
assertEquals(groups.size(), 1);
}
public void testGetSecurityGroupById() {
HttpRequest getSecurityGroup = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups/160")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse getSecurityGroupResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension.json")).build();
Builder<HttpRequest, HttpResponse> requestResponseMap = ImmutableMap.<HttpRequest, HttpResponse> builder();
requestResponseMap.put(keystoneAuthWithUsernameAndPasswordAndTenantName, responseWithKeystoneAccess);
requestResponseMap.put(extensionsOfNovaRequest, extensionsOfNovaResponse);
requestResponseMap.put(getSecurityGroup, getSecurityGroupResponse);
requestResponseMap.put(list, listResponse).build();
SecurityGroupExtension extension = requestsSendResponses(requestResponseMap.build()).getSecurityGroupExtension().get();
SecurityGroup group = extension.getSecurityGroupById(region + "/160");
assertEquals(group.getId(), region + "/160");
}
public void testCreateSecurityGroup() {
HttpRequest getSecurityGroup = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups/160")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse getSecurityGroupResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension.json")).build();
HttpRequest create = HttpRequest.builder().method("POST").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build())
.payload(
payloadFromStringWithContentType(
"{\"security_group\":{\"name\":\"jclouds-test\",\"description\":\"jclouds-test\"}}",
"application/json")).build();
HttpResponse createResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_created.json")).build();
HttpRequest list = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups")).headers(
ImmutableMultimap.<String, String>builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse listResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_list_extension.json")).build();
Builder<HttpRequest, HttpResponse> requestResponseMap = ImmutableMap.<HttpRequest, HttpResponse> builder();
requestResponseMap.put(keystoneAuthWithUsernameAndPasswordAndTenantName, responseWithKeystoneAccess);
requestResponseMap.put(extensionsOfNovaRequest, extensionsOfNovaResponse);
requestResponseMap.put(create, createResponse);
requestResponseMap.put(list, listResponse);
requestResponseMap.put(getSecurityGroup, getSecurityGroupResponse).build();
SecurityGroupExtension extension = requestsSendResponses(requestResponseMap.build()).getSecurityGroupExtension().get();
SecurityGroup group = extension.createSecurityGroup("test", new LocationBuilder()
.scope(LocationScope.REGION)
.id(region)
.description("region")
.build());
assertEquals(group.getId(), region + "/160");
}
public void testRemoveSecurityGroup() {
HttpRequest delete = HttpRequest.builder().method("DELETE").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups/160"))
.headers(
ImmutableMultimap.<String, String>builder().put("Accept", "application/json")
.put("X-Auth-Token", authToken).build()).build();
HttpResponse deleteResponse = HttpResponse.builder().statusCode(202).build();
HttpRequest getSecurityGroup = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups/160")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse getSecurityGroupResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension.json")).build();
Builder<HttpRequest, HttpResponse> requestResponseMap = ImmutableMap.<HttpRequest, HttpResponse> builder();
requestResponseMap.put(keystoneAuthWithUsernameAndPasswordAndTenantName, responseWithKeystoneAccess);
requestResponseMap.put(extensionsOfNovaRequest, extensionsOfNovaResponse);
requestResponseMap.put(getSecurityGroup, getSecurityGroupResponse);
requestResponseMap.put(delete, deleteResponse).build();
SecurityGroupExtension extension = requestsSendResponses(requestResponseMap.build()).getSecurityGroupExtension().get();
assertTrue(extension.removeSecurityGroup(region + "/160"), "Expected removal of securitygroup to be successful");
}
public void testAddIpPermissionCidrFromIpPermission() {
HttpRequest createRule = HttpRequest
.builder()
.method("POST")
.endpoint("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-group-rules")
.addHeader("Accept", "application/json")
.addHeader("X-Auth-Token", authToken)
.payload(
payloadFromStringWithContentType(
"{\"security_group_rule\":{\"parent_group_id\":\"160\",\"cidr\":\"10.2.6.0/24\",\"ip_protocol\":\"tcp\",\"from_port\":\"22\",\"to_port\":\"22\"}}",
"application/json")).build();
HttpResponse createRuleResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygrouprule_created_cidr.json")).build();
HttpRequest getSecurityGroup = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups/160")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse getSecurityGroupNoRulesResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension_norules.json")).build();
HttpResponse getSecurityGroupResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension.json")).build();
SecurityGroupExtension extension = orderedRequestsSendResponses(ImmutableList.of(keystoneAuthWithUsernameAndPasswordAndTenantName,
extensionsOfNovaRequest, getSecurityGroup, createRule, getSecurityGroup, list, list),
ImmutableList.of(responseWithKeystoneAccess, extensionsOfNovaResponse, getSecurityGroupNoRulesResponse,
createRuleResponse, getSecurityGroupResponse, listResponse, listResponse)).getSecurityGroupExtension().get();
IpPermission.Builder builder = IpPermission.builder();
builder.ipProtocol(IpProtocol.TCP);
builder.fromPort(22);
builder.toPort(22);
builder.cidrBlock("10.2.6.0/24");
IpPermission perm = builder.build();
SecurityGroup origGroup = extension.getSecurityGroupById(region + "/160");
assertNotNull(origGroup);
SecurityGroup newGroup = extension.addIpPermission(perm, origGroup);
assertNotNull(newGroup);
}
public void testAddIpPermissionCidrFromParams() {
HttpRequest createRule = HttpRequest
.builder()
.method("POST")
.endpoint("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-group-rules")
.addHeader("Accept", "application/json")
.addHeader("X-Auth-Token", authToken)
.payload(
payloadFromStringWithContentType(
"{\"security_group_rule\":{\"parent_group_id\":\"160\",\"cidr\":\"10.2.6.0/24\",\"ip_protocol\":\"tcp\",\"from_port\":\"22\",\"to_port\":\"22\"}}",
"application/json")).build();
HttpResponse createRuleResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygrouprule_created_cidr.json")).build();
HttpRequest getSecurityGroup = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups/160")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse getSecurityGroupNoRulesResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension_norules.json")).build();
HttpResponse getSecurityGroupResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension.json")).build();
SecurityGroupExtension extension = orderedRequestsSendResponses(ImmutableList.of(keystoneAuthWithUsernameAndPasswordAndTenantName,
extensionsOfNovaRequest, getSecurityGroup, createRule, getSecurityGroup, list, list),
ImmutableList.of(responseWithKeystoneAccess, extensionsOfNovaResponse, getSecurityGroupNoRulesResponse,
createRuleResponse, getSecurityGroupResponse, listResponse, listResponse)).getSecurityGroupExtension().get();
SecurityGroup origGroup = extension.getSecurityGroupById(region + "/160");
assertNotNull(origGroup);
SecurityGroup newGroup = extension.addIpPermission(IpProtocol.TCP,
22,
22,
emptyMultimap(),
ImmutableSet.of("10.2.6.0/24"),
emptyStringSet(),
origGroup);
assertNotNull(newGroup);
}
public void testAddIpPermissionGroupFromIpPermission() {
HttpRequest createRule = HttpRequest
.builder()
.method("POST")
.endpoint("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-group-rules")
.addHeader("Accept", "application/json")
.addHeader("X-Auth-Token", authToken)
.payload(
payloadFromStringWithContentType(
"{\"security_group_rule\":{\"group_id\":\"11111\",\"parent_group_id\":\"160\",\"ip_protocol\":\"tcp\",\"from_port\":\"22\",\"to_port\":\"22\"}}",
"application/json")).build();
HttpResponse createRuleResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygrouprule_created_group.json")).build();
HttpRequest getSecurityGroup = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups/160")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse getSecurityGroupNoRulesResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension_norules.json")).build();
HttpResponse getSecurityGroupResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension.json")).build();
SecurityGroupExtension extension = orderedRequestsSendResponses(ImmutableList.of(keystoneAuthWithUsernameAndPasswordAndTenantName,
extensionsOfNovaRequest, getSecurityGroup, createRule, getSecurityGroup, list, list),
ImmutableList.of(responseWithKeystoneAccess, extensionsOfNovaResponse, getSecurityGroupNoRulesResponse,
createRuleResponse, getSecurityGroupResponse, listResponse, listResponse)).getSecurityGroupExtension().get();
IpPermission.Builder builder = IpPermission.builder();
builder.ipProtocol(IpProtocol.TCP);
builder.fromPort(22);
builder.toPort(22);
builder.groupId("admin/11111");
IpPermission perm = builder.build();
SecurityGroup origGroup = extension.getSecurityGroupById(region + "/160");
assertNotNull(origGroup);
SecurityGroup newGroup = extension.addIpPermission(perm, origGroup);
assertNotNull(newGroup);
}
public void testAddIpPermissionGroupFromParams() {
HttpRequest createRule = HttpRequest
.builder()
.method("POST")
.endpoint("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-group-rules")
.addHeader("Accept", "application/json")
.addHeader("X-Auth-Token", authToken)
.payload(
payloadFromStringWithContentType(
"{\"security_group_rule\":{\"group_id\":\"11111\",\"parent_group_id\":\"160\",\"ip_protocol\":\"tcp\",\"from_port\":\"22\",\"to_port\":\"22\"}}",
"application/json")).build();
HttpResponse createRuleResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygrouprule_created_group.json")).build();
HttpRequest getSecurityGroup = HttpRequest.builder().method("GET").endpoint(
URI.create("https://az-1.region-a.geo-1.compute.hpcloudsvc.com/v2/3456/os-security-groups/160")).headers(
ImmutableMultimap.<String, String> builder().put("Accept", "application/json").put("X-Auth-Token",
authToken).build()).build();
HttpResponse getSecurityGroupNoRulesResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension_norules.json")).build();
HttpResponse getSecurityGroupResponse = HttpResponse.builder().statusCode(200).payload(
payloadFromResource("/securitygroup_details_extension.json")).build();
SecurityGroupExtension extension = orderedRequestsSendResponses(ImmutableList.of(keystoneAuthWithUsernameAndPasswordAndTenantName,
extensionsOfNovaRequest, getSecurityGroup, createRule, getSecurityGroup, list, list),
ImmutableList.of(responseWithKeystoneAccess, extensionsOfNovaResponse, getSecurityGroupNoRulesResponse,
createRuleResponse, getSecurityGroupResponse, listResponse, listResponse)).getSecurityGroupExtension().get();
SecurityGroup origGroup = extension.getSecurityGroupById(region + "/160");
assertNotNull(origGroup);
SecurityGroup newGroup = extension.addIpPermission(IpProtocol.TCP,
22,
22,
emptyMultimap(),
emptyStringSet(),
ImmutableSet.of("admin/11111"),
origGroup);
assertNotNull(newGroup);
}
private Multimap<String, String> emptyMultimap() {
return LinkedHashMultimap.create();
}
private Set<String> emptyStringSet() {
return Sets.newLinkedHashSet();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.functions;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.geode.DataSerializer;
import org.apache.geode.internal.DataSerializableFixedID;
import org.apache.geode.internal.Version;
import org.apache.geode.management.internal.configuration.domain.XmlEntity;
public class CliFunctionResult implements Comparable<CliFunctionResult>, DataSerializableFixedID {
private String memberIdOrName;
private Serializable[] serializables = new String[0];
private Object resultObject;
private XmlEntity xmlEntity;
private byte[] byteData = new byte[0];
private StatusState state;
public enum StatusState {
OK, ERROR, IGNORABLE
}
@Deprecated
public CliFunctionResult() {}
@Deprecated
public CliFunctionResult(final String memberIdOrName) {
this.memberIdOrName = memberIdOrName;
this.state = StatusState.OK;
}
@Deprecated
public CliFunctionResult(final String memberIdOrName, final Serializable[] serializables) {
this.memberIdOrName = memberIdOrName;
this.serializables = serializables;
this.state = StatusState.OK;
}
@Deprecated
public CliFunctionResult(final String memberIdOrName, final XmlEntity xmlEntity) {
this.memberIdOrName = memberIdOrName;
this.xmlEntity = xmlEntity;
this.state = StatusState.OK;
}
@Deprecated
public CliFunctionResult(final String memberIdOrName, final XmlEntity xmlEntity,
final Serializable[] serializables) {
this.memberIdOrName = memberIdOrName;
this.xmlEntity = xmlEntity;
this.serializables = serializables;
this.state = StatusState.OK;
}
@Deprecated
public CliFunctionResult(final String memberIdOrName, XmlEntity xmlEntity, final String message) {
this.memberIdOrName = memberIdOrName;
this.xmlEntity = xmlEntity;
if (message != null) {
this.serializables = new String[] {message};
}
this.state = StatusState.OK;
}
/**
* @deprecated Use {@code CliFunctionResult(String, StatusState, String)} instead
*/
@Deprecated
public CliFunctionResult(final String memberIdOrName, final boolean successful,
final String message) {
this(memberIdOrName, successful ? StatusState.OK : StatusState.ERROR, message);
}
public CliFunctionResult(final String memberIdOrName, final StatusState state,
final String message) {
this.memberIdOrName = memberIdOrName;
this.state = state;
if (message != null) {
this.serializables = new String[] {message};
}
}
public CliFunctionResult(final String memberIdOrName, final Object resultObject,
final String message) {
this.memberIdOrName = memberIdOrName;
this.resultObject = resultObject;
if (message != null) {
this.serializables = new String[] {message};
}
if (resultObject instanceof Throwable) {
this.state = StatusState.ERROR;
} else {
this.state = StatusState.OK;
}
}
public CliFunctionResult(final String memberIdOrName, final Object resultObject) {
this(memberIdOrName, resultObject, null);
}
public String getMemberIdOrName() {
return this.memberIdOrName;
}
@Deprecated
public String getMessage() {
if (this.serializables.length == 0 || !(this.serializables[0] instanceof String)) {
return null;
}
return (String) this.serializables[0];
}
public String getStatus(boolean skipIgnore) {
if (state == StatusState.IGNORABLE) {
return skipIgnore ? "IGNORED" : "ERROR";
}
return state.name();
}
public String getStatus() {
return getStatus(true);
}
public String getStatusMessage() {
String message = getMessage();
if (isSuccessful()) {
return message;
}
String errorMessage = "";
if (message != null
&& (resultObject == null || !((Throwable) resultObject).getMessage().contains(message))) {
errorMessage = message;
}
if (resultObject != null) {
errorMessage = errorMessage.trim() + " " + ((Throwable) resultObject).getClass().getName()
+ ": " + ((Throwable) resultObject).getMessage();
}
return errorMessage;
}
/**
* This can be removed once all commands are using ResultModel.
*/
@Deprecated
public String getLegacyStatus() {
String message = getMessage();
if (isSuccessful()) {
return message;
}
String errorMessage = "ERROR: ";
if (message != null
&& (resultObject == null || !((Throwable) resultObject).getMessage().contains(message))) {
errorMessage += message;
}
if (resultObject != null) {
errorMessage = errorMessage.trim() + " " + ((Throwable) resultObject).getClass().getName()
+ ": " + ((Throwable) resultObject).getMessage();
}
return errorMessage;
}
@Deprecated
public Serializable[] getSerializables() {
return this.serializables;
}
@Deprecated
public Throwable getThrowable() {
if (isSuccessful()) {
return null;
}
return ((Throwable) resultObject);
}
public Object getResultObject() {
return resultObject;
}
@Override
public int getDSFID() {
return DataSerializableFixedID.CLI_FUNCTION_RESULT;
}
@Override
public void toData(DataOutput out) throws IOException {
toDataPre_GEODE_1_6_0_0(out);
DataSerializer.writeEnum(this.state, out);
}
public void toDataPre_GEODE_1_6_0_0(DataOutput out) throws IOException {
DataSerializer.writeString(this.memberIdOrName, out);
DataSerializer.writePrimitiveBoolean(this.isSuccessful(), out);
DataSerializer.writeObject(this.xmlEntity, out);
DataSerializer.writeObjectArray(this.serializables, out);
DataSerializer.writeObject(this.resultObject, out);
DataSerializer.writeByteArray(this.byteData, out);
}
public void toDataPre_GFE_8_0_0_0(DataOutput out) throws IOException {
DataSerializer.writeString(this.memberIdOrName, out);
DataSerializer.writeObjectArray(this.serializables, out);
DataSerializer.writeObject(this.resultObject, out);
}
@Override
public void fromData(DataInput in) throws IOException, ClassNotFoundException {
fromDataPre_GEODE_1_6_0_0(in);
this.state = DataSerializer.readEnum(StatusState.class, in);
}
public void fromDataPre_GEODE_1_6_0_0(DataInput in) throws IOException, ClassNotFoundException {
this.memberIdOrName = DataSerializer.readString(in);
this.state = DataSerializer.readPrimitiveBoolean(in) ? StatusState.OK : StatusState.ERROR;
this.xmlEntity = DataSerializer.readObject(in);
this.serializables = (Serializable[]) DataSerializer.readObjectArray(in);
this.resultObject = DataSerializer.readObject(in);
this.byteData = DataSerializer.readByteArray(in);
}
public void fromDataPre_GFE_8_0_0_0(DataInput in) throws IOException, ClassNotFoundException {
this.memberIdOrName = DataSerializer.readString(in);
this.resultObject = DataSerializer.readObject(in);
this.serializables = (Serializable[]) DataSerializer.readObjectArray(in);
}
public boolean isSuccessful() {
return this.state == StatusState.OK;
}
public boolean isIgnorableFailure() {
return this.state == StatusState.IGNORABLE;
}
@Deprecated
public XmlEntity getXmlEntity() {
return this.xmlEntity;
}
@Deprecated
public byte[] getByteData() {
return this.byteData;
}
@Override
public int compareTo(CliFunctionResult o) {
if (this.memberIdOrName == null && o.memberIdOrName == null) {
return 0;
}
if (this.memberIdOrName == null && o.memberIdOrName != null) {
return -1;
}
if (this.memberIdOrName != null && o.memberIdOrName == null) {
return 1;
}
return getMemberIdOrName().compareTo(o.memberIdOrName);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.memberIdOrName == null) ? 0 : this.memberIdOrName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CliFunctionResult other = (CliFunctionResult) obj;
if (this.memberIdOrName == null) {
if (other.memberIdOrName != null)
return false;
} else if (!this.memberIdOrName.equals(other.memberIdOrName))
return false;
return true;
}
@Override
public String toString() {
return "CliFunctionResult [memberId=" + this.memberIdOrName + ", successful="
+ this.isSuccessful() + ", xmlEntity=" + this.xmlEntity + ", serializables="
+ Arrays.toString(this.serializables) + ", throwable=" + this.resultObject + ", byteData="
+ Arrays.toString(this.byteData) + "]";
}
/**
* Remove elements from the list that are not instances of CliFunctionResult and then sort the
* results.
*
* @param results The results to clean.
* @return The cleaned results.
*/
public static List<CliFunctionResult> cleanResults(List<?> results) {
List<CliFunctionResult> returnResults = new ArrayList<CliFunctionResult>(results.size());
for (Object result : results) {
if (result instanceof CliFunctionResult) {
returnResults.add((CliFunctionResult) result);
}
}
Collections.sort(returnResults);
return returnResults;
}
@Override
public Version[] getSerializationVersions() {
return new Version[] {Version.GFE_80};
}
}
|
|
/*
* www.javagl.de - Geom - Geometry utilities
*
* Copyright (c) 2013-2015 Marco Hutter - http://www.javagl.de
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package de.javagl.geom;
import java.awt.geom.AffineTransform;
import java.awt.geom.Line2D;
import java.awt.geom.NoninvertibleTransformException;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
/**
* Utility methods related to points
*/
public class Points
{
/**
* A comparator that compares points colexicographically
* by their y- and x coordinate
*/
static final Comparator<Point2D> YX_COMPARATOR =
new Comparator<Point2D>()
{
@Override
public int compare(Point2D p0, Point2D p1)
{
int cy = Double.compare(p0.getY(), p1.getY());
if (cy != 0)
{
return cy;
}
return Double.compare(p0.getX(), p1.getX());
}
};
/**
* Returns a comparator that compares points colexicographically.
* That means that it compares the points by their y-coordinate,
* and, if these are equal, by their x-coordinate
*
* @return The comparator
*/
public static Comparator<Point2D> colexicographicalComparator()
{
return YX_COMPARATOR;
}
/**
* A comparator that compares points lexicographically
* by their x- and y coordinate
*/
static final Comparator<Point2D> XY_COMPARATOR =
new Comparator<Point2D>()
{
@Override
public int compare(Point2D p0, Point2D p1)
{
int cx = Double.compare(p0.getX(), p1.getX());
if (cx != 0)
{
return cx;
}
return Double.compare(p0.getY(), p1.getY());
}
};
/**
* Returns a comparator that compares points lexicographically.
* That means that it compares the points by their x-coordinate,
* and, if these are equal, by their y-coordinate
*
* @return The comparator
*/
public static Comparator<Point2D> lexicographicalComparator()
{
return XY_COMPARATOR;
}
/**
* Creates a comparator that compares points by the angle that the line
* between the given center and the point has to the x-axis.
*
* @param center The center
* @return The comparator
*/
public static Comparator<Point2D> byAngleComparator(
Point2D center)
{
return byAngleComparator(center.getX(), center.getY());
}
/**
* Creates a comparator that compares points by the angle that the line
* between the specified center and the point has to the x-axis.
*
* @param centerX The x-coordinate of the center
* @param centerY The y-coordinate of the center
* @return The comparator
*/
public static Comparator<Point2D> byAngleComparator(
double centerX, double centerY)
{
return new Comparator<Point2D>()
{
@Override
public int compare(Point2D p0, Point2D p1)
{
double angle0 = Lines.normalizeAngle(Lines.angleToX(
centerX, centerY, p0.getX(), p0.getY()));
double angle1 = Lines.normalizeAngle(Lines.angleToX(
centerX, centerY, p1.getX(), p1.getY()));
//System.out.println("Angle "
// + Math.toDegrees(angle0) + " and "
// + Math.toDegrees(angle1));
return Double.compare(angle0, angle1);
}
};
}
/**
* Returns a comparator that compares points by their distance to
* the specified reference point
*
* @param x The x-coordinate of the reference point
* @param y The y-coordinate of the reference point
* @return The comparator
*/
public static Comparator<Point2D> byDistanceComparator(
double x, double y)
{
return new Comparator<Point2D>()
{
@Override
public int compare(Point2D p0, Point2D p1)
{
double dx0 = p0.getX() - x;
double dy0 = p0.getY() - y;
double dx1 = p1.getX() - x;
double dy1 = p1.getY() - y;
double d0 = dx0 * dx0 + dy0 * dy0;
double d1 = dx1 * dx1 + dy1 * dy1;
return Double.compare(d0, d1);
}
};
}
/**
* Returns a comparator that compares points by their distance to
* the given reference point
*
* @param reference The reference point
* @return The comparator
*/
public static Comparator<Point2D> byDistanceComparator(Point2D reference)
{
return byDistanceComparator(reference.getX(), reference.getY());
}
/**
* Returns a comparator that compares points by their distance to
* the origin
*
* @return The comparator
*/
public static Comparator<Point2D> byDistanceToOriginComparator()
{
return byDistanceComparator(0, 0);
}
/**
* Returns a comparator that compares points by their distance to
* the specified line
*
* @param line The line
* @return The comparator
*/
public static Comparator<Point2D> byDistanceToLineComparator(
Line2D line)
{
return byDistanceToLineComparator(
line.getX1(), line.getY1(), line.getX2(), line.getY2());
}
/**
* Returns a comparator that compares points by their distance to
* the specified line
*
* @param p0 The start point of the line
* @param p1 The end point of the line
* @return The comparator
*/
public static Comparator<Point2D> byDistanceToLineComparator(
Point2D p0, Point2D p1)
{
return byDistanceToLineComparator(
p0.getX(), p0.getY(), p1.getX(), p1.getY());
}
/**
* Returns a comparator that compares points by their distance to
* the specified line
*
* @param x0 The x-coordinate of the start point of the line
* @param y0 The y-coordinate of the start point of the line
* @param x1 The x-coordinate of the end point of the line
* @param y1 The y-coordinate of the end point of the line
* @return The comparator
*/
public static Comparator<Point2D> byDistanceToLineComparator(
double x0, double y0, double x1, double y1)
{
return new Comparator<Point2D>()
{
@Override
public int compare(Point2D p0, Point2D p1)
{
double d0 = Line2D.ptLineDistSq(
x0, y0, x1, y1, p0.getX(), p0.getY());
double d1 = Line2D.ptLineDistSq(
x0, y0, x1, y1, p1.getX(), p1.getY());
return Double.compare(d0, d1);
}
};
}
/**
* Computes the difference between the given points and stores the
* result in the given result point. If the given result point
* is <code>null</code>, a new point will be created and returned.
*
* @param p0 The first point
* @param p1 The second point
* @param result The result
* @return The result
*/
public static Point2D sub(Point2D p0, Point2D p1, Point2D result)
{
if (result == null)
{
result = new Point2D.Double();
}
result.setLocation(p0.getX()-p1.getX(), p0.getY()-p1.getY());
return result;
}
/**
* Computes the sum the given points and stores the
* result in the given result point. If the given result point
* is <code>null</code>, a new point will be created and returned.
*
* @param p0 The first point
* @param p1 The second point
* @param result The result
* @return The result
*/
public static Point2D add(Point2D p0, Point2D p1, Point2D result)
{
if (result == null)
{
result = new Point2D.Double();
}
result.setLocation(p0.getX()+p1.getX(), p0.getY()+p1.getY());
return result;
}
/**
* Scales the given point with the given factor and stores the
* result in the given result point. If the given result point
* is <code>null</code>, a new point will be created and returned.
*
* @param p0 The point
* @param factor The scaling factor
* @param result The result
* @return The result
*/
public static Point2D scale(Point2D p0, double factor, Point2D result)
{
if (result == null)
{
result = new Point2D.Double();
}
result.setLocation(p0.getX()*factor, p0.getY()*factor);
return result;
}
/**
* Computes <code>p0 + factor * p1</code> and stores the
* result in the given result point. If the given result point
* is <code>null</code>, a new point will be created and returned.
*
* @param p0 The first point
* @param factor The scaling factor for the second point
* @param p1 The second point
* @param result The result
* @return The result
*/
public static Point2D addScaled(
Point2D p0, double factor, Point2D p1, Point2D result)
{
if (result == null)
{
result = new Point2D.Double();
}
result.setLocation(
p0.getX()+factor*p1.getX(),
p0.getY()+factor*p1.getY());
return result;
}
/**
* Interpolates linearly between the given points, and stores the
* result in the given result point. If the given result point
* is <code>null</code>, a new point will be created and returned.
*
* @param p0 The first point
* @param p1 The second point
* @param alpha The position between the points (usually between 0 and 1)
* @param result The result
* @return The result
*/
public static Point2D interpolate(
Point2D p0, Point2D p1, double alpha, Point2D result)
{
return interpolate(p0.getX(), p0.getY(), p1.getX(), p1.getY(),
alpha, result);
}
/**
* Interpolates linearly between the specified points, and stores the
* result in the given result point. If the given result point
* is <code>null</code>, a new point will be created and returned.
*
* @param x0 The x-coordinate of the first point
* @param y0 The y-coordinate of the first point
* @param x1 The x-coordinate of the second point
* @param y1 The y-coordinate of the second point
* @param alpha The position between the points (usually between 0 and 1)
* @param result The result
* @return The result
*/
public static Point2D interpolate(
double x0, double y0, double x1, double y1,
double alpha, Point2D result)
{
if (result == null)
{
result = new Point2D.Double();
}
double dx = x1 - x0;
double dy = y1 - y0;
result.setLocation(x0 + alpha * dx, y0 + alpha * dy);
return result;
}
/**
* Transforms the given point with the inverse of the given transform,
* and stores the result in the given destination point. If the given
* destination point is <code>null</code>, a new point will be created
* and returned.
*
* @param at The affine transform
* @param pSrc The source point
* @param pDst The destination point
* @return The destination point
* @throws IllegalArgumentException If the given transform is not
* invertible
*/
public static Point2D inverseTransform(
AffineTransform at, Point2D pSrc, Point2D pDst)
{
try
{
return at.inverseTransform(pSrc, pDst);
}
catch (NoninvertibleTransformException e)
{
throw new IllegalArgumentException(
"Non-invertible transform", e);
}
}
/**
* Transforms all the given points with the given affine transform,
* and returns the results
*
* @param at The affine transform
* @param points The input points
* @return The transformed points
*/
public static List<Point2D> transform(
AffineTransform at, Iterable<? extends Point2D> points)
{
List<Point2D> result = new ArrayList<Point2D>();
for (Point2D p : points)
{
Point2D tp = at.transform(p, null);
result.add(tp);
}
return result;
}
/**
* Compute the bounding box of the given points
*
* @param points The input points
* @return The bounding box of the given points
*/
public static Rectangle2D computeBounds(
Iterable<? extends Point2D> points)
{
double minX = Double.MAX_VALUE;
double minY = Double.MAX_VALUE;
double maxX = -Double.MAX_VALUE;
double maxY = -Double.MAX_VALUE;
for (Point2D p : points)
{
double x = p.getX();
double y = p.getY();
minX = Math.min(minX, x);
minY = Math.min(minY, y);
maxX = Math.max(maxX, x);
maxY = Math.max(maxY, y);
}
return new Rectangle2D.Double(minX, minY, maxX-minX, maxY-minY);
}
/**
* Computes the center of gravity of the given sequence of points. This
* is simply the average of all points. If the given sequence is empty,
* then <code>null</code> is returned.
*
* @param points The points
* @return The center of gravity
*/
public static Point2D computeCenterOfGravity(
Iterable<? extends Point2D> points)
{
int counter = 0;
double sumX = 0.0;
double sumY = 0.0;
for (Point2D point : points)
{
sumX += point.getX();
sumY += point.getY();
counter++;
}
if (counter == 0)
{
return null;
}
return new Point2D.Double(sumX / counter, sumY / counter);
}
/**
* Creates a short string representation of the given point
*
* @param point The point
* @return The string
*/
public static String toString(Point2D point)
{
return toString(point, "%f");
}
/**
* Creates a short string representation of the given point, using
* the given format for the coordinates
*
* @param point The point
* @param format The format
* @return The string
*/
public static String toString(Point2D point, String format)
{
return toString(point, Locale.getDefault(), format);
}
/**
* Creates a short string representation of the given point, using
* the given format for the coordinates
*
* @param point The point
* @param locale The locale
* @param format The format
* @return The string
*/
public static String toString(Point2D point, Locale locale, String format)
{
String sx = String.format(format, point.getX());
String sy = String.format(format, point.getY());
return "("+sx+","+sy+")";
}
/**
* Private constructor to prevent instantiation
*/
private Points()
{
// Private constructor to prevent instantiation
}
}
|
|
package com.mkl.eu.service.service.persistence.oe.eco;
import com.mkl.eu.service.service.persistence.oe.IEntity;
import com.mkl.eu.service.service.persistence.oe.country.PlayableCountryEntity;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.*;
import java.io.Serializable;
/**
* Economical sheet of a given player at a given turn.
*
* @author MKL
*/
@Entity
@Table(name = "ECONOMICAL_SHEET")
public class EconomicalSheetEntity implements IEntity, Serializable {
/** Id. */
private Long id;
/** Owner of the sheet. */
private PlayableCountryEntity country;
/** Turn of the sheet. */
private Integer turn;
/*********************************************************************************************************
* Economic record sheet A - Royal Treasure *
*********************************************************************************************************/
/** Royal treasure at start of turn. Line 1 of sheet A. */
private Integer rtStart;
/** Royal treasure after events. Line 2 of sheet A. */
private Integer rtEvents;
/** Gifts and loans between players. Line 3 of sheet A. */
private Integer loans;
/** Wood and slaves (negative if bought, positive if sold). Line 4 of sheet A. */
private Integer woodSlaves;
/** Diplomatic actions. Line 5 of sheet A. */
private Integer diploActions;
/** Diplomatic reactions. Line 6 of sheet A. */
private Integer diploReactions;
/** Subsidies and dowries. Line 7 of sheet A. */
private Integer subsidies;
/** Royal treasure after diplomacy. Line 8 of sheet A. */
private Integer rtDiplo;
/** Pillages and privateers. Line 9 of sheet A. */
private Integer pillages;
/** Gold from ROTW and convoys. Line 10 of sheet A. */
private Integer goldRotw;
/** Exceptional taxes. Line 12 of sheet A. */
private Integer excTaxes;
/** Royal treasure before Exchequer. Line 13 of sheet A. */
private Integer rtBefExch;
/** Column of the Exchequer test. */
private Integer exchequerColumn;
/** Bonus of the Exchequer test. */
private Integer exchequerBonus;
/** Unmodified die of the exchequer test. */
private Integer exchequerDie;
/** Regular income. Line 15 of sheet A. */
private Integer regularIncome;
/** Prestige income. Line 16 of sheet A. */
private Integer prestigeIncome;
/** Maximum national loan. Line 17 of sheet A. */
private Integer maxNatLoan;
/** Maximum international loan. Line 18 of sheet A. */
private Integer maxInterLoan;
/** Remaining expenses. Line 20 of sheet A. */
private Integer remainingExpenses;
/** Prestige spent on expenses. Line 21 of sheet A. */
private Integer prestigeSpent;
/** National loan spent on expenses. Line 22 of sheet A. */
private Integer natLoan;
/** International loan spent on expenses. Line 23 of sheet A. */
private Integer interLoan;
/** Royal treasure balance. Line 24 of sheet A. */
private Integer rtBalance;
/** Royal treasure after Exchequer test. Line 25 of sheet A. */
private Integer rtAftExch;
/** Prestige spent in victory points. Line 26 of sheet A. */
private Integer prestigeVP;
/** Wealth. Line 27 of sheet A. */
private Integer wealth;
/** Period wealth. Line 28 of sheet A. */
private Integer periodWealth;
/** Stability improvement expense. Line 29 of sheet A. */
private Integer stab;
/** Modifier for stability improvement, without taking into account the investment. */
private Integer stabModifier;
/** Unmodified die roll for stability improvement. */
private Integer stabDie;
/** Ransom, peace treaties (negative if expense, positive if income). */
private Integer peace;
/** Royal treasure after peace. */
private Integer rtPeace;
/** Inflation. X% of |RT|, minimum X. Line 32 of sheet A. */
private Integer inflation;
/** Royal treasure at the end of the turn. Line 33 of sheet A. */
private Integer rtEnd;
/*********************************************************************************************************
* Economic record sheet C - Loans *
*********************************************************************************************************/
/** New international loans. Line 1 of sheet C. */
private Integer interLoanNew;
/** International loans interests. Line 2 of sheet C. */
private Integer interLoanInterests;
/** International loans refunds. Line 3 of sheet C. */
private Integer interLoanRefund;
/** International bankruptcy. Line 4 of sheet C. */
private Integer interBankrupt;
/** National loans at start of turn. Line 5 of sheet C. */
private Integer natLoanStart;
/** National loans interests. Line 6 of sheet C. */
private Integer natLoanInterest;
/** National loans bankruptcy. Line 7 of sheet C. */
private Integer natLoanBankrupt;
/** National loans refunds. Line 8 of sheet C. */
private Integer natLoanRefund;
/** New national loans. Line 9 of sheet C. */
private Integer natLoanNew;
/** National loans at end of turn. Line 10 of sheet C. */
private Integer natLoanEnd;
/*********************************************************************************************************
* Economic record sheet B - Income *
*********************************************************************************************************/
/*********************************************************************************************************
* Income *
*********************************************************************************************************/
/*********************************************************************************************************
* Land income *
*********************************************************************************************************/
/** Provinces income. Line 1 of sheet B. */
private Integer provincesIncome;
/** VassalProvinces income. Line 2 of sheet B. */
private Integer vassalIncome;
/** Income lost due to pillages, revolts or pashas. Line 3 of sheet B. */
private Integer lostIncome;
/** Variation of land income due to events. Can be negative. Line 4 of sheet B. */
private Integer eventLandIncome;
/** Land income. Summary of the provinces, vassal, lost and event incomes. Line 5 of sheet B. */
private Integer landIncome;
/*********************************************************************************************************
* Domestic income *
*********************************************************************************************************/
/** Manufactures income. Line 6 of sheet B. */
private Integer mnuIncome;
/** European gold mines income. Line 7 of sheet B. */
private Integer goldIncome;
/** Industrial income. Summary of the manufactures and european gold mines income. Line 8 of sheet B. */
private Integer industrialIncome;
/*********************************************************************************************************
* Trade income *
*********************************************************************************************************/
/** Domestic trace income. Based on provinces and vassal income crossed with the DTI. Line 9 of sheet B. */
private Integer domTradeIncome;
/** Foreign trade income. Based on trade refusal crossed with the FTI. Line 10 of sheet B. */
private Integer forTradeIncome;
/** Fleet level income. Line 11 of sheet B. */
private Integer fleetLevelIncome;
/** Fleet monopoly income. Line 12 of sheet B. */
private Integer fleetMonopIncome;
/** Trace centres income. Line 14 of sheet B. */
private Integer tradeCenterIncome;
/** Trace centres losses. Line 15 of sheet B. */
private Integer tradeCenterLoss;
/** Trade income. Summary of the various trace income. Line 16 of sheet B. */
private Integer tradeIncome;
/*********************************************************************************************************
* ROTW Income *
*********************************************************************************************************/
/** Colonies income (without exotic resources). Line 17 of sheet B. */
private Integer colIncome;
/** Trading posts income (without exotic resources). Line 18 of sheet B. */
private Integer tpIncome;
/** Exotic resources income. Line 19 of sheet B. */
private Integer exoResIncome;
/** ROTW income. Summary of the colonies, trading posts and exotic resources income. Line 21 of sheet B. */
private Integer rotwIncome;
/*********************************************************************************************************
* Other Income *
*********************************************************************************************************/
/** Special income (Portugal in annexation for Spain for example). Line 22 of sheet B. */
private Integer specialIncome;
/** Sum of the other summaries income. Line 23 of sheet B. */
private Integer income;
/** Variation of income due to events. Can be negative. Line 24 of sheet B. */
private Integer eventIncome;
/** Gross income. Line 25 of sheet B. */
private Integer grossIncome;
/*********************************************************************************************************
* Expenses *
*********************************************************************************************************/
/** Loan interests. Line 26 of sheet B. */
private Integer interestExpense;
/** Mandatory loan refund. Line 27 of sheet B. */
private Integer mandRefundExpense;
/** RT Collapse if negative. Line 28 of sheet B. */
private Integer rtCollapse;
/*********************************************************************************************************
* Administrative Expenses *
*********************************************************************************************************/
/** Optional loan refund. Line 29 of sheet B. */
private Integer optRefundExpense;
/** Unit maintenance. Line 30 of sheet B. */
private Integer unitMaintExpense;
/** Fortresses and praesidios maintenance. Line 31 of sheet B. */
private Integer fortMaintExpense;
/** Missions maintenance. Line 32 of sheet B. */
private Integer missMaintExpense;
/** Units purchase. Line 33 of sheet B. */
private Integer unitPurchExpense;
/** Fortresses purchase. Line 34 of sheet B. */
private Integer fortPurchExpense;
/** Administrative actions. Line 35 of sheet B. */
private Integer adminActExpense;
/** Administrative reactions. Line 36 of sheet B. */
private Integer adminReactExpense;
/** Other expenses. Line 37 of sheet B. */
private Integer otherExpense;
/** Administrative total. Summary of administrative expenses. Line 38 of sheet B. */
private Integer admTotalExpense;
/** Exceptional taxes modifier. Line 39 of sheet B. */
private Integer excTaxesMod;
/*********************************************************************************************************
* Military Expenses *
*********************************************************************************************************/
/** Passive campaigns. Line 40 of sheet B. */
private Integer passCampExpense;
/** Active campaigns. Line 41 of sheet B. */
private Integer actCampExpense;
/** Major campaigns. Line 42 of sheet B. */
private Integer majCampExpense;
/** Multiple campaigns. Line 43 of sheet B. */
private Integer multCampExpense;
/** Exceptional recruitment. Line 44 of sheet B. */
private Integer excRecruitExpense;
/** Naval refit. Line 45 of sheet B. */
private Integer navalRefitExpense;
/** Praesidios build/upgrade. Line 46 of sheet B. */
private Integer praesidioExpense;
/** Summary of the military expenses. Line 47 of sheet B. */
private Integer militaryExpense;
/** Total expenses. Summary of all the expenses. Line 48 of sheet B. */
private Integer expenses;
/** @return the id. */
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "native")
@GenericGenerator(name = "native", strategy = "native")
@Column(name = "ID")
public Long getId() {
return id;
}
/** @param id the id to set. */
public void setId(Long id) {
this.id = id;
}
/** @return the country. */
@ManyToOne
@JoinColumn(name = "ID_COUNTRY")
public PlayableCountryEntity getCountry() {
return country;
}
/** @param country the country to set. */
public void setCountry(PlayableCountryEntity country) {
this.country = country;
}
/** @return the turn. */
@Column(name = "TURN")
public Integer getTurn() {
return turn;
}
/** @param turn the turn to set. */
public void setTurn(Integer turn) {
this.turn = turn;
}
/** @return the rtStart. */
@Column(name = "RT_START")
public Integer getRtStart() {
return rtStart;
}
/** @param rtStart the rtStart to set. */
public void setRtStart(Integer rtStart) {
this.rtStart = rtStart;
}
/** @return the rtEvents. */
@Column(name = "RT_EVENTS")
public Integer getRtEvents() {
return rtEvents;
}
/** @param rtEvents the rtEvents to set. */
public void setRtEvents(Integer rtEvents) {
this.rtEvents = rtEvents;
}
/** @return the loans. */
@Column(name = "LOANS")
public Integer getLoans() {
return loans;
}
/** @param loans the loans to set. */
public void setLoans(Integer loans) {
this.loans = loans;
}
/** @return the woodSlaves. */
@Column(name = "WOOD_SLAVES")
public Integer getWoodSlaves() {
return woodSlaves;
}
/** @param woodSlaves the woodSlaves to set. */
public void setWoodSlaves(Integer woodSlaves) {
this.woodSlaves = woodSlaves;
}
/** @return the diploActions. */
@Column(name = "DIPLO_ACTIONS")
public Integer getDiploActions() {
return diploActions;
}
/** @param diploActions the diploActions to set. */
public void setDiploActions(Integer diploActions) {
this.diploActions = diploActions;
}
/** @return the diploReactions. */
@Column(name = "DIPLO_REACTIONS")
public Integer getDiploReactions() {
return diploReactions;
}
/** @param diploReactions the diploReactions to set. */
public void setDiploReactions(Integer diploReactions) {
this.diploReactions = diploReactions;
}
/** @return the subsidies. */
@Column(name = "SUBSIDIES")
public Integer getSubsidies() {
return subsidies;
}
/** @param subsidies the subsidies to set. */
public void setSubsidies(Integer subsidies) {
this.subsidies = subsidies;
}
/** @return the rtDiplo. */
@Column(name = "RT_DIPLO")
public Integer getRtDiplo() {
return rtDiplo;
}
/** @param rtDiplo the rtDiplo to set. */
public void setRtDiplo(Integer rtDiplo) {
this.rtDiplo = rtDiplo;
}
/** @return the pillages. */
@Column(name = "PILLAGES")
public Integer getPillages() {
return pillages;
}
/** @param pillages the pillages to set. */
public void setPillages(Integer pillages) {
this.pillages = pillages;
}
/** @return the goldRotw. */
@Column(name = "GOLD_ROTW")
public Integer getGoldRotw() {
return goldRotw;
}
/** @param goldRotw the goldRotw to set. */
public void setGoldRotw(Integer goldRotw) {
this.goldRotw = goldRotw;
}
/** @return the excTaxes. */
@Column(name = "EXC_TAXES")
public Integer getExcTaxes() {
return excTaxes;
}
/** @param excTaxes the excTaxes to set. */
public void setExcTaxes(Integer excTaxes) {
this.excTaxes = excTaxes;
}
/** @return the rtBefExch. */
@Column(name = "RT_BEF_EXCH")
public Integer getRtBefExch() {
return rtBefExch;
}
/** @param rtBefExch the rtBefExch to set. */
public void setRtBefExch(Integer rtBefExch) {
this.rtBefExch = rtBefExch;
}
/** @return the exchequerColumn. */
@Column(name = "EXCHEQUER_COLUMN")
public Integer getExchequerColumn() {
return exchequerColumn;
}
/** @param exchequerColumn the exchequerColumn to set. */
public void setExchequerColumn(Integer exchequerColumn) {
this.exchequerColumn = exchequerColumn;
}
/** @return the exchequerBonus. */
@Column(name = "EXCHEQUER_BONUS")
public Integer getExchequerBonus() {
return exchequerBonus;
}
/** @param exchequerBonus the exchequerBonus to set. */
public void setExchequerBonus(Integer exchequerBonus) {
this.exchequerBonus = exchequerBonus;
}
/** @return the exchequerDie. */
@Column(name = "EXCHEQUER_DIE")
public Integer getExchequerDie() {
return exchequerDie;
}
/** @param exchequerDie the exchequerDie to set. */
public void setExchequerDie(Integer exchequerDie) {
this.exchequerDie = exchequerDie;
}
/** @return the regularIncome. */
@Column(name = "REGULAR_INCOME")
public Integer getRegularIncome() {
return regularIncome;
}
/** @param regularIncome the regularIncome to set. */
public void setRegularIncome(Integer regularIncome) {
this.regularIncome = regularIncome;
}
/** @return the prestigeIncome. */
@Column(name = "PRESTIGE_INCOME")
public Integer getPrestigeIncome() {
return prestigeIncome;
}
/** @param prestigeIncome the prestigeIncome to set. */
public void setPrestigeIncome(Integer prestigeIncome) {
this.prestigeIncome = prestigeIncome;
}
/** @return the maxNatLoan. */
@Column(name = "MAX_NAT_LOAN")
public Integer getMaxNatLoan() {
return maxNatLoan;
}
/** @param maxNatLoan the maxNatLoan to set. */
public void setMaxNatLoan(Integer maxNatLoan) {
this.maxNatLoan = maxNatLoan;
}
/** @return the maxInterLoan. */
@Column(name = "MAX_INTER_LOAN")
public Integer getMaxInterLoan() {
return maxInterLoan;
}
/** @param maxInterLoan the maxInterLoan to set. */
public void setMaxInterLoan(Integer maxInterLoan) {
this.maxInterLoan = maxInterLoan;
}
/** @return the remainingExpenses. */
@Column(name = "REMAINING_EXPENSES")
public Integer getRemainingExpenses() {
return remainingExpenses;
}
/** @param remainingExpenses the remainingExpenses to set. */
public void setRemainingExpenses(Integer remainingExpenses) {
this.remainingExpenses = remainingExpenses;
}
/** @return the prestigeSpent. */
@Column(name = "PRESTIGE_SPENT")
public Integer getPrestigeSpent() {
return prestigeSpent;
}
/** @param prestigeSpent the prestigeSpent to set. */
public void setPrestigeSpent(Integer prestigeSpent) {
this.prestigeSpent = prestigeSpent;
}
/** @return the natLoan. */
@Column(name = "NAT_LOAN")
public Integer getNatLoan() {
return natLoan;
}
/** @param natLoan the natLoan to set. */
public void setNatLoan(Integer natLoan) {
this.natLoan = natLoan;
}
/** @return the interLoan. */
@Column(name = "INTER_LOAN")
public Integer getInterLoan() {
return interLoan;
}
/** @param interLoan the interLoan to set. */
public void setInterLoan(Integer interLoan) {
this.interLoan = interLoan;
}
/** @return the rtBalance. */
@Column(name = "RT_BALANCE")
public Integer getRtBalance() {
return rtBalance;
}
/** @param rtBalance the rtBalance to set. */
public void setRtBalance(Integer rtBalance) {
this.rtBalance = rtBalance;
}
/** @return the rtAftExch. */
@Column(name = "RT_AFT_EXCH")
public Integer getRtAftExch() {
return rtAftExch;
}
/** @param rtAftExch the rtAftExch to set. */
public void setRtAftExch(Integer rtAftExch) {
this.rtAftExch = rtAftExch;
}
/** @return the prestigeVP. */
@Column(name = "PRESTIGE_VP")
public Integer getPrestigeVP() {
return prestigeVP;
}
/** @param prestigeVP the prestigeVP to set. */
public void setPrestigeVP(Integer prestigeVP) {
this.prestigeVP = prestigeVP;
}
/** @return the wealth. */
@Column(name = "WEALTH")
public Integer getWealth() {
return wealth;
}
/** @param wealth the wealth to set. */
public void setWealth(Integer wealth) {
this.wealth = wealth;
}
/** @return the periodWealth. */
@Column(name = "PERIOD_WEALTH")
public Integer getPeriodWealth() {
return periodWealth;
}
/** @param periodWealth the periodWealth to set. */
public void setPeriodWealth(Integer periodWealth) {
this.periodWealth = periodWealth;
}
/** @return the stab. */
@Column(name = "STAB")
public Integer getStab() {
return stab;
}
/** @param stab the stab to set. */
public void setStab(Integer stab) {
this.stab = stab;
}
/** @return the stabModifier. */
public Integer getStabModifier() {
return stabModifier;
}
/** @param stabModifier the stabModifier to set. */
public void setStabModifier(Integer stabModifier) {
this.stabModifier = stabModifier;
}
/** @return the stabDie. */
public Integer getStabDie() {
return stabDie;
}
/** @param stabDie the stabDie to set. */
public void setStabDie(Integer stabDie) {
this.stabDie = stabDie;
}
/** @return the peace. */
@Column(name = "PEACE")
public Integer getPeace() {
return peace;
}
/** @param peace the peace to set. */
public void setPeace(Integer peace) {
this.peace = peace;
}
/** @return the rtPeace. */
@Column(name = "RT_PEACE")
public Integer getRtPeace() {
return rtPeace;
}
/** @param rtPeace the rtPeace to set. */
public void setRtPeace(Integer rtPeace) {
this.rtPeace = rtPeace;
}
/** @return the inflation. */
@Column(name = "INFLATION")
public Integer getInflation() {
return inflation;
}
/** @param inflation the inflation to set. */
public void setInflation(Integer inflation) {
this.inflation = inflation;
}
/** @return the rtEnd. */
@Column(name = "RT_ENT")
public Integer getRtEnd() {
return rtEnd;
}
/** @param rtEnd the rtEnd to set. */
public void setRtEnd(Integer rtEnd) {
this.rtEnd = rtEnd;
}
/** @return the interLoanNew. */
@Column(name = "INTER_LOAN_NEW")
public Integer getInterLoanNew() {
return interLoanNew;
}
/** @param interLoanNew the interLoanNew to set. */
public void setInterLoanNew(Integer interLoanNew) {
this.interLoanNew = interLoanNew;
}
/** @return the interLoanInterests. */
@Column(name = "INTER_LOAN_INTERESTS")
public Integer getInterLoanInterests() {
return interLoanInterests;
}
/** @param interLoanInterests the interLoanInterests to set. */
public void setInterLoanInterests(Integer interLoanInterests) {
this.interLoanInterests = interLoanInterests;
}
/** @return the interLoanRefund. */
@Column(name = "INTER_LOAN_REFUND")
public Integer getInterLoanRefund() {
return interLoanRefund;
}
/** @param interLoanRefund the interLoanRefund to set. */
public void setInterLoanRefund(Integer interLoanRefund) {
this.interLoanRefund = interLoanRefund;
}
/** @return the interBankrupt. */
@Column(name = "INTER_BANKRUPT")
public Integer getInterBankrupt() {
return interBankrupt;
}
/** @param interBankrupt the interBankrupt to set. */
public void setInterBankrupt(Integer interBankrupt) {
this.interBankrupt = interBankrupt;
}
/** @return the natLoanStart. */
@Column(name = "NAT_LOAN_START")
public Integer getNatLoanStart() {
return natLoanStart;
}
/** @param natLoanStart the natLoanStart to set. */
public void setNatLoanStart(Integer natLoanStart) {
this.natLoanStart = natLoanStart;
}
/** @return the natLoanInterest. */
@Column(name = "NAT_LOAN_INTERESTS")
public Integer getNatLoanInterest() {
return natLoanInterest;
}
/** @param natLoanInterest the natLoanInterest to set. */
public void setNatLoanInterest(Integer natLoanInterest) {
this.natLoanInterest = natLoanInterest;
}
/** @return the natLoanBankrupt. */
@Column(name = "NAT_LOAN_BANKRUPT")
public Integer getNatLoanBankrupt() {
return natLoanBankrupt;
}
/** @param natLoanBankrupt the natLoanBankrupt to set. */
public void setNatLoanBankrupt(Integer natLoanBankrupt) {
this.natLoanBankrupt = natLoanBankrupt;
}
/** @return the natLoanRefund. */
@Column(name = "NAT_LOAN_REFUND")
public Integer getNatLoanRefund() {
return natLoanRefund;
}
/** @param natLoanRefund the natLoanRefund to set. */
public void setNatLoanRefund(Integer natLoanRefund) {
this.natLoanRefund = natLoanRefund;
}
/** @return the natLoanNew. */
@Column(name = "NAT_LOAN_NEW")
public Integer getNatLoanNew() {
return natLoanNew;
}
/** @param natLoanNew the natLoanNew to set. */
public void setNatLoanNew(Integer natLoanNew) {
this.natLoanNew = natLoanNew;
}
/** @return the natLoanEnd. */
@Column(name = "NAT_LOAN_END")
public Integer getNatLoanEnd() {
return natLoanEnd;
}
/** @param natLoanEnd the natLoanEnd to set. */
public void setNatLoanEnd(Integer natLoanEnd) {
this.natLoanEnd = natLoanEnd;
}
/** @return the provincesIncome. */
@Column(name = "PROVINCES_INCOME")
public Integer getProvincesIncome() {
return provincesIncome;
}
/** @param provincesIncome the provincesIncome to set. */
public void setProvincesIncome(Integer provincesIncome) {
this.provincesIncome = provincesIncome;
}
/** @return the vassalIncome. */
@Column(name = "VASSAL_INCOME")
public Integer getVassalIncome() {
return vassalIncome;
}
/** @param vassalIncome the vassalIncome to set. */
public void setVassalIncome(Integer vassalIncome) {
this.vassalIncome = vassalIncome;
}
/** @return the lostIncome. */
@Column(name = "LOST_INCOME")
public Integer getLostIncome() {
return lostIncome;
}
/** @param lostIncome the lostIncome to set. */
public void setLostIncome(Integer lostIncome) {
this.lostIncome = lostIncome;
}
/** @return the eventLandIncome. */
@Column(name = "EVENT_LAND_INCOME")
public Integer getEventLandIncome() {
return eventLandIncome;
}
/** @param eventLandIncome the eventLandIncome to set. */
public void setEventLandIncome(Integer eventLandIncome) {
this.eventLandIncome = eventLandIncome;
}
/** @return the landIncome. */
@Column(name = "LAND_INCOME")
public Integer getLandIncome() {
return landIncome;
}
/** @param landIncome the landIncome to set. */
public void setLandIncome(Integer landIncome) {
this.landIncome = landIncome;
}
/** @return the mnuIncome. */
@Column(name = "MNU_INCOME")
public Integer getMnuIncome() {
return mnuIncome;
}
/** @param mnuIncome the mnuIncome to set. */
public void setMnuIncome(Integer mnuIncome) {
this.mnuIncome = mnuIncome;
}
/** @return the goldIncome. */
@Column(name = "GOLD_INCOME")
public Integer getGoldIncome() {
return goldIncome;
}
/** @param goldIncome the goldIncome to set. */
public void setGoldIncome(Integer goldIncome) {
this.goldIncome = goldIncome;
}
/** @return the industrialIncome. */
@Column(name = "INDUSTRIAL_INCOME")
public Integer getIndustrialIncome() {
return industrialIncome;
}
/** @param industrialIncome the industrialIncome to set. */
public void setIndustrialIncome(Integer industrialIncome) {
this.industrialIncome = industrialIncome;
}
/** @return the domTradeIncome. */
@Column(name = "DOM_TRADE_INCOME")
public Integer getDomTradeIncome() {
return domTradeIncome;
}
/** @param domTradeIncome the domTradeIncome to set. */
public void setDomTradeIncome(Integer domTradeIncome) {
this.domTradeIncome = domTradeIncome;
}
/** @return the forTradeIncome. */
@Column(name = "FOR_TRADE_INCOME")
public Integer getForTradeIncome() {
return forTradeIncome;
}
/** @param forTradeIncome the forTradeIncome to set. */
public void setForTradeIncome(Integer forTradeIncome) {
this.forTradeIncome = forTradeIncome;
}
/** @return the fleetLevelIncome. */
@Column(name = "FLEET_LEVEL_INCOME")
public Integer getFleetLevelIncome() {
return fleetLevelIncome;
}
/** @param fleetLevelIncome the fleetLevelIncome to set. */
public void setFleetLevelIncome(Integer fleetLevelIncome) {
this.fleetLevelIncome = fleetLevelIncome;
}
/** @return the fleetMonopIncome. */
@Column(name = "FLEET_MONOP_INCOME")
public Integer getFleetMonopIncome() {
return fleetMonopIncome;
}
/** @param fleetMonopIncome the fleetMonopIncome to set. */
public void setFleetMonopIncome(Integer fleetMonopIncome) {
this.fleetMonopIncome = fleetMonopIncome;
}
/** @return the tradeCenterIncome. */
@Column(name = "TRADE_CENTER_INCOME")
public Integer getTradeCenterIncome() {
return tradeCenterIncome;
}
/** @param tradeCenterIncome the tradeCenterIncome to set. */
public void setTradeCenterIncome(Integer tradeCenterIncome) {
this.tradeCenterIncome = tradeCenterIncome;
}
/** @return the tradeCenterLoss. */
@Column(name = "TRADE_CENTER_LOSS")
public Integer getTradeCenterLoss() {
return tradeCenterLoss;
}
/** @param tradeCenterLoss the tradeCenterLoss to set. */
public void setTradeCenterLoss(Integer tradeCenterLoss) {
this.tradeCenterLoss = tradeCenterLoss;
}
/** @return the tradeIncome. */
@Column(name = "TRADE_INCOME")
public Integer getTradeIncome() {
return tradeIncome;
}
/** @param tradeIncome the tradeIncome to set. */
public void setTradeIncome(Integer tradeIncome) {
this.tradeIncome = tradeIncome;
}
/** @return the colIncome. */
@Column(name = "COL_INCOME")
public Integer getColIncome() {
return colIncome;
}
/** @param colIncome the colIncome to set. */
public void setColIncome(Integer colIncome) {
this.colIncome = colIncome;
}
/** @return the tpIncome. */
@Column(name = "TP_INCOME")
public Integer getTpIncome() {
return tpIncome;
}
/** @param tpIncome the tpIncome to set. */
public void setTpIncome(Integer tpIncome) {
this.tpIncome = tpIncome;
}
/** @return the exoResIncome. */
@Column(name = "EXO_RES_INCOME")
public Integer getExoResIncome() {
return exoResIncome;
}
/** @param exoResIncome the exoResIncome to set. */
public void setExoResIncome(Integer exoResIncome) {
this.exoResIncome = exoResIncome;
}
/** @return the rotwIncome. */
@Column(name = "ROTW_INCOME")
public Integer getRotwIncome() {
return rotwIncome;
}
/** @param rotwIncome the rotwIncome to set. */
public void setRotwIncome(Integer rotwIncome) {
this.rotwIncome = rotwIncome;
}
/** @return the specialIncome. */
@Column(name = "SPECIAL_INCOME")
public Integer getSpecialIncome() {
return specialIncome;
}
/** @param specialIncome the specialIncome to set. */
public void setSpecialIncome(Integer specialIncome) {
this.specialIncome = specialIncome;
}
/** @return the income. */
@Column(name = "INCOME")
public Integer getIncome() {
return income;
}
/** @param income the income to set. */
public void setIncome(Integer income) {
this.income = income;
}
/** @return the eventIncome. */
@Column(name = "EVENT_INCOME")
public Integer getEventIncome() {
return eventIncome;
}
/** @param eventIncome the eventIncome to set. */
public void setEventIncome(Integer eventIncome) {
this.eventIncome = eventIncome;
}
/** @return the grossIncome. */
@Column(name = "GROSS_INCOME")
public Integer getGrossIncome() {
return grossIncome;
}
/** @param grossIncome the grossIncome to set. */
public void setGrossIncome(Integer grossIncome) {
this.grossIncome = grossIncome;
}
/** @return the interestExpense. */
@Column(name = "INTEREST_EXPENSE")
public Integer getInterestExpense() {
return interestExpense;
}
/** @param interestExpense the interestExpense to set. */
public void setInterestExpense(Integer interestExpense) {
this.interestExpense = interestExpense;
}
/** @return the mandRefundExpense. */
@Column(name = "MAND_REFUND_EXPENSE")
public Integer getMandRefundExpense() {
return mandRefundExpense;
}
/** @param mandRefundExpense the mandRefundExpense to set. */
public void setMandRefundExpense(Integer mandRefundExpense) {
this.mandRefundExpense = mandRefundExpense;
}
/** @return the rtCollapse. */
@Column(name = "RT_COLLAPSE")
public Integer getRtCollapse() {
return rtCollapse;
}
/** @param rtCollapse the rtCollapse to set. */
public void setRtCollapse(Integer rtCollapse) {
this.rtCollapse = rtCollapse;
}
/** @return the optRefundExpense. */
@Column(name = "OPT_REFUND_EXPENSE")
public Integer getOptRefundExpense() {
return optRefundExpense;
}
/** @param optRefundExpense the optRefundExpense to set. */
public void setOptRefundExpense(Integer optRefundExpense) {
this.optRefundExpense = optRefundExpense;
}
/** @return the unitMaintExpense. */
@Column(name = "UNIT_MAINT_EXPENSE")
public Integer getUnitMaintExpense() {
return unitMaintExpense;
}
/** @param unitMaintExpense the unitMaintExpense to set. */
public void setUnitMaintExpense(Integer unitMaintExpense) {
this.unitMaintExpense = unitMaintExpense;
}
/** @return the fortMaintExpense. */
@Column(name = "FORT_MAINT_EXPENSE")
public Integer getFortMaintExpense() {
return fortMaintExpense;
}
/** @param fortMaintExpense the fortMaintExpense to set. */
public void setFortMaintExpense(Integer fortMaintExpense) {
this.fortMaintExpense = fortMaintExpense;
}
/** @return the missMaintExpense. */
@Column(name = "MISS_MAINT_EXPENSE")
public Integer getMissMaintExpense() {
return missMaintExpense;
}
/** @param missMaintExpense the missMaintExpense to set. */
public void setMissMaintExpense(Integer missMaintExpense) {
this.missMaintExpense = missMaintExpense;
}
/** @return the unitPurchExpense. */
@Column(name = "UNIT_PURCH_EXPENSE")
public Integer getUnitPurchExpense() {
return unitPurchExpense;
}
/** @param unitPurchExpense the unitPurchExpense to set. */
public void setUnitPurchExpense(Integer unitPurchExpense) {
this.unitPurchExpense = unitPurchExpense;
}
/** @return the fortPurchExpense. */
@Column(name = "FORT_PURCH_EXPENSE")
public Integer getFortPurchExpense() {
return fortPurchExpense;
}
/** @param fortPurchExpense the fortPurchExpense to set. */
public void setFortPurchExpense(Integer fortPurchExpense) {
this.fortPurchExpense = fortPurchExpense;
}
/** @return the adminActExpense. */
@Column(name = "ADMIN_ACT_EXPENSE")
public Integer getAdminActExpense() {
return adminActExpense;
}
/** @param adminActExpense the adminActExpense to set. */
public void setAdminActExpense(Integer adminActExpense) {
this.adminActExpense = adminActExpense;
}
/** @return the adminReactExpense. */
@Column(name = "ADMIN_REACT_EXPENSE")
public Integer getAdminReactExpense() {
return adminReactExpense;
}
/** @param adminReactExpense the adminReactExpense to set. */
public void setAdminReactExpense(Integer adminReactExpense) {
this.adminReactExpense = adminReactExpense;
}
/** @return the otherExpense. */
@Column(name = "OTHER_EXPENSE")
public Integer getOtherExpense() {
return otherExpense;
}
/** @param otherExpense the otherExpense to set. */
public void setOtherExpense(Integer otherExpense) {
this.otherExpense = otherExpense;
}
/** @return the admTotalExpense. */
@Column(name = "ADM_TOTAL_EXPENSE")
public Integer getAdmTotalExpense() {
return admTotalExpense;
}
/** @param admTotalExpense the admTotalExpense to set. */
public void setAdmTotalExpense(Integer admTotalExpense) {
this.admTotalExpense = admTotalExpense;
}
/** @return the excTaxesMod. */
@Column(name = "EXC_TAXES_MOD")
public Integer getExcTaxesMod() {
return excTaxesMod;
}
/** @param excTaxesMod the excTaxesMod to set. */
public void setExcTaxesMod(Integer excTaxesMod) {
this.excTaxesMod = excTaxesMod;
}
/** @return the passCampExpense. */
@Column(name = "PASS_CAMP_EXPENSE")
public Integer getPassCampExpense() {
return passCampExpense;
}
/** @param passCampExpense the passCampExpense to set. */
public void setPassCampExpense(Integer passCampExpense) {
this.passCampExpense = passCampExpense;
}
/** @return the actCampExpense. */
@Column(name = "ACT_CAMP_EXPENSE")
public Integer getActCampExpense() {
return actCampExpense;
}
/** @param actCampExpense the actCampExpense to set. */
public void setActCampExpense(Integer actCampExpense) {
this.actCampExpense = actCampExpense;
}
/** @return the majCampExpense. */
@Column(name = "MAJ_CAMP_EXPENSE")
public Integer getMajCampExpense() {
return majCampExpense;
}
/** @param majCampExpense the majCampExpense to set. */
public void setMajCampExpense(Integer majCampExpense) {
this.majCampExpense = majCampExpense;
}
/** @return the multCampExpense. */
@Column(name = "MULT_CAMP_EXPENSE")
public Integer getMultCampExpense() {
return multCampExpense;
}
/** @param multCampExpense the multCampExpense to set. */
public void setMultCampExpense(Integer multCampExpense) {
this.multCampExpense = multCampExpense;
}
/** @return the excRecruitExpense. */
@Column(name = "EXC_RECRUIT_EXPENSE")
public Integer getExcRecruitExpense() {
return excRecruitExpense;
}
/** @param excRecruitExpense the excRecruitExpense to set. */
public void setExcRecruitExpense(Integer excRecruitExpense) {
this.excRecruitExpense = excRecruitExpense;
}
/** @return the navalRefitExpense. */
@Column(name = "NAVAL_REFIT_EXPENSE")
public Integer getNavalRefitExpense() {
return navalRefitExpense;
}
/** @param navalRefitExpense the navalRefitExpense to set. */
public void setNavalRefitExpense(Integer navalRefitExpense) {
this.navalRefitExpense = navalRefitExpense;
}
/** @return the praesidioExpense. */
@Column(name = "PRAESIDIO_EXPENSE")
public Integer getPraesidioExpense() {
return praesidioExpense;
}
/** @param praesidioExpense the praesidioExpense to set. */
public void setPraesidioExpense(Integer praesidioExpense) {
this.praesidioExpense = praesidioExpense;
}
/** @return the militaryExpense. */
@Column(name = "MILITARY_EXPENSE")
public Integer getMilitaryExpense() {
return militaryExpense;
}
/** @param militaryExpense the militaryExpense to set. */
public void setMilitaryExpense(Integer militaryExpense) {
this.militaryExpense = militaryExpense;
}
/** @return the expenses. */
@Column(name = "EXPENSES")
public Integer getExpenses() {
return expenses;
}
/** @param expenses the expenses to set. */
public void setExpenses(Integer expenses) {
this.expenses = expenses;
}
}
|
|
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.endpoint.mvc;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Before;
import org.junit.Test;
import org.springframework.boot.actuate.endpoint.AbstractEndpoint;
import org.springframework.context.support.StaticApplicationContext;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.util.ReflectionUtils;
import org.springframework.web.HttpRequestMethodNotSupportedException;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.method.HandlerMethod;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link EndpointHandlerMapping}.
*
* @author Phillip Webb
* @author Dave Syer
*/
public class EndpointHandlerMappingTests {
private final StaticApplicationContext context = new StaticApplicationContext();
private Method method;
@Before
public void init() throws Exception {
this.method = ReflectionUtils.findMethod(TestMvcEndpoint.class, "invoke");
}
@Test
public void withoutPrefix() throws Exception {
TestMvcEndpoint endpointA = new TestMvcEndpoint(new TestEndpoint("a"));
TestMvcEndpoint endpointB = new TestMvcEndpoint(new TestEndpoint("b"));
EndpointHandlerMapping mapping = new EndpointHandlerMapping(
Arrays.asList(endpointA, endpointB));
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(request("GET", "/a")).getHandler())
.isEqualTo(new HandlerMethod(endpointA, this.method));
assertThat(mapping.getHandler(request("GET", "/b")).getHandler())
.isEqualTo(new HandlerMethod(endpointB, this.method));
assertThat(mapping.getHandler(request("GET", "/c"))).isNull();
}
@Test
public void withPrefix() throws Exception {
TestMvcEndpoint endpointA = new TestMvcEndpoint(new TestEndpoint("a"));
TestMvcEndpoint endpointB = new TestMvcEndpoint(new TestEndpoint("b"));
EndpointHandlerMapping mapping = new EndpointHandlerMapping(
Arrays.asList(endpointA, endpointB));
mapping.setApplicationContext(this.context);
mapping.setPrefix("/a");
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(new MockHttpServletRequest("GET", "/a/a"))
.getHandler()).isEqualTo(new HandlerMethod(endpointA, this.method));
assertThat(mapping.getHandler(new MockHttpServletRequest("GET", "/a/b"))
.getHandler()).isEqualTo(new HandlerMethod(endpointB, this.method));
assertThat(mapping.getHandler(request("GET", "/a"))).isNull();
}
@Test(expected = HttpRequestMethodNotSupportedException.class)
public void onlyGetHttpMethodForNonActionEndpoints() throws Exception {
TestActionEndpoint endpoint = new TestActionEndpoint(new TestEndpoint("a"));
EndpointHandlerMapping mapping = new EndpointHandlerMapping(
Arrays.asList(endpoint));
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(request("GET", "/a"))).isNotNull();
assertThat(mapping.getHandler(request("POST", "/a"))).isNull();
}
@Test
public void postHttpMethodForActionEndpoints() throws Exception {
TestActionEndpoint endpoint = new TestActionEndpoint(new TestEndpoint("a"));
EndpointHandlerMapping mapping = new EndpointHandlerMapping(
Arrays.asList(endpoint));
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(request("POST", "/a"))).isNotNull();
}
@Test(expected = HttpRequestMethodNotSupportedException.class)
public void onlyPostHttpMethodForActionEndpoints() throws Exception {
TestActionEndpoint endpoint = new TestActionEndpoint(new TestEndpoint("a"));
EndpointHandlerMapping mapping = new EndpointHandlerMapping(
Arrays.asList(endpoint));
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(request("POST", "/a"))).isNotNull();
assertThat(mapping.getHandler(request("GET", "/a"))).isNull();
}
@Test
public void disabled() throws Exception {
TestMvcEndpoint endpoint = new TestMvcEndpoint(new TestEndpoint("a"));
EndpointHandlerMapping mapping = new EndpointHandlerMapping(
Arrays.asList(endpoint));
mapping.setDisabled(true);
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(request("GET", "/a"))).isNull();
}
@Test
public void duplicatePath() throws Exception {
TestMvcEndpoint endpoint = new TestMvcEndpoint(new TestEndpoint("a"));
TestActionEndpoint other = new TestActionEndpoint(new TestEndpoint("a"));
EndpointHandlerMapping mapping = new EndpointHandlerMapping(
Arrays.asList(endpoint, other));
mapping.setDisabled(true);
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(request("GET", "/a"))).isNull();
assertThat(mapping.getHandler(request("POST", "/a"))).isNull();
}
@Test
public void getEndpointsForSpecifiedType() throws Exception {
TestMvcEndpoint endpoint = new TestMvcEndpoint(new TestEndpoint("a"));
TestActionEndpoint other = new TestActionEndpoint(new TestEndpoint("b"));
EndpointHandlerMapping mapping = new EndpointHandlerMapping(
Arrays.asList(endpoint, other));
assertThat(mapping.getEndpoints(TestMvcEndpoint.class)).containsExactly(endpoint);
}
@Test
public void pathNotMappedWhenGetPathReturnsNull() throws Exception {
TestMvcEndpoint endpoint = new TestMvcEndpoint(new TestEndpoint("a"));
TestActionEndpoint other = new TestActionEndpoint(new TestEndpoint("b"));
EndpointHandlerMapping mapping = new TestEndpointHandlerMapping(
Arrays.asList(endpoint, other));
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandlerMethods()).hasSize(1);
assertThat(mapping.getHandler(request("GET", "/a"))).isNull();
assertThat(mapping.getHandler(request("POST", "/b"))).isNotNull();
}
private MockHttpServletRequest request(String method, String requestURI) {
return new MockHttpServletRequest(method, requestURI);
}
private static class TestEndpoint extends AbstractEndpoint<Object> {
TestEndpoint(String id) {
super(id);
}
@Override
public Object invoke() {
return null;
}
}
private static class TestMvcEndpoint extends EndpointMvcAdapter {
TestMvcEndpoint(TestEndpoint delegate) {
super(delegate);
}
}
private static class TestActionEndpoint extends EndpointMvcAdapter {
TestActionEndpoint(TestEndpoint delegate) {
super(delegate);
}
@Override
@PostMapping
public Object invoke() {
return null;
}
}
static class TestEndpointHandlerMapping extends EndpointHandlerMapping {
TestEndpointHandlerMapping(Collection<? extends MvcEndpoint> endpoints) {
super(endpoints);
}
@Override
protected String getPath(MvcEndpoint endpoint) {
if (endpoint instanceof TestActionEndpoint) {
return super.getPath(endpoint);
}
return null;
}
}
}
|
|
/*
* Copyright 2016 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.flow;
import java.util.ArrayDeque;
import java.util.Queue;
import io.netty.channel.ChannelConfig;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.handler.codec.MessageToByteEncoder;
import io.netty.util.ReferenceCountUtil;
import io.netty.util.internal.ObjectPool;
import io.netty.util.internal.ObjectPool.Handle;
import io.netty.util.internal.ObjectPool.ObjectCreator;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
/**
* The {@link FlowControlHandler} ensures that only one message per {@code read()} is sent downstream.
*
* Classes such as {@link ByteToMessageDecoder} or {@link MessageToByteEncoder} are free to emit as
* many events as they like for any given input. A channel's auto reading configuration doesn't usually
* apply in these scenarios. This is causing problems in downstream {@link ChannelHandler}s that would
* like to hold subsequent events while they're processing one event. It's a common problem with the
* {@code HttpObjectDecoder} that will very often fire an {@code HttpRequest} that is immediately followed
* by a {@code LastHttpContent} event.
*
* <pre>{@code
* ChannelPipeline pipeline = ...;
*
* pipeline.addLast(new HttpServerCodec());
* pipeline.addLast(new FlowControlHandler());
*
* pipeline.addLast(new MyExampleHandler());
*
* class MyExampleHandler extends ChannelInboundHandlerAdapter {
* @Override
* public void channelRead(ChannelHandlerContext ctx, Object msg) {
* if (msg instanceof HttpRequest) {
* ctx.channel().config().setAutoRead(false);
*
* // The FlowControlHandler will hold any subsequent events that
* // were emitted by HttpObjectDecoder until auto reading is turned
* // back on or Channel#read() is being called.
* }
* }
* }
* }</pre>
*
* @see ChannelConfig#setAutoRead(boolean)
*/
public class FlowControlHandler extends ChannelDuplexHandler {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(FlowControlHandler.class);
private final boolean releaseMessages;
private RecyclableArrayDeque queue;
private ChannelConfig config;
private boolean shouldConsume;
public FlowControlHandler() {
this(true);
}
public FlowControlHandler(boolean releaseMessages) {
this.releaseMessages = releaseMessages;
}
/**
* Determine if the underlying {@link Queue} is empty. This method exists for
* testing, debugging and inspection purposes and it is not Thread safe!
*/
boolean isQueueEmpty() {
return queue == null || queue.isEmpty();
}
/**
* Releases all messages and destroys the {@link Queue}.
*/
private void destroy() {
if (queue != null) {
if (!queue.isEmpty()) {
logger.trace("Non-empty queue: {}", queue);
if (releaseMessages) {
Object msg;
while ((msg = queue.poll()) != null) {
ReferenceCountUtil.safeRelease(msg);
}
}
}
queue.recycle();
queue = null;
}
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
config = ctx.channel().config();
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
super.handlerRemoved(ctx);
if (!isQueueEmpty()) {
dequeue(ctx, queue.size());
}
destroy();
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
destroy();
ctx.fireChannelInactive();
}
@Override
public void read(ChannelHandlerContext ctx) throws Exception {
if (dequeue(ctx, 1) == 0) {
// It seems no messages were consumed. We need to read() some
// messages from upstream and once one arrives it need to be
// relayed to downstream to keep the flow going.
shouldConsume = true;
ctx.read();
}
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (queue == null) {
queue = RecyclableArrayDeque.newInstance();
}
queue.offer(msg);
// We just received one message. Do we need to relay it regardless
// of the auto reading configuration? The answer is yes if this
// method was called as a result of a prior read() call.
int minConsume = shouldConsume ? 1 : 0;
shouldConsume = false;
dequeue(ctx, minConsume);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
if (isQueueEmpty()) {
ctx.fireChannelReadComplete();
} else {
// Don't relay completion events from upstream as they
// make no sense in this context. See dequeue() where
// a new set of completion events is being produced.
}
}
/**
* Dequeues one or many (or none) messages depending on the channel's auto
* reading state and returns the number of messages that were consumed from
* the internal queue.
*
* The {@code minConsume} argument is used to force {@code dequeue()} into
* consuming that number of messages regardless of the channel's auto
* reading configuration.
*
* @see #read(ChannelHandlerContext)
* @see #channelRead(ChannelHandlerContext, Object)
*/
private int dequeue(ChannelHandlerContext ctx, int minConsume) {
int consumed = 0;
// fireChannelRead(...) may call ctx.read() and so this method may reentrance. Because of this we need to
// check if queue was set to null in the meantime and if so break the loop.
while (queue != null && (consumed < minConsume || config.isAutoRead())) {
Object msg = queue.poll();
if (msg == null) {
break;
}
++consumed;
ctx.fireChannelRead(msg);
}
// We're firing a completion event every time one (or more)
// messages were consumed and the queue ended up being drained
// to an empty state.
if (queue != null && queue.isEmpty()) {
queue.recycle();
queue = null;
if (consumed > 0) {
ctx.fireChannelReadComplete();
}
}
return consumed;
}
/**
* A recyclable {@link ArrayDeque}.
*/
private static final class RecyclableArrayDeque extends ArrayDeque<Object> {
private static final long serialVersionUID = 0L;
/**
* A value of {@code 2} should be a good choice for most scenarios.
*/
private static final int DEFAULT_NUM_ELEMENTS = 2;
private static final ObjectPool<RecyclableArrayDeque> RECYCLER = ObjectPool.newPool(
new ObjectCreator<RecyclableArrayDeque>() {
@Override
public RecyclableArrayDeque newObject(Handle<RecyclableArrayDeque> handle) {
return new RecyclableArrayDeque(DEFAULT_NUM_ELEMENTS, handle);
}
});
public static RecyclableArrayDeque newInstance() {
return RECYCLER.get();
}
private final Handle<RecyclableArrayDeque> handle;
private RecyclableArrayDeque(int numElements, Handle<RecyclableArrayDeque> handle) {
super(numElements);
this.handle = handle;
}
public void recycle() {
clear();
handle.recycle(this);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jdbc.store.file;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.sql.SQLException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.ActiveMQBuffers;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.ActiveMQExceptionType;
import org.apache.activemq.artemis.core.io.IOCallback;
import org.apache.activemq.artemis.core.io.SequentialFile;
import org.apache.activemq.artemis.core.io.buffer.TimedBuffer;
import org.apache.activemq.artemis.core.journal.EncodingSupport;
import org.apache.activemq.artemis.core.journal.impl.SimpleWaitIOCallback;
import org.jboss.logging.Logger;
public class JDBCSequentialFile implements SequentialFile {
private static final Logger logger = Logger.getLogger(JDBCSequentialFile.class);
private final String filename;
private final String extension;
private AtomicBoolean isOpen = new AtomicBoolean(false);
private AtomicBoolean isLoaded = new AtomicBoolean(false);
private long id = -1;
private long readPosition = 0;
private long writePosition = 0;
private final Executor executor;
private final JDBCSequentialFileFactory fileFactory;
private final Object writeLock;
private final JDBCSequentialFileFactoryDriver dbDriver;
// Allows DB Drivers to cache meta data.
private final Map<Object, Object> metaData = new ConcurrentHashMap<>();
JDBCSequentialFile(final JDBCSequentialFileFactory fileFactory,
final String filename,
final Executor executor,
final JDBCSequentialFileFactoryDriver driver,
final Object writeLock) throws SQLException {
this.fileFactory = fileFactory;
this.filename = filename;
this.extension = filename.contains(".") ? filename.substring(filename.lastIndexOf(".") + 1, filename.length()) : "";
this.executor = executor;
this.writeLock = writeLock;
this.dbDriver = driver;
}
void setWritePosition(long writePosition) {
this.writePosition = writePosition;
}
@Override
public boolean isOpen() {
return isOpen.get();
}
@Override
public boolean exists() {
if (isLoaded.get()) return true;
try {
return fileFactory.listFiles(extension).contains(filename);
} catch (Exception e) {
logger.warn(e.getMessage(), e);
fileFactory.onIOError(e, "Error checking JDBC file exists.", this);
return false;
}
}
@Override
public void open() throws Exception {
isOpen.compareAndSet(false, load());
}
private boolean load() {
try {
if (isLoaded.compareAndSet(false, true)) {
dbDriver.openFile(this);
}
return true;
} catch (SQLException e) {
isLoaded.set(false);
fileFactory.onIOError(e, "Error attempting to open JDBC file.", this);
}
return false;
}
@Override
public void open(int maxIO, boolean useExecutor) throws Exception {
open();
}
@Override
public boolean fits(int size) {
return writePosition + size <= dbDriver.getMaxSize();
}
@Override
public int calculateBlockStart(int position) throws Exception {
return 0;
}
@Override
public String getFileName() {
return filename;
}
@Override
public void fill(int size) throws Exception {
// Do nothing
}
@Override
public ByteBuffer map(int position, long size) throws IOException {
return null;
}
@Override
public void delete() throws IOException, InterruptedException, ActiveMQException {
try {
synchronized (writeLock) {
if (load()) {
dbDriver.deleteFile(this);
}
}
} catch (SQLException e) {
fileFactory.onIOError(e, "Error deleting JDBC file.", this);
}
}
private synchronized int internalWrite(byte[] data, IOCallback callback, boolean append) {
try {
open();
synchronized (writeLock) {
int noBytes = dbDriver.writeToFile(this, data, append);
seek(append ? writePosition + noBytes : noBytes);
if (logger.isTraceEnabled()) {
logger.trace("Write: ID: " + this.getId() + " FileName: " + this.getFileName() + size());
}
if (callback != null)
callback.done();
return noBytes;
}
} catch (Exception e) {
if (callback != null)
callback.onError(ActiveMQExceptionType.IO_ERROR.getCode(), e.getMessage());
fileFactory.onIOError(e, "Error writing to JDBC file.", this);
}
return 0;
}
public synchronized int internalWrite(ActiveMQBuffer buffer, IOCallback callback) {
return internalWrite(buffer, callback, true);
}
public synchronized int internalWrite(ActiveMQBuffer buffer, IOCallback callback, boolean append) {
byte[] data = new byte[buffer.readableBytes()];
buffer.readBytes(data);
return internalWrite(data, callback, append);
}
private synchronized int internalWrite(ByteBuffer buffer, IOCallback callback) {
return internalWrite(buffer.array(), callback, true);
}
private void scheduleWrite(final ActiveMQBuffer bytes, final IOCallback callback, boolean append) {
executor.execute(() -> {
internalWrite(bytes, callback, append);
});
}
private void scheduleWrite(final ByteBuffer bytes, final IOCallback callback) {
executor.execute(() -> {
internalWrite(bytes, callback);
});
}
synchronized void seek(long noBytes) {
writePosition = noBytes;
}
public void write(ActiveMQBuffer bytes, boolean sync, IOCallback callback, boolean append) throws Exception {
// We ignore sync since we schedule writes straight away.
scheduleWrite(bytes, callback, append);
}
@Override
public void write(ActiveMQBuffer bytes, boolean sync, IOCallback callback) throws Exception {
write(bytes, sync, callback, true);
}
@Override
public void write(ActiveMQBuffer bytes, boolean sync) throws Exception {
write(bytes, sync, null);
}
@Override
public void write(EncodingSupport bytes, boolean sync, IOCallback callback) throws Exception {
ActiveMQBuffer data = ActiveMQBuffers.fixedBuffer(bytes.getEncodeSize());
bytes.encode(data);
write(data, sync, callback, true);
}
@Override
public void write(EncodingSupport bytes, boolean sync) throws Exception {
write(bytes, sync, null);
}
@Override
public void writeDirect(ByteBuffer bytes, boolean sync, IOCallback callback) {
if (callback == null) {
SimpleWaitIOCallback waitIOCallback = new SimpleWaitIOCallback();
try {
scheduleWrite(bytes, waitIOCallback);
waitIOCallback.waitCompletion();
} catch (Exception e) {
waitIOCallback.onError(ActiveMQExceptionType.IO_ERROR.getCode(), "Error writing to JDBC file.");
fileFactory.onIOError(e, "Failed to write to file.", this);
}
} else {
scheduleWrite(bytes, callback);
}
}
@Override
public void blockingWriteDirect(ByteBuffer bytes, boolean sync, boolean releaseBuffer) {
writeDirect(bytes, sync, null);
}
@Override
public void writeDirect(ByteBuffer bytes, boolean sync) throws Exception {
writeDirect(bytes, sync, null);
// Are we meant to block here?
}
@Override
public synchronized int read(ByteBuffer bytes, final IOCallback callback) throws SQLException {
synchronized (writeLock) {
try {
int read = dbDriver.readFromFile(this, bytes);
readPosition += read;
if (callback != null)
callback.done();
return read;
} catch (SQLException e) {
if (callback != null)
callback.onError(ActiveMQExceptionType.IO_ERROR.getCode(), e.getMessage());
fileFactory.onIOError(e, "Error reading from JDBC file.", this);
}
return 0;
}
}
@Override
public int read(ByteBuffer bytes) throws Exception {
return read(bytes, null);
}
@Override
public void position(long pos) throws IOException {
readPosition = pos;
}
@Override
public long position() {
return readPosition;
}
@Override
public void close() throws Exception {
close(true, true);
}
@Override
public void close(boolean waitOnSync, boolean block) throws Exception {
isOpen.set(false);
if (waitOnSync) {
sync();
}
fileFactory.sequentialFileClosed(this);
}
@Override
public void sync() throws IOException {
final SimpleWaitIOCallback callback = new SimpleWaitIOCallback();
executor.execute(callback::done);
try {
callback.waitCompletion();
} catch (Exception e) {
callback.onError(ActiveMQExceptionType.IO_ERROR.getCode(), "Error during JDBC file sync.");
fileFactory.onIOError(e, "Error during JDBC file sync.", this);
}
}
@Override
public long size() throws Exception {
load();
return writePosition;
}
@Override
public void renameTo(String newFileName) throws Exception {
synchronized (writeLock) {
try {
dbDriver.renameFile(this, newFileName);
} catch (SQLException e) {
fileFactory.onIOError(e, "Error renaming JDBC file.", this);
}
}
}
@Override
public SequentialFile cloneFile() {
try {
JDBCSequentialFile clone = new JDBCSequentialFile(fileFactory, filename, executor, dbDriver, writeLock);
clone.setWritePosition(this.writePosition);
return clone;
} catch (Exception e) {
fileFactory.onIOError(e, "Error cloning JDBC file.", this);
}
return null;
}
@Override
public void copyTo(SequentialFile cloneFile) throws Exception {
JDBCSequentialFile clone = (JDBCSequentialFile) cloneFile;
try {
synchronized (writeLock) {
if (logger.isTraceEnabled()) {
logger.trace("JDBC Copying File. From: " + this + " To: " + cloneFile);
}
clone.open();
dbDriver.copyFileData(this, clone);
clone.setWritePosition(writePosition);
}
} catch (Exception e) {
fileFactory.onIOError(e, "Error copying JDBC file.", this);
}
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getFilename() {
return filename;
}
public String getExtension() {
return extension;
}
// Only Used by Journal, no need to implement.
@Override
public void setTimedBuffer(TimedBuffer buffer) {
}
// Only Used by replication, no need to implement.
@Override
public File getJavaFile() {
return null;
}
public void addMetaData(Object key, Object value) {
metaData.put(key, value);
}
public Object getMetaData(Object key) {
return metaData.get(key);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.processors;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.compress.archivers.jar.JarArchiveEntry;
import org.apache.commons.compress.archivers.jar.JarArchiveOutputStream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hive.conf.HiveVariableSource;
import org.apache.hadoop.hive.conf.VariableSubstitution;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.types.Path;
import org.codehaus.groovy.ant.Groovyc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.io.Files;
/**
* Processor allows users to build code inside a hive session, then
* use this code as a UDF, Serde, or even a more complex entity like an
* input format or hook.
*
* Note: This class is stateful and not thread safe. Create a new instance for
* each invocation of CompileProcessor.
*
*/
public class CompileProcessor implements CommandProcessor {
public static final Logger LOG = LoggerFactory.getLogger(CompileProcessor.class.getName());
public static final LogHelper console = new LogHelper(LOG);
public static final String IO_TMP_DIR = "java.io.tmpdir";
public static final String GROOVY = "GROOVY";
public static final String AS = "AS";
public static final String NAMED = "NAMED";
private static final String SYNTAX = "syntax: COMPILE ` some code here ` AS groovy NAMED something.groovy";
private static final AtomicInteger runCount;
/**
* The language of the compiled code. Used to select the appropriate compiler.
*/
private String lang;
/**
* The code to be compiled
*/
private String code;
/**
* The name of the file the code will be written to
*/
private String named;
/**
* The entire command sent to the processor
*/
private String command;
/**
* Used as part of a file name to help avoid collisions.
*/
private int myId;
static {
runCount = new AtomicInteger(0);
}
/**
* User supplies dynamic code in this format:
* COMPILE ` some code here ` AS groovy NAMED something.groovy;
* CompileProcessor will compile and package this code into a jar. The jar
* will be added to the session state via the session state's
* ADD RESOURCE command.
* @param command a String to be compiled
* @return CommandProcessorResponse with 0 for success and 1 for failure
*/
@Override
public CommandProcessorResponse run(String command) {
SessionState ss = SessionState.get();
this.command = command;
CommandProcessorResponse authErrResp =
CommandUtil.authorizeCommand(ss, HiveOperationType.COMPILE, Arrays.asList(command));
if(authErrResp != null){
// there was an authorization issue
return authErrResp;
}
myId = runCount.getAndIncrement();
try {
parse(ss);
} catch (CompileProcessorException e) {
return CommandProcessorResponse.create(e);
}
CommandProcessorResponse result = null;
try {
result = compile(ss);
} catch (CompileProcessorException e) {
return CommandProcessorResponse.create(e);
}
return result;
}
/**
* Parses the supplied command
* @param ss
* @throws CompileProcessorException if the code can not be compiled or the jar can not be made
*/
@VisibleForTesting
void parse(SessionState ss) throws CompileProcessorException {
if (ss != null){
command = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), command);
}
if (command == null || command.length() == 0) {
throw new CompileProcessorException("Command was empty");
}
StringBuilder toCompile = new StringBuilder();
int startPosition = 0;
int endPosition = -1;
/* TODO Escape handling may be changed by a follow on.
* The largest issue is ; which are treated as statement
* terminators for the cli. Once the cli is fixed this
* code should be re-investigated
*/
while (command.charAt(startPosition++) != '`' && startPosition < command.length()){
}
if (startPosition == command.length()){
throw new CompileProcessorException(SYNTAX);
}
for (int i = startPosition; i < command.length(); i++) {
if (command.charAt(i) == '\\') {
toCompile.append(command.charAt(i + 1));
i = i + 1;
continue;
} else if (command.charAt(i) == '`'){
endPosition = i;
break;
} else {
toCompile.append(command.charAt(i));
}
}
if (endPosition == -1){
throw new CompileProcessorException(SYNTAX);
}
StringTokenizer st = new StringTokenizer(command.substring(endPosition+1), " ");
if (st.countTokens() != 4){
throw new CompileProcessorException(SYNTAX);
}
String shouldBeAs = st.nextToken();
if (!shouldBeAs.equalsIgnoreCase(AS)){
throw new CompileProcessorException(SYNTAX);
}
setLang(st.nextToken());
if (!lang.equalsIgnoreCase(GROOVY)){
throw new CompileProcessorException("Can not compile " + lang + ". Hive can only compile " + GROOVY);
}
String shouldBeNamed = st.nextToken();
if (!shouldBeNamed.equalsIgnoreCase(NAMED)){
throw new CompileProcessorException(SYNTAX);
}
setNamed(st.nextToken());
setCode(toCompile.toString());
}
@VisibleForTesting
/**
* Method converts statement into a file, compiles the file and then packages the file.
* @param ss
* @return Response code of 0 for success 1 for failure
* @throws CompileProcessorException
*/
CommandProcessorResponse compile(SessionState ss) throws CompileProcessorException {
Project proj = new Project();
String ioTempDir = System.getProperty(IO_TMP_DIR);
File ioTempFile = new File(ioTempDir);
if (!ioTempFile.exists()){
throw new CompileProcessorException(ioTempDir + " does not exists");
}
if (!ioTempFile.isDirectory() || !ioTempFile.canWrite()){
throw new CompileProcessorException(ioTempDir + " is not a writable directory");
}
Groovyc g = new Groovyc();
long runStamp = System.currentTimeMillis();
String jarId = myId + "_" + runStamp;
g.setProject(proj);
Path sourcePath = new Path(proj);
File destination = new File(ioTempFile, jarId + "out");
g.setDestdir(destination);
File input = new File(ioTempFile, jarId + "in");
sourcePath.setLocation(input);
g.setSrcdir(sourcePath);
input.mkdir();
File fileToWrite = new File(input, this.named);
try {
Files.write(this.code, fileToWrite, Charset.forName("UTF-8"));
} catch (IOException e1) {
throw new CompileProcessorException("writing file", e1);
}
destination.mkdir();
try {
g.execute();
} catch (BuildException ex){
throw new CompileProcessorException("Problem compiling", ex);
}
File testArchive = new File(ioTempFile, jarId + ".jar");
JarArchiveOutputStream out = null;
try {
out = new JarArchiveOutputStream(new FileOutputStream(testArchive));
for (File f: destination.listFiles()){
JarArchiveEntry jentry = new JarArchiveEntry(f.getName());
FileInputStream fis = new FileInputStream(f);
out.putArchiveEntry(jentry);
IOUtils.copy(fis, out);
fis.close();
out.closeArchiveEntry();
}
out.finish();
} catch (IOException e) {
throw new CompileProcessorException("Exception while writing jar", e);
} finally {
if (out!=null){
try {
out.close();
} catch (IOException WhatCanYouDo) {
}
}
}
if (ss != null){
ss.add_resource(ResourceType.JAR, testArchive.getAbsolutePath());
}
CommandProcessorResponse good = new CommandProcessorResponse(0, testArchive.getAbsolutePath(), null);
return good;
}
public String getLang() {
return lang;
}
public void setLang(String lang) {
this.lang = lang;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getNamed() {
return named;
}
public void setNamed(String named) {
this.named = named;
}
public String getCommand() {
return command;
}
class CompileProcessorException extends HiveException {
private static final long serialVersionUID = 1L;
CompileProcessorException(String s, Throwable t) {
super(s, t);
}
CompileProcessorException(String s) {
super(s);
}
}
@Override
public void close() throws Exception {
}
}
|
|
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.config.annotation.web.configurers;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.mock.web.MockHttpSession;
import org.springframework.security.authentication.AuthenticationTrustResolver;
import org.springframework.security.config.annotation.ObjectPostProcessor;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.config.test.SpringTestContext;
import org.springframework.security.config.test.SpringTestContextExtension;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.session.SessionRegistry;
import org.springframework.security.core.userdetails.PasswordEncodedUser;
import org.springframework.security.web.authentication.session.ChangeSessionIdAuthenticationStrategy;
import org.springframework.security.web.authentication.session.CompositeSessionAuthenticationStrategy;
import org.springframework.security.web.authentication.session.ConcurrentSessionControlAuthenticationStrategy;
import org.springframework.security.web.authentication.session.RegisterSessionAuthenticationStrategy;
import org.springframework.security.web.context.HttpRequestResponseHolder;
import org.springframework.security.web.context.SecurityContextRepository;
import org.springframework.security.web.savedrequest.RequestCache;
import org.springframework.security.web.session.ConcurrentSessionFilter;
import org.springframework.security.web.session.HttpSessionDestroyedEvent;
import org.springframework.security.web.session.SessionManagementFilter;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.request.MockHttpServletRequestBuilder;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import static org.springframework.security.config.Customizer.withDefaults;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Tests for {@link SessionManagementConfigurer}
*
* @author Rob Winch
* @author Eleftheria Stein
*/
@ExtendWith(SpringTestContextExtension.class)
public class SessionManagementConfigurerTests {
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
MockMvc mvc;
@Test
public void sessionManagementWhenConfiguredThenDoesNotOverrideRequestCache() throws Exception {
SessionManagementRequestCacheConfig.REQUEST_CACHE = mock(RequestCache.class);
this.spring.register(SessionManagementRequestCacheConfig.class).autowire();
this.mvc.perform(get("/"));
verify(SessionManagementRequestCacheConfig.REQUEST_CACHE).getMatchingRequest(any(HttpServletRequest.class),
any(HttpServletResponse.class));
}
@Test
public void sessionManagementWhenConfiguredThenDoesNotOverrideSecurityContextRepository() throws Exception {
SessionManagementSecurityContextRepositoryConfig.SECURITY_CONTEXT_REPO = mock(SecurityContextRepository.class);
given(SessionManagementSecurityContextRepositoryConfig.SECURITY_CONTEXT_REPO
.loadContext(any(HttpRequestResponseHolder.class))).willReturn(mock(SecurityContext.class));
this.spring.register(SessionManagementSecurityContextRepositoryConfig.class).autowire();
this.mvc.perform(get("/"));
verify(SessionManagementSecurityContextRepositoryConfig.SECURITY_CONTEXT_REPO)
.saveContext(any(SecurityContext.class), any(HttpServletRequest.class), any(HttpServletResponse.class));
}
@Test
public void sessionManagementWhenInvokedTwiceThenUsesOriginalSessionCreationPolicy() throws Exception {
this.spring.register(InvokeTwiceDoesNotOverride.class).autowire();
MvcResult mvcResult = this.mvc.perform(get("/")).andReturn();
HttpSession session = mvcResult.getRequest().getSession(false);
assertThat(session).isNull();
}
// SEC-2137
@Test
public void getWhenSessionFixationDisabledAndConcurrencyControlEnabledThenSessionIsNotInvalidated()
throws Exception {
this.spring.register(DisableSessionFixationEnableConcurrencyControlConfig.class).autowire();
MockHttpSession session = new MockHttpSession();
String sessionId = session.getId();
// @formatter:off
MockHttpServletRequestBuilder request = get("/")
.with(httpBasic("user", "password"))
.session(session);
MvcResult mvcResult = this.mvc.perform(request)
.andExpect(status().isNotFound())
.andReturn();
// @formatter:on
assertThat(mvcResult.getRequest().getSession().getId()).isEqualTo(sessionId);
}
@Test
public void authenticateWhenNewSessionFixationProtectionInLambdaThenCreatesNewSession() throws Exception {
this.spring.register(SFPNewSessionInLambdaConfig.class).autowire();
MockHttpSession givenSession = new MockHttpSession();
String givenSessionId = givenSession.getId();
givenSession.setAttribute("name", "value");
// @formatter:off
MockHttpServletRequestBuilder request = get("/auth")
.session(givenSession)
.with(httpBasic("user", "password"));
MockHttpSession resultingSession = (MockHttpSession) this.mvc.perform(request)
.andExpect(status().isNotFound())
.andReturn()
.getRequest()
.getSession(false);
// @formatter:on
assertThat(givenSessionId).isNotEqualTo(resultingSession.getId());
assertThat(resultingSession.getAttribute("name")).isNull();
}
@Test
public void loginWhenUserLoggedInAndMaxSessionsIsOneThenLoginPrevented() throws Exception {
this.spring.register(ConcurrencyControlConfig.class).autowire();
// @formatter:off
MockHttpServletRequestBuilder firstRequest = post("/login")
.with(csrf())
.param("username", "user")
.param("password", "password");
this.mvc.perform(firstRequest);
MockHttpServletRequestBuilder secondRequest = post("/login")
.with(csrf())
.param("username", "user")
.param("password", "password");
this.mvc.perform(secondRequest)
.andExpect(status().isFound())
.andExpect(redirectedUrl("/login?error"));
// @formatter:on
}
@Test
public void loginWhenUserSessionExpiredAndMaxSessionsIsOneThenLoggedIn() throws Exception {
this.spring.register(ConcurrencyControlConfig.class).autowire();
// @formatter:off
MockHttpServletRequestBuilder firstRequest = post("/login")
.with(csrf())
.param("username", "user")
.param("password", "password");
MvcResult mvcResult = this.mvc.perform(firstRequest)
.andReturn();
// @formatter:on
HttpSession authenticatedSession = mvcResult.getRequest().getSession();
this.spring.getContext().publishEvent(new HttpSessionDestroyedEvent(authenticatedSession));
// @formatter:off
MockHttpServletRequestBuilder secondRequest = post("/login")
.with(csrf())
.param("username", "user")
.param("password", "password");
this.mvc.perform(secondRequest)
.andExpect(status().isFound())
.andExpect(redirectedUrl("/"));
// @formatter:on
}
@Test
public void loginWhenUserLoggedInAndMaxSessionsOneInLambdaThenLoginPrevented() throws Exception {
this.spring.register(ConcurrencyControlInLambdaConfig.class).autowire();
// @formatter:off
MockHttpServletRequestBuilder firstRequest = post("/login")
.with(csrf())
.param("username", "user")
.param("password", "password");
// @formatter:on
this.mvc.perform(firstRequest);
// @formatter:off
MockHttpServletRequestBuilder secondRequest = post("/login")
.with(csrf())
.param("username", "user")
.param("password", "password");
this.mvc.perform(secondRequest)
.andExpect(status().isFound())
.andExpect(redirectedUrl("/login?error"));
// @formatter:on
}
@Test
public void requestWhenSessionCreationPolicyStateLessInLambdaThenNoSessionCreated() throws Exception {
this.spring.register(SessionCreationPolicyStateLessInLambdaConfig.class).autowire();
MvcResult mvcResult = this.mvc.perform(get("/")).andReturn();
HttpSession session = mvcResult.getRequest().getSession(false);
assertThat(session).isNull();
}
@Test
public void configureWhenRegisteringObjectPostProcessorThenInvokedOnSessionManagementFilter() {
ObjectPostProcessorConfig.objectPostProcessor = spy(ReflectingObjectPostProcessor.class);
this.spring.register(ObjectPostProcessorConfig.class).autowire();
verify(ObjectPostProcessorConfig.objectPostProcessor).postProcess(any(SessionManagementFilter.class));
}
@Test
public void configureWhenRegisteringObjectPostProcessorThenInvokedOnConcurrentSessionFilter() {
ObjectPostProcessorConfig.objectPostProcessor = spy(ReflectingObjectPostProcessor.class);
this.spring.register(ObjectPostProcessorConfig.class).autowire();
verify(ObjectPostProcessorConfig.objectPostProcessor).postProcess(any(ConcurrentSessionFilter.class));
}
@Test
public void configureWhenRegisteringObjectPostProcessorThenInvokedOnConcurrentSessionControlAuthenticationStrategy() {
ObjectPostProcessorConfig.objectPostProcessor = spy(ReflectingObjectPostProcessor.class);
this.spring.register(ObjectPostProcessorConfig.class).autowire();
verify(ObjectPostProcessorConfig.objectPostProcessor)
.postProcess(any(ConcurrentSessionControlAuthenticationStrategy.class));
}
@Test
public void configureWhenRegisteringObjectPostProcessorThenInvokedOnCompositeSessionAuthenticationStrategy() {
ObjectPostProcessorConfig.objectPostProcessor = spy(ReflectingObjectPostProcessor.class);
this.spring.register(ObjectPostProcessorConfig.class).autowire();
verify(ObjectPostProcessorConfig.objectPostProcessor)
.postProcess(any(CompositeSessionAuthenticationStrategy.class));
}
@Test
public void configureWhenRegisteringObjectPostProcessorThenInvokedOnRegisterSessionAuthenticationStrategy() {
ObjectPostProcessorConfig.objectPostProcessor = spy(ReflectingObjectPostProcessor.class);
this.spring.register(ObjectPostProcessorConfig.class).autowire();
verify(ObjectPostProcessorConfig.objectPostProcessor)
.postProcess(any(RegisterSessionAuthenticationStrategy.class));
}
@Test
public void configureWhenRegisteringObjectPostProcessorThenInvokedOnChangeSessionIdAuthenticationStrategy() {
ObjectPostProcessorConfig.objectPostProcessor = spy(ReflectingObjectPostProcessor.class);
this.spring.register(ObjectPostProcessorConfig.class).autowire();
verify(ObjectPostProcessorConfig.objectPostProcessor)
.postProcess(any(ChangeSessionIdAuthenticationStrategy.class));
}
@Test
public void getWhenAnonymousRequestAndTrustResolverSharedObjectReturnsAnonymousFalseThenSessionIsSaved()
throws Exception {
SharedTrustResolverConfig.TR = mock(AuthenticationTrustResolver.class);
given(SharedTrustResolverConfig.TR.isAnonymous(any())).willReturn(false);
this.spring.register(SharedTrustResolverConfig.class).autowire();
MvcResult mvcResult = this.mvc.perform(get("/")).andReturn();
assertThat(mvcResult.getRequest().getSession(false)).isNotNull();
}
@Test
public void whenOneSessionRegistryBeanThenUseIt() throws Exception {
SessionRegistryOneBeanConfig.SESSION_REGISTRY = mock(SessionRegistry.class);
this.spring.register(SessionRegistryOneBeanConfig.class).autowire();
MockHttpSession session = new MockHttpSession(this.spring.getContext().getServletContext());
this.mvc.perform(get("/").session(session));
verify(SessionRegistryOneBeanConfig.SESSION_REGISTRY).getSessionInformation(session.getId());
}
@Test
public void whenTwoSessionRegistryBeansThenUseNeither() throws Exception {
SessionRegistryTwoBeansConfig.SESSION_REGISTRY_ONE = mock(SessionRegistry.class);
SessionRegistryTwoBeansConfig.SESSION_REGISTRY_TWO = mock(SessionRegistry.class);
this.spring.register(SessionRegistryTwoBeansConfig.class).autowire();
MockHttpSession session = new MockHttpSession(this.spring.getContext().getServletContext());
this.mvc.perform(get("/").session(session));
verifyNoInteractions(SessionRegistryTwoBeansConfig.SESSION_REGISTRY_ONE);
verifyNoInteractions(SessionRegistryTwoBeansConfig.SESSION_REGISTRY_TWO);
}
@EnableWebSecurity
static class SessionManagementRequestCacheConfig extends WebSecurityConfigurerAdapter {
static RequestCache REQUEST_CACHE;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.requestCache()
.requestCache(REQUEST_CACHE)
.and()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS);
// @formatter:on
}
}
@EnableWebSecurity
static class SessionManagementSecurityContextRepositoryConfig extends WebSecurityConfigurerAdapter {
static SecurityContextRepository SECURITY_CONTEXT_REPO;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.securityContext()
.securityContextRepository(SECURITY_CONTEXT_REPO)
.and()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS);
// @formatter:on
}
}
@EnableWebSecurity
static class InvokeTwiceDoesNotOverride extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.sessionManagement();
// @formatter:on
}
}
@EnableWebSecurity
static class DisableSessionFixationEnableConcurrencyControlConfig extends WebSecurityConfigurerAdapter {
@Override
public void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.httpBasic()
.and()
.sessionManagement()
.sessionFixation().none()
.maximumSessions(1);
// @formatter:on
}
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
// @formatter:off
auth
.inMemoryAuthentication()
.withUser(PasswordEncodedUser.user());
// @formatter:on
}
}
@EnableWebSecurity
static class SFPNewSessionInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement((sessionManagement) ->
sessionManagement
.sessionFixation((sessionFixation) ->
sessionFixation.newSession()
)
)
.httpBasic(withDefaults());
// @formatter:on
}
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
// @formatter:off
auth
.inMemoryAuthentication()
.withUser(PasswordEncodedUser.user());
// @formatter:on
}
}
@EnableWebSecurity
static class ConcurrencyControlConfig extends WebSecurityConfigurerAdapter {
@Override
public void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.formLogin()
.and()
.sessionManagement()
.maximumSessions(1)
.maxSessionsPreventsLogin(true);
// @formatter:on
}
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
// @formatter:off
auth
.inMemoryAuthentication()
.withUser(PasswordEncodedUser.user());
// @formatter:on
}
}
@EnableWebSecurity
static class ConcurrencyControlInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
public void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.formLogin(withDefaults())
.sessionManagement((sessionManagement) ->
sessionManagement
.sessionConcurrency((sessionConcurrency) ->
sessionConcurrency
.maximumSessions(1)
.maxSessionsPreventsLogin(true)
)
);
// @formatter:on
}
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
// @formatter:off
auth
.inMemoryAuthentication()
.withUser(PasswordEncodedUser.user());
// @formatter:on
}
}
@EnableWebSecurity
static class SessionCreationPolicyStateLessInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement((sessionManagement) ->
sessionManagement
.sessionCreationPolicy(SessionCreationPolicy.STATELESS)
);
// @formatter:on
}
}
@EnableWebSecurity
static class ObjectPostProcessorConfig extends WebSecurityConfigurerAdapter {
static ObjectPostProcessor<Object> objectPostProcessor;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement()
.maximumSessions(1);
// @formatter:on
}
@Bean
static ObjectPostProcessor<Object> objectPostProcessor() {
return objectPostProcessor;
}
}
static class ReflectingObjectPostProcessor implements ObjectPostProcessor<Object> {
@Override
public <O> O postProcess(O object) {
return object;
}
}
@EnableWebSecurity
static class SharedTrustResolverConfig extends WebSecurityConfigurerAdapter {
static AuthenticationTrustResolver TR;
@Override
protected void configure(HttpSecurity http) {
// @formatter:off
http
.setSharedObject(AuthenticationTrustResolver.class, TR);
// @formatter:on
}
}
@EnableWebSecurity
static class SessionRegistryOneBeanConfig extends WebSecurityConfigurerAdapter {
private static SessionRegistry SESSION_REGISTRY;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement()
.maximumSessions(1);
// @formatter:on
}
@Bean
SessionRegistry sessionRegistry() {
return SESSION_REGISTRY;
}
}
@EnableWebSecurity
static class SessionRegistryTwoBeansConfig extends WebSecurityConfigurerAdapter {
private static SessionRegistry SESSION_REGISTRY_ONE;
private static SessionRegistry SESSION_REGISTRY_TWO;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement()
.maximumSessions(1);
// @formatter:on
}
@Bean
SessionRegistry sessionRegistryOne() {
return SESSION_REGISTRY_ONE;
}
@Bean
SessionRegistry sessionRegistryTwo() {
return SESSION_REGISTRY_TWO;
}
}
}
|
|
/*
* Copyright (c) 2007-2008, debug-commons team
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.rubyforge.debugcommons;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.ConnectException;
import java.net.Socket;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.rubyforge.debugcommons.model.ExceptionSuspensionPoint;
import org.rubyforge.debugcommons.model.IRubyBreakpoint;
import org.rubyforge.debugcommons.model.IRubyExceptionBreakpoint;
import org.rubyforge.debugcommons.model.IRubyLineBreakpoint;
import org.rubyforge.debugcommons.model.SuspensionPoint;
import org.rubyforge.debugcommons.model.RubyThreadInfo;
import org.rubyforge.debugcommons.model.RubyDebugTarget;
import org.rubyforge.debugcommons.model.RubyFrame;
import org.rubyforge.debugcommons.model.RubyFrameInfo;
import org.rubyforge.debugcommons.model.RubyThread;
import org.rubyforge.debugcommons.model.RubyVariable;
import org.rubyforge.debugcommons.model.RubyVariableInfo;
public final class RubyDebuggerProxy {
private static final Logger LOGGER = Logger.getLogger(RubyDebuggerProxy.class.getName());
public static enum DebuggerType { CLASSIC_DEBUGGER, RUBY_DEBUG }
public static final DebuggerType CLASSIC_DEBUGGER = DebuggerType.CLASSIC_DEBUGGER;
public static final DebuggerType RUBY_DEBUG = DebuggerType.RUBY_DEBUG;
public static final List<RubyDebuggerProxy> PROXIES = new CopyOnWriteArrayList<RubyDebuggerProxy>();
private final List<RubyDebugEventListener> listeners;
private final Map<Integer, IRubyLineBreakpoint> breakpointsIDs;
private final int timeout;
private final DebuggerType debuggerType;
private RubyDebugTarget debugTarget;
private Socket commandSocket;
private boolean finished;
private PrintWriter commandWriter;
private ICommandFactory commandFactory;
private ReadersSupport readersSupport;
private boolean supportsCondition;
private Object variablesLock = new Object();
// catchpoint removing is not supported by backend yet, handle it in the
// debug-commons-java until the support is added
// http://rubyforge.org/tracker/index.php?func=detail&aid=20237&group_id=1900&atid=7436
private Set<String> removedCatchpoints;
public RubyDebuggerProxy(final DebuggerType debuggerType) {
this(debuggerType, 10); // default reading timeout 10s
}
public RubyDebuggerProxy(final DebuggerType debuggerType, final int timeout) {
this.debuggerType = debuggerType;
this.listeners = new CopyOnWriteArrayList<RubyDebugEventListener>();
this.breakpointsIDs = new HashMap<Integer, IRubyLineBreakpoint>();
this.removedCatchpoints = new HashSet<String>();
this.timeout = timeout;
this.readersSupport = new ReadersSupport(timeout);
}
public void setDebugTarget(RubyDebugTarget debugTarget) throws IOException, RubyDebuggerException {
this.debugTarget = debugTarget;
LOGGER.fine("Proxy target: " + debugTarget);
}
public RubyDebugTarget getDebugTarget() {
return debugTarget;
}
/** <b>Package-private</b> for unit tests only. */
ReadersSupport getReadersSupport() {
return readersSupport;
}
/**
* Set initial breakpoints and start the debugging process stopping (and
* firing event to the {@link #addRubyDebugEventListener}) on the first
* breakpoint.
*
* @param initialBreakpoints initial set of breakpoints to be set before
* triggering the debugging
*/
public void attach(final IRubyBreakpoint[] initialBreakpoints) throws RubyDebuggerException {
try {
switch(debuggerType) {
case CLASSIC_DEBUGGER:
attachToClassicDebugger(initialBreakpoints);
break;
case RUBY_DEBUG:
attachToRubyDebug(initialBreakpoints);
break;
default:
throw new IllegalStateException("Unhandled debugger type: " + debuggerType);
}
} catch (RubyDebuggerException e) {
PROXIES.remove(this);
throw e;
}
startSuspensionReaderLoop();
}
/**
* Whether client might send command to the proxy. When the debuggee has
* finished (in a standard manner or unexpectedly, e.g. was killed) or the
* proxy did not start yet, false is returned.
*/
public synchronized boolean isReady() {
return !finished && commandWriter != null && debugTarget.isAvailable();
}
private synchronized void attachToClassicDebugger(final IRubyBreakpoint[] initialBreakpoints) throws RubyDebuggerException {
try {
commandFactory = new ClassicDebuggerCommandFactory();
readersSupport.startCommandLoop(getCommandSocket().getInputStream());
commandWriter = new PrintWriter(getCommandSocket().getOutputStream(), true);
setBreakpoints(initialBreakpoints);
sendCommand("cont");
} catch (IOException ex) {
throw new RubyDebuggerException(ex);
}
}
private synchronized void attachToRubyDebug(final IRubyBreakpoint[] initialBreakpoints) throws RubyDebuggerException {
try {
commandFactory = new RubyDebugCommandFactory();
readersSupport.startCommandLoop(getCommandSocket().getInputStream());
commandWriter = new PrintWriter(getCommandSocket().getOutputStream(), true);
setBreakpoints(initialBreakpoints);
sendCommand("start");
} catch (IOException ex) {
throw new RubyDebuggerException(ex);
}
}
public void fireDebugEvent(final RubyDebugEvent e) {
for (RubyDebugEventListener listener : listeners) {
listener.onDebugEvent(e);
}
}
public void addRubyDebugEventListener(final RubyDebugEventListener listener) {
listeners.add(listener);
}
public void removeRubyDebugEventListener(final RubyDebugEventListener listener) {
listeners.remove(listener);
}
private PrintWriter getCommandWriter() throws RubyDebuggerException {
assert commandWriter != null : "Proxy has to be started, before using the writer";
return commandWriter;
}
protected void setBreakpoints(final IRubyBreakpoint[] breakpoints) throws RubyDebuggerException {
for (IRubyBreakpoint breakpoint: breakpoints) {
addBreakpoint(breakpoint);
}
}
public synchronized void addBreakpoint(final IRubyBreakpoint breakpoint) {
LOGGER.fine("Adding breakpoint: " + breakpoint);
if (!isReady()) {
LOGGER.fine("Session and/or debuggee is not ready, skipping addition of breakpoint: " + breakpoint);
return;
}
assert breakpoint != null : "breakpoint cannot be null";
try {
if (breakpoint instanceof IRubyLineBreakpoint) {
IRubyLineBreakpoint lineBreakpoint = (IRubyLineBreakpoint) breakpoint;
String command = commandFactory.createAddBreakpoint(
lineBreakpoint.getFilePath(), lineBreakpoint.getLineNumber());
sendCommand(command);
Integer id = getReadersSupport().readAddedBreakpointNo();
String condition = lineBreakpoint.getCondition();
if (condition != null && supportsCondition) {
command = commandFactory.createSetCondition(id, condition);
if (command != null) {
sendCommand(command);
getReadersSupport().readConditionSet(); // read response
} else {
LOGGER.info("conditional breakpoints are not supported by backend");
}
}
if (!breakpoint.isEnabled()) {
disableBreakpoint(breakpoint);
}
breakpointsIDs.put(id, lineBreakpoint);
} else if (breakpoint instanceof IRubyExceptionBreakpoint) {
IRubyExceptionBreakpoint excBreakpoint = (IRubyExceptionBreakpoint) breakpoint;
// just 're-enable' if contained in removedCatchpoints
if (!removedCatchpoints.remove(excBreakpoint.getException())) {
String command = commandFactory.createCatchOn(excBreakpoint);
sendCommand(command);
getReadersSupport().readCatchpointSet(); // read response
}
} else {
throw new IllegalArgumentException("Unknown breakpoint type: " + breakpoint);
}
} catch (final RubyDebuggerException ex) {
if (isReady()) {
LOGGER.log(Level.WARNING, "Cannot add breakpoint to: " + getDebugTarget(), ex);
}
}
}
private void disableBreakpoint(final IRubyBreakpoint breakpoint) {
LOGGER.fine("Disabling breakpoint: " + breakpoint);
if (!isReady()) {
LOGGER.fine("Session and/or debuggee is not ready, skipping addition of breakpoint: " + breakpoint);
return;
}
try {
if (breakpoint instanceof IRubyLineBreakpoint) {
IRubyLineBreakpoint lineBreakpoint = (IRubyLineBreakpoint) breakpoint;
Integer id = findBreakpointId(lineBreakpoint);
String command = commandFactory.createDisableBreakpoint(id);
if (command != null) {
sendCommand(command);
getReadersSupport().readDisabledBreakpointNo(id);
} else {
LOGGER.info("disabling breakpoints is nor supported by backend");
}
} else {
removeBreakpoint(breakpoint);
}
} catch (RubyDebuggerException e) {
LOGGER.log(Level.SEVERE, "Exception during disabling breakpoint.", e);
}
}
private void enableBreakpoint(final IRubyBreakpoint breakpoint) {
LOGGER.fine("Enabling breakpoint: " + breakpoint);
if (!isReady()) {
LOGGER.fine("Session and/or debuggee is not ready, skipping addition of breakpoint: " + breakpoint);
return;
}
try {
if (breakpoint instanceof IRubyLineBreakpoint) {
IRubyLineBreakpoint lineBreakpoint = (IRubyLineBreakpoint) breakpoint;
Integer id = findBreakpointId(lineBreakpoint);
String command = commandFactory.createEnableBreakpoint(id);
if (command != null) {
sendCommand(command);
getReadersSupport().readEnabledBreakpointNo(id);
} else {
LOGGER.info("disabling breakpoints is nor supported by backend");
}
} else {
addBreakpoint(breakpoint);
}
} catch (RubyDebuggerException e) {
LOGGER.log(Level.SEVERE, "Exception during enabling breakpoint.", e);
}
}
public synchronized void removeBreakpoint(final IRubyBreakpoint breakpoint) {
removeBreakpoint(breakpoint, false);
}
/**
* Remove the given breakpoint from this debugging session.
*
* @param breakpoint breakpoint to be removed
* @param silent whether info message should be omitted if the breakpoint
* has not been set in this session
*/
public synchronized void removeBreakpoint(final IRubyBreakpoint breakpoint, boolean silent) {
LOGGER.fine("Removing breakpoint: " + breakpoint);
if (!isReady()) {
LOGGER.fine("Session and/or debuggee is not ready, skipping removing of breakpoint: " + breakpoint);
return;
}
if (breakpoint instanceof IRubyLineBreakpoint) {
IRubyLineBreakpoint lineBreakpoint = (IRubyLineBreakpoint) breakpoint;
Integer id = findBreakpointId(lineBreakpoint);
if (id != null) {
String command = commandFactory.createRemoveBreakpoint(id);
try {
sendCommand(command);
getReadersSupport().waitForRemovedBreakpoint(id);
breakpointsIDs.remove(id);
LOGGER.fine("Breakpoint " + breakpoint + " with id " + id + " successfully removed");
} catch (RubyDebuggerException e) {
LOGGER.log(Level.SEVERE, "Exception during removing breakpoint.", e);
}
} else if (!silent) {
LOGGER.fine("Breakpoint [" + breakpoint + "] cannot be removed since " +
"its ID cannot be found. Might have been already removed.");
}
} else if (breakpoint instanceof IRubyExceptionBreakpoint) {
// catchpoint removing is not supported by backend yet, handle in
// the debug-commons-java until the support is added
// http://rubyforge.org/tracker/index.php?func=detail&aid=20237&group_id=1900&atid=7436
IRubyExceptionBreakpoint catchpoint = (IRubyExceptionBreakpoint) breakpoint;
removedCatchpoints.add(catchpoint.getException());
} else {
throw new IllegalArgumentException("Unknown breakpoint type: " + breakpoint);
}
}
/**
* Update the given breakpoint. Use when <em>enabled</em> property has
* changed.
* @param breakpoint breakpoint to be updated
*/
public void updateBreakpoint(IRubyBreakpoint breakpoint) {
if (breakpoint.isEnabled()) {
enableBreakpoint(breakpoint);
} else {
disableBreakpoint(breakpoint);
}
}
/**
* Find ID under which the given breakpoint is known in the current
* debugging session.
*
* @param wantedBP breakpoint to search for
* @return found ID; might be <tt>null</tt> if none is found
*/
private synchronized Integer findBreakpointId(final IRubyLineBreakpoint wantedBP) {
for (Map.Entry<Integer, IRubyLineBreakpoint> breakpointID : breakpointsIDs.entrySet()) {
IRubyLineBreakpoint bp = breakpointID.getValue();
int id = breakpointID.getKey();
if (wantedBP.getFilePath().equals(bp.getFilePath()) &&
wantedBP.getLineNumber() == bp.getLineNumber()) {
return id;
}
}
return null;
}
private void startSuspensionReaderLoop() {
new SuspensionReaderLoop().start();
}
public Socket getCommandSocket() throws RubyDebuggerException {
if (commandSocket == null) {
commandSocket = attach();
}
return commandSocket;
}
public void resume(final RubyThread thread) {
try {
sendCommand(commandFactory.createResume(thread));
} catch (RubyDebuggerException e) {
LOGGER.log(Level.SEVERE, "resuming of " + thread.getId() + " failed", e);
}
}
private synchronized void sendCommand(final String s) throws RubyDebuggerException {
LOGGER.fine("Sending command debugger: " + s);
if (!isReady()) {
throw new RubyDebuggerException("Trying to send a command [" + s +
"] to non-started or finished proxy (debuggee: " + getDebugTarget() + ", output: \n\n" +
Util.dumpAndDestroyProcess(debugTarget));
}
getCommandWriter().println(s);
}
public void sendStepOver(RubyFrame frame, boolean forceNewLine) {
try {
if (forceNewLine) {
sendCommand(commandFactory.createForcedStepOver(frame));
} else {
sendCommand(commandFactory.createStepOver(frame));
}
} catch (RubyDebuggerException e) {
LOGGER.log(Level.SEVERE, "Stepping failed", e);
}
}
public void sendStepReturnEnd(RubyFrame frame) {
try {
sendCommand(commandFactory.createStepReturn(frame));
} catch (RubyDebuggerException e) {
LOGGER.log(Level.SEVERE, "Stepping failed", e);
}
}
public void sendStepIntoEnd(RubyFrame frame, boolean forceNewLine) {
try {
if (forceNewLine) {
sendCommand(commandFactory.createForcedStepInto(frame));
} else {
sendCommand(commandFactory.createStepInto(frame));
}
} catch (RubyDebuggerException e) {
LOGGER.log(Level.SEVERE, "Stepping failed", e);
}
}
public RubyThreadInfo[] readThreadInfo() throws RubyDebuggerException {
sendCommand(commandFactory.createReadThreads());
return getReadersSupport().readThreads();
}
public RubyFrame[] readFrames(RubyThread thread) throws RubyDebuggerException {
RubyFrameInfo[] infos;
try {
sendCommand(commandFactory.createReadFrames(thread));
infos = getReadersSupport().readFrames();
} catch (RubyDebuggerException e) {
if (isReady()) {
throw e;
}
LOGGER.fine("Session and/or debuggee is not ready, returning empty thread list.");
infos = new RubyFrameInfo[0];
}
RubyFrame[] frames = new RubyFrame[infos.length];
for (int i = 0; i < infos.length; i++) {
RubyFrameInfo info = infos[i];
frames[i] = new RubyFrame(thread, info);
}
return frames;
}
public RubyVariable[] readVariables(RubyFrame frame) throws RubyDebuggerException {
RubyVariableInfo[] infos;
synchronized (variablesLock) {
sendCommand(commandFactory.createReadLocalVariables(frame));
infos = getReadersSupport().readVariables();
}
RubyVariable[] variables= new RubyVariable[infos.length];
for (int i = 0; i < infos.length; i++) {
RubyVariableInfo info = infos[i];
variables[i] = new RubyVariable(info, frame);
}
return variables;
}
public RubyVariable[] readInstanceVariables(final RubyVariable variable) throws RubyDebuggerException {
RubyVariableInfo[] infos;
synchronized (variablesLock) {
sendCommand(commandFactory.createReadInstanceVariable(variable));
infos = getReadersSupport().readVariables();
}
RubyVariable[] variables= new RubyVariable[infos.length];
for (int i = 0; i < infos.length; i++) {
RubyVariableInfo info = infos[i];
variables[i] = new RubyVariable(info, variable);
}
return variables;
}
public RubyVariable[] readGlobalVariables() throws RubyDebuggerException {
RubyVariableInfo[] infos;
synchronized (variablesLock) {
sendCommand(commandFactory.createReadGlobalVariables());
infos = getReadersSupport().readVariables();
}
RubyVariable[] variables= new RubyVariable[infos.length];
for (int i = 0; i < infos.length; i++) {
RubyVariableInfo info = infos[i];
variables[i] = new RubyVariable(this, info);
}
return variables;
}
public RubyVariable inspectExpression(RubyFrame frame, String expression) throws RubyDebuggerException {
expression = expression.replaceAll("\n", "\\\\n");
RubyVariableInfo[] infos;
synchronized (variablesLock) {
sendCommand(commandFactory.createInspect(frame, expression));
infos = getReadersSupport().readVariables();
}
return infos.length == 0 ? null : new RubyVariable(infos[0], frame);
}
public void finish(final boolean forced) {
synchronized(this) {
if (finished) {
// possible if client call this explicitly and then second time from RubyLoop
LOGGER.fine("Trying to finish the same proxy more than once: " + this);
return;
}
if (getDebugTarget().isRemote()) {
// TBD rather detach
sendExit();
}
PROXIES.remove(RubyDebuggerProxy.this);
if (forced) {
sendExit();
try {
// Needed to let the IO readers to read the last pieces of input and
// output streams.
Thread.sleep(500);
} catch (InterruptedException e) {
LOGGER.log(Level.INFO, "Interrupted during IO readers waiting", e);
}
RubyDebugTarget target = getDebugTarget();
if (!target.isRemote()) {
LOGGER.fine("Destroying process: " + target);
target.getProcess().destroy();
}
}
finished = true;
}
fireDebugEvent(RubyDebugEvent.createTerminateEvent());
}
private synchronized void sendExit() {
if (commandSocket != null && debugTarget.isAvailable()) {
try {
sendCommand("exit");
} catch (RubyDebuggerException ex) {
LOGGER.fine("'exit' command failed. Remote process? -> " + debugTarget.isRemote());
if (!debugTarget.isRemote()) {
LOGGER.fine("'exit' command failed. Process running? -> " + debugTarget.isRunning());
}
}
}
}
public synchronized void jump(final int line) {
try {
sendCommand("jump " + line);
}
catch (final RubyDebuggerException ex) {
if (isReady()) {
LOGGER.log(Level.WARNING, "Cannot jump", ex);
}
}
}
public synchronized void threadPause(final int id) {
try {
sendCommand("pause " + id);
}
catch (final RubyDebuggerException ex) {
if (isReady()) {
LOGGER.log(Level.WARNING, "Cannot pause", ex);
}
}
}
public synchronized void setType(final RubyVariable var, final String new_type) {
try {
sendCommand("set_type " + var.getName() + " " + new_type);
}
catch (final RubyDebuggerException ex) {
if (isReady()) {
LOGGER.log(Level.WARNING, "Cannot set_type", ex);
}
}
}
/**
* Tries to attach to the <code>target</code>'s process and gives up in
* <code>timeout</code> seconds.
*/
private Socket attach() throws RubyDebuggerException {
int port = debugTarget.getPort();
String host = debugTarget.getHost();
Socket socket = null;
for (int tryCount = (timeout*2), i = 0; i < tryCount && socket == null; i++) {
try {
socket = new Socket(host, port);
LOGGER.finest("Successfully attached to " + host + ':' + port);
} catch (ConnectException e) {
synchronized (this) {
if (finished) { // terminated by frontend before process started
throw new RubyDebuggerException("Process was terminated before debugger connection was established.");
}
if (i == tryCount - 1) {
failWithInfo(e);
}
}
try {
if (debugTarget.isAvailable()) {
LOGGER.finest("Cannot connect to " + host + ':' + port + ". Trying again...(" + (tryCount - i - 1) + ')');
Thread.sleep(500);
} else {
failWithInfo(e);
}
} catch (InterruptedException e1) {
LOGGER.log(Level.INFO, "Interrupted during attaching.", e1);
Thread.currentThread().interrupt();
}
} catch (IOException e) {
throw new RubyDebuggerException(e);
}
}
return socket;
}
private void failWithInfo(ConnectException e) throws RubyDebuggerException {
String info = debugTarget.isRemote()
? "[Remote Process at " + debugTarget.getHost() + ':' + debugTarget.getPort() + "]"
: Util.dumpAndDestroyProcess(debugTarget);
throw new RubyDebuggerException("Cannot connect to the debugged process at port "
+ debugTarget.getPort() + " in " + timeout + "s:\n\n" + info, e);
}
/**
* Tells the proxy whether condition on breakpoint is supported. Older
* engines version do not support it.
*/
void setConditionSupport(final boolean supportsCondition) {
this.supportsCondition = supportsCondition;
}
private class SuspensionReaderLoop extends Thread {
SuspensionReaderLoop() {
this.setName("RubyDebuggerLoop [" + System.currentTimeMillis() + ']');
}
public void suspensionOccurred(final SuspensionPoint hit) {
new Thread() {
public @Override void run() {
debugTarget.suspensionOccurred(hit);
}
}.start();
}
public @Override void run() {
LOGGER.finest("Waiting for breakpoints.");
while (true) {
SuspensionPoint sp = getReadersSupport().readSuspension();
if (sp == SuspensionPoint.END) {
break;
}
LOGGER.finest(sp.toString());
// see removedCatchpoints's JavaDoc
if (sp.isException()) {
ExceptionSuspensionPoint exceptionSP = (ExceptionSuspensionPoint) sp;
if (removedCatchpoints.contains(exceptionSP.getExceptionType())) {
RubyThread thread = getDebugTarget().getThreadById(sp.getThreadId());
if (thread != null) {
RubyDebuggerProxy.this.resume(thread);
continue;
}
}
}
if (!RubyDebuggerProxy.this.isReady()) { // flush events after proxy is finished
LOGGER.info("Session and/or debuggee is not ready, ignoring backend event - suspension point: " + sp);
} else {
SuspensionReaderLoop.this.suspensionOccurred(sp);
}
}
boolean unexpectedFail = getReadersSupport().isUnexpectedFail();
if (unexpectedFail) {
LOGGER.warning("Unexpected fail. Debuggee: " + getDebugTarget() +
", output: \n\n" + Util.dumpAndDestroyProcess(debugTarget));
}
finish(unexpectedFail);
LOGGER.finest("Socket reader loop finished.");
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import java.util.ArrayList;
import java.util.Collection;
import java.util.TreeMap;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import org.apache.ignite.cache.eviction.lru.LruEvictionPolicy;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.processors.cache.IgniteInternalCache;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxManager;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.PRIMARY_SYNC;
/**
* Tests explicit lock.
*/
public class IgniteCacheMultiTxLockSelfTest extends GridCommonAbstractTest {
/** */
public static final String CACHE_NAME = "part_cache";
/** IP finder. */
private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
private volatile boolean run = true;
/** */
private boolean client;
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
assertEquals(0, G.allGrids().size());
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration c = super.getConfiguration(gridName);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
c.setDiscoverySpi(disco);
CacheConfiguration ccfg = new CacheConfiguration();
ccfg.setName(CACHE_NAME);
ccfg.setAtomicityMode(TRANSACTIONAL);
ccfg.setWriteSynchronizationMode(PRIMARY_SYNC);
ccfg.setBackups(2);
ccfg.setCacheMode(PARTITIONED);
ccfg.setStartSize(100000);
LruEvictionPolicy plc = new LruEvictionPolicy();
plc.setMaxSize(100000);
ccfg.setEvictionPolicy(plc);
ccfg.setEvictSynchronized(true);
c.setCacheConfiguration(ccfg);
c.setClientMode(client);
return c;
}
/**
* @throws Exception If failed.
*/
public void testExplicitLockOneKey() throws Exception {
checkExplicitLock(1, false);
}
/**
* @throws Exception If failed.
*/
public void testExplicitLockManyKeys() throws Exception {
checkExplicitLock(4, false);
}
/**
* @throws Exception If failed.
*/
public void testExplicitLockManyKeysWithClient() throws Exception {
checkExplicitLock(4, true);
}
/**
* @param keys Number of keys.
* @param testClient If {@code true} uses one client node.
* @throws Exception If failed.
*/
public void checkExplicitLock(int keys, boolean testClient) throws Exception {
Collection<Thread> threads = new ArrayList<>();
try {
// Start grid 1.
IgniteEx grid1 = startGrid(1);
assertFalse(grid1.configuration().isClientMode());
threads.add(runCacheOperations(grid1.cachex(CACHE_NAME), keys));
TimeUnit.SECONDS.sleep(3L);
client = testClient; // If test client start on node in client mode.
// Start grid 2.
IgniteEx grid2 = startGrid(2);
assertEquals((Object)testClient, grid2.configuration().isClientMode());
client = false;
threads.add(runCacheOperations(grid2.cachex(CACHE_NAME), keys));
TimeUnit.SECONDS.sleep(3L);
// Start grid 3.
IgniteEx grid3 = startGrid(3);
assertFalse(grid3.configuration().isClientMode());
if (testClient)
log.info("Started client node: " + grid3.name());
threads.add(runCacheOperations(grid3.cachex(CACHE_NAME), keys));
TimeUnit.SECONDS.sleep(3L);
// Start grid 4.
IgniteEx grid4 = startGrid(4);
assertFalse(grid4.configuration().isClientMode());
threads.add(runCacheOperations(grid4.cachex(CACHE_NAME), keys));
TimeUnit.SECONDS.sleep(3L);
stopThreads(threads);
for (int i = 1; i <= 4; i++) {
IgniteTxManager tm = ((IgniteKernal)grid(i)).internalCache(CACHE_NAME).context().tm();
assertEquals("txMap is not empty:" + i, 0, tm.idMapSize());
}
}
finally {
stopAllGrids();
}
}
/**
* @param threads Thread which will be stopped.
*/
private void stopThreads(Iterable<Thread> threads) {
try {
run = false;
for (Thread thread : threads)
thread.join();
}
catch (Exception e) {
U.error(log(), "Couldn't stop threads.", e);
}
}
/**
* @param cache Cache.
* @param keys Number of keys.
* @return Running thread.
*/
@SuppressWarnings("TypeMayBeWeakened")
private Thread runCacheOperations(final IgniteInternalCache<Object,Object> cache, final int keys) {
Thread t = new Thread() {
@Override public void run() {
while (run) {
TreeMap<Integer, String> vals = generateValues(keys);
try {
// Explicit lock.
cache.lock(vals.firstKey(), 0);
try {
// Put or remove.
if (ThreadLocalRandom.current().nextDouble(1) < 0.65)
cache.putAll(vals);
else
cache.removeAll(vals.keySet());
}
catch (Exception e) {
U.error(log(), "Failed cache operation.", e);
}
finally {
cache.unlock(vals.firstKey());
}
U.sleep(100);
}
catch (Exception e){
U.error(log(), "Failed unlock.", e);
}
}
}
};
t.start();
return t;
}
/**
* @param cnt Number of keys to generate.
* @return Map.
*/
private TreeMap<Integer, String> generateValues(int cnt) {
TreeMap<Integer, String> res = new TreeMap<>();
ThreadLocalRandom rnd = ThreadLocalRandom.current();
while (res.size() < cnt) {
int key = rnd.nextInt(0, 100);
res.put(key, String.valueOf(key));
}
return res;
}
}
|
|
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.fhir.protogen;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParameterException;
import com.google.common.base.CaseFormat;
import com.google.common.collect.ImmutableList;
import com.google.fhir.common.AnnotationUtils;
import com.google.fhir.common.InvalidFhirException;
import com.google.fhir.proto.Annotations;
import com.google.fhir.proto.Annotations.FhirVersion;
import com.google.fhir.proto.PackageInfo;
import com.google.fhir.r4.core.StructureDefinition;
import com.google.fhir.r4.core.StructureDefinitionKindCode;
import com.google.fhir.r4.core.TypeDerivationRuleCode;
import com.google.protobuf.DescriptorProtos.DescriptorProto;
import com.google.protobuf.DescriptorProtos.FileDescriptorProto;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* A class that runs ProtoGenerator on the specified inputs, turning FHIR StructureDefinition files
* into proto descriptors. Depending on settings, either the descriptors, the .proto file, or both
* will be emitted.
*/
class ProtoGeneratorMain {
private final Args args;
private static final String EXTENSION_STRUCTURE_DEFINITION_URL =
"http://hl7.org/fhir/StructureDefinition/Extension";
private static class Args {
@Parameter(
names = {"--output_directory"},
description = "Directory where generated output will be saved"
)
private String outputDirectory = ".";
@Parameter(
names = {"--directory_in_source"},
description =
"The directory in the source tree that the proto files will be located. "
+ "This allows for intra-package imports, like codes and extensions.")
private String directoryInSource = null;
@Parameter(
names = {"--filter"},
description =
"Filter for types of definitions in input package to use. If set, must be one of"
+ " resource, profile, extension, datatype.")
private String filter = null;
@Parameter(
names = {"--sort"},
description = "If true, will sort messages within a file by message name.")
private boolean sort = false;
@Parameter(
names = {"--stu3_core_dep"},
description =
"FhirPackage for core STU3 Definitions. See --fhir_definition_dep flag for"
+ " documentaton on format.")
private String stu3CoreDep = null;
@Parameter(
names = {"--r4_core_dep"},
description =
"FhirPackage for core R4 Definitions. See --fhir_definition_dep flag for documentaton"
+ " on format.")
private String r4CoreDep = null;
@Parameter(
names = {"--fhir_definition_dep"},
description =
"List of FhirPackages that this package depends on. These should be zips containing a"
+ " PackageInfo prototxt file, along with resources defined by the package, such"
+ " as the ones generated by the fhir_package rule in protogen.bzl")
private List<String> fhirDefinitionDepList = new ArrayList<>();
@Parameter(
names = {"--additional_import"},
description = "Non-core FHIR Packages to add.")
private List<String> additionalImports = new ArrayList<>();
@Parameter(
names = {"--output_name"},
description =
"Name for output proto files. If writing a single resource proto file, will write to"
+ " {$output_name.proto}. If splitting resources, will write all resources in"
+ " separate files to {$output_name.zip}. If writing a separate extensions file,"
+ " will output ${output_name}_extensions.proto.")
private String outputName = "output";
@Parameter(
names = {"--input_package"},
description = "Input FHIR package",
required = true)
private String inputPackageLocation = null;
@Parameter(
names = {"--exclude"},
description = "Ids of input StructureDefinitions to ignore.")
private List<String> excludeIds = new ArrayList<>();
private Set<FhirPackage> getDependencies() throws IOException, InvalidFhirException {
Set<FhirPackage> packages = new HashSet<>();
for (String fhirDefinitionDep : fhirDefinitionDepList) {
packages.add(FhirPackage.load(fhirDefinitionDep));
}
return packages;
}
}
ProtoGeneratorMain(Args args) {
this.args = args;
}
void run() throws IOException, InvalidFhirException {
Set<FhirPackage> fhirPackages = args.getDependencies();
FhirPackage unfilteredInputPackage = FhirPackage.load(args.inputPackageLocation);
fhirPackages.add(unfilteredInputPackage);
FhirPackage inputPackage =
args.filter == null
? unfilteredInputPackage
: applyFilter(unfilteredInputPackage, args.filter);
PackageInfo packageInfo = inputPackage.packageInfo;
if (packageInfo.getProtoPackage().isEmpty()
|| packageInfo.getFhirVersion() == FhirVersion.FHIR_VERSION_UNKNOWN) {
throw new IllegalArgumentException(
"package_info must contain at least a proto_package and fhir_version.");
}
// Add in core FHIR types (e.g., datatypes and unprofiled resources)
switch (packageInfo.getFhirVersion()) {
case STU3:
if (args.stu3CoreDep == null) {
throw new IllegalArgumentException(
"Package is for STU3, but --stu3_core_dep is not specified.");
}
fhirPackages.add(FhirPackage.load(args.stu3CoreDep));
break;
case R4:
if (args.r4CoreDep == null) {
throw new IllegalArgumentException(
"Package is for R4, but --r4_core_dep is not specified.");
}
fhirPackages.add(FhirPackage.load(args.r4CoreDep));
break;
default:
throw new IllegalArgumentException(
"FHIR version not supported by ProfileGenerator: " + packageInfo.getFhirVersion());
}
List<StructureDefinition> inputDefinitions =
inputPackage.structureDefinitions.stream()
.filter(def -> !args.excludeIds.contains(def.getId().getValue()))
.collect(Collectors.toList());
// Generate the proto file.
System.out.println("Generating proto descriptors...");
ValueSetGenerator valueSetGenerator = new ValueSetGenerator(packageInfo, fhirPackages);
ProtoGenerator generator =
packageInfo.getFhirVersion() != FhirVersion.R4
? new ProtoGenerator(
packageInfo,
args.directoryInSource + "/" + args.outputName + "_codes.proto",
fhirPackages)
: new ProtoGenerator(
packageInfo,
args.directoryInSource + "/" + args.outputName + "_codes.proto",
fhirPackages,
valueSetGenerator);
ProtoFilePrinter printer = new ProtoFilePrinter(packageInfo);
try (ZipOutputStream zipOutputStream =
new ZipOutputStream(
new FileOutputStream(new File(args.outputDirectory, args.outputName + ".zip")))) {
if (inputPackage != null && args.filter == null) {
// We're generating a whole input package. Make sure to add codes if there are any to
// generate.
FileDescriptorProto codesFileProto =
valueSetGenerator.generateCodeSystemAndValueSetsFile(inputPackage);
if (!codesFileProto.getMessageTypeList().isEmpty()) {
try {
zipOutputStream.putNextEntry(new ZipEntry(args.outputName + "_codes.proto"));
byte[] entryBytes = printer.print(codesFileProto).getBytes(UTF_8);
zipOutputStream.write(entryBytes, 0, entryBytes.length);
} finally {
zipOutputStream.closeEntry();
}
}
}
switch (packageInfo.getFileSplittingBehavior()) {
case DEFAULT_SPLITTING_BEHAVIOR:
case NO_SPLITTING:
writeWithCombinedExtensionsAndResourcesFile(
inputDefinitions, generator, printer, packageInfo, zipOutputStream);
break;
case SEPARATE_EXTENSIONS:
writeWithSeparateExtensionsFile(
inputDefinitions, generator, printer, packageInfo, zipOutputStream);
break;
case SPLIT_RESOURCES:
writeSplitResources(inputDefinitions, generator, printer, packageInfo, zipOutputStream);
break;
case UNRECOGNIZED:
throw new IllegalArgumentException(
"Unrecognized file splitting behavior: " + packageInfo.getFileSplittingBehavior());
}
}
}
private static FhirPackage applyFilter(FhirPackage fhirPackage, String type) {
Predicate<StructureDefinition> isDatatype =
def ->
def.getKind().getValue() == StructureDefinitionKindCode.Value.PRIMITIVE_TYPE
|| def.getKind().getValue() == StructureDefinitionKindCode.Value.COMPLEX_TYPE;
Predicate<StructureDefinition> isResource = def -> isResource(def);
Predicate<StructureDefinition> isConstraint =
def -> def.getDerivation().getValue() == TypeDerivationRuleCode.Value.CONSTRAINT;
Predicate<StructureDefinition> isExtensionProfile =
isConstraint.and(
def ->
def.getBaseDefinition()
.getValue()
.equals("http://hl7.org/fhir/StructureDefinition/Extension"));
switch (type) {
case "datatype":
return fhirPackage.filterResources(isDatatype.and(isExtensionProfile.negate()));
case "extension":
return fhirPackage.filterResources(isExtensionProfile);
case "resource":
return fhirPackage.filterResources(isResource.and(isConstraint.negate()));
case "profile":
return fhirPackage.filterResources(isResource.and(isConstraint));
default:
throw new IllegalArgumentException("Unrecognized filter: " + type);
}
}
private void writeWithCombinedExtensionsAndResourcesFile(
List<StructureDefinition> definitions,
ProtoGenerator generator,
ProtoFilePrinter printer,
PackageInfo packageInfo,
ZipOutputStream zipOutputStream)
throws IOException, InvalidFhirException {
FileDescriptorProto proto =
generator.generateFileDescriptor(definitions, args.additionalImports);
if (packageInfo.getLocalContainedResource()) {
proto = generator.addContainedResource(proto, proto.getMessageTypeList());
}
addZipEntry(args.outputName + ".proto", proto, printer, zipOutputStream);
}
private void writeWithSeparateExtensionsFile(
List<StructureDefinition> definitions,
ProtoGenerator generator,
ProtoFilePrinter printer,
PackageInfo packageInfo,
ZipOutputStream zipOutputStream)
throws IOException, InvalidFhirException {
List<StructureDefinition> extensions = new ArrayList<>();
List<StructureDefinition> profiles = new ArrayList<>();
for (StructureDefinition structDef : definitions) {
if (structDef.getBaseDefinition().getValue().equals(EXTENSION_STRUCTURE_DEFINITION_URL)) {
extensions.add(structDef);
} else {
profiles.add(structDef);
}
}
if (!extensions.isEmpty()) {
addZipEntry(
args.outputName + "_extensions.proto",
generator.generateFileDescriptor(extensions),
printer,
zipOutputStream);
args.additionalImports.add(
args.directoryInSource + "/" + args.outputName + "_extensions.proto");
}
if (!profiles.isEmpty()) {
FileDescriptorProto mainFileProto =
generator.generateFileDescriptor(profiles, args.additionalImports);
if (packageInfo.getLocalContainedResource()) {
mainFileProto =
generator.addContainedResource(mainFileProto, mainFileProto.getMessageTypeList());
}
addZipEntry(args.outputName + ".proto", mainFileProto, printer, zipOutputStream);
}
}
private void writeSplitResources(
List<StructureDefinition> definitions,
ProtoGenerator generator,
ProtoFilePrinter printer,
PackageInfo packageInfo,
ZipOutputStream zipOutputStream)
throws IOException, InvalidFhirException {
// Divide into three categories.
// Extensions and datatypes will be printed into a single aggregate file each,
// while resources will be printed into one file per resource.
// Note primititives are include in datatypes here.
List<StructureDefinition> extensions = new ArrayList<>();
List<StructureDefinition> datatypes = new ArrayList<>();
List<StructureDefinition> resources = new ArrayList<>();
for (StructureDefinition structDef : definitions) {
StructureDefinitionKindCode.Value kind = structDef.getKind().getValue();
if (structDef.getBaseDefinition().getValue().equals(EXTENSION_STRUCTURE_DEFINITION_URL)) {
extensions.add(structDef);
} else if (isResource(structDef)) {
resources.add(structDef);
} else if (kind == StructureDefinitionKindCode.Value.PRIMITIVE_TYPE
|| kind == StructureDefinitionKindCode.Value.COMPLEX_TYPE) {
datatypes.add(structDef);
}
}
if (!extensions.isEmpty()) {
addZipEntry(
args.outputName + "_extensions.proto",
generator.generateFileDescriptor(extensions),
printer,
zipOutputStream);
args.additionalImports.add(
args.directoryInSource + "/" + args.outputName + "_extensions.proto");
}
if (!datatypes.isEmpty()) {
addZipEntry(
"datatypes.proto", generator.generateFileDescriptor(datatypes), printer, zipOutputStream);
}
// TODO: Move Contained Resource logic into ProtoGenerator.java
if (!resources.isEmpty()) {
List<DescriptorProto> containedTypes = new ArrayList<>();
// Note that in the case where there is a contained resource that is local to a proto set,
// (the usual case), we need to define the ContainedResource proto in the same file as the
// Bundle proto to avoid a circular dependency. Since we need to define all other resources
// before we can define ContainedResource, we defer printing the Bundle file until after
// all other resources are generated, and after we've added in ContainedResource.
FileDescriptorProto deferredBundleFile = null;
for (StructureDefinition structDef : resources) {
FileDescriptorProto fileProto =
generator.generateFileDescriptor(ImmutableList.of(structDef), args.additionalImports);
DescriptorProto type = fileProto.getMessageType(0);
String filename =
resourceNameToFileName(
GeneratorUtils.getTypeName(structDef, packageInfo.getFhirVersion()), generator);
if (type.getName().equals("Bundle")) {
deferredBundleFile = fileProto;
} else {
addZipEntry(filename, fileProto, printer, zipOutputStream);
}
if (!type.getOptions().getExtension(Annotations.isAbstractType)) {
containedTypes.add(type);
}
}
if (deferredBundleFile != null) {
if (packageInfo.getLocalContainedResource()) {
FileDescriptorProto.Builder fileBuilder =
generator.addContainedResource(deferredBundleFile, containedTypes).toBuilder();
String importRoot = args.directoryInSource;
while (importRoot.contains("/../")) {
// resolve foo/bar/baz/../../quux into foo/quux
importRoot = importRoot.replaceAll("/[^/]*/\\.\\./", "/");
}
for (DescriptorProto type : containedTypes) {
if (!type.getName().equals("Bundle")) {
fileBuilder.addDependency(
new File(importRoot, resourceNameToFileName(type.getName(), generator))
.toString());
}
}
addZipEntry(
"bundle_and_contained_resource.proto", fileBuilder.build(), printer, zipOutputStream);
} else {
addZipEntry("bundle.proto", deferredBundleFile, printer, zipOutputStream);
}
}
}
}
private void addZipEntry(
String filename,
FileDescriptorProto fileProto,
ProtoFilePrinter printer,
ZipOutputStream zipOutputStream)
throws IOException {
try {
zipOutputStream.putNextEntry(new ZipEntry(filename));
byte[] entryBytes = printer.print(maybeSortFile(fileProto)).getBytes(UTF_8);
zipOutputStream.write(entryBytes, 0, entryBytes.length);
} finally {
zipOutputStream.closeEntry();
}
}
String resourceNameToFileName(String resourceName, ProtoGenerator generator) {
return CaseFormat.UPPER_CAMEL.to(
CaseFormat.LOWER_UNDERSCORE,
GeneratorUtils.resolveAcronyms(GeneratorUtils.toFieldTypeCase(resourceName)))
+ ".proto";
}
private FileDescriptorProto maybeSortFile(FileDescriptorProto proto) {
if (args.sort) {
return proto.toBuilder()
.clearMessageType()
.addAllMessageType(
proto.getMessageTypeList().stream()
.sorted(
(a, b) -> {
boolean aIsPrimitive = AnnotationUtils.isPrimitiveType(a);
boolean bIsPrimitive = AnnotationUtils.isPrimitiveType(b);
if (aIsPrimitive != bIsPrimitive) {
return aIsPrimitive ? -1 : 1;
} else {
return a.getName().compareTo(b.getName());
}
})
.collect(Collectors.toList()))
.build();
}
return proto;
}
private static boolean isResource(StructureDefinition definition) {
// Despite being categorized as "Logical" rather than a "Resource",
// MetadataResource is included here for historical reasons (and lack of a better place...)
return definition.getKind().getValue() == StructureDefinitionKindCode.Value.RESOURCE
|| definition.getId().getValue().equals("MetadataResource");
}
public static void main(String[] argv) throws IOException, InvalidFhirException {
// Each non-flag argument is assumed to be an input file.
Args args = new Args();
JCommander jcommander = new JCommander(args);
try {
jcommander.parse(argv);
} catch (ParameterException exception) {
System.err.printf("Invalid usage: %s\n", exception.getMessage());
System.exit(1);
}
new ProtoGeneratorMain(args).run();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.apache.kafka.connect.runtime;
import org.apache.kafka.common.utils.SystemTime;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.source.SourceRecord;
import org.apache.kafka.connect.source.SourceTask;
import org.apache.kafka.connect.source.SourceTaskContext;
import org.apache.kafka.connect.storage.Converter;
import org.apache.kafka.connect.storage.OffsetStorageReader;
import org.apache.kafka.connect.storage.OffsetStorageWriter;
import org.apache.kafka.connect.util.Callback;
import org.apache.kafka.connect.util.ConnectorTaskId;
import org.apache.kafka.connect.util.ThreadedTest;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.easymock.IExpectationSetters;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.easymock.PowerMock;
import org.powermock.api.easymock.annotation.Mock;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.reflect.Whitebox;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNull;
@RunWith(PowerMockRunner.class)
public class WorkerSourceTaskTest extends ThreadedTest {
private static final String TOPIC = "topic";
private static final Map<String, byte[]> PARTITION = Collections.singletonMap("key", "partition".getBytes());
private static final Map<String, Integer> OFFSET = Collections.singletonMap("key", 12);
// Connect-format data
private static final Schema KEY_SCHEMA = Schema.INT32_SCHEMA;
private static final Integer KEY = -1;
private static final Schema RECORD_SCHEMA = Schema.INT64_SCHEMA;
private static final Long RECORD = 12L;
// Serialized data. The actual format of this data doesn't matter -- we just want to see that the right version
// is used in the right place.
private static final byte[] SERIALIZED_KEY = "converted-key".getBytes();
private static final byte[] SERIALIZED_RECORD = "converted-record".getBytes();
private ExecutorService executor = Executors.newSingleThreadExecutor();
private ConnectorTaskId taskId = new ConnectorTaskId("job", 0);
private WorkerConfig config;
@Mock private SourceTask sourceTask;
@Mock private Converter keyConverter;
@Mock private Converter valueConverter;
@Mock private KafkaProducer<byte[], byte[]> producer;
@Mock private OffsetStorageReader offsetReader;
@Mock private OffsetStorageWriter offsetWriter;
private WorkerSourceTask workerTask;
@Mock private Future<RecordMetadata> sendFuture;
private Capture<org.apache.kafka.clients.producer.Callback> producerCallbacks;
private static final Map<String, String> EMPTY_TASK_PROPS = Collections.emptyMap();
private static final List<SourceRecord> RECORDS = Arrays.asList(
new SourceRecord(PARTITION, OFFSET, "topic", null, KEY_SCHEMA, KEY, RECORD_SCHEMA, RECORD)
);
@Override
public void setup() {
super.setup();
Map<String, String> workerProps = new HashMap<>();
workerProps.put("key.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("value.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("internal.key.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("internal.value.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("internal.key.converter.schemas.enable", "false");
workerProps.put("internal.value.converter.schemas.enable", "false");
config = new StandaloneConfig(workerProps);
producerCallbacks = EasyMock.newCapture();
}
private void createWorkerTask() {
workerTask = new WorkerSourceTask(taskId, sourceTask, keyConverter, valueConverter, producer,
offsetReader, offsetWriter, config, new SystemTime());
}
@Test
public void testPollsInBackground() throws Exception {
createWorkerTask();
sourceTask.initialize(EasyMock.anyObject(SourceTaskContext.class));
EasyMock.expectLastCall();
sourceTask.start(EMPTY_TASK_PROPS);
EasyMock.expectLastCall();
final CountDownLatch pollLatch = expectPolls(10);
// In this test, we don't flush, so nothing goes any further than the offset writer
sourceTask.stop();
EasyMock.expectLastCall();
expectOffsetFlush(true);
PowerMock.replayAll();
workerTask.initialize(EMPTY_TASK_PROPS);
executor.submit(workerTask);
awaitPolls(pollLatch);
workerTask.stop();
assertEquals(true, workerTask.awaitStop(1000));
PowerMock.verifyAll();
}
@Test
public void testCommit() throws Exception {
// Test that the task commits properly when prompted
createWorkerTask();
sourceTask.initialize(EasyMock.anyObject(SourceTaskContext.class));
EasyMock.expectLastCall();
sourceTask.start(EMPTY_TASK_PROPS);
EasyMock.expectLastCall();
// We'll wait for some data, then trigger a flush
final CountDownLatch pollLatch = expectPolls(1);
expectOffsetFlush(true);
sourceTask.stop();
EasyMock.expectLastCall();
expectOffsetFlush(true);
PowerMock.replayAll();
workerTask.initialize(EMPTY_TASK_PROPS);
executor.submit(workerTask);
awaitPolls(pollLatch);
assertTrue(workerTask.commitOffsets());
workerTask.stop();
assertEquals(true, workerTask.awaitStop(1000));
PowerMock.verifyAll();
}
@Test
public void testCommitFailure() throws Exception {
// Test that the task commits properly when prompted
createWorkerTask();
sourceTask.initialize(EasyMock.anyObject(SourceTaskContext.class));
EasyMock.expectLastCall();
sourceTask.start(EMPTY_TASK_PROPS);
EasyMock.expectLastCall();
// We'll wait for some data, then trigger a flush
final CountDownLatch pollLatch = expectPolls(1);
expectOffsetFlush(false);
sourceTask.stop();
EasyMock.expectLastCall();
expectOffsetFlush(true);
PowerMock.replayAll();
workerTask.initialize(EMPTY_TASK_PROPS);
executor.submit(workerTask);
awaitPolls(pollLatch);
assertFalse(workerTask.commitOffsets());
workerTask.stop();
assertEquals(true, workerTask.awaitStop(1000));
PowerMock.verifyAll();
}
@Test
public void testSendRecordsConvertsData() throws Exception {
createWorkerTask();
List<SourceRecord> records = new ArrayList<>();
// Can just use the same record for key and value
records.add(new SourceRecord(PARTITION, OFFSET, "topic", null, KEY_SCHEMA, KEY, RECORD_SCHEMA, RECORD));
Capture<ProducerRecord<byte[], byte[]>> sent = expectSendRecordAnyTimes();
PowerMock.replayAll();
Whitebox.setInternalState(workerTask, "toSend", records);
Whitebox.invokeMethod(workerTask, "sendRecords");
assertEquals(SERIALIZED_KEY, sent.getValue().key());
assertEquals(SERIALIZED_RECORD, sent.getValue().value());
PowerMock.verifyAll();
}
@Test
public void testSendRecordsRetries() throws Exception {
createWorkerTask();
// Differentiate only by Kafka partition so we can reuse conversion expectations
SourceRecord record1 = new SourceRecord(PARTITION, OFFSET, "topic", 1, KEY_SCHEMA, KEY, RECORD_SCHEMA, RECORD);
SourceRecord record2 = new SourceRecord(PARTITION, OFFSET, "topic", 2, KEY_SCHEMA, KEY, RECORD_SCHEMA, RECORD);
SourceRecord record3 = new SourceRecord(PARTITION, OFFSET, "topic", 3, KEY_SCHEMA, KEY, RECORD_SCHEMA, RECORD);
// First round
expectSendRecordOnce(false);
// Any Producer retriable exception should work here
expectSendRecordSyncFailure(new org.apache.kafka.common.errors.TimeoutException("retriable sync failure"));
// Second round
expectSendRecordOnce(true);
expectSendRecordOnce(false);
PowerMock.replayAll();
// Try to send 3, make first pass, second fail. Should save last two
Whitebox.setInternalState(workerTask, "toSend", Arrays.asList(record1, record2, record3));
Whitebox.invokeMethod(workerTask, "sendRecords");
assertEquals(true, Whitebox.getInternalState(workerTask, "lastSendFailed"));
assertEquals(Arrays.asList(record2, record3), Whitebox.getInternalState(workerTask, "toSend"));
// Next they all succeed
Whitebox.invokeMethod(workerTask, "sendRecords");
assertEquals(false, Whitebox.getInternalState(workerTask, "lastSendFailed"));
assertNull(Whitebox.getInternalState(workerTask, "toSend"));
PowerMock.verifyAll();
}
@Test
public void testSlowTaskStart() throws Exception {
final CountDownLatch startupLatch = new CountDownLatch(1);
createWorkerTask();
sourceTask.initialize(EasyMock.anyObject(SourceTaskContext.class));
EasyMock.expectLastCall();
sourceTask.start(EMPTY_TASK_PROPS);
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
startupLatch.countDown();
Utils.sleep(100);
return null;
}
});
sourceTask.stop();
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(EMPTY_TASK_PROPS);
executor.submit(workerTask);
// Stopping immediately while the other thread has work to do should result in no polling, no offset commits,
// exiting the work thread immediately, and the stop() method will be invoked in the background thread since it
// cannot be invoked immediately in the thread trying to stop the task.
startupLatch.await(1000, TimeUnit.MILLISECONDS);
workerTask.stop();
assertEquals(true, workerTask.awaitStop(1000));
PowerMock.verifyAll();
}
private CountDownLatch expectPolls(int count) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(count);
// Note that we stub these to allow any number of calls because the thread will continue to
// run. The count passed in + latch returned just makes sure we get *at least* that number of
// calls
EasyMock.expect(sourceTask.poll())
.andStubAnswer(new IAnswer<List<SourceRecord>>() {
@Override
public List<SourceRecord> answer() throws Throwable {
latch.countDown();
return RECORDS;
}
});
// Fallout of the poll() call
expectSendRecordAnyTimes();
return latch;
}
private void expectSendRecordSyncFailure(Throwable error) throws InterruptedException {
expectConvertKeyValue(false);
offsetWriter.offset(PARTITION, OFFSET);
PowerMock.expectLastCall();
EasyMock.expect(
producer.send(EasyMock.anyObject(ProducerRecord.class),
EasyMock.anyObject(org.apache.kafka.clients.producer.Callback.class)))
.andThrow(error);
}
private Capture<ProducerRecord<byte[], byte[]>> expectSendRecordAnyTimes() throws InterruptedException {
return expectSendRecord(true, false);
}
private Capture<ProducerRecord<byte[], byte[]>> expectSendRecordOnce(boolean isRetry) throws InterruptedException {
return expectSendRecord(false, isRetry);
}
private Capture<ProducerRecord<byte[], byte[]>> expectSendRecord(boolean anyTimes, boolean isRetry) throws InterruptedException {
expectConvertKeyValue(anyTimes);
Capture<ProducerRecord<byte[], byte[]>> sent = EasyMock.newCapture();
// 1. Offset data is passed to the offset storage.
if (!isRetry) {
offsetWriter.offset(PARTITION, OFFSET);
if (anyTimes)
PowerMock.expectLastCall().anyTimes();
else
PowerMock.expectLastCall();
}
// 2. Converted data passed to the producer, which will need callbacks invoked for flush to work
IExpectationSetters<Future<RecordMetadata>> expect = EasyMock.expect(
producer.send(EasyMock.capture(sent),
EasyMock.capture(producerCallbacks)));
IAnswer<Future<RecordMetadata>> expectResponse = new IAnswer<Future<RecordMetadata>>() {
@Override
public Future<RecordMetadata> answer() throws Throwable {
synchronized (producerCallbacks) {
for (org.apache.kafka.clients.producer.Callback cb : producerCallbacks.getValues()) {
cb.onCompletion(new RecordMetadata(new TopicPartition("foo", 0), 0, 0), null);
}
producerCallbacks.reset();
}
return sendFuture;
}
};
if (anyTimes)
expect.andStubAnswer(expectResponse);
else
expect.andAnswer(expectResponse);
return sent;
}
private void expectConvertKeyValue(boolean anyTimes) {
IExpectationSetters<byte[]> convertKeyExpect = EasyMock.expect(keyConverter.fromConnectData(TOPIC, KEY_SCHEMA, KEY));
if (anyTimes)
convertKeyExpect.andStubReturn(SERIALIZED_KEY);
else
convertKeyExpect.andReturn(SERIALIZED_KEY);
IExpectationSetters<byte[]> convertValueExpect = EasyMock.expect(valueConverter.fromConnectData(TOPIC, RECORD_SCHEMA, RECORD));
if (anyTimes)
convertValueExpect.andStubReturn(SERIALIZED_RECORD);
else
convertValueExpect.andReturn(SERIALIZED_RECORD);
}
private void awaitPolls(CountDownLatch latch) throws InterruptedException {
latch.await(1000, TimeUnit.MILLISECONDS);
}
@SuppressWarnings("unchecked")
private void expectOffsetFlush(boolean succeed) throws Exception {
EasyMock.expect(offsetWriter.beginFlush()).andReturn(true);
Future<Void> flushFuture = PowerMock.createMock(Future.class);
EasyMock.expect(offsetWriter.doFlush(EasyMock.anyObject(Callback.class))).andReturn(flushFuture);
// Should throw for failure
IExpectationSetters<Void> futureGetExpect = EasyMock.expect(
flushFuture.get(EasyMock.anyLong(), EasyMock.anyObject(TimeUnit.class)));
if (succeed) {
futureGetExpect.andReturn(null);
} else {
futureGetExpect.andThrow(new TimeoutException());
offsetWriter.cancelFlush();
PowerMock.expectLastCall();
}
}
}
|
|
package net.mcft.copy.betterstorage.tile.crate;
import java.util.HashSet;
import java.util.Set;
import net.mcft.copy.betterstorage.BetterStorage;
import net.mcft.copy.betterstorage.api.crate.ICrateWatcher;
import net.mcft.copy.betterstorage.config.GlobalConfig;
import net.mcft.copy.betterstorage.inventory.InventoryCrateBlockView;
import net.mcft.copy.betterstorage.misc.ItemIdentifier;
import net.mcft.copy.betterstorage.misc.Region;
import net.mcft.copy.betterstorage.utils.StackUtils;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraftforge.common.util.Constants.NBT;
/** Holds data for a single crate pile, a multi-block
* structure made from individual crate blocks */
public class CratePileData {
private static final int maxCratePileSize = 8192;
public final CratePileCollection collection;
public final int id;
private CrateItems contents = new CrateItems();
private int numCrates = 0;
private boolean destroyed = false;
private boolean dirty = false;
private CratePileMap map;
private Set<ICrateWatcher> watchers = new HashSet<ICrateWatcher>();
/** An inventory interface built for machines accessing the crate pile. */
public final InventoryCrateBlockView blockView = new InventoryCrateBlockView(this);
/** Returns the items in this crate pile. */
public CrateItems getContents() { return contents; }
/** Returns the number of crates attached. */
public int getNumCrates() { return numCrates; }
/** Returns the maximum number of slots. */
public int getCapacity() { return numCrates * TileEntityCrate.slotsPerCrate; }
/** Returns the number of unique items. */
public int getUniqueItems() { return contents.getUniqueItems(); }
/** Returns the number of slots occupied. */
public int getOccupiedSlots() { return contents.getTotalStacks(); }
/** Returns the number of slots free. Negative if there's any overflow. */
public int getFreeSlots() { return getCapacity() - getOccupiedSlots(); }
/** Returns the region / bounds this crate pile takes up. */
public Region getRegion() { return map.region; }
public int getCenterX() { return (map.region.minX + map.region.maxX) / 2; }
public int getCenterY() { return (map.region.minY + map.region.maxY) / 2; }
public int getCenterZ() { return (map.region.minZ + map.region.maxZ) / 2; }
public CratePileData(CratePileCollection collection, int id, int numCrates) {
this.collection = collection;
this.id = id;
this.numCrates = numCrates;
}
// Saving related
/** Returns if the crate pile is marked as dirty. */
public boolean isDirty() { return dirty; }
/** Marks the crate pile as dirty. */
public void markDirty() {
if (isDirty() || destroyed) return;
dirty = true;
if (BetterStorage.globalConfig.getBoolean(GlobalConfig.crateDebugMessages))
BetterStorage.log.info(String.format("[CRATE DEBUG] Marked crate pile at [%s,%s,%s] as dirty.",
getCenterX(), getCenterY(), getCenterZ()));
}
/** Saves the crate pile to disk if it's been marked as dirty. */
public void save() {
if (!isDirty()) return;
collection.save(this);
dirty = false;
if (BetterStorage.globalConfig.getBoolean(GlobalConfig.crateDebugMessages))
BetterStorage.log.info(String.format("[CRATE DEBUG] Saved crate pile at [%s,%s,%s].",
getCenterX(), getCenterY(), getCenterZ()));
}
/** Removes this (empty) crate pile from the collection. */
public void remove() {
destroyed = true;
collection.removeCratePile(this);
if (BetterStorage.globalConfig.getBoolean(GlobalConfig.crateDebugMessages))
BetterStorage.log.info(String.format("[CRATE DEBUG] Removed empty crate pile at [%s,%s,%s].",
getCenterX(), getCenterY(), getCenterZ()));
}
// CrateMap related functions
/** Returns if the crate can be added to the crate pile. */
public boolean canAdd(TileEntityCrate crate) {
return ((map != null) && (numCrates < maxCratePileSize) &&
(map.region.contains(crate) || canExpand(crate)) &&
(map.get(crate.xCoord, crate.yCoord - 1, crate.zCoord) || (crate.yCoord == map.region.minY)));
}
/** Returns if the crate can expand the crate pile. */
private boolean canExpand(TileEntityCrate crate) {
int volume = map.region.volume();
// Can't expand if there's not enough crates in the bounding box.
if (numCrates < Math.min((int)(volume * 0.8), volume - 5)) return false;
if (crate.xCoord < map.region.minX || crate.xCoord > map.region.maxX) {
int maxDiff = ((map.region.height() == 1) ? 1 : 3);
if (map.region.width() >= maxDiff + Math.min(map.region.height(), map.region.depth()))
return false;
} else if (crate.zCoord < map.region.minZ || crate.zCoord > map.region.maxZ) {
int maxDiff = ((map.region.width() == 1) ? 1 : 3);
if (map.region.height() >= maxDiff + Math.min(map.region.width(), map.region.depth()))
return false;
} else if (crate.yCoord < map.region.minY || crate.yCoord > map.region.maxY) {
int maxDiff = ((map.region.width() == 1 || map.region.height() == 1) ? 1 : 4);
if (map.region.depth() >= maxDiff + Math.min(map.region.width(), map.region.height()))
return false;
}
return true;
}
public void trimMap() {
if (map != null)
map.trim();
}
// Adding and removing crates
/** Adds a crate to the crate pile, increasing the number
* of crates and adding it to the crate pile map. */
public void addCrate(TileEntityCrate crate) {
if (numCrates == 0)
map = new CratePileMap(crate);
map.add(crate);
numCrates++;
markDirty();
}
/** Removes a crate from the crate pile, decreasing the number
* of crates and removing it from the crate pile map. */
public void removeCrate(TileEntityCrate crate) {
if (--numCrates > 0) {
if (map != null)
map.remove(crate);
markDirty();
} else remove();
}
/** Returns if there's a crate from the crate pile at that position. */
public boolean hasCrate(int x, int y, int z) {
return map.get(x, y, z);
}
// Adding items
/** Tries to add a stack to the contents. <br>
* Returns what could not be added, null if there was no overflow. */
public ItemStack addItems(ItemStack stack) {
if (stack == null) return null;
ItemStack overflow = null;
int space = getSpaceForItem(stack);
if (space > 0) {
if (space < stack.stackSize)
overflow = stack.splitStack(stack.stackSize - space);
ItemIdentifier item = new ItemIdentifier(stack);
getContents().set(item, getContents().get(item) + stack.stackSize);
for (ICrateWatcher watcher : watchers)
watcher.onCrateItemsModified(stack);
} else overflow = stack;
markDirty();
return overflow;
}
// Removing items
/** Removes and returns a specific amount of items. <br>
* Returns less than the requested amount when there's
* not enough, or null if there's none at all. */
public ItemStack removeItems(ItemIdentifier item, int amount) {
int currentAmount = getContents().get(item);
amount = Math.min(amount, currentAmount);
if (amount <= 0) return null;
getContents().set(item, currentAmount - amount);
ItemStack removedStack = item.createStack(-amount);
for (ICrateWatcher watcher : watchers)
watcher.onCrateItemsModified(removedStack);
markDirty();
return item.createStack(amount);
}
/** Removes and returns a specific amount of items. <br>
* Returns less than the requested amount when there's
* not enough, or null if there's none at all. */
public ItemStack removeItems(ItemStack stack) {
return removeItems(new ItemIdentifier(stack), stack.stackSize);
}
// Checking space
/** Returns how much space there is left for a specific item. */
public int getSpaceForItem(ItemIdentifier item) {
if (item == null) return 0;
int amount = getContents().get(item);
ItemStack testStack = item.createStack(amount);
int maxStackSize = testStack.getMaxStackSize();
int space = getFreeSlots() * maxStackSize;
if (amount > 0)
space += (StackUtils.calcNumStacks(testStack) * maxStackSize) - testStack.stackSize;
return space;
}
/** Returns how much space there is left for a specific item. */
public int getSpaceForItem(ItemStack item) {
if (item == null) return 0;
return getSpaceForItem(new ItemIdentifier(item));
}
// Crate watcher related functions
/** Adds a crate watcher to the watchers list, so it
* gets informed about any added or removed items. */
public void addWatcher(ICrateWatcher watcher) {
watchers.add(watcher);
}
/** Removes a crate watcher to the watchers list. */
public void removeWatcher(ICrateWatcher watcher) {
watchers.remove(watcher);
}
// NBT related functions
public NBTTagCompound toCompound() {
NBTTagCompound compound = new NBTTagCompound();
compound.setShort("numCrates", (short)getNumCrates());
NBTTagList stacks = new NBTTagList();
for (ItemStack stack : getContents().getItems()) {
NBTTagCompound stackCompound = new NBTTagCompound();
stackCompound.setShort("id", (short)Item.getIdFromItem(stack.getItem()));
stackCompound.setInteger("Count", stack.stackSize);
stackCompound.setShort("Damage", (short)StackUtils.getRealItemDamage(stack));
if (stack.hasTagCompound())
stackCompound.setTag("tag", stack.getTagCompound());
stacks.appendTag(stackCompound);
}
compound.setTag("stacks", stacks);
if (map != null)
compound.setTag("map", map.toCompound());
return compound;
}
public static CratePileData fromCompound(CratePileCollection collection, int crateId, NBTTagCompound compound) {
int numCrates = compound.getShort("numCrates");
CratePileData pileData = new CratePileData(collection, crateId, numCrates);
NBTTagList stacks = compound.getTagList("stacks", NBT.TAG_COMPOUND);
for (int j = 0; j < stacks.tagCount(); j++) {
NBTTagCompound stackCompound = stacks.getCompoundTagAt(j);
Item item = Item.getItemById(stackCompound.getShort("id"));
int count = stackCompound.getInteger("Count");
int damage = stackCompound.getShort("Damage");
ItemStack stack = new ItemStack(item, count, damage);
if (stackCompound.hasKey("tag"))
stack.stackTagCompound = stackCompound.getCompoundTag("tag");
if (stack.getItem() != null)
pileData.getContents().set(new ItemIdentifier(stack), stack.stackSize);
}
if (compound.hasKey("map"))
pileData.map = CratePileMap.fromCompound(compound.getCompoundTag("map"));
return pileData;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.