gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.endpoint.invoker.cache; import java.security.Principal; import java.time.Duration; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import org.junit.jupiter.api.Test; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import org.springframework.boot.actuate.endpoint.ApiVersion; import org.springframework.boot.actuate.endpoint.InvocationContext; import org.springframework.boot.actuate.endpoint.OperationArgumentResolver; import org.springframework.boot.actuate.endpoint.SecurityContext; import org.springframework.boot.actuate.endpoint.invoke.OperationInvoker; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; /** * Tests for {@link CachingOperationInvoker}. * * @author Stephane Nicoll * @author Christoph Dreis * @author Phillip Webb */ class CachingOperationInvokerTests { private static final long CACHE_TTL = Duration.ofHours(1).toMillis(); @Test void createInstanceWithTtlSetToZero() { assertThatIllegalArgumentException() .isThrownBy(() -> new CachingOperationInvoker(mock(OperationInvoker.class), 0)) .withMessageContaining("TimeToLive"); } @Test void cacheInTtlRangeWithNoParameter() { assertCacheIsUsed(Collections.emptyMap()); } @Test void cacheInTtlWithPrincipal() { assertCacheIsUsed(Collections.emptyMap(), mock(Principal.class)); } @Test void cacheInTtlWithNullParameters() { Map<String, Object> parameters = new HashMap<>(); parameters.put("first", null); parameters.put("second", null); assertCacheIsUsed(parameters); } @Test void cacheInTtlWithMonoResponse() { MonoOperationInvoker.invocations = new AtomicInteger(); MonoOperationInvoker target = new MonoOperationInvoker(); InvocationContext context = new InvocationContext(mock(SecurityContext.class), Collections.emptyMap()); CachingOperationInvoker invoker = new CachingOperationInvoker(target, CACHE_TTL); Object response = ((Mono<?>) invoker.invoke(context)).block(); Object cachedResponse = ((Mono<?>) invoker.invoke(context)).block(); assertThat(MonoOperationInvoker.invocations).hasValue(1); assertThat(response).isSameAs(cachedResponse); } @Test void cacheInTtlWithFluxResponse() { FluxOperationInvoker.invocations = new AtomicInteger(); FluxOperationInvoker target = new FluxOperationInvoker(); InvocationContext context = new InvocationContext(mock(SecurityContext.class), Collections.emptyMap()); CachingOperationInvoker invoker = new CachingOperationInvoker(target, CACHE_TTL); Object response = ((Flux<?>) invoker.invoke(context)).blockLast(); Object cachedResponse = ((Flux<?>) invoker.invoke(context)).blockLast(); assertThat(FluxOperationInvoker.invocations).hasValue(1); assertThat(response).isSameAs(cachedResponse); } private void assertCacheIsUsed(Map<String, Object> parameters) { assertCacheIsUsed(parameters, null); } private void assertCacheIsUsed(Map<String, Object> parameters, Principal principal) { OperationInvoker target = mock(OperationInvoker.class); Object expected = new Object(); SecurityContext securityContext = mock(SecurityContext.class); if (principal != null) { given(securityContext.getPrincipal()).willReturn(principal); } InvocationContext context = new InvocationContext(securityContext, parameters); given(target.invoke(context)).willReturn(expected); CachingOperationInvoker invoker = new CachingOperationInvoker(target, CACHE_TTL); Object response = invoker.invoke(context); assertThat(response).isSameAs(expected); verify(target, times(1)).invoke(context); Object cachedResponse = invoker.invoke(context); assertThat(cachedResponse).isSameAs(response); verifyNoMoreInteractions(target); } @Test void targetAlwaysInvokedWithParameters() { OperationInvoker target = mock(OperationInvoker.class); Map<String, Object> parameters = new HashMap<>(); parameters.put("test", "value"); parameters.put("something", null); InvocationContext context = new InvocationContext(mock(SecurityContext.class), parameters); given(target.invoke(context)).willReturn(new Object()); CachingOperationInvoker invoker = new CachingOperationInvoker(target, CACHE_TTL); invoker.invoke(context); invoker.invoke(context); invoker.invoke(context); verify(target, times(3)).invoke(context); } @Test void targetAlwaysInvokedWithDifferentPrincipals() { OperationInvoker target = mock(OperationInvoker.class); Map<String, Object> parameters = new HashMap<>(); SecurityContext securityContext = mock(SecurityContext.class); given(securityContext.getPrincipal()).willReturn(mock(Principal.class), mock(Principal.class), mock(Principal.class)); InvocationContext context = new InvocationContext(securityContext, parameters); Object result1 = new Object(); Object result2 = new Object(); Object result3 = new Object(); given(target.invoke(context)).willReturn(result1, result2, result3); CachingOperationInvoker invoker = new CachingOperationInvoker(target, CACHE_TTL); assertThat(invoker.invoke(context)).isEqualTo(result1); assertThat(invoker.invoke(context)).isEqualTo(result2); assertThat(invoker.invoke(context)).isEqualTo(result3); verify(target, times(3)).invoke(context); } @Test void targetInvokedWhenCalledWithAndWithoutPrincipal() { OperationInvoker target = mock(OperationInvoker.class); Map<String, Object> parameters = new HashMap<>(); SecurityContext anonymous = mock(SecurityContext.class); SecurityContext authenticated = mock(SecurityContext.class); given(authenticated.getPrincipal()).willReturn(mock(Principal.class)); InvocationContext anonymousContext = new InvocationContext(anonymous, parameters); Object anonymousResult = new Object(); given(target.invoke(anonymousContext)).willReturn(anonymousResult); InvocationContext authenticatedContext = new InvocationContext(authenticated, parameters); Object authenticatedResult = new Object(); given(target.invoke(authenticatedContext)).willReturn(authenticatedResult); CachingOperationInvoker invoker = new CachingOperationInvoker(target, CACHE_TTL); assertThat(invoker.invoke(anonymousContext)).isEqualTo(anonymousResult); assertThat(invoker.invoke(authenticatedContext)).isEqualTo(authenticatedResult); assertThat(invoker.invoke(anonymousContext)).isEqualTo(anonymousResult); assertThat(invoker.invoke(authenticatedContext)).isEqualTo(authenticatedResult); verify(target, times(1)).invoke(anonymousContext); verify(target, times(1)).invoke(authenticatedContext); } @Test void targetInvokedWhenCacheExpires() throws InterruptedException { OperationInvoker target = mock(OperationInvoker.class); Map<String, Object> parameters = new HashMap<>(); InvocationContext context = new InvocationContext(mock(SecurityContext.class), parameters); given(target.invoke(context)).willReturn(new Object()); CachingOperationInvoker invoker = new CachingOperationInvoker(target, 50L); invoker.invoke(context); long expired = System.currentTimeMillis() + 50; while (System.currentTimeMillis() < expired) { Thread.sleep(10); } invoker.invoke(context); verify(target, times(2)).invoke(context); } @Test void targetInvokedWithDifferentApiVersion() { OperationInvoker target = mock(OperationInvoker.class); Object expectedV2 = new Object(); Object expectedV3 = new Object(); InvocationContext contextV2 = new InvocationContext(mock(SecurityContext.class), Collections.emptyMap(), new ApiVersionArgumentResolver(ApiVersion.V2)); InvocationContext contextV3 = new InvocationContext(mock(SecurityContext.class), Collections.emptyMap(), new ApiVersionArgumentResolver(ApiVersion.V3)); given(target.invoke(contextV2)).willReturn(expectedV2); given(target.invoke(contextV3)).willReturn(expectedV3); CachingOperationInvoker invoker = new CachingOperationInvoker(target, CACHE_TTL); Object response = invoker.invoke(contextV2); assertThat(response).isSameAs(expectedV2); verify(target, times(1)).invoke(contextV2); Object cachedResponse = invoker.invoke(contextV3); assertThat(cachedResponse).isNotSameAs(response); verify(target, times(1)).invoke(contextV3); } private static class MonoOperationInvoker implements OperationInvoker { static AtomicInteger invocations = new AtomicInteger(); @Override public Mono<String> invoke(InvocationContext context) { return Mono.fromCallable(() -> { invocations.incrementAndGet(); return "test"; }); } } private static class FluxOperationInvoker implements OperationInvoker { static AtomicInteger invocations = new AtomicInteger(); @Override public Flux<String> invoke(InvocationContext context) { return Flux.just("spring", "boot").hide().doFirst(invocations::incrementAndGet); } } private static final class ApiVersionArgumentResolver implements OperationArgumentResolver { private final ApiVersion apiVersion; private ApiVersionArgumentResolver(ApiVersion apiVersion) { this.apiVersion = apiVersion; } @SuppressWarnings("unchecked") @Override public <T> T resolve(Class<T> type) { return (T) this.apiVersion; } @Override public boolean canResolve(Class<?> type) { return ApiVersion.class.equals(type); } } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deephacks.confit.internal.hbase; import org.deephacks.confit.internal.hbase.BytesUtils.ReferenceList; import org.deephacks.confit.internal.hbase.HBeanKeyValue.HBeanReader; import org.deephacks.confit.internal.hbase.HBeanKeyValue.HBeanWriter; import org.junit.Test; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Random; import static org.junit.Assert.*; import static org.junit.matchers.JUnitMatchers.hasItems; public class HBeanKeyValueTest { private int randomFieldsNum = 100; @Test public void test_random_write_read() throws Exception { for (int i = 0; i < 1000; i++) { ArrayList<Integer> ids = new ArrayList<>(); HBeanWriter writer = new HBeanWriter(); int stringId = new Random().nextInt(); ids.add(stringId); String stringValue = RandomStringUtils.randomAlphanumeric(255); writer.putValue(stringId, stringValue); int booleanId = new Random().nextInt(); ids.add(booleanId); boolean booleanValue = random(2) == 0; writer.putValue(booleanId, booleanValue); int longId = new Random().nextInt(); ids.add(longId); long longValue = new Random().nextLong(); writer.putValue(longId, longValue); int intId = new Random().nextInt(); ids.add(intId); int intValue = new Random().nextInt(); writer.putValue(intId, intValue); int shortId = new Random().nextInt(); ids.add(shortId); short shortValue = (short) (new Random().nextInt() % Short.MAX_VALUE); writer.putValue(shortId, shortValue); int byteId = new Random().nextInt(); ids.add(byteId); byte byteValue = (byte) (new Random().nextInt() % Byte.MAX_VALUE); writer.putValue(byteId, byteValue); int floatId = new Random().nextInt(); ids.add(floatId); float floatValue = new Random().nextFloat(); writer.putValue(floatId, floatValue); int doubleId = new Random().nextInt(); ids.add(doubleId); double doubleValue = new Random().nextDouble(); writer.putValue(doubleId, doubleValue); int referencesId = new Random().nextInt(); ids.add(referencesId); Collection<String> instances = randomStrings(); ReferenceList list = new ReferenceList(referencesId); list.getInstances().addAll(instances); writer.putValue(referencesId, list); int stringsId = new Random().nextInt(); ids.add(stringsId); Collection<String> strings = randomStrings(); writer.putValues(stringsId, strings, String.class); int booleansId = new Random().nextInt(); ids.add(booleansId); Collection<Boolean> booleans = randomBooleans(); writer.putValues(booleansId, booleans, Boolean.class); int longsId = new Random().nextInt(); ids.add(longsId); Collection<Long> longs = randomLongs(); writer.putValues(longsId, longs, Long.class); int intsId = new Random().nextInt(); ids.add(intsId); Collection<Integer> integers = randomInts(); writer.putValues(intsId, integers, Integer.class); int shortsId = new Random().nextInt(); ids.add(shortsId); Collection<Short> shorts = randomShorts(); writer.putValues(shortsId, shorts, Short.class); int bytesId = new Random().nextInt(); ids.add(bytesId); Collection<Byte> bytes = randomBytes(); writer.putValues(bytesId, bytes, Byte.class); int floatsId = new Random().nextInt(); ids.add(floatsId); Collection<Float> floats = randomFloats(); writer.putValues(floatsId, floats, Float.class); int doublesId = new Random().nextInt(); ids.add(doublesId); Collection<Double> doubles = randomDoubles(); writer.putValues(doublesId, doubles, Double.class); byte[] bean = writer.write(); HBeanReader reader = new HBeanReader(bean); Integer[] idsArray = convert(reader.getIds(), Integer.class); assertThat(ids, hasItems(idsArray)); assertEquals(booleanValue, reader.getValue(booleanId)); assertEquals(longValue, reader.getValue(longId)); assertEquals(intValue, reader.getValue(intId)); assertEquals(shortValue, reader.getValue(shortId)); assertEquals(byteValue, reader.getValue(byteId)); assertEquals(doubleValue, reader.getValue(doubleId)); assertEquals(floatValue, reader.getValue(floatId)); ReferenceList referenceList = (ReferenceList) reader.getValue(referencesId); List<String> instanceList = referenceList.getInstances(); assertThat(instances, hasItems((String[]) instanceList.toArray(new String[instanceList.size()]))); Collection<String> collection = (Collection<String>) reader.getValue(stringsId); assertThat(strings, hasItems(collection.toArray(new String[collection.size()]))); Boolean[] boolArray = ((Collection<Boolean>) reader.getValue(booleansId)).toArray(new Boolean[0]); assertThat(booleans, hasItems(boolArray)); Long[] longArray = ((Collection<Long>) reader.getValue(longsId)).toArray(new Long[0]); assertThat(longs, hasItems(longArray)); Integer[] intArray = ((Collection<Integer>) reader.getValue(intsId)).toArray(new Integer[0]); assertThat(integers, hasItems(intArray)); Short[] shortArray = ((Collection<Short>) reader.getValue(shortsId)).toArray(new Short[0]); assertThat(shorts, hasItems(shortArray)); Byte[] byteArray = ((Collection<Byte>) reader.getValue(bytesId)).toArray(new Byte[0]); assertThat(bytes, hasItems(byteArray)); Double[] doubleArray = ((Collection<Double>)reader.getValue(doublesId)).toArray(new Double[0]); assertThat(doubles, hasItems(doubleArray)); Float[] floatArray = ((Collection<Float>) reader.getValue(floatsId)).toArray(new Float[0]); assertThat(floats, hasItems(floatArray)); } } public static <T> T[] convert(final Object array, Class<T> wrapperClass) { final int arrayLength = Array.getLength(array); final T[] result = (T[]) Array.newInstance(wrapperClass, arrayLength); for (int i = 0; i < arrayLength; i++) { Array.set(result, i, Array.get(array, i)); } return result; } public Collection<String> randomStrings() { Collection<String> values = new ArrayList<>(); for(int i = 0; i < random(randomFieldsNum); i++) { values.add(RandomStringUtils.randomAlphabetic(random(255))); } return values; } public Collection<Boolean> randomBooleans() { Collection<Boolean> values = new ArrayList<>(); boolean[] possible = new boolean[] {true, false}; for(int i = 0; i < random(randomFieldsNum); i++) { values.add(possible[random(2)]); } return values; } public Collection<Long> randomLongs() { Collection<Long> values = new ArrayList<>(); for(int i = 0; i < random(randomFieldsNum); i++) { values.add(random(Long.MAX_VALUE)); } return values; } public Collection<Integer> randomInts() { Collection<Integer> values = new ArrayList<>(); for(int i = 0; i < random(randomFieldsNum); i++) { values.add(random(Integer.MAX_VALUE)); } return values; } public Collection<Short> randomShorts() { Collection<Short> values = new ArrayList<>(); for(int i = 0; i < random(randomFieldsNum); i++) { values.add((short) random(Integer.MAX_VALUE)); } return values; } public Collection<Byte> randomBytes() { Collection<Byte> values = new ArrayList<>(); for(int i = 0; i < random(randomFieldsNum); i++) { values.add((byte) random(Integer.MAX_VALUE)); } return values; } public Collection<Float> randomFloats() { Collection<Float> values = new ArrayList<>(); for(int i = 0; i < random(randomFieldsNum); i++) { values.add(new Random().nextFloat()); } return values; } public Collection<Double> randomDoubles() { Collection<Double> values = new ArrayList<>(); for(int i = 0; i < random(randomFieldsNum); i++) { values.add(new Random().nextDouble()); } return values; } public int random(int max) { return Math.abs(new Random().nextInt()) % max; } public long random(long max) { return Math.abs(new Random().nextLong()) % max; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.glacier.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.glacier.AmazonGlacier#uploadMultipartPart(UploadMultipartPartRequest) UploadMultipartPart operation}. * <p> * This operation uploads a part of an archive. You can upload archive * parts in any order. You can also upload them in parallel. You can * upload up to 10,000 parts for a multipart upload. * </p> * <p> * Amazon Glacier rejects your upload part request if any of the * following conditions is true: * </p> * * <ul> * <li> <p> * <b>SHA256 tree hash does not match</b> To ensure that part data is * not corrupted in transmission, you compute a SHA256 tree hash of the * part and include it in your request. Upon receiving the part data, * Amazon Glacier also computes a SHA256 tree hash. If these hash values * don't match, the operation fails. For information about computing a * SHA256 tree hash, see * <a href="http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html"> Computing Checksums </a> * . * </p> * </li> * <li> <p> * <b>Part size does not match</b> The size of each part except the last * must match the size specified in the corresponding * InitiateMultipartUpload request. The size of the last part must be the * same size as, or smaller than, the specified size. * </p> * <p> * <b>NOTE:</b> If you upload a part whose size is smaller than the part * size you specified in your initiate multipart upload request and that * part is not the last part, then the upload part request will succeed. * However, the subsequent Complete Multipart Upload request will fail. * </p> * </li> * <li> <b>Range does not align</b> The byte range value in the request * does not align with the part size specified in the corresponding * initiate request. For example, if you specify a part size of 4194304 * bytes (4 MB), then 0 to 4194303 bytes (4 MB - 1) and 4194304 (4 MB) to * 8388607 (8 MB - 1) are valid part ranges. However, if you set a range * value of 2 MB to 6 MB, the range does not align with the part size and * the upload will fail. </li> * * </ul> * <p> * This operation is idempotent. If you upload the same part multiple * times, the data included in the most recent request overwrites the * previously uploaded data. * </p> * <p> * An AWS account has full permission to perform all operations * (actions). However, AWS Identity and Access Management (IAM) users * don't have any permissions by default. You must grant them explicit * permission to perform specific actions. For more information, see * <a href="http://docs.aws.amazon.com/amazonglacier/latest/dev/using-iam-with-amazon-glacier.html"> Access Control Using AWS Identity and Access Management (IAM) </a> * . * </p> * <p> * For conceptual information and underlying REST API, go to * <a href="http://docs.aws.amazon.com/amazonglacier/latest/dev/uploading-archive-mpu.html"> Uploading Large Archives in Parts (Multipart Upload) </a> and <a href="http://docs.aws.amazon.com/amazonglacier/latest/dev/api-upload-part.html"> Upload Part </a> * in the <i>Amazon Glacier Developer Guide</i> . * </p> * * @see com.amazonaws.services.glacier.AmazonGlacier#uploadMultipartPart(UploadMultipartPartRequest) */ public class UploadMultipartPartRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * The <code>AccountId</code> value is the AWS account ID of the account * that owns the vault. You can either specify an AWS account ID or * optionally a single '<code>-</code>' (hyphen), in which case Amazon * Glacier uses the AWS account ID associated with the credentials used * to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. */ private String accountId; /** * The name of the vault. */ private String vaultName; /** * The upload ID of the multipart upload. */ private String uploadId; /** * The SHA256 tree hash of the data being uploaded. */ private String checksum; /** * Identifies the range of bytes in the assembled archive that will be * uploaded in this part. Amazon Glacier uses this information to * assemble the archive in the proper sequence. The format of this header * follows RFC 2616. An example header is Content-Range:bytes * 0-4194303/*. */ private String range; /** * The data to upload. * * This stream must implement mark/reset in order for signature calculation to be performed * before this data is read for the request payload. */ private java.io.InputStream body; /** * Default constructor for a new UploadMultipartPartRequest object. Callers should use the * setter or fluent setter (with...) methods to initialize this object after creating it. */ public UploadMultipartPartRequest() {} /** * Constructs a new UploadMultipartPartRequest object. * Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param vaultName The name of the vault. * @param uploadId The upload ID of the multipart upload. * @param checksum The SHA256 tree hash of the data being uploaded. * @param range Identifies the range of bytes in the assembled archive * that will be uploaded in this part. Amazon Glacier uses this * information to assemble the archive in the proper sequence. The format * of this header follows RFC 2616. An example header is * Content-Range:bytes 0-4194303/*. * @param body The data to upload. */ public UploadMultipartPartRequest(String vaultName, String uploadId, String checksum, String range, java.io.InputStream body) { setVaultName(vaultName); setUploadId(uploadId); setChecksum(checksum); setRange(range); setBody(body); } /** * Constructs a new UploadMultipartPartRequest object. * Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param accountId The <code>AccountId</code> value is the AWS account * ID of the account that owns the vault. You can either specify an AWS * account ID or optionally a single '<code>-</code>' (hyphen), in which * case Amazon Glacier uses the AWS account ID associated with the * credentials used to sign the request. If you use an account ID, do not * include any hyphens ('-') in the ID. * @param vaultName The name of the vault. * @param uploadId The upload ID of the multipart upload. * @param checksum The SHA256 tree hash of the data being uploaded. * @param range Identifies the range of bytes in the assembled archive * that will be uploaded in this part. Amazon Glacier uses this * information to assemble the archive in the proper sequence. The format * of this header follows RFC 2616. An example header is * Content-Range:bytes 0-4194303/*. * @param body The data to upload. */ public UploadMultipartPartRequest(String accountId, String vaultName, String uploadId, String checksum, String range, java.io.InputStream body) { setAccountId(accountId); setVaultName(vaultName); setUploadId(uploadId); setChecksum(checksum); setRange(range); setBody(body); } /** * The <code>AccountId</code> value is the AWS account ID of the account * that owns the vault. You can either specify an AWS account ID or * optionally a single '<code>-</code>' (hyphen), in which case Amazon * Glacier uses the AWS account ID associated with the credentials used * to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. * * @return The <code>AccountId</code> value is the AWS account ID of the account * that owns the vault. You can either specify an AWS account ID or * optionally a single '<code>-</code>' (hyphen), in which case Amazon * Glacier uses the AWS account ID associated with the credentials used * to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. */ public String getAccountId() { return accountId; } /** * The <code>AccountId</code> value is the AWS account ID of the account * that owns the vault. You can either specify an AWS account ID or * optionally a single '<code>-</code>' (hyphen), in which case Amazon * Glacier uses the AWS account ID associated with the credentials used * to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. * * @param accountId The <code>AccountId</code> value is the AWS account ID of the account * that owns the vault. You can either specify an AWS account ID or * optionally a single '<code>-</code>' (hyphen), in which case Amazon * Glacier uses the AWS account ID associated with the credentials used * to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. */ public void setAccountId(String accountId) { this.accountId = accountId; } /** * The <code>AccountId</code> value is the AWS account ID of the account * that owns the vault. You can either specify an AWS account ID or * optionally a single '<code>-</code>' (hyphen), in which case Amazon * Glacier uses the AWS account ID associated with the credentials used * to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param accountId The <code>AccountId</code> value is the AWS account ID of the account * that owns the vault. You can either specify an AWS account ID or * optionally a single '<code>-</code>' (hyphen), in which case Amazon * Glacier uses the AWS account ID associated with the credentials used * to sign the request. If you use an account ID, do not include any * hyphens ('-') in the ID. * * @return A reference to this updated object so that method calls can be chained * together. */ public UploadMultipartPartRequest withAccountId(String accountId) { this.accountId = accountId; return this; } /** * The name of the vault. * * @return The name of the vault. */ public String getVaultName() { return vaultName; } /** * The name of the vault. * * @param vaultName The name of the vault. */ public void setVaultName(String vaultName) { this.vaultName = vaultName; } /** * The name of the vault. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param vaultName The name of the vault. * * @return A reference to this updated object so that method calls can be chained * together. */ public UploadMultipartPartRequest withVaultName(String vaultName) { this.vaultName = vaultName; return this; } /** * The upload ID of the multipart upload. * * @return The upload ID of the multipart upload. */ public String getUploadId() { return uploadId; } /** * The upload ID of the multipart upload. * * @param uploadId The upload ID of the multipart upload. */ public void setUploadId(String uploadId) { this.uploadId = uploadId; } /** * The upload ID of the multipart upload. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param uploadId The upload ID of the multipart upload. * * @return A reference to this updated object so that method calls can be chained * together. */ public UploadMultipartPartRequest withUploadId(String uploadId) { this.uploadId = uploadId; return this; } /** * The SHA256 tree hash of the data being uploaded. * * @return The SHA256 tree hash of the data being uploaded. */ public String getChecksum() { return checksum; } /** * The SHA256 tree hash of the data being uploaded. * * @param checksum The SHA256 tree hash of the data being uploaded. */ public void setChecksum(String checksum) { this.checksum = checksum; } /** * The SHA256 tree hash of the data being uploaded. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param checksum The SHA256 tree hash of the data being uploaded. * * @return A reference to this updated object so that method calls can be chained * together. */ public UploadMultipartPartRequest withChecksum(String checksum) { this.checksum = checksum; return this; } /** * Identifies the range of bytes in the assembled archive that will be * uploaded in this part. Amazon Glacier uses this information to * assemble the archive in the proper sequence. The format of this header * follows RFC 2616. An example header is Content-Range:bytes * 0-4194303/*. * * @return Identifies the range of bytes in the assembled archive that will be * uploaded in this part. Amazon Glacier uses this information to * assemble the archive in the proper sequence. The format of this header * follows RFC 2616. An example header is Content-Range:bytes * 0-4194303/*. */ public String getRange() { return range; } /** * Identifies the range of bytes in the assembled archive that will be * uploaded in this part. Amazon Glacier uses this information to * assemble the archive in the proper sequence. The format of this header * follows RFC 2616. An example header is Content-Range:bytes * 0-4194303/*. * * @param range Identifies the range of bytes in the assembled archive that will be * uploaded in this part. Amazon Glacier uses this information to * assemble the archive in the proper sequence. The format of this header * follows RFC 2616. An example header is Content-Range:bytes * 0-4194303/*. */ public void setRange(String range) { this.range = range; } /** * Identifies the range of bytes in the assembled archive that will be * uploaded in this part. Amazon Glacier uses this information to * assemble the archive in the proper sequence. The format of this header * follows RFC 2616. An example header is Content-Range:bytes * 0-4194303/*. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param range Identifies the range of bytes in the assembled archive that will be * uploaded in this part. Amazon Glacier uses this information to * assemble the archive in the proper sequence. The format of this header * follows RFC 2616. An example header is Content-Range:bytes * 0-4194303/*. * * @return A reference to this updated object so that method calls can be chained * together. */ public UploadMultipartPartRequest withRange(String range) { this.range = range; return this; } /** * The data to upload. * * This stream must implement mark/reset in order for signature calculation to be performed * before this data is read for the request payload. * * @return The data to upload. */ public java.io.InputStream getBody() { return body; } /** * The data to upload. * * This stream must implement mark/reset in order for signature calculation to be performed * before this data is read for the request payload. * * @param body The data to upload. */ public void setBody(java.io.InputStream body) { this.body = body; } /** * The data to upload. * * This stream must implement mark/reset in order for signature calculation to be performed * before this data is read for the request payload. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param body The data to upload. * * @return A reference to this updated object so that method calls can be chained * together. */ public UploadMultipartPartRequest withBody(java.io.InputStream body) { this.body = body; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAccountId() != null) sb.append("AccountId: " + getAccountId() + ","); if (getVaultName() != null) sb.append("VaultName: " + getVaultName() + ","); if (getUploadId() != null) sb.append("UploadId: " + getUploadId() + ","); if (getChecksum() != null) sb.append("Checksum: " + getChecksum() + ","); if (getRange() != null) sb.append("Range: " + getRange() + ","); if (getBody() != null) sb.append("Body: " + getBody() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAccountId() == null) ? 0 : getAccountId().hashCode()); hashCode = prime * hashCode + ((getVaultName() == null) ? 0 : getVaultName().hashCode()); hashCode = prime * hashCode + ((getUploadId() == null) ? 0 : getUploadId().hashCode()); hashCode = prime * hashCode + ((getChecksum() == null) ? 0 : getChecksum().hashCode()); hashCode = prime * hashCode + ((getRange() == null) ? 0 : getRange().hashCode()); hashCode = prime * hashCode + ((getBody() == null) ? 0 : getBody().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UploadMultipartPartRequest == false) return false; UploadMultipartPartRequest other = (UploadMultipartPartRequest)obj; if (other.getAccountId() == null ^ this.getAccountId() == null) return false; if (other.getAccountId() != null && other.getAccountId().equals(this.getAccountId()) == false) return false; if (other.getVaultName() == null ^ this.getVaultName() == null) return false; if (other.getVaultName() != null && other.getVaultName().equals(this.getVaultName()) == false) return false; if (other.getUploadId() == null ^ this.getUploadId() == null) return false; if (other.getUploadId() != null && other.getUploadId().equals(this.getUploadId()) == false) return false; if (other.getChecksum() == null ^ this.getChecksum() == null) return false; if (other.getChecksum() != null && other.getChecksum().equals(this.getChecksum()) == false) return false; if (other.getRange() == null ^ this.getRange() == null) return false; if (other.getRange() != null && other.getRange().equals(this.getRange()) == false) return false; if (other.getBody() == null ^ this.getBody() == null) return false; if (other.getBody() != null && other.getBody().equals(this.getBody()) == false) return false; return true; } @Override public UploadMultipartPartRequest clone() { return (UploadMultipartPartRequest) super.clone(); } }
/* * Copyright 2012-2013 inBloom, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.slc.sli.sif.agent; import java.io.File; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import openadk.library.ADK; import openadk.library.ADKException; import openadk.library.ADKFlags; import openadk.library.ElementDef; import openadk.library.SubscriptionOptions; import openadk.library.Zone; import openadk.library.common.CommonDTD; import openadk.library.datamodel.DatamodelDTD; import openadk.library.hrfin.HrfinDTD; import openadk.library.student.StudentDTD; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.slc.sli.sif.subscriber.SifSubscriber; /** * Manages a SIFAgent and its SifSubscriber * */ public class AgentManager { private SifAgent agent; @Autowired private SifSubscriber subscriber; @Value("${log.path}") private String logPath; @Value("${sli.sif-agent.adk.logFile}") private String adkLogFile; private String subscriberZoneName; private List<String> subscribeTypeList; private List<ElementDef> subscribeList; private static final Logger LOG = LoggerFactory.getLogger(SifAgent.class); /** * Schedules the delayed setup of the agent. This is delayed so the the zone connect * will only happen after this(and every other) webapp has been completely initialized. * @throws Exception */ @PostConstruct public void postConstruct() throws Exception { final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); Runnable run = new Runnable() { @Override public void run() { try { setup(); } catch (Exception e) { throw new RuntimeException(e); } } }; scheduler.schedule(run, 5, TimeUnit.SECONDS); } /** * Initializes the ADK, the agent, and the zone subscription * @throws Exception */ public void setup() throws Exception { // set the adk.log.file property, which is used in ADK.initialize() System.setProperty("adk.log.file", logPath + File.separator + adkLogFile); ADK.initialize(); ADK.debug = ADK.DBG_ALL; agent.startAgent(); subscribeToZone(); } /** * Unregisters the agent from the zone * @throws ADKException */ @PreDestroy public void cleanup() throws ADKException { agent.shutdown(ADKFlags.PROV_NONE); } /** * Creates a subscriber and adds it to the configured zone */ private void subscribeToZone() throws ADKException { Map<String, Map<String, ElementDef>> dtdMap = new HashMap<String, Map<String, ElementDef>>(); Map<String, ElementDef> studentDtdMap = new HashMap<String, ElementDef>(); new StudentDTD().addElementMappings(studentDtdMap); dtdMap.put("StudentDTD", studentDtdMap); Map<String, ElementDef> datamodelDtdMap = new HashMap<String, ElementDef>(); new DatamodelDTD().addElementMappings(datamodelDtdMap); dtdMap.put("DatamodelDTD", datamodelDtdMap); Map<String, ElementDef> commonDtdMap = new HashMap<String, ElementDef>(); new CommonDTD().addElementMappings(commonDtdMap); dtdMap.put("CommonDTD", commonDtdMap); Map<String, ElementDef> hrfinDtdMap = new HashMap<String, ElementDef>(); new HrfinDTD().addElementMappings(hrfinDtdMap); dtdMap.put("HrfinDTD", hrfinDtdMap); Zone zone = agent.getZoneFactory().getZone(subscriberZoneName); for (String dataTypeString : subscribeTypeList) { String dtdType = dataTypeString.split("\\.", 2)[0]; String subType = dataTypeString.split("\\.", 2)[1]; ElementDef dataTypeDef = dtdMap.get(dtdType).get(subType); zone.setSubscriber(subscriber, dataTypeDef, new SubscriptionOptions()); LOG.info("Subscribed zone " + subscriberZoneName + " to SIF ADK datatype " + dataTypeString); } } public void setSubscribeTypeList(List<String> subscribeTypeList) { this.subscribeTypeList = subscribeTypeList; } public List<String> getSubscribeTypeList() { return this.subscribeTypeList; } public void setSubscriberZoneName(String subscriberZoneName) { this.subscriberZoneName = subscriberZoneName; } public String getSubscriberZoneName() { return this.subscriberZoneName; } public void setAgent(SifAgent agent) { this.agent = agent; } public SifAgent getAgent() { return this.agent; } public String getLogPath() { return logPath; } public void setLogPath(String logPath) { this.logPath = logPath; } public String getAdkLogFile() { return adkLogFile; } public void setAdkLogFile(String adkLogFile) { this.adkLogFile = adkLogFile; } public List<ElementDef> getSubscribeList() { return subscribeList; } public void setSubscribeList(List<ElementDef> subscribeList) { this.subscribeList = subscribeList; } }
/** * */ package com.fusioncharts.exporter.beans; import java.util.HashMap; import java.util.Iterator; import com.fusioncharts.exporter.FusionChartsExportHelper; import com.fusioncharts.exporter.error.LOGMESSAGE; /** * Contains all the information required during the export process like chart * metadata, chart image data and export parameters * * @author Infosoft Global (P) Ltd. * */ public class ExportBean { private ChartMetadata metadata; private String stream; private HashMap<String, Object> exportParameters = null; /** * Initializes the default values for the export parameters */ public ExportBean() { exportParameters = new HashMap<String, Object>(); // Default Values exportParameters.put(ExportParameterNames.EXPORTFILENAME.toString(), "FusionCharts"); exportParameters.put(ExportParameterNames.EXPORTACTION.toString(), "download"); exportParameters.put( ExportParameterNames.EXPORTTARGETWINDOW.toString(), "_self"); exportParameters.put(ExportParameterNames.EXPORTFORMAT.toString(), "PDF"); } public ExportBean(String stream, ChartMetadata metadata, HashMap<String, Object> exportParameters) { super(); this.stream = stream; this.metadata = metadata; this.exportParameters = exportParameters; } /** * Adds a parameter and value to the existing exportParameters map. * * @param exportParameters * the exportParameters to set */ public void addExportParameter(String parameterName, Object value) { exportParameters.put(parameterName.toLowerCase(), value); } /** * Adds all parameters and values from the given HashMap to the existing * exportParameters map. * * @param exportParameters * the exportParameters to set */ public void addExportParametersFromMap( HashMap<String, String> moreParameters) { exportParameters.putAll(moreParameters); } /** * @return the exportParameters */ public HashMap<String, Object> getExportParameters() { return new HashMap<String, Object>(exportParameters); } /** * @return the exportParameter Value */ public Object getExportParameterValue(String key) { return exportParameters.get(key); } /** * @return the metadata */ public ChartMetadata getMetadata() { return metadata; } /** * Returns the metadata as a querystring * * @param filePath * - path of the file on the server. * @param isError * - whether error is present or not. * @param isHTML * - whether to generate in html format or not. * @return - String containing the metadata to be shown. */ public String getMetadataAsQueryString(String filePath, boolean isError, boolean isHTML) { String queryParams = ""; if (isError) { queryParams += (isHTML ? "<BR>" : "&") + "width=0"; queryParams += (isHTML ? "<BR>" : "&") + "height=0"; } else { queryParams += (isHTML ? "<BR>" : "&") + "width=" + metadata.getWidth(); queryParams += (isHTML ? "<BR>" : "&") + "height=" + metadata.getHeight(); } // queryParams+="&bgColor="+metadata.getBgColor(); queryParams += (isHTML ? "<BR>" : "&") + "DOMId=" + metadata.getDOMId(); if (filePath != null) { queryParams += (isHTML ? "<BR>" : "&") + "fileName=" + filePath; } return queryParams; } /** * Returns the metadata as a querystring * * @return - String containing the metadata to be shown. */ public String getParametersAndMetadataAsQueryString() { String queryParams = ""; queryParams += "?width=" + metadata.getWidth(); queryParams += "&height=" + metadata.getHeight(); queryParams += "&bgcolor=" + metadata.getBgColor(); Iterator<String> iter = exportParameters.keySet().iterator(); String key; String value; while (iter.hasNext()) { key = iter.next(); value = (String) exportParameters.get(key); queryParams += "&" + key + "=" + value; } return queryParams; } /** * @return the stream */ public String getStream() { return stream; } /** * Whether the response is going to be html or plain text * * @return */ public boolean isHTMLResponse() { boolean isHTML = false; String exportAction = (String) getExportParameterValue(ExportParameterNames.EXPORTACTION .toString()); if (exportAction != null && exportAction.equals("download")) isHTML = true; return isHTML; } /** * @param exportParameters * the exportParameters to set */ public void setExportParameters(HashMap<String, Object> exportParameters) { this.exportParameters = exportParameters; } /** * @param metadata * the metadata to set */ public void setMetadata(ChartMetadata metadata) { this.metadata = metadata; } /** * @param stream * the stream to set */ public void setStream(String stream) { this.stream = stream; } /** * Validates the ExportBean to check if all the required values are present. * */ public LogMessageSetVO validate() { LogMessageSetVO errorSetVO = new LogMessageSetVO(); if (getMetadata().getWidth() == -1 || getMetadata().getHeight() == -1 || getMetadata().getWidth() == 0 || getMetadata().getHeight() == 0) { // If Width/Height parameter is not sent, the ChartMetadata will // have width/height as -1 // Raise Error E101 - Width/Height not found errorSetVO.addError(LOGMESSAGE.E101); } if (getMetadata().getBgColor() == null) { // Background color not available errorSetVO.addWarning(LOGMESSAGE.W513); } if (getStream() == null) { // If image data not available // Raise Error E100 errorSetVO.addError(LOGMESSAGE.E100); } if (exportParameters == null || exportParameters.isEmpty()) { // export data does not contain parameters errorSetVO.addWarning(LOGMESSAGE.W102); } // Export format should exist in the supported handlerAssociationsMap else { String exportFormat = (String) getExportParameterValue(ExportParameterNames.EXPORTFORMAT .toString()); boolean exportFormatSupported = FusionChartsExportHelper .getHandlerAssociationsMap().containsKey( exportFormat.toUpperCase()); if (!exportFormatSupported) { errorSetVO.addError(LOGMESSAGE.E517); } } return errorSetVO; } }
/* * Copyright 2011-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.lambdaworks.redis.protocol; import java.net.ConnectException; import java.net.SocketAddress; import java.util.Set; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.Supplier; import com.lambdaworks.redis.ClientOptions; import com.lambdaworks.redis.RedisChannelInitializer; import com.lambdaworks.redis.RedisCommandTimeoutException; import com.lambdaworks.redis.internal.LettuceAssert; import com.lambdaworks.redis.internal.LettuceSets; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.util.Timeout; import io.netty.util.Timer; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; /** * @author Mark Paluch */ class ReconnectionHandler { private static final InternalLogger logger = InternalLoggerFactory.getInstance(ReconnectionHandler.class); private static final Set<Class<?>> EXECUTION_EXCEPTION_TYPES = LettuceSets.unmodifiableSet(TimeoutException.class, CancellationException.class, RedisCommandTimeoutException.class, ConnectException.class); private final Supplier<SocketAddress> socketAddressSupplier; private final Bootstrap bootstrap; private final ClientOptions clientOptions; private final Timer timer; private final ExecutorService reconnectWorkers; private TimeUnit timeoutUnit = TimeUnit.SECONDS; private long timeout = 60; private volatile ChannelFuture currentFuture; private volatile boolean reconnectSuspended; ReconnectionHandler(ClientOptions clientOptions, Bootstrap bootstrap, Supplier<SocketAddress> socketAddressSupplier, Timer timer, ExecutorService reconnectWorkers) { LettuceAssert.notNull(socketAddressSupplier, "SocketAddressSupplier must not be null"); LettuceAssert.notNull(bootstrap, "Bootstrap must not be null"); LettuceAssert.notNull(clientOptions, "ClientOptions must not be null"); LettuceAssert.notNull(timer, "Timer must not be null"); LettuceAssert.notNull(reconnectWorkers, "ExecutorService must not be null"); this.socketAddressSupplier = socketAddressSupplier; this.bootstrap = bootstrap; this.clientOptions = clientOptions; this.timer = timer; this.reconnectWorkers = reconnectWorkers; } /** * Initiate reconnect and return a {@link ChannelFuture} for synchronization. The resulting future either succeeds or fails. * It can be {@link ChannelFuture#cancel(boolean) canceled} to interrupt reconnection and channel initialization. A failed * {@link ChannelFuture} will close the channel. * * @return reconnect {@link ChannelFuture}. */ protected ChannelFuture reconnect() { SocketAddress remoteAddress = socketAddressSupplier.get(); logger.debug("Reconnecting to Redis at {}", remoteAddress); ChannelFuture connectFuture = bootstrap.connect(remoteAddress); ChannelPromise initFuture = connectFuture.channel().newPromise(); initFuture.addListener((ChannelFuture it) -> { if (it.cause() != null) { connectFuture.cancel(true); close(it.channel()); } }); connectFuture.addListener((ChannelFuture it) -> { if (it.cause() != null) { initFuture.tryFailure(it.cause()); return; } ChannelPipeline pipeline = it.channel().pipeline(); RedisChannelInitializer channelInitializer = pipeline.get(RedisChannelInitializer.class); CommandHandler<?, ?> commandHandler = pipeline.get(CommandHandler.class); if (channelInitializer == null) { initFuture.tryFailure(new IllegalStateException( "Reconnection attempt without a RedisChannelInitializer in the channel pipeline")); return; } if (commandHandler == null) { initFuture.tryFailure(new IllegalStateException( "Reconnection attempt without a CommandHandler in the channel pipeline")); return; } channelInitializer.channelInitialized().whenComplete( (state, throwable) -> { if (throwable != null) { if (isExecutionException(throwable)) { initFuture.tryFailure(throwable); return; } if (clientOptions.isCancelCommandsOnReconnectFailure()) { commandHandler.reset(); } if (clientOptions.isSuspendReconnectOnProtocolFailure()) { logger.error("Disabling autoReconnect due to initialization failure", throwable); setReconnectSuspended(true); } initFuture.tryFailure(throwable); return; } if (logger.isDebugEnabled()) { logger.info("Reconnected to {}, Channel {}", remoteAddress, ChannelLogDescriptor.logDescriptor(it.channel())); } else { logger.info("Reconnected to {}", remoteAddress); } initFuture.trySuccess(); }); }); Runnable timeoutAction = () -> { initFuture.tryFailure(new TimeoutException(String.format("Reconnection attempt exceeded timeout of %d %s ", timeout, timeoutUnit))); }; Timeout timeoutHandle = timer.newTimeout(it -> { if (connectFuture.isDone() && initFuture.isDone()) { return; } if (reconnectWorkers.isShutdown()) { timeoutAction.run(); return; } reconnectWorkers.submit(timeoutAction); }, this.timeout, timeoutUnit); initFuture.addListener(it -> timeoutHandle.cancel()); return this.currentFuture = initFuture; } private void close(Channel channel) { if (channel != null) { channel.close(); } } public boolean isReconnectSuspended() { return reconnectSuspended; } public void setReconnectSuspended(boolean reconnectSuspended) { this.reconnectSuspended = reconnectSuspended; } public TimeUnit getTimeoutUnit() { return timeoutUnit; } public void setTimeoutUnit(TimeUnit timeoutUnit) { this.timeoutUnit = timeoutUnit; } public long getTimeout() { return timeout; } public void setTimeout(long timeout) { this.timeout = timeout; } public void prepareClose() { ChannelFuture currentFuture = this.currentFuture; if (currentFuture != null && !currentFuture.isDone()) { currentFuture.cancel(true); } } /** * @param throwable * @return {@literal true} if {@code throwable} is an execution {@link Exception}. */ public static boolean isExecutionException(Throwable throwable) { for (Class<?> type : EXECUTION_EXCEPTION_TYPES) { if (type.isAssignableFrom(throwable.getClass())) { return true; } } return false; } ClientOptions getClientOptions() { return clientOptions; } }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.dmdl.windgate.jdbc.driver; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.math.BigDecimal; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; import java.text.MessageFormat; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; import org.apache.hadoop.io.Text; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import com.asakusafw.dmdl.java.emitter.driver.ObjectDriver; import com.asakusafw.dmdl.windgate.common.driver.GeneratorTesterRoot; import com.asakusafw.runtime.value.Date; import com.asakusafw.runtime.value.DateTime; import com.asakusafw.windgate.core.vocabulary.DataModelJdbcSupport; import com.asakusafw.windgate.core.vocabulary.DataModelJdbcSupport.DataModelPreparedStatement; import com.asakusafw.windgate.core.vocabulary.DataModelJdbcSupport.DataModelResultSet; /** * Test for {@link JdbcSupportEmitter}. */ public class JdbcSupportEmitterTest extends GeneratorTesterRoot { /** * Test database. */ @Rule public H2Resource h2 = new H2Resource("testing"); /** * Initializes the test. * @throws Exception if some errors were occurred */ @Before public void setUp() throws Exception { emitDrivers.add(new JdbcSupportEmitter()); emitDrivers.add(new ObjectDriver()); } /** * A simple case. * @throws Exception if failed */ @Test public void simple() throws Exception { ModelLoader loaded = generateJava("simple"); ModelWrapper model = loaded.newModel("Simple"); DataModelJdbcSupport<?> support = (DataModelJdbcSupport<?>) loaded.newObject("jdbc", "SimpleJdbcSupport"); assertThat(support.getSupportedType(), is((Object) model.unwrap().getClass())); assertThat(support.isSupported(list("VALUE")), is(true)); assertThat(support.isSupported(list("VALUE", "VALUE")), is(false)); assertThat(support.isSupported(JdbcSupportEmitterTest.list()), is(false)); assertThat(support.isSupported(list("INVALID")), is(false)); assertThat(support.isSupported(list("VALUE", "INVALID")), is(false)); assertThat(support.getColumnMap().keySet(), hasSize(1)); assertThat(support.getColumnMap(), hasEntry("VALUE", "value")); DataModelJdbcSupport<Object> unsafe = unsafe(support); h2.executeFile("simple.sql"); try (Connection conn = h2.open(); PreparedStatement ps = conn.prepareStatement("INSERT INTO SIMPLE (VALUE) VALUES (?)")) { DataModelPreparedStatement<Object> p = unsafe.createPreparedStatementSupport(ps, list("VALUE")); model.set("value", new Text("Hello, world!")); p.setParameters(model.unwrap()); ps.executeUpdate(); ps.close(); Statement s = conn.createStatement(); ResultSet rs = s.executeQuery("SELECT VALUE FROM SIMPLE"); DataModelResultSet<Object> r = unsafe.createResultSetSupport(rs, list("VALUE")); assertThat(r.next(model.unwrap()), is(true)); assertThat(model.get("value"), is((Object) new Text("Hello, world!"))); assertThat(r.next(model.unwrap()), is(false)); } } /** * All types. * @throws Exception if failed */ @Test public void types() throws Exception { ModelLoader loaded = generateJava("types"); ModelWrapper model = loaded.newModel("Types"); DataModelJdbcSupport<?> support = (DataModelJdbcSupport<?>) loaded.newObject("jdbc", "TypesJdbcSupport"); assertThat(support.getSupportedType(), is((Object) model.unwrap().getClass())); List<String> list = list(new String[] { "C_INT", "C_TEXT", "C_BOOLEAN", "C_BYTE", "C_SHORT", "C_LONG", "C_FLOAT", "C_DOUBLE", "C_DECIMAL", "C_DATE", "C_DATETIME", }); assertThat(support.isSupported(list), is(true)); assertThat(support.getColumnMap(), is(list.stream().collect(Collectors.toMap( Function.identity(), String::toLowerCase)))); DataModelJdbcSupport<Object> unsafe = unsafe(support); h2.executeFile("types.sql"); try (Connection conn = h2.open(); PreparedStatement ps = conn.prepareStatement(MessageFormat.format( "INSERT INTO TYPES ({0}) VALUES ({1})", join(list), join(Collections.nCopies(list.size(), "?"))))) { DataModelPreparedStatement<Object> p = unsafe.createPreparedStatementSupport(ps, list); // set nulls ModelWrapper nulls = loaded.newModel("Types"); p.setParameters(nulls.unwrap()); ps.executeUpdate(); // text only ModelWrapper text = loaded.newModel("Types"); text.set("c_text", new Text("Hello, world!")); p.setParameters(text.unwrap()); ps.executeUpdate(); // all types ModelWrapper all = loaded.newModel("Types"); all.set("c_int", 100); all.set("c_text", new Text("Hello, DMDL world!")); all.set("c_boolean", true); all.set("c_byte", (byte) 64); all.set("c_short", (short) 1023); all.set("c_long", 100000L); all.set("c_float", 1.5f); all.set("c_double", 2.5f); all.set("c_decimal", new BigDecimal("3.1415")); all.set("c_date", new Date(2011, 9, 1)); all.set("c_datetime", new DateTime(2011, 12, 31, 23, 59, 59)); p.setParameters(all.unwrap()); ps.executeUpdate(); ps.close(); Statement s = conn.createStatement(); ResultSet rs = s.executeQuery(MessageFormat.format( "SELECT {0} FROM TYPES ORDER BY ORDINAL", join(list))); DataModelResultSet<Object> r = unsafe.createResultSetSupport(rs, list); ModelWrapper buffer = loaded.newModel("Types"); assertThat(r.next(buffer.unwrap()), is(true)); assertThat(buffer.unwrap(), is(nulls.unwrap())); assertThat(r.next(buffer.unwrap()), is(true)); assertThat(buffer.unwrap(), is(text.unwrap())); assertThat(r.next(buffer.unwrap()), is(true)); assertThat(buffer.unwrap(), is(all.unwrap())); assertThat(r.next(buffer.unwrap()), is(false)); } } /** * Compile with no attributes. * @throws Exception if failed */ @Test public void no_attributes() throws Exception { ModelLoader loaded = generateJava("no_attributes"); assertThat(loaded.exists("jdbc", "NoAttributesJdbcSupport"), is(false)); } private String join(List<String> list) { StringBuilder buf = new StringBuilder(); buf.append(list.get(0)); for (int i = 1, n = list.size(); i < n; i++) { buf.append(", "); buf.append(list.get(i)); } return buf.toString(); } @SuppressWarnings("unchecked") private DataModelJdbcSupport<Object> unsafe(DataModelJdbcSupport<?> support) { return (DataModelJdbcSupport<Object>) support; } @SafeVarargs private static <T> List<T> list(T... values) { return Arrays.asList(values); } }
// @@@ START COPYRIGHT @@@ // // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // // @@@ END COPYRIGHT @@@ /* -*-java-*- * Filename : Messages.java */ package org.apache.trafodion.jdbc.t2; import java.sql.*; import java.util.MissingResourceException; import java.util.Locale; import java.util.ResourceBundle; import java.util.PropertyResourceBundle; import java.text.MessageFormat; class Messages { static SQLException createSQLException(Locale msgLocale, String messageId, Object[] messageArguments) { if (JdbcDebugCfg.entryActive) debug[methodId_createSQLException].methodEntry(); try { Locale currentLocale; int sqlcode; if (msgLocale == null) currentLocale = Locale.getDefault(); else currentLocale = msgLocale; try { PropertyResourceBundle messageBundle = (PropertyResourceBundle) ResourceBundle.getBundle("SQLMXT2Messages", currentLocale);//R321 changed property file name to SQLMXT2Messages_en.properties MessageFormat formatter = new MessageFormat(""); formatter.setLocale(currentLocale); formatter.applyPattern(messageBundle.getString(messageId+"_msg")); String message = formatter.format(messageArguments); String sqlState = messageBundle.getString(messageId+"_sqlstate"); String sqlcodeStr = messageBundle.getString(messageId+"_sqlcode"); if (sqlcodeStr != null) { try { sqlcode = Integer.parseInt(sqlcodeStr); sqlcode = -sqlcode; } catch (NumberFormatException e1) { sqlcode = -1; } } else sqlcode = -1; return new SQLException(message, sqlState, sqlcode); } catch (MissingResourceException e) { // If the resource bundle is not found, concatenate the messageId and the parameters String message; int i = 0; message = "The message id: " + messageId; if (messageArguments != null) { message = message.concat(" With parameters: "); while (true) { message = message.concat(messageArguments[i++].toString()); if (i >= messageArguments.length) break; else message = message.concat(","); } } return new SQLException(message, "HY000", -1); } } finally { if (JdbcDebugCfg.entryActive) debug[methodId_createSQLException].methodExit(); } } static SQLWarning createSQLWarning(Locale msgLocale, String messageId, Object[] messageArguments) { if (JdbcDebugCfg.entryActive) debug[methodId_createSQLWarning].methodEntry(); try { Locale currentLocale; int sqlcode; if (msgLocale == null) currentLocale = Locale.getDefault(); else currentLocale = msgLocale; try { PropertyResourceBundle messageBundle = (PropertyResourceBundle) ResourceBundle.getBundle("SQLMXT2Messages", currentLocale);//R321 changed property file name to SQLMXT2Messages_en.properties MessageFormat formatter = new MessageFormat(""); formatter.setLocale(currentLocale); formatter.applyPattern(messageBundle.getString(messageId+"_msg")); String message = formatter.format(messageArguments); String sqlState = messageBundle.getString(messageId+"_sqlstate"); String sqlcodeStr = messageBundle.getString(messageId+"_sqlcode"); if (sqlcodeStr != null) { try { sqlcode = Integer.parseInt(sqlcodeStr); } catch (NumberFormatException e1) { sqlcode = 1; } } else sqlcode = 1; return new SQLWarning(message, sqlState, sqlcode); } catch (MissingResourceException e) { // If the resource bundle is not found, concatenate the messageId and the parameters String message; int i = 0; message = "The message id: " + messageId; if (messageArguments != null) { message = message.concat(" With parameters: "); while (true) { message = message.concat(messageArguments[i++].toString()); if (i >= messageArguments.length) break; else message = message.concat(","); } } return new SQLWarning(message, "01000", 1); } } finally { if (JdbcDebugCfg.entryActive) debug[methodId_createSQLWarning].methodExit(); } } static void throwUnsupportedFeatureException(Locale locale, String s) throws SQLException { if (JdbcDebugCfg.entryActive) debug[methodId_throwUnsupportedFeatureException].methodEntry(); try { Object[] messageArguments = new Object[1]; messageArguments[0] = s; throw Messages.createSQLException(locale, "unsupported_feature", messageArguments); } finally { if (JdbcDebugCfg.entryActive) debug[methodId_throwUnsupportedFeatureException].methodExit(); } } static void throwDeprecatedMethodException(Locale locale, String s) throws SQLException { if (JdbcDebugCfg.entryActive) debug[methodId_throwDeprecatedMethodException].methodEntry(); try { Object[] messageArguments = new Object[1]; messageArguments[0] = s; throw Messages.createSQLException(locale, "deprecated_method", messageArguments); } finally { if (JdbcDebugCfg.entryActive) debug[methodId_throwDeprecatedMethodException].methodExit(); } } private static int methodId_createSQLException = 0; private static int methodId_createSQLWarning = 1; private static int methodId_throwUnsupportedFeatureException = 2; private static int methodId_throwDeprecatedMethodException = 3; private static int totalMethodIds = 4; private static JdbcDebug[] debug; static { String className = "Messages"; if (JdbcDebugCfg.entryActive) { debug = new JdbcDebug[totalMethodIds]; debug[methodId_createSQLException] = new JdbcDebug(className,"createSQLException"); debug[methodId_createSQLWarning] = new JdbcDebug(className,"createSQLWarning"); debug[methodId_throwUnsupportedFeatureException] = new JdbcDebug(className,"throwUnsupportedFeatureException"); debug[methodId_throwDeprecatedMethodException] = new JdbcDebug(className,"throwDeprecatedMethodException"); } } }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.copycat.server.state; import io.atomix.catalyst.serializer.Serializer; import io.atomix.catalyst.util.Assert; import io.atomix.catalyst.util.concurrent.Scheduled; import io.atomix.catalyst.util.concurrent.ThreadContext; import io.atomix.copycat.client.Command; import io.atomix.copycat.client.Operation; import io.atomix.copycat.client.error.ApplicationException; import io.atomix.copycat.server.Commit; import io.atomix.copycat.server.StateMachineExecutor; import org.slf4j.Logger; import java.time.Duration; import java.time.Instant; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; /** * Raft server state machine executor. * * @author <a href="http://github.com/kuujo>Jordan Halterman</a> */ class ServerStateMachineExecutor implements StateMachineExecutor { private final ThreadContext executor; private final ServerStateMachineContext context; private final List<ServerScheduledTask> tasks = new ArrayList<>(); private final List<ServerScheduledTask> complete = new ArrayList<>(); private final Map<Class, Function> operations = new HashMap<>(); private Function allOperation; private long timestamp; ServerStateMachineExecutor(ServerStateMachineContext context, ThreadContext executor) { this.executor = executor; this.context = context; } /** * Returns the executor timestamp. */ long timestamp() { return timestamp; } @Override public ServerStateMachineContext context() { return context; } @Override public Logger logger() { return executor.logger(); } @Override public Serializer serializer() { return executor.serializer(); } @Override public Executor executor() { return executor.executor(); } /** * Executes the given command commit on the state machine. */ <T extends Operation<U>, U> CompletableFuture<U> executeCommand(Commit<T> commit, Command.ConsistencyLevel consistency) { CompletableFuture<U> future = new CompletableFuture<>(); executor.executor().execute(() -> { context.update(commit.index(), commit.time(), consistency); executeOperation(commit, future); }); return future; } /** * Executes the given query commit on the state machine. */ <T extends Operation<U>, U> CompletableFuture<U> executeQuery(Commit<T> commit) { CompletableFuture<U> future = new CompletableFuture<>(); executor.executor().execute(() -> { context.update(commit.index(), commit.time(), null); executeOperation(commit, future); }); return future; } /** * Executes an operation. */ @SuppressWarnings("unchecked") private <T extends Operation<U>, U> void executeOperation(Commit commit, CompletableFuture<U> future) { // Get the function registered for the operation. If no function is registered, attempt to // use a global function if available. Function function = operations.get(commit.type()); if (function == null) { // If no operation function was found for the class, try to find an operation function // registered with a parent class. for (Map.Entry<Class, Function> entry : operations.entrySet()) { if (entry.getKey().isAssignableFrom(commit.type())) { function = entry.getValue(); break; } } // If a parent operation function was found, store the function for future reference. if (function != null) { operations.put(commit.type(), function); } } // If no operation function was found, use the all operation and store it as the permanent operation. if (function == null) { function = allOperation; if (function != null) { operations.put(commit.type(), function); } } if (function == null) { future.completeExceptionally(new IllegalStateException("unknown state machine operation: " + commit.type())); } else { // Execute the operation. If the operation return value is a Future, await the result, // otherwise immediately complete the execution future. try { future.complete((U) function.apply(commit)); } catch (Exception e) { future.completeExceptionally(new ApplicationException("An application error occurred", e)); } } } /** * Executes scheduled callbacks based on the provided time. */ void tick(long timestamp) { this.timestamp = Math.max(this.timestamp, timestamp); // Only create an iterator if there are actually tasks scheduled. if (!tasks.isEmpty()) { // Iterate through scheduled tasks until we reach a task that has not met its scheduled time. // The tasks list is sorted by time on insertion. Iterator<ServerScheduledTask> iterator = tasks.iterator(); while (iterator.hasNext()) { ServerScheduledTask task = iterator.next(); if (task.complete(this.timestamp)) { executor.executor().execute(() -> { context.update(context.version(), Instant.ofEpochMilli(task.time), Command.ConsistencyLevel.SEQUENTIAL); task.execute(); }); complete.add(task); iterator.remove(); } else { break; } } // Iterate through tasks that were completed and reschedule them. for (ServerScheduledTask task : complete) { task.reschedule(); } complete.clear(); } } @Override public CompletableFuture<Void> execute(Runnable callback) { return executor.execute(callback); } @Override public <T> CompletableFuture<T> execute(Supplier<T> callback) { return executor.execute(callback); } @Override public Scheduled schedule(Duration delay, Runnable callback) { Assert.state(context.consistency() != null, "callbacks can only be scheduled during command execution"); return new ServerScheduledTask(callback, delay.toMillis()).schedule(); } @Override public Scheduled schedule(Duration initialDelay, Duration interval, Runnable callback) { Assert.state(context.consistency() != null, "callbacks can only be scheduled during command execution"); return new ServerScheduledTask(callback, initialDelay.toMillis(), interval.toMillis()).schedule(); } @Override public StateMachineExecutor register(Function<Commit<? extends Operation<?>>, ?> callback) { allOperation = Assert.notNull(callback, "callback"); return this; } @Override public <T extends Operation<Void>> StateMachineExecutor register(Class<T> type, Consumer<Commit<T>> callback) { Assert.notNull(type, "type"); Assert.notNull(callback, "callback"); operations.put(type, (Function<Commit<T>, Void>) commit -> { callback.accept(commit); return null; }); return this; } @Override public <T extends Operation<U>, U> StateMachineExecutor register(Class<T> type, Function<Commit<T>, U> callback) { Assert.notNull(type, "type"); Assert.notNull(callback, "callback"); operations.put(type, callback); return this; } @Override public void close() { executor.close(); } /** * Scheduled task. */ private class ServerScheduledTask implements Scheduled { private final long delay; private final long interval; private final Runnable callback; private long time; private ServerScheduledTask(Runnable callback, long delay) { this(callback, delay, 0); } private ServerScheduledTask(Runnable callback, long delay, long interval) { this.delay = delay; this.interval = interval; this.callback = callback; this.time = context.clock().instant().toEpochMilli() + delay; } /** * Schedules the task. */ private Scheduled schedule() { // Perform binary search to insert the task at the appropriate position in the tasks list. if (tasks.isEmpty()) { tasks.add(this); } else { int l = 0; int u = tasks.size() - 1; int i; while (true) { i = (u + l) / 2; long t = tasks.get(i).time; if (t == time) { tasks.add(i, this); return this; } else if (t < time) { l = i + 1; if (l > u) { tasks.add(i + 1, this); return this; } } else { u = i - 1; if (l > u) { tasks.add(i, this); return this; } } } } return this; } /** * Reschedules the task. */ private void reschedule() { if (interval > 0) { time = timestamp + delay; schedule(); } } /** * Returns a boolean value indicating whether the task delay has been met. */ private boolean complete(long timestamp) { return timestamp > time; } /** * Executes the task. */ private synchronized void execute() { callback.run(); } @Override public synchronized void cancel() { tasks.remove(this); } } }
package com.sap.mlt.xliff12.impl.element.structural; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.w3c.dom.Element; import com.sap.mlt.xliff12.api.attribute.Approved; import com.sap.mlt.xliff12.api.attribute.Id; import com.sap.mlt.xliff12.api.attribute.MimeType; import com.sap.mlt.xliff12.api.attribute.NonXliffAttribute; import com.sap.mlt.xliff12.api.attribute.PhaseName; import com.sap.mlt.xliff12.api.attribute.Reformat; import com.sap.mlt.xliff12.api.attribute.ResName; import com.sap.mlt.xliff12.api.attribute.ResType; import com.sap.mlt.xliff12.api.attribute.Translate; import com.sap.mlt.xliff12.api.attribute.Ts; import com.sap.mlt.xliff12.api.base.Attribute; import com.sap.mlt.xliff12.api.base.Node; import com.sap.mlt.xliff12.api.element.header.Note; import com.sap.mlt.xliff12.api.element.namedgroup.ContextGroup; import com.sap.mlt.xliff12.api.element.namedgroup.CountGroup; import com.sap.mlt.xliff12.api.element.namedgroup.PropGroup; import com.sap.mlt.xliff12.api.element.nonxliff.NonXliffElement; import com.sap.mlt.xliff12.api.element.structural.BinSource; import com.sap.mlt.xliff12.api.element.structural.BinTarget; import com.sap.mlt.xliff12.api.element.structural.BinUnit; import com.sap.mlt.xliff12.api.element.structural.Source; import com.sap.mlt.xliff12.api.element.structural.TransUnit; import com.sap.mlt.xliff12.api.exception.ConstraintViolationException; import com.sap.mlt.xliff12.impl.attribute.ReformatYesNoImpl; import com.sap.mlt.xliff12.impl.attribute.TranslateImpl; import com.sap.mlt.xliff12.impl.base.ElementImpl; import com.sap.mlt.xliff12.impl.base.XliffElementImpl; import com.sap.mlt.xliff12.impl.element.header.NoteImpl; import com.sap.mlt.xliff12.impl.element.namedgroup.ContextGroupImpl; import com.sap.mlt.xliff12.impl.element.namedgroup.CountGroupImpl; import com.sap.mlt.xliff12.impl.element.namedgroup.PropGroupImpl; import com.sap.mlt.xliff12.impl.element.nonxliff.NonXliffElementImpl; import com.sap.mlt.xliff12.impl.util.Assert; import com.sap.mlt.xliff12.impl.util.NodeIterator; @SuppressWarnings("deprecation") public class BinUnitImpl extends XliffElementImpl implements BinUnit { public BinUnitImpl(Id id, MimeType mimeType, BinSource binSource) { super(NAME); context = new ArrayList<Context>(); nonXliffElements = new ArrayList<NonXliffElement>(); setId(id); setMimeType(mimeType); setBinSource(binSource); } public BinUnitImpl(org.w3c.dom.Element element) throws ConstraintViolationException { super(NAME, element); } private BinSource binSource; private BinTarget binTarget; private ArrayList<Context> context; private ArrayList<NonXliffElement> nonXliffElements; @Override protected void assertAttributesValid(Element element) throws ConstraintViolationException { Assert.xliffAttrAvailable(element, Id.NAME, MimeType.NAME); Assert.xliffAttrRestricted(element, true, false, false, Id.NAME, MimeType.NAME, Approved.NAME, Translate.NAME, Reformat.NAME, Ts.NAME, PhaseName.NAME, ResType.NAME, ResName.NAME); } @Override protected void cloneChildrenFrom(ElementImpl elem) { BinUnitImpl source = (BinUnitImpl) elem; binSource = (BinSource) source.binSource.clone(); attach(binSource); if (source.binTarget != null) { binTarget = (BinTarget) source.binTarget.clone(); attach(binTarget); } context = new ArrayList<Context>(); for (Context ctx : source.context) { context.add((Context) ctx.clone()); } attach(context); nonXliffElements = new ArrayList<NonXliffElement>(); for (NonXliffElement nonXliffElement : source.nonXliffElements) { nonXliffElements.add((NonXliffElement) nonXliffElement.clone()); } attach(nonXliffElements); } @Override public List<? extends Node> getChildren() { ArrayList<Node> ret = new ArrayList<Node>(); ret.add(binSource); if (binTarget != null) { ret.add(binTarget); } ret.addAll(context); ret.addAll(nonXliffElements); return ret; } @Override protected void processChildren( List<? extends org.w3c.dom.Node> elementsAndText) throws ConstraintViolationException { context = new ArrayList<Context>(); nonXliffElements = new ArrayList<NonXliffElement>(); NodeIterator iter = new NodeIterator(elementsAndText, true); while (iter.nextIsXliffElement()) { org.w3c.dom.Element element = iter.getXliffElement(BinSource.NAME, BinTarget.NAME, ContextGroup.NAME, CountGroup.NAME, PropGroup.NAME, Note.NAME, TransUnit.NAME); String name = element.getLocalName(); if (BinSource.NAME.equals(name)) { if (binSource != null) { String msg = MessageFormat .format( "Only one <{0}> element is allowed per <{1}> element", BinSource.NAME, NAME); throw new ConstraintViolationException(msg); } binSource = new BinSourceImpl(element); } else if (BinTarget.NAME.equals(name)) { if (binTarget != null) { String msg = MessageFormat .format( "Only one <{0}> element is allowed per <{1}> element", BinTarget.NAME, NAME); throw new ConstraintViolationException(msg); } binTarget = new BinTargetImpl(element); } else if (ContextGroup.NAME.equals(name)) { context.add(new ContextGroupImpl(element)); } else if (CountGroup.NAME.equals(name)) { context.add(new CountGroupImpl(element)); } else if (PropGroup.NAME.equals(name)) { context.add(new PropGroupImpl(element)); } else if (Note.NAME.equals(name)) { context.add(new NoteImpl(element)); } else if (TransUnit.NAME.equals(name)) { context.add(new TransUnitImpl(element)); } } while (iter.hasNext()) { nonXliffElements.add(new NonXliffElementImpl(iter.getNonXliffElement())); } if (binSource == null) { String msg = MessageFormat.format( "A <{0}> element must contain a <{1}> element", NAME, Source.NAME); throw new ConstraintViolationException(msg); } attach(binSource); attach(binTarget); attach(context); attach(nonXliffElements); } public Approved getApproved() { return (Approved) getXliffAttribute(Approved.NAME); } public BinSource getBinSource() { return binSource; } public BinTarget getBinTarget() { return binTarget; } public List<? extends Context> getContext() { return Collections.unmodifiableList(context); } public Id getId() { return (Id) getXliffAttribute(Id.NAME); } public MimeType getMimeType() { return (MimeType) getXliffAttribute(MimeType.NAME); } public Collection<NonXliffAttribute> getNonXliffAttributes() { return super.getNonXliffAttributes(); } public List<NonXliffElement> getNonXliffElements() { return Collections.unmodifiableList(nonXliffElements); } public PhaseName getPhaseName() { return (PhaseName) getXliffAttribute(PhaseName.NAME); } public Reformat getReformat() { return (Reformat) getXliffAttribute(Reformat.NAME); } public ResName getResName() { return (ResName) getXliffAttribute(ResName.NAME); } public ResType getResType() { return (ResType) getXliffAttribute(ResType.NAME); } public Translate getTranslate() { return (Translate) getXliffAttribute(Translate.NAME); } /** * @deprecated */ public com.sap.mlt.xliff12.api.attribute.Ts getTs() { return (com.sap.mlt.xliff12.api.attribute.Ts) getXliffAttribute(com.sap.mlt.xliff12.api.attribute.Ts.NAME); } public void setApproved(Approved approved) { if (approved == null) { clearXliffAttribute(Approved.NAME); } else { setAttribute(approved); } } public void setBinSource(BinSource binSource) { Assert.notNull(binSource, "binSource"); Assert.isInstance(binSource, "binSource", BinSourceImpl.class); assertNotAttached(binSource); detach(this.binSource); this.binSource = binSource; attach(this.binSource); } public void setBinTarget(BinTarget binTarget) { Assert.isInstance(binTarget, "binTarget", BinTargetImpl.class); assertNotAttached(binTarget); detach(this.binTarget); this.binTarget = binTarget; attach(this.binTarget); } public void setContext(List<? extends Context> context) { Assert.notNull(context, "context"); Assert.areInstances(context, "context", TransUnitImpl.class, ContextGroupImpl.class, CountGroupImpl.class, NoteImpl.class, PropGroupImpl.class); assertNotAttached(context); detach(this.context); this.context = new ArrayList<Context>(context); attach(this.context); } public void setId(Id id) { Assert.notNull(id, "id"); setAttribute(id); } public void setMimeType(MimeType mimeType) { Assert.notNull(mimeType, "mimeType"); setAttribute(mimeType); } public void setNonXliffAttributes( Collection<NonXliffAttribute> nonXliffAttributes) { Assert.notNull(nonXliffAttributes, "nonXliffAttributes"); Collection<NonXliffAttribute> toDelete = getNonXliffAttributes(); for (NonXliffAttribute attr : toDelete) { clearAttribute(attr.getNamespaceUri(), attr.getName()); } for (NonXliffAttribute attr : nonXliffAttributes) { setAttribute(attr); } } public void setNonXliffElements(List<NonXliffElement> nonXliffElements) { Assert.notNull(nonXliffElements, "nonXliffElements"); Assert.areInstances(nonXliffElements, "nonXliffElements", NonXliffElementImpl.class); assertNotAttached(nonXliffElements); detach(this.nonXliffElements); this.nonXliffElements = new ArrayList<NonXliffElement>(nonXliffElements); attach(this.nonXliffElements); } public void setPhaseName(PhaseName phaseName) { if (phaseName == null) { clearXliffAttribute(PhaseName.NAME); } else { setAttribute(phaseName); } } public void setReformat(Reformat reformat) { if (reformat == null) { setAttribute(new ReformatYesNoImpl(true)); } else { setAttribute(reformat); } } public void setResName(ResName resName) { if (resName == null) { clearXliffAttribute(ResName.NAME); } else { setAttribute(resName); } } public void setResType(ResType resType) { if (resType == null) { clearXliffAttribute(ResType.NAME); } else { setAttribute(resType); } } public void setTranslate(Translate translate) { if (translate == null) { setAttribute(new TranslateImpl(Translate.Value.YES)); } else { setAttribute(translate); } } /** * @deprecated */ public void setTs(com.sap.mlt.xliff12.api.attribute.Ts ts) { if (ts == null) { clearXliffAttribute(com.sap.mlt.xliff12.api.attribute.Ts.NAME); } else { setAttribute(ts); } } @Override protected Collection<? extends Attribute> getDefaultAttributes() { ArrayList<Attribute> defaults = new ArrayList<Attribute>(); defaults.add(new TranslateImpl(Translate.Value.YES)); defaults.add(new ReformatYesNoImpl(true)); return defaults; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.editor.ex.util; import com.intellij.openapi.diagnostic.Attachment; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.highlighter.HighlighterClient; import com.intellij.openapi.editor.highlighter.HighlighterIterator; import com.intellij.openapi.editor.impl.DocumentImpl; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileTypes.SyntaxHighlighter; import com.intellij.openapi.fileTypes.SyntaxHighlighterBase; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.tree.IElementType; import com.intellij.util.ArrayUtil; import com.intellij.util.SmartList; import com.intellij.util.containers.FactoryMap; import com.intellij.util.text.MergingCharSequence; import it.unimi.dsi.fastutil.ints.Int2IntMap; import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap; import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntList; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; public class LayeredLexerEditorHighlighter extends LexerEditorHighlighter { private static final Logger LOG = Logger.getInstance(LayeredLexerEditorHighlighter.class); private final Map<IElementType, LayerDescriptor> myTokensToLayer = new HashMap<>(); public LayeredLexerEditorHighlighter(@NotNull SyntaxHighlighter highlighter, @NotNull EditorColorsScheme scheme) { super(highlighter, scheme); } @NotNull @Override protected SegmentArrayWithData createSegments() { return new MappingSegments(createStorage()); } public synchronized void registerLayer(@NotNull IElementType tokenType, @NotNull LayerDescriptor layerHighlighter) { myTokensToLayer.put(tokenType, layerHighlighter); getSegments().removeAll(); } protected synchronized void unregisterLayer(@NotNull IElementType tokenType) { final LayerDescriptor layer = myTokensToLayer.remove(tokenType); if (layer != null) { getSegments().myLayerBuffers.remove(layer); getSegments().removeAll(); } } @NotNull @Override public MappingSegments getSegments() { return (MappingSegments)super.getSegments(); } private final class LightMapper { final Mapper mapper; final StringBuilder text = new StringBuilder(); final IntList lengths = new IntArrayList(); final List<IElementType> tokenTypes = new ArrayList<>(); final Int2IntMap index2Global = new Int2IntOpenHashMap(); private final String mySeparator; final int insertOffset; LightMapper(@NotNull Mapper mapper, int insertOffset) { this.mapper = mapper; mySeparator = mapper.mySeparator; this.insertOffset = insertOffset; } void addToken(@NotNull CharSequence tokenText, @NotNull IElementType tokenType, int globalIndex) { index2Global.put(tokenTypes.size(), globalIndex); text.append(mySeparator).append(tokenText); lengths.add(tokenText.length()); tokenTypes.add(tokenType); } void finish() { assert insertOffset >= 0; final DocumentImpl document = mapper.doc; document.insertString(insertOffset, text); int start = insertOffset; for (int i = 0; i < tokenTypes.size(); i++) { IElementType type = tokenTypes.get(i); final int len = lengths.getInt(i); start += mySeparator.length(); final int globalIndex = index2Global.get(i); MappedRange[] ranges = getSegments().myRanges; checkNull(type, ranges[globalIndex]); ranges[globalIndex] = new MappedRange(mapper, document.createRangeMarker(start, start + len), type); start += len; } } private void checkNull(@NotNull IElementType type, @Nullable MappedRange range) { if (range != null) { Document mainDocument = getDocument(); VirtualFile file = mainDocument == null ? null : FileDocumentManager.getInstance().getFile(mainDocument); LOG.error("Expected null range on " + type + ", found " + range + "; highlighter=" + getSyntaxHighlighter(), new Attachment(file != null ? file.getName() : "editorText.txt", myText.toString())); } } } @Override public void setText(@NotNull final CharSequence text) { if (updateLayers()) { resetText(text); } else { super.setText(text); } } @NotNull @Override protected TokenProcessor createTokenProcessor(int startIndex, SegmentArrayWithData segments, CharSequence text) { MappingSegments mappingSegments = (MappingSegments)segments; return new TokenProcessor() { final Map<Mapper, LightMapper> docTexts = FactoryMap.create(key -> { MappedRange predecessor = key.findPredecessor(startIndex, mappingSegments); return new LightMapper(key, predecessor != null ? predecessor.range.getEndOffset() : 0); }); @Override public void addToken(int tokenIndex, int startOffset, int endOffset, int data, @NotNull IElementType tokenType) { mappingSegments.setElementLight(tokenIndex, startOffset, endOffset, data); Mapper mapper = mappingSegments.getMappingDocument(tokenType); if (mapper != null) { docTexts.get(mapper).addToken(text.subSequence(startOffset, endOffset), tokenType, tokenIndex); } } @Override public void finish() { docTexts.keySet().forEach(mapper -> mappingSegments.freezeHighlighter(mapper)); for (final LightMapper mapper : docTexts.values()) { mapper.finish(); } } }; } protected boolean updateLayers() { return false; } protected boolean updateLayers(@NotNull DocumentEvent e) { return updateLayers(); } @SuppressWarnings("NonSynchronizedMethodOverridesSynchronizedMethod") @Override public void documentChanged(@NotNull DocumentEvent e) { // do NOT synchronize before updateLayers due to deadlock with PsiLock boolean changed = updateLayers(e); //noinspection SynchronizeOnThis synchronized (this) { if (changed) { super.setText(e.getDocument().getImmutableCharSequence()); } else { super.documentChanged(e); } } } @NotNull @Override public HighlighterIterator createIterator(int startOffset) { //noinspection SynchronizeOnThis synchronized (this) { return new LayeredHighlighterIteratorImpl(startOffset); } } @NotNull public HighlighterIterator createBaseIterator(int startOffset) { return super.createIterator(startOffset); } private final class MappingSegments extends SegmentArrayWithData { private MappedRange[] myRanges = new MappedRange[INITIAL_SIZE]; private final Map<LayerDescriptor, Mapper> myLayerBuffers = new HashMap<>(); private @Nullable Set<LazyLexerEditorHighlighter> myFreezedHighlighters; private MappingSegments(DataStorage o) { super(o); } @Nullable Mapper getMappingDocument(@NotNull IElementType token) { final LayerDescriptor descriptor = myTokensToLayer.get(token); if (descriptor == null) return null; Mapper mapper = myLayerBuffers.get(descriptor); if (mapper == null) { mapper = new Mapper(descriptor); myLayerBuffers.put(descriptor, mapper); } return mapper; } @Override public void removeAll() { if (mySegmentCount != 0) { Arrays.fill(myRanges, null); } myLayerBuffers.clear(); super.removeAll(); } @Override public void replace(int startIndex, int endIndex, @NotNull SegmentArrayWithData newData) { withFreezedHighlighters(() -> super.replace(startIndex, endIndex, newData)); } @Override public void setElementAt(int i, int startOffset, int endOffset, int data) { setElementLight(i, startOffset, endOffset, data); final MappedRange range = myRanges[i]; if (range != null) { freezeHighlighter(range.mapper); range.mapper.removeMapping(range); myRanges[i] = null; } updateMappingForToken(i); } private void setElementLight(final int i, final int startOffset, final int endOffset, final int data) { super.setElementAt(i, startOffset, endOffset, data); myRanges = LayeredLexerEditorHighlighter.reallocateArray(myRanges, i + 1); } @Override public void remove(int startIndex, int endIndex) { Map<Mapper, Integer> mins = FactoryMap.create(key -> Integer.MAX_VALUE); Map<Mapper, Integer> maxs = FactoryMap.create(key -> 0); for (int i = startIndex; i < endIndex; i++) { final MappedRange range = myRanges[i]; if (range != null && range.range.isValid()) { mins.put(range.mapper, Math.min(mins.get(range.mapper).intValue(), range.range.getStartOffset())); maxs.put(range.mapper, Math.max(maxs.get(range.mapper).intValue(), range.range.getEndOffset())); } myRanges[i] = null; } for (final Map.Entry<Mapper, Integer> entry : maxs.entrySet()) { Mapper mapper = entry.getKey(); freezeHighlighter(mapper); mapper.doc.deleteString(mins.get(mapper).intValue() - mapper.mySeparator.length(), entry.getValue().intValue()); } removeRange(myRanges, startIndex, endIndex); super.remove(startIndex, endIndex); } @Override protected void replace(int startOffset, @NotNull SegmentArrayWithData data, int len) { super.replace(startOffset, data, len); for (int i = startOffset; i < startOffset + len; i++) { updateMappingForToken(i); } } private MappedRange @NotNull [] insert(MappedRange @NotNull [] array, MappedRange @NotNull [] insertArray, int startIndex, int insertLength) { MappedRange[] newArray = LayeredLexerEditorHighlighter.reallocateArray(array, mySegmentCount + insertLength); if (startIndex < mySegmentCount) { System.arraycopy(newArray, startIndex, newArray, startIndex + insertLength, mySegmentCount - startIndex); } System.arraycopy(insertArray, 0, newArray, startIndex, insertLength); return newArray; } private <T> void removeRange(T @NotNull [] array, int startIndex, int endIndex) { if (endIndex < mySegmentCount) { System.arraycopy(array, endIndex, array, startIndex, mySegmentCount - endIndex); } Arrays.fill(array, mySegmentCount - (endIndex - startIndex), mySegmentCount, null); } @Override public void insert(@NotNull SegmentArrayWithData segmentArray, final int startIndex) { synchronized (LayeredLexerEditorHighlighter.this) { super.insert(segmentArray, startIndex); final int newCount = segmentArray.getSegmentCount(); final MappedRange[] newRanges = new MappedRange[newCount]; myRanges = insert(myRanges, newRanges, startIndex, newCount); int endIndex = startIndex + segmentArray.getSegmentCount(); TokenProcessor processor = createTokenProcessor(startIndex, getSegments(), myText); for (int i = startIndex; i < endIndex; i++) { final int data = getSegmentData(i); final IElementType token = getSegments().unpackTokenFromData(data); processor.addToken(i, getSegmentStart(i), getSegmentEnd(i), data, token); } processor.finish(); } } private void updateMappingForToken(final int i) { final int data = getSegmentData(i); final IElementType token = getSegments().unpackTokenFromData(data); final Mapper mapper = getMappingDocument(token); final MappedRange oldMapping = myRanges[i]; if (mapper != null) { freezeHighlighter(mapper); if (oldMapping != null) { if (oldMapping.mapper == mapper && oldMapping.outerToken == token) { mapper.updateMapping(i, oldMapping); } else { oldMapping.mapper.removeMapping(oldMapping); myRanges[i] = mapper.insertMapping(i, token); } } else { myRanges[i] = mapper.insertMapping(i, token); } } else { if (oldMapping != null) { freezeHighlighter(oldMapping.mapper); oldMapping.mapper.removeMapping(oldMapping); myRanges[i] = null; } } } private void withFreezedHighlighters(@NotNull Runnable action) { if (myFreezedHighlighters != null) { action.run(); return; } myFreezedHighlighters = new HashSet<>(); try { action.run(); } finally { myFreezedHighlighters.forEach(highlighter -> { try { highlighter.finishUpdate(); } catch (IllegalStateException e) { LOG.error(e.getMessage() + "\nLayer highlighter: " + highlighter.getSyntaxHighlighter().toString() + "\nTop level highlighter: " + LayeredLexerEditorHighlighter.this.getSyntaxHighlighter().toString(), e, new Attachment("layerTextAfterChange.txt", highlighter.myText.toString()), new Attachment("editorTextAfterChange.txt", myText.toString())); } }); myFreezedHighlighters = null; } } private void freezeHighlighter(@NotNull Mapper mapper) { if (myFreezedHighlighters != null && myFreezedHighlighters.add(mapper.highlighter)) { mapper.highlighter.beginUpdate(); } } } private final class Mapper implements HighlighterClient { private final DocumentImpl doc; private final LazyLexerEditorHighlighter highlighter; private final String mySeparator; private final Map<IElementType, TextAttributes> myAttributesMap = new HashMap<>(); private final Map<IElementType, TextAttributesKey[]> myKeysMap = new HashMap<>(); @NotNull private final SyntaxHighlighter mySyntaxHighlighter; private final TextAttributesKey myBackground; private Mapper(@NotNull LayerDescriptor descriptor) { doc = new DocumentImpl("", true); mySyntaxHighlighter = descriptor.getLayerHighlighter(); myBackground = descriptor.getBackgroundKey(); highlighter = new LazyLexerEditorHighlighter(mySyntaxHighlighter, getScheme()); mySeparator = descriptor.getTokenSeparator(); highlighter.setEditor(this); doc.addDocumentListener(highlighter); } @NotNull public TextAttributes getAttributes(IElementType tokenType) { TextAttributes attrs = myAttributesMap.get(tokenType); if (attrs == null) { TextAttributesKey[] keys = getAttributesKeys(tokenType); attrs = convertAttributes(keys); myAttributesMap.put(tokenType, attrs); } return attrs; } private TextAttributesKey @NotNull [] getAttributesKeys(IElementType tokenType) { return myKeysMap.computeIfAbsent(tokenType, type -> { return SyntaxHighlighterBase.pack(myBackground, mySyntaxHighlighter.getTokenHighlights(type)); }); } @NotNull public HighlighterIterator createIterator(@NotNull MappedRange mapper, int shift) { final int rangeStart = mapper.range.getStartOffset(); final int rangeEnd = mapper.range.getEndOffset(); return new LimitedRangeHighlighterIterator(highlighter.createIterator(rangeStart + shift), rangeStart, rangeEnd); } @Override public Project getProject() { return getClient().getProject(); } @Override public void repaint(int start, int end) { // TODO: map ranges to outer document } @Override public Document getDocument() { return LayeredLexerEditorHighlighter.this.getDocument(); } void resetCachedTextAttributes() { // after color scheme was changed we need to reset cached attributes myAttributesMap.clear(); } void updateMapping(final int tokenIndex, @NotNull MappedRange oldMapping) { CharSequence tokenText = getTokenText(tokenIndex); final int start = oldMapping.range.getStartOffset(); final int end = oldMapping.range.getEndOffset(); if (Comparing.equal(doc.getCharsSequence().subSequence(start, end), tokenText)) return; doc.replaceString(start, end, tokenText); final int newEnd = start + tokenText.length(); if (oldMapping.range.getStartOffset() != start || oldMapping.range.getEndOffset() != newEnd) { assert oldMapping.range.getDocument() == doc; oldMapping.range.dispose(); oldMapping.range = doc.createRangeMarker(start, newEnd); } } @NotNull private MappedRange insertMapping(int tokenIndex, @NotNull IElementType outerToken) { CharSequence tokenText = getTokenText(tokenIndex); final int length = tokenText.length(); MappedRange predecessor = findPredecessor(tokenIndex, getSegments()); int insertOffset = predecessor != null ? predecessor.range.getEndOffset() : 0; doc.insertString(insertOffset, new MergingCharSequence(mySeparator, tokenText)); insertOffset += mySeparator.length(); RangeMarker marker = doc.createRangeMarker(insertOffset, insertOffset + length); return new MappedRange(this, marker, outerToken); } @NotNull private CharSequence getTokenText(final int tokenIndex) { return myText.subSequence(getSegments().getSegmentStart(tokenIndex), getSegments().getSegmentEnd(tokenIndex)); } @Nullable MappedRange findPredecessor(int token, MappingSegments segments) { token--; while (token >= 0) { MappedRange mappedRange = segments.myRanges[token]; if (mappedRange != null && mappedRange.mapper == this) return mappedRange; token--; } return null; } private void removeMapping(@NotNull MappedRange mapping) { RangeMarker rangeMarker = mapping.range; if (rangeMarker.isValid()) { final int start = rangeMarker.getStartOffset(); final int end = rangeMarker.getEndOffset(); assert doc == rangeMarker.getDocument(); doc.deleteString(start - mySeparator.length(), end); rangeMarker.dispose(); } } } private static class MappedRange { private RangeMarker range; private final Mapper mapper; private final IElementType outerToken; MappedRange(@NotNull Mapper mapper, @NotNull RangeMarker range, @NotNull IElementType outerToken) { this.mapper = mapper; this.range = range; this.outerToken = outerToken; assert mapper.doc == range.getDocument(); } @Override @NonNls public String toString() { return "MappedRange{range=" + range + ", outerToken=" + outerToken + '}'; } } @Override public void setColorScheme(@NotNull EditorColorsScheme scheme) { super.setColorScheme(scheme); for (MappedRange mapping : getSegments().myRanges) { final Mapper mapper = mapping == null ? null : mapping.mapper; if (mapper != null) { mapper.resetCachedTextAttributes(); } } } @Override protected boolean hasAdditionalData(int segmentIndex) { return getSegments().myRanges[segmentIndex] != null; } private final class LayeredHighlighterIteratorImpl implements LayeredHighlighterIterator { private final HighlighterIterator myBaseIterator; private HighlighterIterator myLayerIterator; private int myLayerStartOffset; private Mapper myCurrentMapper; private LayeredHighlighterIteratorImpl(int offset) { myBaseIterator = createBaseIterator(offset); if (!myBaseIterator.atEnd()) { int shift = offset - myBaseIterator.getStart(); initLayer(shift); } } private void initLayer(final int shiftInToken) { if (myBaseIterator.atEnd()) { myLayerIterator = null; myCurrentMapper = null; return; } MappedRange mapping = getSegments().myRanges[((HighlighterIteratorImpl)myBaseIterator).currentIndex()]; if (mapping != null) { myCurrentMapper = mapping.mapper; myLayerIterator = myCurrentMapper.createIterator(mapping, shiftInToken); myLayerStartOffset = myBaseIterator.getStart() - mapping.range.getStartOffset(); } else { myCurrentMapper = null; myLayerIterator = null; } } @Override public TextAttributes getTextAttributes() { if (myCurrentMapper != null) { return myCurrentMapper.getAttributes(getTokenType()); } return myBaseIterator.getTextAttributes(); } @Override public TextAttributesKey @NotNull [] getTextAttributesKeys() { if (myCurrentMapper != null) { return myCurrentMapper.getAttributesKeys(getTokenType()); } return myBaseIterator.getTextAttributesKeys(); } @Override @NotNull public SyntaxHighlighter getActiveSyntaxHighlighter() { if (myCurrentMapper != null) { return myCurrentMapper.mySyntaxHighlighter; } return getSyntaxHighlighter(); } @Override public int getStart() { if (myLayerIterator != null) { return myLayerIterator.getStart() + myLayerStartOffset; } return myBaseIterator.getStart(); } @Override public int getEnd() { if (myLayerIterator != null) { return myLayerIterator.getEnd() + myLayerStartOffset; } return myBaseIterator.getEnd(); } @Override public IElementType getTokenType() { return myLayerIterator != null ? myLayerIterator.getTokenType() : myBaseIterator.getTokenType(); } @Override public void advance() { if (myLayerIterator != null) { myLayerIterator.advance(); if (!myLayerIterator.atEnd()) return; } myBaseIterator.advance(); initLayer(0); } @Override public void retreat() { if (myLayerIterator != null) { myLayerIterator.retreat(); if (!myLayerIterator.atEnd()) return; } myBaseIterator.retreat(); initLayer(myBaseIterator.atEnd() ? 0 : myBaseIterator.getEnd() - myBaseIterator.getStart() - 1); } @Override public boolean atEnd() { return myBaseIterator.atEnd(); } @Override public Document getDocument() { return myBaseIterator.getDocument(); } } private static MappedRange @NotNull [] reallocateArray(MappedRange @NotNull [] array, int index) { if (index < array.length) return array; return ArrayUtil.realloc(array, SegmentArray.calcCapacity(array.length, index), MappedRange[]::new); } /** * The layered lexer editor highlighter can issue high volume of small document changes to it's layers' highlighters. * Some of the changes might cause full re-lexing within the layer highlighter leading to huge * performance issues. LazyLexerEditorHighlighter caches and merges all of the document updates and applies * them in a batch heavily improving performance in large documents. */ private static class LazyLexerEditorHighlighter extends LexerEditorHighlighter { private boolean inUpdate = false; private List<DocumentUpdate> updates; LazyLexerEditorHighlighter(@NotNull SyntaxHighlighter highlighter, @NotNull EditorColorsScheme scheme) { super(highlighter, scheme); } public void beginUpdate() { inUpdate = true; updates = new SmartList<>(); } public void finishUpdate() { inUpdate = false; if (updates.isEmpty()) { updates = null; return; } sortUpdates(); mergeUpdates(); final Document document = updates.get(0).document; final int documentSize = document.getTextLength(); int processedOffset = -1; for (DocumentUpdate event : updates) { if (event.offset + event.newLength < processedOffset) { continue; } processedOffset = super.incrementalUpdate(event.offset, event.oldLength, event.newLength, document); if (processedOffset >= documentSize) { break; } } updates = null; } @Override int incrementalUpdate(int eventOffset, int eventOldLength, int eventNewLength, Document document) { if (inUpdate) { if (!mergeUpdate(updates, eventOffset, eventOldLength, eventNewLength)) { updates.add(new DocumentUpdate(eventOffset, eventOldLength, eventNewLength, document)); } return -1; } return super.incrementalUpdate(eventOffset, eventOldLength, eventNewLength, document); } private static boolean mergeUpdate(List<DocumentUpdate> updates, int eventOffset, int eventOldLength, int eventNewLength) { if (updates.isEmpty()) return false; final int MERGE_MARGIN = 5; DocumentUpdate a = updates.get(updates.size() - 1); if (eventOffset < a.offset) { // a not sorted update, ignore as it will be merged after sorting all of the updates return false; } if (a.offset == eventOffset) { if (a.newLength > eventOldLength) { a.newLength += eventNewLength - eventOldLength; } else { a.oldLength += eventOldLength - a.newLength; a.newLength = eventNewLength; } } else if (a.offset + a.newLength > eventOffset) { if (a.offset + a.newLength < eventOffset + eventOldLength) { a.oldLength = eventOldLength + eventOffset - (a.offset + a.oldLength); } a.newLength += eventNewLength - eventOldLength; } else if (a.offset + a.newLength + MERGE_MARGIN >= eventOffset) { int offsetDiff = eventOffset - a.offset - a.newLength; a.oldLength += offsetDiff + eventOldLength; a.newLength += offsetDiff + eventNewLength; } else { return false; } return true; } private void mergeUpdates() { List<DocumentUpdate> result = new ArrayList<>(updates.size()); result.add(updates.get(0)); for (int i = 1; i < updates.size(); i++) { DocumentUpdate b = updates.get(i); if (!mergeUpdate(result, b.offset, b.oldLength, b.newLength)) { result.add(b); } } updates = result; } private void sortUpdates() { int sortedFrom = updates.size(); // We need to sort updates using bubble sort, because each swap requires offset update. // The updates are merged and mostly sorted, so the complexity is expected to be close to O(3*n). while (sortedFrom != 0) { int lastSortedIndex = 0; for (int i = 1; i < sortedFrom; i++) { DocumentUpdate a = updates.get(i - 1); DocumentUpdate b = updates.get(i); if (a.offset > b.offset) { if (a.offset < b.offset + b.oldLength) { int delta = b.offset + b.oldLength - a.offset; a.offset = b.offset; a.oldLength -= Math.min(0, delta); a.newLength -= Math.min(0, delta); } else { a.offset += b.newLength - b.oldLength; } updates.set(i - 1, b); updates.set(i, a); lastSortedIndex = i; } } sortedFrom = lastSortedIndex; } } private static class DocumentUpdate { int offset; int oldLength; int newLength; final Document document; DocumentUpdate(int offset, int oldLength, int newLength, Document document) { this.offset = offset; this.oldLength = oldLength; this.newLength = newLength; this.document = document; } @Override public String toString() { return "update at " + offset + ": " + oldLength + " => " + newLength; } } } @Override @NonNls public String toString() { return myText.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.ml.dataset.impl.cache; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.locks.LockSupport; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteAtomicLong; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteLock; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheAdapter; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionTopology; import org.apache.ignite.internal.util.IgniteUtils; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; /** * Tests for {@link CacheBasedDataset}. */ public class CacheBasedDatasetTest extends GridCommonAbstractTest { /** Number of nodes in grid. */ private static final int NODE_COUNT = 4; /** Ignite instance. */ private Ignite ignite; /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { for (int i = 1; i <= NODE_COUNT; i++) startGrid(i); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() { stopAllGrids(); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { /* Grid instance. */ ignite = grid(NODE_COUNT); ignite.configuration().setPeerClassLoadingEnabled(true); IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); } /** * Tests that partitions of the upstream cache and the partition {@code context} cache are reserved during * computations on dataset. Reservation means that partitions won't be unloaded from the node before computation is * completed. */ public void testPartitionExchangeDuringComputeCall() { int partitions = 4; IgniteCache<Integer, String> upstreamCache = generateTestData(4, 0); CacheBasedDatasetBuilder<Integer, String> builder = new CacheBasedDatasetBuilder<>(ignite, upstreamCache); CacheBasedDataset<Integer, String, Long, AutoCloseable> dataset = builder.build( (upstream, upstreamSize) -> upstreamSize, (upstream, upstreamSize, ctx) -> null ); assertTrue("Before computation all partitions should not be reserved", areAllPartitionsNotReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); UUID numOfStartedComputationsId = UUID.randomUUID(); IgniteAtomicLong numOfStartedComputations = ignite.atomicLong(numOfStartedComputationsId.toString(), 0, true); UUID computationsLockId = UUID.randomUUID(); IgniteLock computationsLock = ignite.reentrantLock(computationsLockId.toString(), false, true, true); // lock computations lock to stop computations in the middle computationsLock.lock(); try { new Thread(() -> dataset.compute((data, partIndex) -> { // track number of started computations ignite.atomicLong(numOfStartedComputationsId.toString(), 0, false).incrementAndGet(); ignite.reentrantLock(computationsLockId.toString(), false, true, false).lock(); ignite.reentrantLock(computationsLockId.toString(), false, true, false).unlock(); })).start(); // wait all computations to start while (numOfStartedComputations.get() < partitions) { } assertTrue("During computation all partitions should be reserved", areAllPartitionsReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); } finally { computationsLock.unlock(); } assertTrue("All partitions should be released", areAllPartitionsNotReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); } /** * Tests that partitions of the upstream cache and the partition {@code context} cache are reserved during * computations on dataset. Reservation means that partitions won't be unloaded from the node before computation is * completed. */ public void testPartitionExchangeDuringComputeWithCtxCall() { int partitions = 4; IgniteCache<Integer, String> upstreamCache = generateTestData(4, 0); CacheBasedDatasetBuilder<Integer, String> builder = new CacheBasedDatasetBuilder<>(ignite, upstreamCache); CacheBasedDataset<Integer, String, Long, AutoCloseable> dataset = builder.build( (upstream, upstreamSize) -> upstreamSize, (upstream, upstreamSize, ctx) -> null ); assertTrue("Before computation all partitions should not be reserved", areAllPartitionsNotReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); UUID numOfStartedComputationsId = UUID.randomUUID(); IgniteAtomicLong numOfStartedComputations = ignite.atomicLong(numOfStartedComputationsId.toString(), 0, true); UUID computationsLockId = UUID.randomUUID(); IgniteLock computationsLock = ignite.reentrantLock(computationsLockId.toString(), false, true, true); // lock computations lock to stop computations in the middle computationsLock.lock(); try { new Thread(() -> dataset.computeWithCtx((ctx, data, partIndex) -> { // track number of started computations ignite.atomicLong(numOfStartedComputationsId.toString(), 0, false).incrementAndGet(); ignite.reentrantLock(computationsLockId.toString(), false, true, false).lock(); ignite.reentrantLock(computationsLockId.toString(), false, true, false).unlock(); })).start(); // wait all computations to start while (numOfStartedComputations.get() < partitions) { } assertTrue("During computation all partitions should be reserved", areAllPartitionsReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); } finally { computationsLock.unlock(); } assertTrue("All partitions should be released", areAllPartitionsNotReserved(upstreamCache.getName(), dataset.getDatasetCache().getName())); } /** * Checks that all partitions of all specified caches are not reserved. * * @param cacheNames Cache names to be checked. * @return {@code true} if all partitions are not reserved, otherwise {@code false}. */ private boolean areAllPartitionsNotReserved(String... cacheNames) { return checkAllPartitions(partition -> partition.reservations() == 0, cacheNames); } /** * Checks that all partitions of all specified caches not reserved. * * @param cacheNames Cache names to be checked. * @return {@code true} if all partitions are reserved, otherwise {@code false}. */ private boolean areAllPartitionsReserved(String... cacheNames) { return checkAllPartitions(partition -> partition.reservations() != 0, cacheNames); } /** * Checks that all partitions of all specified caches satisfies the given predicate. * * @param pred Predicate. * @param cacheNames Cache names. * @return {@code true} if all partitions satisfies the given predicate. */ private boolean checkAllPartitions(IgnitePredicate<GridDhtLocalPartition> pred, String... cacheNames) { boolean flag = false; long checkingStartTs = System.currentTimeMillis(); while (!flag && (System.currentTimeMillis() - checkingStartTs) < 30_000) { LockSupport.parkNanos(200 * 1000 * 1000); flag = true; for (String cacheName : cacheNames) { IgniteClusterPartitionsState state = IgniteClusterPartitionsState.getCurrentState(cacheName); for (IgniteInstancePartitionsState instanceState : state.instances.values()) for (GridDhtLocalPartition partition : instanceState.parts) if (partition != null) flag &= pred.apply(partition); } } return flag; } /** * Aggregated data about cache partitions in Ignite cluster. */ private static class IgniteClusterPartitionsState { /** */ private final String cacheName; /** */ private final Map<UUID, IgniteInstancePartitionsState> instances; /** */ static IgniteClusterPartitionsState getCurrentState(String cacheName) { Map<UUID, IgniteInstancePartitionsState> instances = new HashMap<>(); for (Ignite ignite : G.allGrids()) { IgniteKernal igniteKernal = (IgniteKernal)ignite; IgniteCacheProxy<?, ?> cache = igniteKernal.context().cache().jcache(cacheName); GridDhtCacheAdapter<?, ?> dht = dht(cache); GridDhtPartitionTopology top = dht.topology(); AffinityTopologyVersion topVer = dht.context().shared().exchange().readyAffinityVersion(); List<GridDhtLocalPartition> parts = new ArrayList<>(); for (int p = 0; p < cache.context().config().getAffinity().partitions(); p++) { GridDhtLocalPartition part = top.localPartition(p, AffinityTopologyVersion.NONE, false); parts.add(part); } instances.put(ignite.cluster().localNode().id(), new IgniteInstancePartitionsState(topVer, parts)); } return new IgniteClusterPartitionsState(cacheName, instances); } /** */ IgniteClusterPartitionsState(String cacheName, Map<UUID, IgniteInstancePartitionsState> instances) { this.cacheName = cacheName; this.instances = instances; } /** */ @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("Cache ").append(cacheName).append(" is in following state:").append("\n"); for (Map.Entry<UUID, IgniteInstancePartitionsState> e : instances.entrySet()) { UUID instanceId = e.getKey(); IgniteInstancePartitionsState instanceState = e.getValue(); builder.append("\n\t") .append("Node ") .append(instanceId) .append(" with topology version [") .append(instanceState.topVer.topologyVersion()) .append(", ") .append(instanceState.topVer.minorTopologyVersion()) .append("] contains following partitions:") .append("\n\n"); builder.append("\t\t---------------------------------------------------------------------------------"); builder.append("--------------------\n"); builder.append("\t\t| ID | STATE | RELOAD | RESERVATIONS | SHOULD BE RENTING | PRIMARY |"); builder.append(" DATA STORE SIZE |\n"); builder.append("\t\t---------------------------------------------------------------------------------"); builder.append("--------------------\n"); for (GridDhtLocalPartition partition : instanceState.parts) if (partition != null) { builder.append("\t\t") .append(String.format("| %3d |", partition.id())) .append(String.format(" %7s |", partition.state())) .append(String.format(" %7s |", partition.reload())) .append(String.format(" %13s |", partition.reservations())) .append(String.format(" %18s |", partition.shouldBeRenting())) .append(String.format(" %8s |", partition.primary(instanceState.topVer))) .append(String.format(" %16d |", partition.dataStore().fullSize())) .append("\n"); builder.append("\t\t-------------------------------------------------------------------------"); builder.append("----------------------------\n"); } } return builder.toString(); } } /** * Aggregated data about cache partitions in Ignite instance. */ private static class IgniteInstancePartitionsState { /** */ private final AffinityTopologyVersion topVer; /** */ private final List<GridDhtLocalPartition> parts; /** */ IgniteInstancePartitionsState(AffinityTopologyVersion topVer, List<GridDhtLocalPartition> parts) { this.topVer = topVer; this.parts = parts; } /** */ public AffinityTopologyVersion getTopVer() { return topVer; } /** */ public List<GridDhtLocalPartition> getParts() { return parts; } } /** * Generates Ignite Cache with data for tests. * * @return Ignite Cache with data for tests. */ private IgniteCache<Integer, String> generateTestData(int partitions, int backups) { CacheConfiguration<Integer, String> cacheConfiguration = new CacheConfiguration<>(); cacheConfiguration.setName(UUID.randomUUID().toString()); cacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, partitions)); cacheConfiguration.setBackups(backups); IgniteCache<Integer, String> cache = ignite.createCache(cacheConfiguration); for (int i = 0; i < 1000; i++) cache.put(i, "TEST" + i); return cache; } }
package com.desarrollodroide.twopanels; import android.app.Activity; import android.app.Fragment; import android.content.res.Configuration; import android.os.Bundle; import android.util.DisplayMetrics; import android.util.Log; import android.util.TypedValue; import android.widget.LinearLayout; public abstract class TwoPanelsBaseActivity extends Activity implements LeftFragment.OnSliderLeftListener, RightFragment.OnSliderRightListener { protected Fragment mRightFragment = new Fragment(); protected Fragment mLeftFragment = new Fragment(); private TwoPaneLayout mRootPanel = null; private Boolean mIsLeftShowing = true;; private Boolean mIsRightShowing = true; // Default weight of fragments protected float mLeftWeight = 0.30f; protected float mRightWeight = 0.70f; // Default orientation private int mFragmentsOrientation = LinearLayout.HORIZONTAL; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.two_panels_activity_main); if (savedInstanceState != null) { // Restore value of members from saved state mIsLeftShowing = savedInstanceState.getBoolean("isLeftShowing"); mIsRightShowing = savedInstanceState.getBoolean("isRightShowing"); } mRootPanel = (TwoPaneLayout) findViewById(R.id.root); mRootPanel.setOrientation(mFragmentsOrientation); if (mFragmentsOrientation == LinearLayout.VERTICAL) { mRootPanel.setHeightsFromWeight(mLeftWeight, mRightWeight); } else { mRootPanel.setWidthsFromWeight(mLeftWeight, mRightWeight); } } protected void switchSliderVisitility() { if (mIsLeftShowing && mIsRightShowing) { mRootPanel.changeSliderVisitility(); } } protected void setSliderVisitility (Boolean visibility){ mRootPanel.setSliderVisitility(visibility); } protected void setBaseOrientation(int orientation) { if (orientation == LinearLayout.HORIZONTAL) { if (mFragmentsOrientation == LinearLayout.VERTICAL) { changeOrientation(LinearLayout.HORIZONTAL); } } else if (orientation == LinearLayout.VERTICAL) { if (mFragmentsOrientation == LinearLayout.HORIZONTAL) { changeOrientation(LinearLayout.VERTICAL); } } } public void setSlidersDrawables(int verticalDrawable, int horizontalDrawable) { mRootPanel.setSlidersDrawables(verticalDrawable, horizontalDrawable); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); updateButtonSliderOrientation(); mRootPanel.updateWidgetsOnOrientationChange(mIsLeftShowing, mIsRightShowing, true); } @Override protected void onSaveInstanceState(Bundle outState) { // Saving state for rotations outState.putBoolean("isLeftShowing", mIsLeftShowing); outState.putBoolean("isRightShowing", mIsRightShowing); super.onSaveInstanceState(outState); } @Override public void slideFragmentsToLeft() { if (mIsLeftShowing && mIsRightShowing) { mRootPanel.hideLeft(); mIsLeftShowing = false; mIsRightShowing = true; } else if (mIsLeftShowing) { mRootPanel.showRight(); mIsRightShowing = true; mIsLeftShowing = true; } } @Override public void slideFragmentsToRight() { if (mIsRightShowing && mIsLeftShowing) { mRootPanel.hideRight(); mIsRightShowing = false; mIsLeftShowing = true; } else if (mIsRightShowing) { mRootPanel.showLeft(); mIsRightShowing = true; mIsLeftShowing = true; } } public void hideLeft() { if (mIsLeftShowing) { mRootPanel.hideLeftNoAnimate(); mIsLeftShowing = false; mIsRightShowing = true; } } public void hideRight() { if (mIsRightShowing) { mRootPanel.hideRightNoAnimate(); mIsLeftShowing = true; mIsRightShowing = false; } } public void showTwoFragments() { mRootPanel.showTwoPanels(); mIsLeftShowing = true; mIsRightShowing = true; } // Called when finish onCreate() @Override public void onWindowFocusChanged(boolean hasFocus) { mRightFragment = getFragmentManager().findFragmentById(R.id.right); mLeftFragment = getFragmentManager().findFragmentById(R.id.left); } private void changeOrientation(int orientation) { mFragmentsOrientation = orientation; mRootPanel.setOrientation(orientation); mRootPanel.setParamsValues(); updateButtonSliderOrientation(); mRootPanel.updateWidgetsOnOrientationChange(mIsLeftShowing, mIsRightShowing, false); } public int getStatusBarHeight() { int result = 0; int resourceId = getResources().getIdentifier("status_bar_height", "dimen", "android"); if (resourceId > 0) { result = getResources().getDimensionPixelSize(resourceId); } return result; } public int getScreenHeight() { DisplayMetrics displaymetrics = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(displaymetrics); return displaymetrics.heightPixels; } public int getScreenWidth() { DisplayMetrics displaymetrics = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(displaymetrics); return displaymetrics.widthPixels; } public int getActionBarHeight() { TypedValue tv = new TypedValue(); if (getTheme().resolveAttribute(android.R.attr.actionBarSize, tv, true)) { return TypedValue.complexToDimensionPixelSize(tv.data, getResources().getDisplayMetrics()); } else { return 0; } } public void updateButtonSliderOrientation() { // Manage errors if fragments not extends from RightFragment and // LeftFragment try { if (getFragmentsOrientation() == LinearLayout.VERTICAL) { ((LeftFragment) mLeftFragment).updateSlideLeftButtonOrientationVertical(); ((RightFragment) mRightFragment).updateSlideRightButtonOrientationVertical(); } else { ((LeftFragment) mLeftFragment).updateSlideLeftButtonOrientationHorizontal(); ((RightFragment) mRightFragment).updateSlideRightButtonOrientationHorizontal(); } } catch (Exception e) { Log.v("Error in updateButtonSliderOrientation", e.toString()); } } public Fragment getmRightFragment() { return mRightFragment; } public Fragment getmLeftFragment() { return mLeftFragment; } public float getmLeftWeight() { return mLeftWeight; } public float getmRightWeight() { return mRightWeight; } public int getFragmentsOrientation() { return mFragmentsOrientation; } protected void setSliderSize(int size) { mRootPanel.setmSliderBarConst(size); } }
package com.sibilantsolutions.grison.rx.event.xform; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.Optional; import org.reactivestreams.Subscriber; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.VerifyException; import com.sibilantsolutions.grison.driver.foscam.entity.AlarmNotifyTextEntity; import com.sibilantsolutions.grison.driver.foscam.entity.AudioDataTextEntity; import com.sibilantsolutions.grison.driver.foscam.entity.AudioStartRespTextEntity; import com.sibilantsolutions.grison.driver.foscam.entity.LoginRespTextEntity; import com.sibilantsolutions.grison.driver.foscam.entity.Unk02TextEntity; import com.sibilantsolutions.grison.driver.foscam.entity.VerifyRespTextEntity; import com.sibilantsolutions.grison.driver.foscam.entity.VideoDataTextEntity; import com.sibilantsolutions.grison.driver.foscam.entity.VideoStartRespTextEntity; import com.sibilantsolutions.grison.driver.foscam.type.FosInt32; import com.sibilantsolutions.grison.rx.State; import com.sibilantsolutions.grison.rx.event.action.AbstractAction; import com.sibilantsolutions.grison.rx.event.action.AudioStartAction; import com.sibilantsolutions.grison.rx.event.action.AudioVideoConnectAction; import com.sibilantsolutions.grison.rx.event.action.AudioVideoLoginAction; import com.sibilantsolutions.grison.rx.event.action.LoginAction; import com.sibilantsolutions.grison.rx.event.action.VerifyAction; import com.sibilantsolutions.grison.rx.event.action.VideoStartAction; import com.sibilantsolutions.grison.rx.event.result.AbstractResult; import com.sibilantsolutions.grison.rx.event.result.AudioStartSendResult; import com.sibilantsolutions.grison.rx.event.result.AudioVideoConnectResult; import com.sibilantsolutions.grison.rx.event.result.AudioVideoLoginSendResult; import com.sibilantsolutions.grison.rx.event.result.AudioVideoReceiveResult; import com.sibilantsolutions.grison.rx.event.result.LoginSendResult; import com.sibilantsolutions.grison.rx.event.result.OperationConnectResult; import com.sibilantsolutions.grison.rx.event.result.OperationReceiveResult; import com.sibilantsolutions.grison.rx.event.result.VerifySendResult; import com.sibilantsolutions.grison.rx.event.result.VideoStartSendResult; import io.reactivex.rxjava3.functions.BiFunction; /** * Given a State and an AbstractResult, determine and return the new State. */ public class StateAndResultToStateBiFunction implements BiFunction<State, AbstractResult, State> { private static final Logger LOG = LoggerFactory.getLogger(StateAndResultToStateBiFunction.class); private final Subscriber<AbstractAction> dynamicActions; private final String username; private final String password; public StateAndResultToStateBiFunction(Subscriber<AbstractAction> dynamicActions, String username, String password) { this.dynamicActions = dynamicActions; this.username = username; this.password = password; } @Override public State apply(State state, AbstractResult result) { if (result == OperationConnectResult.IN_FLIGHT) { return State.operationConnectInFlight(state); } if (result instanceof OperationConnectResult) { OperationConnectResult cr = (OperationConnectResult) result; if (cr.channel != null) { dynamicActions.onNext(new LoginAction(cr.channel)); return State.operationConnected(cr.channel, state); } else { return State.fail(new RuntimeException(cr.failureCause), state); } } if (result instanceof LoginSendResult) { LoginSendResult lsr = (LoginSendResult) result; if (lsr == LoginSendResult.IN_FLIGHT) { return State.loginSending(state); } else if (lsr == LoginSendResult.SENT) { return State.loginSent(state); } else { return State.fail(lsr.failureCause, state); } } if (result instanceof VerifySendResult) { VerifySendResult vsr = (VerifySendResult) result; if (vsr == VerifySendResult.IN_FLIGHT) { return State.verifySending(state); } else if (vsr == VerifySendResult.SENT) { return State.verifySent(state); } else { return State.fail(vsr.failureCause, state); } } if (result instanceof VideoStartSendResult) { VideoStartSendResult vsr = (VideoStartSendResult) result; if (vsr == VideoStartSendResult.IN_FLIGHT) { return State.videoStartSending(state); } else if (vsr == VideoStartSendResult.SENT) { return State.videoStartSent(state); } else { return State.fail(vsr.failureCause, state); } } if (result instanceof AudioStartSendResult) { AudioStartSendResult asr = (AudioStartSendResult) result; if (asr == AudioStartSendResult.IN_FLIGHT) { return State.audioStartSending(state); } else if (asr == AudioStartSendResult.SENT) { return State.audioStartSent(state); } else { return State.fail(asr.failureCause, state); } } if (result instanceof OperationReceiveResult) { OperationReceiveResult r = (OperationReceiveResult) result; if (r.text() instanceof LoginRespTextEntity) { final State state1 = State.loginRespText((LoginRespTextEntity) r.text(), state); dynamicActions.onNext(new VerifyAction(state1.operationChannel, username, password)); return state1; } if (r.text() instanceof VerifyRespTextEntity) { return State.verifyRespText((VerifyRespTextEntity) r.text(), state); } if (r.text() instanceof Unk02TextEntity) { final State state1 = State.unk02((Unk02TextEntity) r.text(), state); dynamicActions.onNext(new VideoStartAction(state1.operationChannel)); dynamicActions.onNext(new AudioStartAction(state1.operationChannel)); return state1; } if (r.text() instanceof VideoStartRespTextEntity) { final State state1 = State.videoStartResp((VideoStartRespTextEntity) r.text(), state); return maybeConnect(state1.videoStartRespText.dataConnectionId(), state1, dynamicActions); } if (r.text() instanceof AudioStartRespTextEntity) { final State state1 = State.audioStartResp((AudioStartRespTextEntity) r.text(), state); return maybeConnect(state1.audioStartRespText.dataConnectionId(), state1, dynamicActions); } if (r.text() instanceof AlarmNotifyTextEntity) { return State.alarmNotify((AlarmNotifyTextEntity) r.text(), state); } throw new IllegalArgumentException("Unexpected handshake state=" + state + ", result=" + result); } if (result instanceof AudioVideoConnectResult) { AudioVideoConnectResult r = (AudioVideoConnectResult) result; LOG.info("AudioVideoConnectResult={}.", r); if (r == AudioVideoConnectResult.IN_FLIGHT) { return State.audioVideoConnectInFlight(state); } final State state1; if (r.channel != null) { state1 = State.audioVideoConnected(r.channel, state); Optional<FosInt32> o = Optional.ofNullable(state1.dataConnectionId); final FosInt32 dataConnectionId = o.orElseThrow(VerifyException::new); final AudioVideoLoginAction audioVideoLoginAction = new AudioVideoLoginAction(state1.audioVideoChannel, dataConnectionId); dynamicActions.onNext(audioVideoLoginAction); } else { state1 = State.fail(r.failureCause, state); } return state1; } if (result instanceof AudioVideoLoginSendResult) { AudioVideoLoginSendResult r = (AudioVideoLoginSendResult) result; LOG.info("AudioVideoLoginSendResult={}.", r); if (r == AudioVideoLoginSendResult.IN_FLIGHT) { return State.audioVideoLoginSending(state); } else if (r == AudioVideoLoginSendResult.SENT) { return State.audioVideoLoginSent(state); } else { return State.fail(new RuntimeException(r.failureCause), state); } } if (result instanceof AudioVideoReceiveResult) { AudioVideoReceiveResult r = (AudioVideoReceiveResult) result; if (r.text() instanceof VideoDataTextEntity) { VideoDataTextEntity t = (VideoDataTextEntity) r.text(); return State.videoDataText(t, state); } if (r.text() instanceof AudioDataTextEntity) { AudioDataTextEntity t = (AudioDataTextEntity) r.text(); return State.audioDataText(t, state); } throw new IllegalArgumentException("Unexpected result=" + result + " with state=" + state); } throw new IllegalArgumentException("Unexpected result=" + result + " with state=" + state); } /** * Fire the AudioVideoConnectAction if a dataConnectionId is present. If not that means that the audio/video connection is already established. * * @param dataConnectionId Optional dataConnectionId that came with videoStartRespText or audioStartRespText, if any. * @param state1 State to return after the action has (maybe) been fired. * @param dynamicActions Subscriber to which to fire the action. * @return The new state. */ private static State maybeConnect(Optional<FosInt32> dataConnectionId, State state1, Subscriber<AbstractAction> dynamicActions) { return dataConnectionId .map(ignored -> { final SocketAddress socketAddress = state1.operationChannel.remoteAddress(); if (socketAddress instanceof InetSocketAddress) { InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress; dynamicActions.onNext(new AudioVideoConnectAction(inetSocketAddress.getHostString(), inetSocketAddress.getPort())); return state1; } else { return State.fail(new RuntimeException("Expected " + InetSocketAddress.class.getSimpleName() + " but got=" + socketAddress), state1); } }) .orElse(state1); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.builder; import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; /** * A search source builder allowing to easily build search source. Simple * construction using * {@link org.elasticsearch.search.builder.SearchSourceBuilder#searchSource()}. * * @see org.elasticsearch.action.search.SearchRequest#source(SearchSourceBuilder) */ public final class SearchSourceBuilder extends ToXContentToBytes implements Writeable<SearchSourceBuilder> { public static final ParseField FROM_FIELD = new ParseField("from"); public static final ParseField SIZE_FIELD = new ParseField("size"); public static final ParseField TIMEOUT_FIELD = new ParseField("timeout"); public static final ParseField TERMINATE_AFTER_FIELD = new ParseField("terminate_after"); public static final ParseField QUERY_FIELD = new ParseField("query"); public static final ParseField POST_FILTER_FIELD = new ParseField("post_filter"); public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score"); public static final ParseField VERSION_FIELD = new ParseField("version"); public static final ParseField EXPLAIN_FIELD = new ParseField("explain"); public static final ParseField _SOURCE_FIELD = new ParseField("_source"); public static final ParseField FIELDS_FIELD = new ParseField("fields"); public static final ParseField FIELDDATA_FIELDS_FIELD = new ParseField("fielddata_fields"); public static final ParseField SCRIPT_FIELDS_FIELD = new ParseField("script_fields"); public static final ParseField SCRIPT_FIELD = new ParseField("script"); public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure"); public static final ParseField SORT_FIELD = new ParseField("sort"); public static final ParseField TRACK_SCORES_FIELD = new ParseField("track_scores"); public static final ParseField INDICES_BOOST_FIELD = new ParseField("indices_boost"); public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations", "aggs"); public static final ParseField HIGHLIGHT_FIELD = new ParseField("highlight"); public static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits"); public static final ParseField SUGGEST_FIELD = new ParseField("suggest"); public static final ParseField RESCORE_FIELD = new ParseField("rescore"); public static final ParseField STATS_FIELD = new ParseField("stats"); public static final ParseField EXT_FIELD = new ParseField("ext"); private static final SearchSourceBuilder PROTOTYPE = new SearchSourceBuilder(); public static SearchSourceBuilder readSearchSourceFrom(StreamInput in) throws IOException { return PROTOTYPE.readFrom(in); } public static SearchSourceBuilder parseSearchSource(XContentParser parser, QueryParseContext context) throws IOException { return PROTOTYPE.fromXContent(parser, context); } /** * A static factory method to construct a new search source. */ public static SearchSourceBuilder searchSource() { return new SearchSourceBuilder(); } /** * A static factory method to construct new search highlights. */ public static HighlightBuilder highlight() { return new HighlightBuilder(); } private QueryBuilder<?> queryBuilder; private QueryBuilder<?> postQueryBuilder; private int from = -1; private int size = -1; private Boolean explain; private Boolean version; private List<BytesReference> sorts; private boolean trackScores = false; private Float minScore; private long timeoutInMillis = -1; private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER; private List<String> fieldNames; private List<String> fieldDataFields; private List<ScriptField> scriptFields; private FetchSourceContext fetchSourceContext; private List<BytesReference> aggregations; private BytesReference highlightBuilder; private BytesReference suggestBuilder; private BytesReference innerHitsBuilder; private List<BytesReference> rescoreBuilders; private ObjectFloatHashMap<String> indexBoost = null; private List<String> stats; private BytesReference ext = null; /** * Constructs a new search source builder. */ public SearchSourceBuilder() { } /** * Sets the search query for this request. * * @see org.elasticsearch.index.query.QueryBuilders */ public SearchSourceBuilder query(QueryBuilder<?> query) { this.queryBuilder = query; return this; } /** * Gets the query for this request */ public QueryBuilder<?> query() { return queryBuilder; } /** * Sets a filter that will be executed after the query has been executed and * only has affect on the search hits (not aggregations). This filter is * always executed as last filtering mechanism. */ public SearchSourceBuilder postFilter(QueryBuilder<?> postFilter) { this.postQueryBuilder = postFilter; return this; } /** * Gets the post filter for this request */ public QueryBuilder<?> postFilter() { return postQueryBuilder; } /** * From index to start the search from. Defaults to <tt>0</tt>. */ public SearchSourceBuilder from(int from) { this.from = from; return this; } /** * Gets the from index to start the search from. **/ public int from() { return from; } /** * The number of search hits to return. Defaults to <tt>10</tt>. */ public SearchSourceBuilder size(int size) { this.size = size; return this; } /** * Gets the number of search hits to return. */ public int size() { return size; } /** * Sets the minimum score below which docs will be filtered out. */ public SearchSourceBuilder minScore(float minScore) { this.minScore = minScore; return this; } /** * Gets the minimum score below which docs will be filtered out. */ public Float minScore() { return minScore; } /** * Should each {@link org.elasticsearch.search.SearchHit} be returned with * an explanation of the hit (ranking). */ public SearchSourceBuilder explain(Boolean explain) { this.explain = explain; return this; } /** * Indicates whether each search hit will be returned with an explanation of * the hit (ranking) */ public Boolean explain() { return explain; } /** * Should each {@link org.elasticsearch.search.SearchHit} be returned with a * version associated with it. */ public SearchSourceBuilder version(Boolean version) { this.version = version; return this; } /** * Indicates whether the document's version will be included in the search * hits. */ public Boolean version() { return version; } /** * An optional timeout to control how long search is allowed to take. */ public SearchSourceBuilder timeout(TimeValue timeout) { this.timeoutInMillis = timeout.millis(); return this; } /** * Gets the timeout to control how long search is allowed to take. */ public long timeoutInMillis() { return timeoutInMillis; } /** * An optional terminate_after to terminate the search after collecting * <code>terminateAfter</code> documents */ public SearchSourceBuilder terminateAfter(int terminateAfter) { if (terminateAfter < 0) { throw new IllegalArgumentException("terminateAfter must be > 0"); } this.terminateAfter = terminateAfter; return this; } /** * Gets the number of documents to terminate after collecting. */ public int terminateAfter() { return terminateAfter; } /** * Adds a sort against the given field name and the sort ordering. * * @param name * The name of the field * @param order * The sort ordering */ public SearchSourceBuilder sort(String name, SortOrder order) { return sort(SortBuilders.fieldSort(name).order(order)); } /** * Add a sort against the given field name. * * @param name * The name of the field to sort by */ public SearchSourceBuilder sort(String name) { return sort(SortBuilders.fieldSort(name)); } /** * Adds a sort builder. */ public SearchSourceBuilder sort(SortBuilder sort) { try { if (sorts == null) { sorts = new ArrayList<>(); } XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); sort.toXContent(builder, EMPTY_PARAMS); builder.endObject(); sorts.add(builder.bytes()); return this; } catch (IOException e) { throw new RuntimeException(e); } } /** * Gets the bytes representing the sort builders for this request. */ public List<BytesReference> sorts() { return sorts; } /** * Applies when sorting, and controls if scores will be tracked as well. * Defaults to <tt>false</tt>. */ public SearchSourceBuilder trackScores(boolean trackScores) { this.trackScores = trackScores; return this; } /** * Indicates whether scores will be tracked for this request. */ public boolean trackScores() { return trackScores; } /** * Add an aggregation to perform as part of the search. */ public SearchSourceBuilder aggregation(AbstractAggregationBuilder aggregation) { try { if (aggregations == null) { aggregations = new ArrayList<>(); } XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); aggregation.toXContent(builder, EMPTY_PARAMS); builder.endObject(); aggregations.add(builder.bytes()); return this; } catch (IOException e) { throw new RuntimeException(e); } } /** * Gets the bytes representing the aggregation builders for this request. */ public List<BytesReference> aggregations() { return aggregations; } /** * Adds highlight to perform as part of the search. */ public SearchSourceBuilder highlighter(HighlightBuilder highlightBuilder) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); highlightBuilder.innerXContent(builder, EMPTY_PARAMS); builder.endObject(); this.highlightBuilder = builder.bytes(); return this; } catch (IOException e) { throw new RuntimeException(e); } } /** * Gets the bytes representing the hightlighter builder for this request. */ public BytesReference highlighter() { return highlightBuilder; } public SearchSourceBuilder innerHits(InnerHitsBuilder innerHitsBuilder) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); innerHitsBuilder.innerXContent(builder, EMPTY_PARAMS); builder.endObject(); this.innerHitsBuilder = builder.bytes(); return this; } catch (IOException e) { throw new RuntimeException(e); } } /** * Gets the bytes representing the inner hits builder for this request. */ public BytesReference innerHits() { return innerHitsBuilder; } public SearchSourceBuilder suggest(SuggestBuilder suggestBuilder) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); suggestBuilder.toXContent(builder, EMPTY_PARAMS); this.suggestBuilder = builder.bytes(); return this; } catch (IOException e) { throw new RuntimeException(e); } } /** * Gets the bytes representing the suggester builder for this request. */ public BytesReference suggest() { return suggestBuilder; } public SearchSourceBuilder addRescorer(RescoreBuilder rescoreBuilder) { try { if (rescoreBuilders == null) { rescoreBuilders = new ArrayList<>(); } XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); rescoreBuilder.toXContent(builder, EMPTY_PARAMS); builder.endObject(); rescoreBuilders.add(builder.bytes()); return this; } catch (IOException e) { throw new RuntimeException(e); } } public SearchSourceBuilder clearRescorers() { rescoreBuilders = null; return this; } /** * Gets the bytes representing the rescore builders for this request. */ public List<BytesReference> rescores() { return rescoreBuilders; } /** * Indicates whether the response should contain the stored _source for * every hit */ public SearchSourceBuilder fetchSource(boolean fetch) { if (this.fetchSourceContext == null) { this.fetchSourceContext = new FetchSourceContext(fetch); } else { this.fetchSourceContext.fetchSource(fetch); } return this; } /** * Indicate that _source should be returned with every hit, with an * "include" and/or "exclude" set which can include simple wildcard * elements. * * @param include * An optional include (optionally wildcarded) pattern to filter * the returned _source * @param exclude * An optional exclude (optionally wildcarded) pattern to filter * the returned _source */ public SearchSourceBuilder fetchSource(@Nullable String include, @Nullable String exclude) { return fetchSource(include == null ? Strings.EMPTY_ARRAY : new String[] { include }, exclude == null ? Strings.EMPTY_ARRAY : new String[] { exclude }); } /** * Indicate that _source should be returned with every hit, with an * "include" and/or "exclude" set which can include simple wildcard * elements. * * @param includes * An optional list of include (optionally wildcarded) pattern to * filter the returned _source * @param excludes * An optional list of exclude (optionally wildcarded) pattern to * filter the returned _source */ public SearchSourceBuilder fetchSource(@Nullable String[] includes, @Nullable String[] excludes) { fetchSourceContext = new FetchSourceContext(includes, excludes); return this; } /** * Indicate how the _source should be fetched. */ public SearchSourceBuilder fetchSource(@Nullable FetchSourceContext fetchSourceContext) { this.fetchSourceContext = fetchSourceContext; return this; } /** * Gets the {@link FetchSourceContext} which defines how the _source should * be fetched. */ public FetchSourceContext fetchSource() { return fetchSourceContext; } /** * Adds a field to load and return (note, it must be stored) as part of the * search request. If none are specified, the source of the document will be * return. */ public SearchSourceBuilder field(String name) { if (fieldNames == null) { fieldNames = new ArrayList<>(); } fieldNames.add(name); return this; } /** * Sets the fields to load and return as part of the search request. If none * are specified, the source of the document will be returned. */ public SearchSourceBuilder fields(List<String> fields) { this.fieldNames = fields; return this; } /** * Sets no fields to be loaded, resulting in only id and type to be returned * per field. */ public SearchSourceBuilder noFields() { this.fieldNames = Collections.emptyList(); return this; } /** * Gets the fields to load and return as part of the search request. */ public List<String> fields() { return fieldNames; } /** * Adds a field to load from the field data cache and return as part of the * search request. */ public SearchSourceBuilder fieldDataField(String name) { if (fieldDataFields == null) { fieldDataFields = new ArrayList<>(); } fieldDataFields.add(name); return this; } /** * Gets the field-data fields. */ public List<String> fieldDataFields() { return fieldDataFields; } /** * Adds a script field under the given name with the provided script. * * @param name * The name of the field * @param script * The script */ public SearchSourceBuilder scriptField(String name, Script script) { scriptField(name, script, false); return this; } /** * Adds a script field under the given name with the provided script. * * @param name * The name of the field * @param script * The script */ public SearchSourceBuilder scriptField(String name, Script script, boolean ignoreFailure) { if (scriptFields == null) { scriptFields = new ArrayList<>(); } scriptFields.add(new ScriptField(name, script, ignoreFailure)); return this; } /** * Gets the script fields. */ public List<ScriptField> scriptFields() { return scriptFields; } /** * Sets the boost a specific index will receive when the query is executeed * against it. * * @param index * The index to apply the boost against * @param indexBoost * The boost to apply to the index */ public SearchSourceBuilder indexBoost(String index, float indexBoost) { if (this.indexBoost == null) { this.indexBoost = new ObjectFloatHashMap<>(); } this.indexBoost.put(index, indexBoost); return this; } /** * Gets the boost a specific indices will receive when the query is * executeed against them. */ public ObjectFloatHashMap<String> indexBoost() { return indexBoost; } /** * The stats groups this request will be aggregated under. */ public SearchSourceBuilder stats(List<String> statsGroups) { this.stats = statsGroups; return this; } /** * The stats groups this request will be aggregated under. */ public List<String> stats() { return stats; } public SearchSourceBuilder ext(XContentBuilder ext) { this.ext = ext.bytes(); return this; } public BytesReference ext() { return ext; } public SearchSourceBuilder fromXContent(XContentParser parser, QueryParseContext context) throws IOException { SearchSourceBuilder builder = new SearchSourceBuilder(); XContentParser.Token token = parser.currentToken(); String currentFieldName = null; if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) { throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] but found [" + token + "]", parser.getTokenLocation()); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (context.parseFieldMatcher().match(currentFieldName, FROM_FIELD)) { builder.from = parser.intValue(); } else if (context.parseFieldMatcher().match(currentFieldName, SIZE_FIELD)) { builder.size = parser.intValue(); } else if (context.parseFieldMatcher().match(currentFieldName, TIMEOUT_FIELD)) { builder.timeoutInMillis = parser.longValue(); } else if (context.parseFieldMatcher().match(currentFieldName, TERMINATE_AFTER_FIELD)) { builder.terminateAfter = parser.intValue(); } else if (context.parseFieldMatcher().match(currentFieldName, MIN_SCORE_FIELD)) { builder.minScore = parser.floatValue(); } else if (context.parseFieldMatcher().match(currentFieldName, VERSION_FIELD)) { builder.version = parser.booleanValue(); } else if (context.parseFieldMatcher().match(currentFieldName, EXPLAIN_FIELD)) { builder.explain = parser.booleanValue(); } else if (context.parseFieldMatcher().match(currentFieldName, TRACK_SCORES_FIELD)) { builder.trackScores = parser.booleanValue(); } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { FetchSourceContext fetchSourceContext = FetchSourceContext.parse(parser, context); builder.fetchSourceContext = fetchSourceContext; } else if (context.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { List<String> fieldNames = new ArrayList<>(); fieldNames.add(parser.text()); builder.fieldNames = fieldNames; } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { builder.sort(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } else if (token == XContentParser.Token.START_OBJECT) { if (context.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { builder.queryBuilder = context.parseInnerQueryBuilder(); } else if (context.parseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) { builder.postQueryBuilder = context.parseInnerQueryBuilder(); } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { FetchSourceContext fetchSourceContext = FetchSourceContext.parse(parser, context); builder.fetchSourceContext = fetchSourceContext; } else if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) { List<ScriptField> scriptFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { String scriptFieldName = parser.currentName(); token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { Script script = null; boolean ignoreFailure = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELD)) { script = Script.parse(parser, context.parseFieldMatcher()); } else if (context.parseFieldMatcher().match(currentFieldName, IGNORE_FAILURE_FIELD)) { ignoreFailure = parser.booleanValue(); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } else if (token == XContentParser.Token.START_OBJECT) { if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELD)) { script = Script.parse(parser, context.parseFieldMatcher()); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } scriptFields.add(new ScriptField(scriptFieldName, script, ignoreFailure)); } else { throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); } } builder.scriptFields = scriptFields; } else if (context.parseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) { ObjectFloatHashMap<String> indexBoost = new ObjectFloatHashMap<String>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { indexBoost.put(currentFieldName, parser.floatValue()); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } builder.indexBoost = indexBoost; } else if (context.parseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)) { List<BytesReference> aggregations = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { currentFieldName = parser.currentName(); token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()); xContentBuilder.startObject(); xContentBuilder.field(currentFieldName); xContentBuilder.copyCurrentStructure(parser); xContentBuilder.endObject(); aggregations.add(xContentBuilder.bytes()); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } builder.aggregations = aggregations; } else if (context.parseFieldMatcher().match(currentFieldName, HIGHLIGHT_FIELD)) { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); builder.highlightBuilder = xContentBuilder.bytes(); } else if (context.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); builder.innerHitsBuilder = xContentBuilder.bytes(); } else if (context.parseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()); xContentBuilder.copyCurrentStructure(parser); builder.suggestBuilder = xContentBuilder.bytes(); } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { List<BytesReference> sorts = new ArrayList<>(); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); sorts.add(xContentBuilder.bytes()); builder.sorts = sorts; } else if (context.parseFieldMatcher().match(currentFieldName, EXT_FIELD)) { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); builder.ext = xContentBuilder.bytes(); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } else if (token == XContentParser.Token.START_ARRAY) { if (context.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { List<String> fieldNames = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { fieldNames.add(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); } } builder.fieldNames = fieldNames; } else if (context.parseFieldMatcher().match(currentFieldName, FIELDDATA_FIELDS_FIELD)) { List<String> fieldDataFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { fieldDataFields.add(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); } } builder.fieldDataFields = fieldDataFields; } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { List<BytesReference> sorts = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); sorts.add(xContentBuilder.bytes()); } builder.sorts = sorts; } else if (context.parseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) { List<BytesReference> rescoreBuilders = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); rescoreBuilders.add(xContentBuilder.bytes()); } builder.rescoreBuilders = rescoreBuilders; } else if (context.parseFieldMatcher().match(currentFieldName, STATS_FIELD)) { List<String> stats = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { stats.add(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); } } builder.stats = stats; } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { FetchSourceContext fetchSourceContext = FetchSourceContext.parse(parser, context); builder.fetchSourceContext = fetchSourceContext; } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } return builder; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); innerToXContent(builder, params); builder.endObject(); return builder; } public void innerToXContent(XContentBuilder builder, Params params) throws IOException { if (from != -1) { builder.field(FROM_FIELD.getPreferredName(), from); } if (size != -1) { builder.field(SIZE_FIELD.getPreferredName(), size); } if (timeoutInMillis != -1) { builder.field(TIMEOUT_FIELD.getPreferredName(), timeoutInMillis); } if (terminateAfter != SearchContext.DEFAULT_TERMINATE_AFTER) { builder.field(TERMINATE_AFTER_FIELD.getPreferredName(), terminateAfter); } if (queryBuilder != null) { builder.field(QUERY_FIELD.getPreferredName(), queryBuilder); } if (postQueryBuilder != null) { builder.field(POST_FILTER_FIELD.getPreferredName(), postQueryBuilder); } if (minScore != null) { builder.field(MIN_SCORE_FIELD.getPreferredName(), minScore); } if (version != null) { builder.field(VERSION_FIELD.getPreferredName(), version); } if (explain != null) { builder.field(EXPLAIN_FIELD.getPreferredName(), explain); } if (fetchSourceContext != null) { builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext); } if (fieldNames != null) { if (fieldNames.size() == 1) { builder.field(FIELDS_FIELD.getPreferredName(), fieldNames.get(0)); } else { builder.startArray(FIELDS_FIELD.getPreferredName()); for (String fieldName : fieldNames) { builder.value(fieldName); } builder.endArray(); } } if (fieldDataFields != null) { builder.startArray(FIELDDATA_FIELDS_FIELD.getPreferredName()); for (String fieldDataField : fieldDataFields) { builder.value(fieldDataField); } builder.endArray(); } if (scriptFields != null) { builder.startObject(SCRIPT_FIELDS_FIELD.getPreferredName()); for (ScriptField scriptField : scriptFields) { scriptField.toXContent(builder, params); } builder.endObject(); } if (sorts != null) { builder.startArray(SORT_FIELD.getPreferredName()); for (BytesReference sort : sorts) { XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(sort); parser.nextToken(); builder.copyCurrentStructure(parser); } builder.endArray(); } if (trackScores) { builder.field(TRACK_SCORES_FIELD.getPreferredName(), true); } if (indexBoost != null) { builder.startObject(INDICES_BOOST_FIELD.getPreferredName()); assert !indexBoost.containsKey(null); final Object[] keys = indexBoost.keys; final float[] values = indexBoost.values; for (int i = 0; i < keys.length; i++) { if (keys[i] != null) { builder.field((String) keys[i], values[i]); } } builder.endObject(); } if (aggregations != null) { builder.field(AGGREGATIONS_FIELD.getPreferredName()); builder.startObject(); for (BytesReference aggregation : aggregations) { XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(aggregation); parser.nextToken(); parser.nextToken(); builder.copyCurrentStructure(parser); } builder.endObject(); } if (highlightBuilder != null) { builder.field(HIGHLIGHT_FIELD.getPreferredName()); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(highlightBuilder); parser.nextToken(); builder.copyCurrentStructure(parser); } if (innerHitsBuilder != null) { builder.field(INNER_HITS_FIELD.getPreferredName()); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(innerHitsBuilder); parser.nextToken(); builder.copyCurrentStructure(parser); } if (suggestBuilder != null) { builder.field(SUGGEST_FIELD.getPreferredName()); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(suggestBuilder); parser.nextToken(); builder.copyCurrentStructure(parser); } if (rescoreBuilders != null) { builder.startArray(RESCORE_FIELD.getPreferredName()); for (BytesReference rescoreBuilder : rescoreBuilders) { XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(rescoreBuilder); parser.nextToken(); builder.copyCurrentStructure(parser); } builder.endArray(); } if (stats != null) { builder.field(STATS_FIELD.getPreferredName(), stats); } if (ext != null) { builder.field(EXT_FIELD.getPreferredName()); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(ext); parser.nextToken(); builder.copyCurrentStructure(parser); } } public static class ScriptField implements Writeable<ScriptField>, ToXContent { public static final ScriptField PROTOTYPE = new ScriptField(null, null); private final boolean ignoreFailure; private final String fieldName; private final Script script; private ScriptField(String fieldName, Script script) { this(fieldName, script, false); } private ScriptField(String fieldName, Script script, boolean ignoreFailure) { this.fieldName = fieldName; this.script = script; this.ignoreFailure = ignoreFailure; } public String fieldName() { return fieldName; } public Script script() { return script; } public boolean ignoreFailure() { return ignoreFailure; } @Override public ScriptField readFrom(StreamInput in) throws IOException { return new ScriptField(in.readString(), Script.readScript(in), in.readBoolean()); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(fieldName); script.writeTo(out); out.writeBoolean(ignoreFailure); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(fieldName); builder.field(SCRIPT_FIELD.getPreferredName(), script); builder.field(IGNORE_FAILURE_FIELD.getPreferredName(), ignoreFailure); builder.endObject(); return builder; } @Override public int hashCode() { return Objects.hash(fieldName, script, ignoreFailure); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ScriptField other = (ScriptField) obj; return Objects.equals(fieldName, other.fieldName) && Objects.equals(script, other.script) && Objects.equals(ignoreFailure, other.ignoreFailure); } } @Override public SearchSourceBuilder readFrom(StreamInput in) throws IOException { SearchSourceBuilder builder = new SearchSourceBuilder(); if (in.readBoolean()) { int size = in.readVInt(); List<BytesReference> aggregations = new ArrayList<>(size); for (int i = 0; i < size; i++) { aggregations.add(in.readBytesReference()); } builder.aggregations = aggregations; } builder.explain = in.readOptionalBoolean(); builder.fetchSourceContext = FetchSourceContext.optionalReadFromStream(in); boolean hasFieldDataFields = in.readBoolean(); if (hasFieldDataFields) { int size = in.readVInt(); List<String> fieldDataFields = new ArrayList<>(size); for (int i = 0; i < size; i++) { fieldDataFields.add(in.readString()); } builder.fieldDataFields = fieldDataFields; } boolean hasFieldNames = in.readBoolean(); if (hasFieldNames) { int size = in.readVInt(); List<String> fieldNames = new ArrayList<>(size); for (int i = 0; i < size; i++) { fieldNames.add(in.readString()); } builder.fieldNames = fieldNames; } builder.from = in.readVInt(); if (in.readBoolean()) { builder.highlightBuilder = in.readBytesReference(); } boolean hasIndexBoost = in.readBoolean(); if (hasIndexBoost) { int size = in.readVInt(); ObjectFloatHashMap<String> indexBoost = new ObjectFloatHashMap<String>(size); for (int i = 0; i < size; i++) { indexBoost.put(in.readString(), in.readFloat()); } builder.indexBoost = indexBoost; } if (in.readBoolean()) { builder.innerHitsBuilder = in.readBytesReference(); } if (in.readBoolean()) { builder.minScore = in.readFloat(); } if (in.readBoolean()) { builder.postQueryBuilder = in.readQuery(); } if (in.readBoolean()) { builder.queryBuilder = in.readQuery(); } if (in.readBoolean()) { int size = in.readVInt(); List<BytesReference> rescoreBuilders = new ArrayList<>(); for (int i = 0; i < size; i++) { rescoreBuilders.add(in.readBytesReference()); } builder.rescoreBuilders = rescoreBuilders; } if (in.readBoolean()) { int size = in.readVInt(); List<ScriptField> scriptFields = new ArrayList<>(size); for (int i = 0; i < size; i++) { scriptFields.add(ScriptField.PROTOTYPE.readFrom(in)); } builder.scriptFields = scriptFields; } builder.size = in.readVInt(); if (in.readBoolean()) { int size = in.readVInt(); List<BytesReference> sorts = new ArrayList<>(); for (int i = 0; i < size; i++) { sorts.add(in.readBytesReference()); } builder.sorts = sorts; } if (in.readBoolean()) { int size = in.readVInt(); List<String> stats = new ArrayList<>(); for (int i = 0; i < size; i++) { stats.add(in.readString()); } builder.stats = stats; } if (in.readBoolean()) { builder.suggestBuilder = in.readBytesReference(); } builder.terminateAfter = in.readVInt(); builder.timeoutInMillis = in.readLong(); builder.trackScores = in.readBoolean(); builder.version = in.readOptionalBoolean(); if (in.readBoolean()) { builder.ext = in.readBytesReference(); } return builder; } @Override public void writeTo(StreamOutput out) throws IOException { boolean hasAggregations = aggregations != null; out.writeBoolean(hasAggregations); if (hasAggregations) { out.writeVInt(aggregations.size()); for (BytesReference aggregation : aggregations) { out.writeBytesReference(aggregation); } } out.writeOptionalBoolean(explain); FetchSourceContext.optionalWriteToStream(fetchSourceContext, out); boolean hasFieldDataFields = fieldDataFields != null; out.writeBoolean(hasFieldDataFields); if (hasFieldDataFields) { out.writeVInt(fieldDataFields.size()); for (String field : fieldDataFields) { out.writeString(field); } } boolean hasFieldNames = fieldNames != null; out.writeBoolean(hasFieldNames); if (hasFieldNames) { out.writeVInt(fieldNames.size()); for (String field : fieldNames) { out.writeString(field); } } out.writeVInt(from); boolean hasHighlightBuilder = highlightBuilder != null; out.writeBoolean(hasHighlightBuilder); if (hasHighlightBuilder) { out.writeBytesReference(highlightBuilder); } boolean hasIndexBoost = indexBoost != null; out.writeBoolean(hasIndexBoost); if (hasIndexBoost) { out.writeVInt(indexBoost.size()); for (ObjectCursor<String> key : indexBoost.keys()) { out.writeString(key.value); out.writeFloat(indexBoost.get(key.value)); } } boolean hasInnerHitsBuilder = innerHitsBuilder != null; out.writeBoolean(hasInnerHitsBuilder); if (hasInnerHitsBuilder) { out.writeBytesReference(innerHitsBuilder); } boolean hasMinScore = minScore != null; out.writeBoolean(hasMinScore); if (hasMinScore) { out.writeFloat(minScore); } boolean hasPostQuery = postQueryBuilder != null; out.writeBoolean(hasPostQuery); if (hasPostQuery) { out.writeQuery(postQueryBuilder); } boolean hasQuery = queryBuilder != null; out.writeBoolean(hasQuery); if (hasQuery) { out.writeQuery(queryBuilder); } boolean hasRescoreBuilders = rescoreBuilders != null; out.writeBoolean(hasRescoreBuilders); if (hasRescoreBuilders) { out.writeVInt(rescoreBuilders.size()); for (BytesReference rescoreBuilder : rescoreBuilders) { out.writeBytesReference(rescoreBuilder); } } boolean hasScriptFields = scriptFields != null; out.writeBoolean(hasScriptFields); if (hasScriptFields) { out.writeVInt(scriptFields.size()); for (ScriptField scriptField : scriptFields) { scriptField.writeTo(out); } } out.writeVInt(size); boolean hasSorts = sorts != null; out.writeBoolean(hasSorts); if (hasSorts) { out.writeVInt(sorts.size()); for (BytesReference sort : sorts) { out.writeBytesReference(sort); } } boolean hasStats = stats != null; out.writeBoolean(hasStats); if (hasStats) { out.writeVInt(stats.size()); for (String stat : stats) { out.writeString(stat); } } boolean hasSuggestBuilder = suggestBuilder != null; out.writeBoolean(hasSuggestBuilder); if (hasSuggestBuilder) { out.writeBytesReference(suggestBuilder); } out.writeVInt(terminateAfter); out.writeLong(timeoutInMillis); out.writeBoolean(trackScores); out.writeOptionalBoolean(version); boolean hasExt = ext != null; out.writeBoolean(hasExt); if (hasExt) { out.writeBytesReference(ext); } } @Override public int hashCode() { return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from, highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj.getClass() != getClass()) { return false; } SearchSourceBuilder other = (SearchSourceBuilder) obj; return Objects.equals(aggregations, other.aggregations) && Objects.equals(explain, other.explain) && Objects.equals(fetchSourceContext, other.fetchSourceContext) && Objects.equals(fieldDataFields, other.fieldDataFields) && Objects.equals(fieldNames, other.fieldNames) && Objects.equals(from, other.from) && Objects.equals(highlightBuilder, other.highlightBuilder) && Objects.equals(indexBoost, other.indexBoost) && Objects.equals(innerHitsBuilder, other.innerHitsBuilder) && Objects.equals(minScore, other.minScore) && Objects.equals(postQueryBuilder, other.postQueryBuilder) && Objects.equals(queryBuilder, other.queryBuilder) && Objects.equals(rescoreBuilders, other.rescoreBuilders) && Objects.equals(scriptFields, other.scriptFields) && Objects.equals(size, other.size) && Objects.equals(sorts, other.sorts) && Objects.equals(stats, other.stats) && Objects.equals(suggestBuilder, other.suggestBuilder) && Objects.equals(terminateAfter, other.terminateAfter) && Objects.equals(timeoutInMillis, other.timeoutInMillis) && Objects.equals(trackScores, other.trackScores) && Objects.equals(version, other.version); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.iterate; import static com.google.common.base.Preconditions.checkArgument; import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_FAILED_QUERY_COUNTER; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.apache.phoenix.compile.QueryPlan; import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.query.ConnectionQueryServices; import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.util.ServerUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Throwables; /** * ResultIterator that keeps track of the number of records fetched by each {@link PeekingResultIterator} making sure it * asks for records from each iterator in a round-robin fashion. When the iterators have fetched the scan cache size of * records, it submits the iterators to the thread pool to help parallelize the I/O needed to fetch the next batch of * records. This iterator assumes that the PeekingResultIterators that it manages are not nested i.e. they directly * manage the underlying scanners. This kind of ResultIterator should only be used when one doesn't care about the order * in which records are returned. */ public class RoundRobinResultIterator implements ResultIterator { private static final Logger LOGGER = LoggerFactory.getLogger(RoundRobinResultIterator.class); private final int threshold; private int numScannersCacheExhausted = 0; private ResultIterators resultIterators; private List<RoundRobinIterator> openIterators = new ArrayList<>(); private int index; private boolean closed; private final QueryPlan plan; // For testing purposes private int numParallelFetches; public RoundRobinResultIterator(ResultIterators iterators, QueryPlan plan) { this.resultIterators = iterators; this.plan = plan; this.threshold = getThreshold(); } public RoundRobinResultIterator(List<PeekingResultIterator> iterators, QueryPlan plan) { this.resultIterators = null; this.plan = plan; this.threshold = getThreshold(); initOpenIterators(wrapToRoundRobinIterators(iterators)); } public static ResultIterator newIterator(final List<PeekingResultIterator> iterators, QueryPlan plan) { if (iterators.isEmpty()) { return EMPTY_ITERATOR; } if (iterators.size() == 1) { return iterators.get(0); } return new RoundRobinResultIterator(iterators, plan); } @Override public Tuple next() throws SQLException { List<RoundRobinIterator> iterators; int size; while ((size = (iterators = getIterators()).size()) > 0) { index = index % size; RoundRobinIterator itr = iterators.get(index); if (itr.getNumRecordsRead() < threshold) { Tuple tuple; if ((tuple = itr.peek()) != null) { tuple = itr.next(); if (itr.getNumRecordsRead() == threshold) { numScannersCacheExhausted++; } index = (index + 1) % size; return tuple; } else { // The underlying scanner is exhausted. Close the iterator and un-track it. itr.close(); iterators.remove(index); if (iterators.size() == 0) { close(); } } } else { index = (index + 1) % size; } } return null; } @Override public void close() throws SQLException { if (closed) { return; } closed = true; SQLException toThrow = null; try { if (resultIterators != null) { resultIterators.close(); } } catch (Exception e) { toThrow = ServerUtil.parseServerException(e); } finally { try { if (openIterators.size() > 0) { for (RoundRobinIterator itr : openIterators) { try { itr.close(); } catch (Exception e) { if (toThrow == null) { toThrow = ServerUtil.parseServerException(e); } else { toThrow.setNextException(ServerUtil.parseServerException(e)); } } } } } finally { if (toThrow != null) { throw toThrow; } } } } @Override public void explain(List<String> planSteps) { if (resultIterators != null) { resultIterators.explain(planSteps); } } @VisibleForTesting int getNumberOfParallelFetches() { return numParallelFetches; } @VisibleForTesting QueryPlan getQueryPlan() { return plan; } private List<RoundRobinIterator> getIterators() throws SQLException { if (closed) { return Collections.emptyList(); } if (openIterators.size() > 0 && openIterators.size() == numScannersCacheExhausted) { /* * All the scanners have exhausted their cache. Submit the scanners back to the pool so that they can fetch * the next batch of records in parallel. */ initOpenIterators(fetchNextBatch()); } else if (openIterators.size() == 0 && resultIterators != null) { List<PeekingResultIterator> iterators = resultIterators.getIterators(); initOpenIterators(wrapToRoundRobinIterators(iterators)); } return openIterators; } private List<RoundRobinIterator> wrapToRoundRobinIterators(List<PeekingResultIterator> iterators) { List<RoundRobinIterator> roundRobinItrs = new ArrayList<>(iterators.size()); for (PeekingResultIterator itr : iterators) { roundRobinItrs.add(new RoundRobinIterator(itr, null)); } return roundRobinItrs; } private void initOpenIterators(List<RoundRobinIterator> iterators) { openIterators.clear(); openIterators.addAll(iterators); index = 0; numScannersCacheExhausted = 0; } private int getThreshold() { int cacheSize = getScannerCacheSize(); checkArgument(cacheSize > 1, "RoundRobinResultIterator doesn't work when cache size is less than or equal to 1"); return cacheSize - 1; } private int getScannerCacheSize() { try { return plan.getContext().getStatement().getFetchSize(); } catch (Throwable e) { Throwables.propagate(e); } return -1; // unreachable } private List<RoundRobinIterator> fetchNextBatch() throws SQLException { int numExpectedIterators = openIterators.size(); List<Future<Tuple>> futures = new ArrayList<>(numExpectedIterators); List<RoundRobinIterator> results = new ArrayList<>(); // Randomize the order in which we will be hitting region servers to try not overload particular region servers. Collections.shuffle(openIterators); boolean success = false; SQLException toThrow = null; try { StatementContext context = plan.getContext(); final ConnectionQueryServices services = context.getConnection().getQueryServices(); ExecutorService executor = services.getExecutor(); numParallelFetches++; if (LOGGER.isDebugEnabled()) { LOGGER.debug("Performing parallel fetch for " + openIterators.size() + " iterators. "); } for (final RoundRobinIterator itr : openIterators) { Future<Tuple> future = executor.submit(new Callable<Tuple>() { @Override public Tuple call() throws Exception { // Read the next record to refill the scanner's cache. return itr.next(); } }); futures.add(future); } int i = 0; for (Future<Tuple> future : futures) { Tuple tuple = future.get(); if (tuple != null) { results.add(new RoundRobinIterator(openIterators.get(i).delegate, tuple)); } else { // Underlying scanner is exhausted. So close it. openIterators.get(i).close(); } i++; } success = true; return results; } catch (SQLException e) { toThrow = e; } catch (Exception e) { toThrow = ServerUtil.parseServerException(e); } finally { try { if (!success) { try { close(); } catch (Exception e) { if (toThrow == null) { toThrow = ServerUtil.parseServerException(e); } else { toThrow.setNextException(ServerUtil.parseServerException(e)); } } } } finally { if (toThrow != null) { GLOBAL_FAILED_QUERY_COUNTER.increment(); throw toThrow; } } } return null; // Not reachable } /** * Inner class that delegates to {@link PeekingResultIterator} keeping track the number of records it has read. Also * keeps track of the tuple the {@link PeekingResultIterator} read in the previous next() call before it ran out of * underlying scanner cache. */ private class RoundRobinIterator implements PeekingResultIterator { private PeekingResultIterator delegate; private Tuple tuple; private int numRecordsRead; private RoundRobinIterator(PeekingResultIterator itr, Tuple tuple) { this.delegate = itr; this.tuple = tuple; this.numRecordsRead = 0; } @Override public void close() throws SQLException { delegate.close(); } @Override public Tuple next() throws SQLException { if (tuple != null) { Tuple t = tuple; tuple = null; return t; } numRecordsRead++; return delegate.next(); } @Override public void explain(List<String> planSteps) { delegate.explain(planSteps); } @Override public Tuple peek() throws SQLException { if (tuple != null) { return tuple; } return delegate.peek(); } public int getNumRecordsRead() { return numRecordsRead; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.spi.predicate.Domain; import com.facebook.presto.sql.planner.assertions.BasePlanTest; import com.facebook.presto.sql.planner.optimizations.AddLocalExchanges; import com.facebook.presto.sql.planner.plan.AggregationNode; import com.facebook.presto.sql.planner.plan.ApplyNode; import com.facebook.presto.sql.planner.plan.DistinctLimitNode; import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode; import com.facebook.presto.sql.planner.plan.IndexJoinNode; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.LateralJoinNode; import com.facebook.presto.sql.planner.plan.PlanNode; import com.facebook.presto.sql.planner.plan.SemiJoinNode; import com.facebook.presto.sql.planner.plan.ValuesNode; import com.facebook.presto.tests.QueryTemplate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.testng.annotations.Test; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Predicate; import static com.facebook.presto.spi.predicate.Domain.singleValue; import static com.facebook.presto.spi.type.VarcharType.createVarcharType; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.aggregation; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.any; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.anyTree; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.apply; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.constrainedTableScan; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.equiJoinClause; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.expression; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.filter; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.functionCall; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.join; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.lateral; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.node; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.project; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.semiJoin; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.strictTableScan; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.tableScan; import static com.facebook.presto.sql.planner.optimizations.PlanNodeSearcher.searchFrom; import static com.facebook.presto.sql.planner.optimizations.Predicates.isInstanceOfAny; import static com.facebook.presto.sql.planner.plan.JoinNode.Type.INNER; import static com.facebook.presto.sql.planner.plan.JoinNode.Type.LEFT; import static com.facebook.presto.tests.QueryTemplate.queryTemplate; import static io.airlift.slice.Slices.utf8Slice; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; public class TestLogicalPlanner extends BasePlanTest { @Test public void testDistinctLimitOverInequalityJoin() throws Exception { assertPlan("SELECT DISTINCT o.orderkey FROM orders o JOIN lineitem l ON o.orderkey < l.orderkey LIMIT 1", anyTree( node(DistinctLimitNode.class, anyTree( join(INNER, ImmutableList.of(), Optional.of("O_ORDERKEY < L_ORDERKEY"), tableScan("orders", ImmutableMap.of("O_ORDERKEY", "orderkey")), any(tableScan("lineitem", ImmutableMap.of("L_ORDERKEY", "orderkey")))) .withExactOutputs(ImmutableList.of("O_ORDERKEY")))))); } @Test public void testJoin() { assertPlan("SELECT o.orderkey FROM orders o, lineitem l WHERE l.orderkey = o.orderkey", anyTree( join(INNER, ImmutableList.of(equiJoinClause("ORDERS_OK", "LINEITEM_OK")), any( tableScan("orders", ImmutableMap.of("ORDERS_OK", "orderkey"))), anyTree( tableScan("lineitem", ImmutableMap.of("LINEITEM_OK", "orderkey")))))); } @Test public void testJoinWithOrderBySameKey() { assertPlan("SELECT o.orderkey FROM orders o, lineitem l WHERE l.orderkey = o.orderkey ORDER BY l.orderkey ASC, o.orderkey ASC", anyTree( join(INNER, ImmutableList.of(equiJoinClause("ORDERS_OK", "LINEITEM_OK")), any( tableScan("orders", ImmutableMap.of("ORDERS_OK", "orderkey"))), anyTree( tableScan("lineitem", ImmutableMap.of("LINEITEM_OK", "orderkey")))))); } @Test public void testUncorrelatedSubqueries() { assertPlan("SELECT * FROM orders WHERE orderkey = (SELECT orderkey FROM lineitem ORDER BY orderkey LIMIT 1)", anyTree( join(INNER, ImmutableList.of(equiJoinClause("X", "Y")), project( tableScan("orders", ImmutableMap.of("X", "orderkey"))), project( node(EnforceSingleRowNode.class, anyTree( tableScan("lineitem", ImmutableMap.of("Y", "orderkey")))))))); assertPlan("SELECT * FROM orders WHERE orderkey IN (SELECT orderkey FROM lineitem WHERE linenumber % 4 = 0)", anyTree( filter("S", project( semiJoin("X", "Y", "S", anyTree( tableScan("orders", ImmutableMap.of("X", "orderkey"))), anyTree( tableScan("lineitem", ImmutableMap.of("Y", "orderkey")))))))); assertPlan("SELECT * FROM orders WHERE orderkey NOT IN (SELECT orderkey FROM lineitem WHERE linenumber < 0)", anyTree( filter("NOT S", project( semiJoin("X", "Y", "S", anyTree( tableScan("orders", ImmutableMap.of("X", "orderkey"))), anyTree( tableScan("lineitem", ImmutableMap.of("Y", "orderkey")))))))); } @Test public void testPushDownJoinConditionConjunctsToInnerSideBasedOnInheritedPredicate() { Map<String, Domain> tableScanConstraint = ImmutableMap.<String, Domain>builder() .put("name", singleValue(createVarcharType(25), utf8Slice("blah"))) .build(); assertPlan( "SELECT nationkey FROM nation LEFT OUTER JOIN region " + "ON nation.regionkey = region.regionkey and nation.name = region.name WHERE nation.name = 'blah'", anyTree( join(LEFT, ImmutableList.of(equiJoinClause("NATION_NAME", "REGION_NAME"), equiJoinClause("NATION_REGIONKEY", "REGION_REGIONKEY")), anyTree( constrainedTableScan("nation", tableScanConstraint, ImmutableMap.of( "NATION_NAME", "name", "NATION_REGIONKEY", "regionkey"))), anyTree( constrainedTableScan("region", tableScanConstraint, ImmutableMap.of( "REGION_NAME", "name", "REGION_REGIONKEY", "regionkey")))))); } @Test public void testSameScalarSubqueryIsAppliedOnlyOnce() { // three subqueries with two duplicates (coerced to two different types), only two scalar joins should be in plan assertEquals( countOfMatchingNodes( plan("SELECT * FROM orders WHERE CAST(orderkey AS INTEGER) = (SELECT 1) AND custkey = (SELECT 2) AND CAST(custkey as REAL) != (SELECT 1)"), EnforceSingleRowNode.class::isInstance), 2); // same query used for left, right and complex join condition assertEquals( countOfMatchingNodes( plan("SELECT * FROM orders o1 JOIN orders o2 ON o1.orderkey = (SELECT 1) AND o2.orderkey = (SELECT 1) AND o1.orderkey + o2.orderkey = (SELECT 1)"), EnforceSingleRowNode.class::isInstance), 1); } @Test public void testSameInSubqueryIsAppliedOnlyOnce() { // same IN query used for left, right and complex condition assertEquals( countOfMatchingNodes( plan("SELECT * FROM orders o1 JOIN orders o2 ON o1.orderkey IN (SELECT 1) AND (o1.orderkey IN (SELECT 1) OR o1.orderkey IN (SELECT 1))"), SemiJoinNode.class::isInstance), 1); // one subquery used for "1 IN (SELECT 1)", one subquery used for "2 IN (SELECT 1)" assertEquals( countOfMatchingNodes( plan("SELECT 1 IN (SELECT 1), 2 IN (SELECT 1) WHERE 1 IN (SELECT 1)"), SemiJoinNode.class::isInstance), 2); } @Test public void testSameQualifiedSubqueryIsAppliedOnlyOnce() { // same ALL query used for left, right and complex condition assertEquals( countOfMatchingNodes( plan("SELECT * FROM orders o1 JOIN orders o2 ON o1.orderkey <= ALL(SELECT 1) AND (o1.orderkey <= ALL(SELECT 1) OR o1.orderkey <= ALL(SELECT 1))"), AggregationNode.class::isInstance), 1); // one subquery used for "1 <= ALL(SELECT 1)", one subquery used for "2 <= ALL(SELECT 1)" assertEquals( countOfMatchingNodes( plan("SELECT 1 <= ALL(SELECT 1), 2 <= ALL(SELECT 1) WHERE 1 <= ALL(SELECT 1)"), AggregationNode.class::isInstance), 2); } private static int countOfMatchingNodes(Plan plan, Predicate<PlanNode> predicate) { return searchFrom(plan.getRoot()).where(predicate).count(); } @Test public void testRemoveUnreferencedScalarInputApplyNodes() { assertPlanContainsNoApplyOrAnyJoin("SELECT (SELECT 1)"); } @Test public void testSubqueryPruning() { List<QueryTemplate.Parameter> subqueries = QueryTemplate.parameter("subquery").of( "orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 2 = 0)", "EXISTS(SELECT orderkey FROM lineitem WHERE orderkey % 2 = 0)", "0 = (SELECT orderkey FROM lineitem WHERE orderkey % 2 = 0)"); queryTemplate("SELECT COUNT(*) FROM (SELECT %subquery% FROM orders)") .replaceAll(subqueries) .forEach(this::assertPlanContainsNoApplyOrAnyJoin); // TODO enable when pruning apply nodes works for this kind of query // assertPlanContainsNoApplyOrAnyJoin("SELECT * FROM orders WHERE true OR " + subquery); } @Test public void testJoinOutputPruning() { assertPlan("SELECT nationkey FROM nation JOIN region ON nation.regionkey = region.regionkey", anyTree( join(INNER, ImmutableList.of(equiJoinClause("REGIONKEY_LEFT", "REGIONKEY_RIGHT")), anyTree( tableScan("nation", ImmutableMap.of("REGIONKEY_LEFT", "regionkey", "NATIONKEY", "nationkey"))), anyTree( tableScan("region", ImmutableMap.of("REGIONKEY_RIGHT", "regionkey")))) ) .withNumberOfOutputColumns(1) .withOutputs(ImmutableList.of("NATIONKEY")) ); } private void assertPlanContainsNoApplyOrAnyJoin(String sql) { assertFalse( searchFrom(plan(sql, LogicalPlanner.Stage.OPTIMIZED).getRoot()) .where(isInstanceOfAny(ApplyNode.class, JoinNode.class, IndexJoinNode.class, SemiJoinNode.class, LateralJoinNode.class)) .matches(), "Unexpected node for query: " + sql); } @Test public void testCorrelatedSubqueries() { assertPlanWithOptimizerFiltering( "SELECT orderkey FROM orders WHERE 3 = (SELECT orderkey)", LogicalPlanner.Stage.OPTIMIZED, anyTree( filter("BIGINT '3' = X", lateral( ImmutableList.of("X"), tableScan("orders", ImmutableMap.of("X", "orderkey")), node(EnforceSingleRowNode.class, project( node(ValuesNode.class) ))))), planOptimizer -> !(planOptimizer instanceof AddLocalExchanges)); } /** * Handling of correlated IN pulls up everything possible to the generated outer join condition. * This test ensures uncorrelated conditions are pushed back down. */ @Test public void testCorrelatedInUncorrelatedFiltersPushDown() { assertPlan( "SELECT orderkey, comment IN (SELECT clerk FROM orders s WHERE s.orderkey = o.orderkey AND s.orderkey < 7) FROM lineitem o", anyTree( node(JoinNode.class, anyTree(tableScan("lineitem")), anyTree( filter("orderkey < BIGINT '7'", // pushed down tableScan("orders", ImmutableMap.of("orderkey", "orderkey")) ) ) ) ) ); } /** * Handling of correlated in predicate involves group by over all symbols from source. Once aggregation is added to the plan, * it prevents pruning of the unreferenced symbols. However, the aggregation's result doesn't actually depended on those symbols * and this test makes sure the symbols are pruned first. */ @Test public void testSymbolsPrunedInCorrelatedInPredicateSource() { assertPlan( "SELECT orderkey, comment IN (SELECT clerk FROM orders s WHERE s.orderkey = o.orderkey AND s.orderkey < 7) FROM lineitem o", anyTree( node(JoinNode.class, anyTree(strictTableScan("lineitem", ImmutableMap.of( "orderkey", "orderkey", "comment", "comment"))), anyTree(tableScan("orders")) ) ) ); } @Test public void testDoubleNestedCorrelatedSubqueries() { assertPlanWithOptimizerFiltering( "SELECT orderkey FROM orders o " + "WHERE 3 IN (SELECT o.custkey FROM lineitem l WHERE (SELECT l.orderkey = o.orderkey))", LogicalPlanner.Stage.OPTIMIZED, anyTree( filter("OUTER_FILTER", apply(ImmutableList.of("C", "O"), ImmutableMap.of("OUTER_FILTER", expression("THREE IN (C)")), project(ImmutableMap.of("THREE", expression("BIGINT '3'")), tableScan("orders", ImmutableMap.of( "O", "orderkey", "C", "custkey"))), anyTree( lateral( ImmutableList.of("L"), tableScan("lineitem", ImmutableMap.of("L", "orderkey")), node(EnforceSingleRowNode.class, project( node(ValuesNode.class) ))))))), planOptimizer -> !(planOptimizer instanceof AddLocalExchanges)); } @Test public void testCorrelatedScalarAggregationRewriteToLeftOuterJoin() { assertPlan( "SELECT orderkey FROM orders WHERE EXISTS(SELECT 1 WHERE orderkey = 3)", // EXISTS maps to count(*) > 0 anyTree( filter("FINAL_COUNT > BIGINT '0'", any( aggregation(ImmutableMap.of("FINAL_COUNT", functionCall("count", ImmutableList.of("PARTIAL_COUNT"))), any( aggregation(ImmutableMap.of("PARTIAL_COUNT", functionCall("count", ImmutableList.of("NON_NULL"))), any( join(LEFT, ImmutableList.of(), Optional.of("BIGINT '3' = ORDERKEY"), any( tableScan("orders", ImmutableMap.of("ORDERKEY", "orderkey"))), project(ImmutableMap.of("NON_NULL", expression("true")), node(ValuesNode.class))))))))))); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tajo.engine.planner; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.tajo.algebra.*; import org.apache.tajo.algebra.CreateTable.ColumnDefinition; import org.apache.tajo.catalog.*; import org.apache.tajo.catalog.partition.PartitionMethodDesc; import org.apache.tajo.catalog.proto.CatalogProtos; import org.apache.tajo.common.TajoDataTypes; import org.apache.tajo.datum.NullDatum; import org.apache.tajo.engine.eval.*; import org.apache.tajo.engine.exception.VerifyException; import org.apache.tajo.engine.planner.LogicalPlan.QueryBlock; import org.apache.tajo.engine.planner.logical.*; import org.apache.tajo.engine.planner.rewrite.ProjectionPushDownRule; import org.apache.tajo.engine.utils.SchemaUtil; import org.apache.tajo.util.TUtil; import java.util.*; import static org.apache.tajo.algebra.CreateTable.PartitionType; import static org.apache.tajo.engine.planner.ExprNormalizer.ExprNormalizedResult; import static org.apache.tajo.engine.planner.LogicalPlan.BlockType; import static org.apache.tajo.engine.planner.LogicalPlanPreprocessor.PreprocessContext; /** * This class creates a logical plan from a nested tajo algebra expression ({@link org.apache.tajo.algebra}) */ public class LogicalPlanner extends BaseAlgebraVisitor<LogicalPlanner.PlanContext, LogicalNode> { private static Log LOG = LogFactory.getLog(LogicalPlanner.class); private final CatalogService catalog; private final LogicalPlanPreprocessor preprocessor; private final ExprAnnotator exprAnnotator; private final ExprNormalizer normalizer; public LogicalPlanner(CatalogService catalog) { this.catalog = catalog; this.exprAnnotator = new ExprAnnotator(catalog); this.preprocessor = new LogicalPlanPreprocessor(catalog, exprAnnotator); this.normalizer = new ExprNormalizer(); } public class PlanContext { LogicalPlan plan; // transient data for each query block QueryBlock queryBlock; boolean debugOrUnitTests; public PlanContext(LogicalPlan plan, QueryBlock block, boolean debugOrUnitTests) { this.plan = plan; this.queryBlock = block; this.debugOrUnitTests = debugOrUnitTests; } public PlanContext(PlanContext context, QueryBlock block) { this.plan = context.plan; this.queryBlock = block; this.debugOrUnitTests = context.debugOrUnitTests; } public String toString() { return "block=" + queryBlock.getName() + ", relNum=" + queryBlock.getRelations().size() + ", "+ queryBlock.namedExprsMgr.toString(); } } /** * This generates a logical plan. * * @param expr A relational algebraic expression for a query. * @return A logical plan */ public LogicalPlan createPlan(Expr expr) throws PlanningException { return createPlan(expr, false); } @VisibleForTesting public LogicalPlan createPlan(Expr expr, boolean debug) throws PlanningException { LogicalPlan plan = new LogicalPlan(this); QueryBlock rootBlock = plan.newAndGetBlock(LogicalPlan.ROOT_BLOCK); PreprocessContext preProcessorCtx = new PreprocessContext(plan, rootBlock); preprocessor.visit(preProcessorCtx, new Stack<Expr>(), expr); PlanContext context = new PlanContext(plan, plan.getRootBlock(), debug); LogicalNode topMostNode = this.visit(context, new Stack<Expr>(), expr); // Add Root Node LogicalRootNode root = plan.createNode(LogicalRootNode.class); root.setInSchema(topMostNode.getOutSchema()); root.setChild(topMostNode); root.setOutSchema(topMostNode.getOutSchema()); plan.getRootBlock().setRoot(root); return plan; } public ExprAnnotator getExprAnnotator() { return this.exprAnnotator; } public void preHook(PlanContext context, Stack<Expr> stack, Expr expr) throws PlanningException { context.queryBlock.updateCurrentNode(expr); } public LogicalNode postHook(PlanContext context, Stack<Expr> stack, Expr expr, LogicalNode current) throws PlanningException { // Some generated logical nodes (e.g., implicit aggregation) without exprs will pass NULL as a expr parameter. // We should skip them. if (expr != null) { // A relation list including a single ScanNode will return a ScanNode instance that already passed postHook. // So, it skips the already-visited ScanNode instance. if (expr.getType() == OpType.RelationList && current.getType() == NodeType.SCAN) { return current; } } QueryBlock queryBlock = context.queryBlock; queryBlock.updateLatestNode(current); // if this node is the topmost if (stack.size() == 0) { queryBlock.setRoot(current); } if (!stack.empty()) { queryBlock.updateCurrentNode(stack.peek()); } return current; } /*=============================================================================================== Data Manupulation Language (DML) SECTION ===============================================================================================*/ /*=============================================================================================== PROJECTION SECTION ===============================================================================================*/ @Override public LogicalNode visitProjection(PlanContext context, Stack<Expr> stack, Projection projection) throws PlanningException { LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; // If a non-from statement is given if (!projection.hasChild()) { return buildPlanForNoneFromStatement(context, stack, projection); } String [] referenceNames; // in prephase, insert all target list into NamedExprManagers. // Then it gets reference names, each of which points an expression in target list. referenceNames = doProjectionPrephase(context, projection); //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(projection); LogicalNode child = visit(context, stack, projection.getChild()); // check if it is implicit aggregation. If so, it inserts group-by node to its child. if (block.isAggregationRequired()) { child = insertGroupbyNode(context, child, stack); } stack.pop(); //////////////////////////////////////////////////////// ProjectionNode projectionNode; Target [] targets; targets = buildTargets(plan, block, referenceNames); // Set ProjectionNode projectionNode = context.queryBlock.getNodeFromExpr(projection); projectionNode.setInSchema(child.getOutSchema()); projectionNode.setTargets(targets); projectionNode.setChild(child); if (projection.isDistinct() && block.hasNode(NodeType.GROUP_BY)) { throw new VerifyException("Cannot support grouping and distinct at the same time yet"); } else { if (projection.isDistinct()) { insertDistinctOperator(context, projectionNode, child, stack); } } // It's for debugging and unit tests purpose. // It sets raw targets, all of them are raw expressions instead of references. if (context.debugOrUnitTests) { setRawTargets(context, targets, referenceNames, projection); } verifyProjectedFields(block, projectionNode); return projectionNode; } private void setRawTargets(PlanContext context, Target[] targets, String[] referenceNames, Projection projection) throws PlanningException { LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; // It's for debugging or unit tests. Target [] rawTargets = new Target[projection.getNamedExprs().length]; for (int i = 0; i < projection.getNamedExprs().length; i++) { NamedExpr namedExpr = projection.getNamedExprs()[i]; EvalNode evalNode = exprAnnotator.createEvalNode(plan, block, namedExpr.getExpr()); rawTargets[i] = new Target(evalNode, referenceNames[i]); } // it's for debugging or unit testing block.setRawTargets(rawTargets); } private void insertDistinctOperator(PlanContext context, ProjectionNode projectionNode, LogicalNode child, Stack<Expr> stack) throws PlanningException { LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; Schema outSchema = projectionNode.getOutSchema(); GroupbyNode dupRemoval = context.plan.createNode(GroupbyNode.class); dupRemoval.setChild(child); dupRemoval.setInSchema(projectionNode.getInSchema()); dupRemoval.setTargets(PlannerUtil.schemaToTargets(outSchema)); dupRemoval.setGroupingColumns(outSchema.toArray()); block.registerNode(dupRemoval); postHook(context, stack, null, dupRemoval); projectionNode.setChild(dupRemoval); projectionNode.setInSchema(dupRemoval.getOutSchema()); } private String [] doProjectionPrephase(PlanContext context, Projection projection) throws PlanningException { QueryBlock block = context.queryBlock; int finalTargetNum = projection.size(); String [] referenceNames = new String[finalTargetNum]; ExprNormalizedResult [] normalizedExprList = new ExprNormalizedResult[finalTargetNum]; NamedExpr namedExpr; for (int i = 0; i < finalTargetNum; i++) { namedExpr = projection.getNamedExprs()[i]; if (PlannerUtil.existsAggregationFunction(namedExpr)) { block.setAggregationRequire(); } // dissect an expression into multiple parts (at most dissected into three parts) normalizedExprList[i] = normalizer.normalize(context, namedExpr.getExpr()); } // Note: Why separate normalization and add(Named)Expr? // // ExprNormalizer internally makes use of the named exprs in NamedExprsManager. // If we don't separate normalization work and addExprWithName, addExprWithName will find named exprs evaluated // the same logical node. It will cause impossible evaluation in physical executors. for (int i = 0; i < finalTargetNum; i++) { namedExpr = projection.getNamedExprs()[i]; // Get all projecting references if (namedExpr.hasAlias()) { NamedExpr aliasedExpr = new NamedExpr(normalizedExprList[i].baseExpr, namedExpr.getAlias()); referenceNames[i] = block.namedExprsMgr.addNamedExpr(aliasedExpr); } else { referenceNames[i] = block.namedExprsMgr.addExpr(normalizedExprList[i].baseExpr); } // Add sub-expressions (i.e., aggregation part and scalar part) from dissected parts. block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].aggExprs); block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].scalarExprs); } return referenceNames; } /** * It builds non-from statement (only expressions) like '<code>SELECT 1+3 as plus</code>'. */ private EvalExprNode buildPlanForNoneFromStatement(PlanContext context, Stack<Expr> stack, Projection projection) throws PlanningException { LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; int finalTargetNum = projection.getNamedExprs().length; Target [] targets = new Target[finalTargetNum]; for (int i = 0; i < targets.length; i++) { NamedExpr namedExpr = projection.getNamedExprs()[i]; EvalNode evalNode = exprAnnotator.createEvalNode(plan, block, namedExpr.getExpr()); if (namedExpr.hasAlias()) { targets[i] = new Target(evalNode, namedExpr.getAlias()); } else { targets[i] = new Target(evalNode, context.plan.generateUniqueColumnName(namedExpr.getExpr())); } } EvalExprNode evalExprNode = context.queryBlock.getNodeFromExpr(projection); evalExprNode.setTargets(targets); evalExprNode.setOutSchema(PlannerUtil.targetToSchema(targets)); // it's for debugging or unit testing block.setRawTargets(targets); return evalExprNode; } private Target [] buildTargets(LogicalPlan plan, QueryBlock block, String[] referenceNames) throws PlanningException { Target [] targets = new Target[referenceNames.length]; for (int i = 0; i < referenceNames.length; i++) { if (block.namedExprsMgr.isEvaluated(referenceNames[i])) { targets[i] = block.namedExprsMgr.getTarget(referenceNames[i]); } else { NamedExpr namedExpr = block.namedExprsMgr.getNamedExpr(referenceNames[i]); EvalNode evalNode = exprAnnotator.createEvalNode(plan, block, namedExpr.getExpr()); block.namedExprsMgr.markAsEvaluated(referenceNames[i], evalNode); targets[i] = new Target(evalNode, referenceNames[i]); } } return targets; } public static void verifyProjectedFields(QueryBlock block, Projectable projectable) throws PlanningException { if (projectable instanceof ProjectionNode && block.hasNode(NodeType.GROUP_BY)) { for (Target target : projectable.getTargets()) { Set<Column> columns = EvalTreeUtil.findUniqueColumns(target.getEvalTree()); for (Column c : columns) { if (!projectable.getInSchema().contains(c)) { throw new PlanningException(c.getQualifiedName() + " must appear in the GROUP BY clause or be used in an aggregate function at node (" + projectable.getPID() + ")" ); } } } } else if (projectable instanceof GroupbyNode) { GroupbyNode groupbyNode = (GroupbyNode) projectable; for (Column c : groupbyNode.getGroupingColumns()) { if (!projectable.getInSchema().contains(c)) { throw new PlanningException(String.format("Cannot get the field \"%s\" at node (%d)", c, projectable.getPID())); } } if (groupbyNode.hasAggFunctions()) { for (AggregationFunctionCallEval f : groupbyNode.getAggFunctions()) { Set<Column> columns = EvalTreeUtil.findUniqueColumns(f); for (Column c : columns) { if (!projectable.getInSchema().contains(c)) { throw new PlanningException(String.format("Cannot get the field \"%s\" at node (%d)", c, projectable.getPID())); } } } } } else if (projectable instanceof RelationNode) { RelationNode relationNode = (RelationNode) projectable; for (Target target : projectable.getTargets()) { Set<Column> columns = EvalTreeUtil.findUniqueColumns(target.getEvalTree()); for (Column c : columns) { if (!relationNode.getTableSchema().contains(c)) { throw new PlanningException(String.format("Cannot get the field \"%s\" at node (%d)", c, projectable.getPID())); } } } } else { for (Target target : projectable.getTargets()) { Set<Column> columns = EvalTreeUtil.findUniqueColumns(target.getEvalTree()); for (Column c : columns) { if (!projectable.getInSchema().contains(c)) { throw new PlanningException(String.format("Cannot get the field \"%s\" at node (%d)", c, projectable.getPID())); } } } } } /** * Insert a group-by operator before a sort or a projection operator. * It is used only when a group-by clause is not given. */ private LogicalNode insertGroupbyNode(PlanContext context, LogicalNode child, Stack<Expr> stack) throws PlanningException { LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; GroupbyNode groupbyNode = context.plan.createNode(GroupbyNode.class); groupbyNode.setChild(child); groupbyNode.setInSchema(child.getOutSchema()); groupbyNode.setGroupingColumns(new Column[] {}); Set<String> aggEvalNames = new LinkedHashSet<String>(); Set<AggregationFunctionCallEval> aggEvals = new LinkedHashSet<AggregationFunctionCallEval>(); boolean includeDistinctFunction = false; for (Iterator<NamedExpr> it = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); it.hasNext();) { NamedExpr rawTarget = it.next(); try { includeDistinctFunction = PlannerUtil.existsDistinctAggregationFunction(rawTarget.getExpr()); EvalNode evalNode = exprAnnotator.createEvalNode(context.plan, context.queryBlock, rawTarget.getExpr()); if (evalNode.getType() == EvalType.AGG_FUNCTION) { aggEvalNames.add(rawTarget.getAlias()); aggEvals.add((AggregationFunctionCallEval) evalNode); block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode); } } catch (VerifyException ve) { } } groupbyNode.setDistinct(includeDistinctFunction); groupbyNode.setAggFunctions(aggEvals.toArray(new AggregationFunctionCallEval[aggEvals.size()])); Target [] targets = ProjectionPushDownRule.buildGroupByTarget(groupbyNode, null, aggEvalNames.toArray(new String[aggEvalNames.size()])); groupbyNode.setTargets(targets); // this inserted group-by node doesn't pass through preprocessor. So manually added. block.registerNode(groupbyNode); postHook(context, stack, null, groupbyNode); return groupbyNode; } /*=============================================================================================== SORT SECTION ===============================================================================================*/ @Override public LimitNode visitLimit(PlanContext context, Stack<Expr> stack, Limit limit) throws PlanningException { QueryBlock block = context.queryBlock; EvalNode firstFetNum; LogicalNode child; if (limit.getFetchFirstNum().getType() == OpType.Literal) { firstFetNum = exprAnnotator.createEvalNode(context.plan, block, limit.getFetchFirstNum()); //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(limit); child = visit(context, stack, limit.getChild()); stack.pop(); //////////////////////////////////////////////////////// } else { ExprNormalizedResult normalizedResult = normalizer.normalize(context, limit.getFetchFirstNum()); String referName = block.namedExprsMgr.addExpr(normalizedResult.baseExpr); block.namedExprsMgr.addNamedExprArray(normalizedResult.aggExprs); block.namedExprsMgr.addNamedExprArray(normalizedResult.scalarExprs); //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(limit); child = visit(context, stack, limit.getChild()); stack.pop(); //////////////////////////////////////////////////////// if (block.namedExprsMgr.isEvaluated(referName)) { firstFetNum = block.namedExprsMgr.getTarget(referName).getEvalTree(); } else { NamedExpr namedExpr = block.namedExprsMgr.getNamedExpr(referName); firstFetNum = exprAnnotator.createEvalNode(context.plan, block, namedExpr.getExpr()); block.namedExprsMgr.markAsEvaluated(referName, firstFetNum); } } LimitNode limitNode = block.getNodeFromExpr(limit); limitNode.setChild(child); limitNode.setInSchema(child.getOutSchema()); limitNode.setOutSchema(child.getOutSchema()); limitNode.setFetchFirst(firstFetNum.eval(null, null).asInt8()); return limitNode; } @Override public SortNode visitSort(PlanContext context, Stack<Expr> stack, Sort sort) throws PlanningException { QueryBlock block = context.queryBlock; int sortKeyNum = sort.getSortSpecs().length; Sort.SortSpec[] sortSpecs = sort.getSortSpecs(); String [] referNames = new String[sortKeyNum]; ExprNormalizedResult [] normalizedExprList = new ExprNormalizedResult[sortKeyNum]; for (int i = 0; i < sortKeyNum; i++) { normalizedExprList[i] = normalizer.normalize(context, sortSpecs[i].getKey()); } for (int i = 0; i < sortKeyNum; i++) { referNames[i] = block.namedExprsMgr.addExpr(normalizedExprList[i].baseExpr); block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].aggExprs); block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].scalarExprs); } //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(sort); LogicalNode child = visit(context, stack, sort.getChild()); if (block.isAggregationRequired()) { child = insertGroupbyNode(context, child, stack); } stack.pop(); //////////////////////////////////////////////////////// SortNode sortNode = block.getNodeFromExpr(sort); sortNode.setChild(child); sortNode.setInSchema(child.getOutSchema()); sortNode.setOutSchema(child.getOutSchema()); // Building sort keys Column column; SortSpec [] annotatedSortSpecs = new SortSpec[sortKeyNum]; for (int i = 0; i < sortKeyNum; i++) { if (block.namedExprsMgr.isEvaluated(referNames[i])) { column = block.namedExprsMgr.getTarget(referNames[i]).getNamedColumn(); } else { throw new IllegalStateException("Unexpected State: " + TUtil.arrayToString(sortSpecs)); } annotatedSortSpecs[i] = new SortSpec(column, sortSpecs[i].isAscending(), sortSpecs[i].isNullFirst()); } sortNode.setSortSpecs(annotatedSortSpecs); return sortNode; } /*=============================================================================================== GROUP BY SECTION ===============================================================================================*/ @Override public LogicalNode visitHaving(PlanContext context, Stack<Expr> stack, Having expr) throws PlanningException { QueryBlock block = context.queryBlock; ExprNormalizedResult normalizedResult = normalizer.normalize(context, expr.getQual()); String referName = block.namedExprsMgr.addExpr(normalizedResult.baseExpr); block.namedExprsMgr.addNamedExprArray(normalizedResult.aggExprs); block.namedExprsMgr.addNamedExprArray(normalizedResult.scalarExprs); //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(expr); LogicalNode child = visit(context, stack, expr.getChild()); stack.pop(); //////////////////////////////////////////////////////// HavingNode having = new HavingNode(context.plan.newPID()); having.setChild(child); having.setInSchema(child.getOutSchema()); having.setOutSchema(child.getOutSchema()); EvalNode havingCondition; if (block.namedExprsMgr.isEvaluated(referName)) { havingCondition = block.namedExprsMgr.getTarget(referName).getEvalTree(); } else { NamedExpr namedExpr = block.namedExprsMgr.getNamedExpr(referName); havingCondition = exprAnnotator.createEvalNode(context.plan, block, namedExpr.getExpr()); block.namedExprsMgr.markAsEvaluated(referName, havingCondition); } // set having condition having.setQual(havingCondition); return having; } @Override public LogicalNode visitGroupBy(PlanContext context, Stack<Expr> stack, Aggregation aggregation) throws PlanningException { // Initialization Phase: LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; // Normalize grouping keys and add normalized grouping keys to NamedExprManager int groupingKeyNum = aggregation.getGroupSet()[0].getGroupingSets().length; ExprNormalizedResult [] normalizedResults = new ExprNormalizedResult[groupingKeyNum]; for (int i = 0; i < groupingKeyNum; i++) { Expr groupingKey = aggregation.getGroupSet()[0].getGroupingSets()[i]; normalizedResults[i] = normalizer.normalize(context, groupingKey); } String [] groupingKeyRefNames = new String[groupingKeyNum]; for (int i = 0; i < groupingKeyNum; i++) { groupingKeyRefNames[i] = block.namedExprsMgr.addExpr(normalizedResults[i].baseExpr); block.namedExprsMgr.addNamedExprArray(normalizedResults[i].aggExprs); block.namedExprsMgr.addNamedExprArray(normalizedResults[i].scalarExprs); } //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(aggregation); LogicalNode child = visit(context, stack, aggregation.getChild()); stack.pop(); //////////////////////////////////////////////////////// GroupbyNode groupingNode = context.queryBlock.getNodeFromExpr(aggregation); groupingNode.setChild(child); groupingNode.setInSchema(child.getOutSchema()); // Set grouping sets Column [] groupingColumns = new Column[aggregation.getGroupSet()[0].getGroupingSets().length]; for (int i = 0; i < groupingColumns.length; i++) { if (block.namedExprsMgr.isEvaluated(groupingKeyRefNames[i])) { groupingColumns[i] = block.namedExprsMgr.getTarget(groupingKeyRefNames[i]).getNamedColumn(); } else { throw new PlanningException("Each grouping column expression must be a scalar expression."); } } groupingNode.setGroupingColumns(groupingColumns); //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// // create EvalNodes and check if each EvalNode can be evaluated here. List<String> aggEvalNames = TUtil.newList(); List<AggregationFunctionCallEval> aggEvalNodes = TUtil.newList(); boolean includeDistinctFunction = false; for (Iterator<NamedExpr> iterator = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); iterator.hasNext();) { NamedExpr namedExpr = iterator.next(); try { includeDistinctFunction |= PlannerUtil.existsDistinctAggregationFunction(namedExpr.getExpr()); EvalNode evalNode = exprAnnotator.createEvalNode(context.plan, context.queryBlock, namedExpr.getExpr()); if (evalNode.getType() == EvalType.AGG_FUNCTION) { block.namedExprsMgr.markAsEvaluated(namedExpr.getAlias(), evalNode); aggEvalNames.add(namedExpr.getAlias()); aggEvalNodes.add((AggregationFunctionCallEval) evalNode); } } catch (VerifyException ve) { } } // if there is at least one distinct aggregation function groupingNode.setDistinct(includeDistinctFunction); groupingNode.setAggFunctions(aggEvalNodes.toArray(new AggregationFunctionCallEval[aggEvalNodes.size()])); Target [] targets = new Target[groupingKeyNum + aggEvalNames.size()]; // In target, grouping columns will be followed by aggregation evals. // // col1, col2, col3, sum(..), agv(..) // ^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^ // grouping keys aggregation evals // Build grouping keys for (int i = 0; i < groupingKeyNum; i++) { Target target = new Target(new FieldEval(groupingNode.getGroupingColumns()[i])); targets[i] = target; } for (int i = 0, targetIdx = groupingKeyNum; i < aggEvalNodes.size(); i++, targetIdx++) { targets[targetIdx] = block.namedExprsMgr.getTarget(aggEvalNames.get(i)); } groupingNode.setTargets(targets); block.unsetAggregationRequire(); verifyProjectedFields(block, groupingNode); return groupingNode; } public static final Column[] ALL= Lists.newArrayList().toArray(new Column[0]); public static List<Column[]> generateCuboids(Column[] columns) { int numCuboids = (int) Math.pow(2, columns.length); int maxBits = columns.length; List<Column[]> cube = Lists.newArrayList(); List<Column> cuboidCols; cube.add(ALL); for (int cuboidId = 1; cuboidId < numCuboids; cuboidId++) { cuboidCols = Lists.newArrayList(); for (int j = 0; j < maxBits; j++) { int bit = 1 << j; if ((cuboidId & bit) == bit) { cuboidCols.add(columns[j]); } } cube.add(cuboidCols.toArray(new Column[cuboidCols.size()])); } return cube; } @Override public SelectionNode visitFilter(PlanContext context, Stack<Expr> stack, Selection selection) throws PlanningException { QueryBlock block = context.queryBlock; ExprNormalizedResult normalizedResult = normalizer.normalize(context, selection.getQual()); block.namedExprsMgr.addExpr(normalizedResult.baseExpr); if (normalizedResult.aggExprs.size() > 0 || normalizedResult.scalarExprs.size() > 0) { throw new VerifyException("Filter condition cannot include aggregation function"); } //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(selection); LogicalNode child = visit(context, stack, selection.getChild()); stack.pop(); //////////////////////////////////////////////////////// SelectionNode selectionNode = context.queryBlock.getNodeFromExpr(selection); selectionNode.setChild(child); selectionNode.setInSchema(child.getOutSchema()); selectionNode.setOutSchema(child.getOutSchema()); // Create EvalNode for a search condition. EvalNode searchCondition = exprAnnotator.createEvalNode(context.plan, block, selection.getQual()); EvalNode simplified = AlgebraicUtil.eliminateConstantExprs(searchCondition); // set selection condition selectionNode.setQual(simplified); return selectionNode; } /*=============================================================================================== JOIN SECTION ===============================================================================================*/ @Override public LogicalNode visitJoin(PlanContext context, Stack<Expr> stack, Join join) throws PlanningException { // Phase 1: Init LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; if (join.hasQual()) { ExprNormalizedResult normalizedResult = normalizer.normalize(context, join.getQual()); block.namedExprsMgr.addExpr(normalizedResult.baseExpr); if (normalizedResult.aggExprs.size() > 0 || normalizedResult.scalarExprs.size() > 0) { throw new VerifyException("Filter condition cannot include aggregation function"); } } //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(join); LogicalNode left = visit(context, stack, join.getLeft()); LogicalNode right = visit(context, stack, join.getRight()); stack.pop(); //////////////////////////////////////////////////////// JoinNode joinNode = context.queryBlock.getNodeFromExpr(join); joinNode.setJoinType(join.getJoinType()); joinNode.setLeftChild(left); joinNode.setRightChild(right); // Set A merged input schema Schema merged; if (join.isNatural()) { merged = getNaturalJoinSchema(left, right); } else { merged = SchemaUtil.merge(left.getOutSchema(), right.getOutSchema()); } joinNode.setInSchema(merged); // Create EvalNode for a search condition. EvalNode joinCondition = null; if (join.hasQual()) { EvalNode evalNode = exprAnnotator.createEvalNode(context.plan, block, join.getQual()); joinCondition = AlgebraicUtil.eliminateConstantExprs(evalNode); } List<String> newlyEvaluatedExprs = getNewlyEvaluatedExprsForJoin(plan, block, joinNode, stack); List<Target> targets = TUtil.newList(PlannerUtil.schemaToTargets(merged)); for (String newAddedExpr : newlyEvaluatedExprs) { targets.add(block.namedExprsMgr.getTarget(newAddedExpr, true)); } joinNode.setTargets(targets.toArray(new Target[targets.size()])); // Determine join conditions if (join.isNatural()) { // if natural join, it should have the equi-join conditions by common column names EvalNode njCond = getNaturalJoinCondition(joinNode); joinNode.setJoinQual(njCond); } else if (join.hasQual()) { // otherwise, the given join conditions are set joinNode.setJoinQual(joinCondition); } return joinNode; } private List<String> getNewlyEvaluatedExprsForJoin(LogicalPlan plan, QueryBlock block, JoinNode joinNode, Stack<Expr> stack) { EvalNode evalNode; List<String> newlyEvaluatedExprs = TUtil.newList(); for (Iterator<NamedExpr> it = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); it.hasNext();) { NamedExpr namedExpr = it.next(); try { evalNode = exprAnnotator.createEvalNode(plan, block, namedExpr.getExpr()); if (LogicalPlanner.checkIfBeEvaluatedAtJoin(block, evalNode, joinNode, stack.peek().getType() != OpType.Join)) { block.namedExprsMgr.markAsEvaluated(namedExpr.getAlias(), evalNode); newlyEvaluatedExprs.add(namedExpr.getAlias()); } } catch (VerifyException ve) {} catch (PlanningException e) { e.printStackTrace(); } } return newlyEvaluatedExprs; } private static Schema getNaturalJoinSchema(LogicalNode left, LogicalNode right) { Schema joinSchema = new Schema(); Schema commons = SchemaUtil.getNaturalJoinColumns(left.getOutSchema(), right.getOutSchema()); joinSchema.addColumns(commons); for (Column c : left.getOutSchema().getColumns()) { if (!joinSchema.contains(c.getQualifiedName())) { joinSchema.addColumn(c); } } for (Column c : right.getOutSchema().getColumns()) { if (!joinSchema.contains(c.getQualifiedName())) { joinSchema.addColumn(c); } } return joinSchema; } private static EvalNode getNaturalJoinCondition(JoinNode joinNode) { Schema leftSchema = joinNode.getLeftChild().getInSchema(); Schema rightSchema = joinNode.getRightChild().getInSchema(); Schema commons = SchemaUtil.getNaturalJoinColumns(leftSchema, rightSchema); EvalNode njQual = null; EvalNode equiQual; Column leftJoinKey; Column rightJoinKey; for (Column common : commons.getColumns()) { leftJoinKey = leftSchema.getColumn(common.getQualifiedName()); rightJoinKey = rightSchema.getColumn(common.getQualifiedName()); equiQual = new BinaryEval(EvalType.EQUAL, new FieldEval(leftJoinKey), new FieldEval(rightJoinKey)); if (njQual == null) { njQual = equiQual; } else { njQual = new BinaryEval(EvalType.AND, njQual, equiQual); } } return njQual; } private LogicalNode createCartesianProduct(PlanContext context, LogicalNode left, LogicalNode right) throws PlanningException { LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; Schema merged = SchemaUtil.merge(left.getOutSchema(), right.getOutSchema()); JoinNode join = plan.createNode(JoinNode.class); join.init(JoinType.CROSS, left, right); join.setInSchema(merged); EvalNode evalNode; List<String> newlyEvaluatedExprs = TUtil.newList(); for (Iterator<NamedExpr> it = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); it.hasNext();) { NamedExpr namedExpr = it.next(); try { evalNode = exprAnnotator.createEvalNode(plan, block, namedExpr.getExpr()); if (EvalTreeUtil.findDistinctAggFunction(evalNode).size() == 0) { block.namedExprsMgr.markAsEvaluated(namedExpr.getAlias(), evalNode); newlyEvaluatedExprs.add(namedExpr.getAlias()); } } catch (VerifyException ve) {} } List<Target> targets = TUtil.newList(PlannerUtil.schemaToTargets(merged)); for (String newAddedExpr : newlyEvaluatedExprs) { targets.add(block.namedExprsMgr.getTarget(newAddedExpr, true)); } join.setTargets(targets.toArray(new Target[targets.size()])); return join; } @Override public LogicalNode visitRelationList(PlanContext context, Stack<Expr> stack, RelationList relations) throws PlanningException { LogicalNode current = visit(context, stack, relations.getRelations()[0]); LogicalNode left; LogicalNode right; if (relations.size() > 1) { for (int i = 1; i < relations.size(); i++) { left = current; right = visit(context, stack, relations.getRelations()[i]); current = createCartesianProduct(context, left, right); } } context.queryBlock.registerNode(current); return current; } @Override public ScanNode visitRelation(PlanContext context, Stack<Expr> stack, Relation expr) throws PlanningException { QueryBlock block = context.queryBlock; ScanNode scanNode = block.getNodeFromExpr(expr); updatePhysicalInfo(scanNode.getTableDesc()); // Find expression which can be evaluated at this relation node. // Except for column references, additional expressions used in select list, where clause, order-by clauses // can be evaluated here. Their reference names are kept in newlyEvaluatedExprsRef. Set<String> newlyEvaluatedExprsReferences = new LinkedHashSet<String>(); for (Iterator<NamedExpr> iterator = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); iterator.hasNext();) { NamedExpr rawTarget = iterator.next(); try { EvalNode evalNode = exprAnnotator.createEvalNode(context.plan, context.queryBlock, rawTarget.getExpr()); if (checkIfBeEvaluatedAtRelation(block, evalNode, scanNode)) { block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode); newlyEvaluatedExprsReferences.add(rawTarget.getAlias()); // newly added exr } } catch (VerifyException ve) { } } // Assume that each unique expr is evaluated once. LinkedHashSet<Target> targets = createFieldTargetsFromRelation(block, scanNode, newlyEvaluatedExprsReferences); // The fact the some expr is included in newlyEvaluatedExprsReferences means that it is already evaluated. // So, we get a raw expression and then creates a target. for (String reference : newlyEvaluatedExprsReferences) { NamedExpr refrrer = block.namedExprsMgr.getNamedExpr(reference); EvalNode evalNode = exprAnnotator.createEvalNode(context.plan, block, refrrer.getExpr()); targets.add(new Target(evalNode, reference)); } scanNode.setTargets(targets.toArray(new Target[targets.size()])); verifyProjectedFields(block, scanNode); return scanNode; } private static LinkedHashSet<Target> createFieldTargetsFromRelation(QueryBlock block, RelationNode relationNode, Set<String> newlyEvaluatedRefNames) { LinkedHashSet<Target> targets = Sets.newLinkedHashSet(); for (Column column : relationNode.getTableSchema().getColumns()) { String aliasName = block.namedExprsMgr.checkAndGetIfAliasedColumn(column.getQualifiedName()); if (aliasName != null) { targets.add(new Target(new FieldEval(column), aliasName)); newlyEvaluatedRefNames.remove(aliasName); } else { targets.add(new Target(new FieldEval(column))); } } return targets; } private void updatePhysicalInfo(TableDesc desc) { if (desc.getPath() != null) { try { FileSystem fs = desc.getPath().getFileSystem(new Configuration()); FileStatus status = fs.getFileStatus(desc.getPath()); if (desc.getStats() != null && (status.isDirectory() || status.isFile())) { ContentSummary summary = fs.getContentSummary(desc.getPath()); if (summary != null) { long volume = summary.getLength(); desc.getStats().setNumBytes(volume); } } } catch (Throwable t) { LOG.warn(t); } } } public TableSubQueryNode visitTableSubQuery(PlanContext context, Stack<Expr> stack, TablePrimarySubQuery expr) throws PlanningException { QueryBlock block = context.queryBlock; QueryBlock childBlock = context.plan.getBlock(context.plan.getBlockNameByExpr(expr.getSubQuery())); PlanContext newContext = new PlanContext(context, childBlock); LogicalNode child = visit(newContext, new Stack<Expr>(), expr.getSubQuery()); TableSubQueryNode subQueryNode = context.queryBlock.getNodeFromExpr(expr); context.plan.connectBlocks(childBlock, context.queryBlock, BlockType.TableSubQuery); subQueryNode.setSubQuery(child); // Add additional expressions required in upper nodes. Set<String> newlyEvaluatedExprs = TUtil.newHashSet(); for (NamedExpr rawTarget : block.namedExprsMgr.getAllNamedExprs()) { try { EvalNode evalNode = exprAnnotator.createEvalNode(context.plan, context.queryBlock, rawTarget.getExpr()); if (checkIfBeEvaluatedAtRelation(block, evalNode, subQueryNode)) { block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode); newlyEvaluatedExprs.add(rawTarget.getAlias()); // newly added exr } } catch (VerifyException ve) { } } // Assume that each unique expr is evaluated once. LinkedHashSet<Target> targets = createFieldTargetsFromRelation(block, subQueryNode, newlyEvaluatedExprs); for (String newAddedExpr : newlyEvaluatedExprs) { targets.add(block.namedExprsMgr.getTarget(newAddedExpr, true)); } subQueryNode.setTargets(targets.toArray(new Target[targets.size()])); return subQueryNode; } /*=============================================================================================== SET OPERATION SECTION ===============================================================================================*/ @Override public LogicalNode visitUnion(PlanContext context, Stack<Expr> stack, SetOperation setOperation) throws PlanningException { return buildSetPlan(context, stack, setOperation); } @Override public LogicalNode visitExcept(PlanContext context, Stack<Expr> stack, SetOperation setOperation) throws PlanningException { return buildSetPlan(context, stack, setOperation); } @Override public LogicalNode visitIntersect(PlanContext context, Stack<Expr> stack, SetOperation setOperation) throws PlanningException { return buildSetPlan(context, stack, setOperation); } private LogicalNode buildSetPlan(PlanContext context, Stack<Expr> stack, SetOperation setOperation) throws PlanningException { // 1. Init Phase LogicalPlan plan = context.plan; QueryBlock block = context.queryBlock; //////////////////////////////////////////////////////// // Visit and Build Left Child Plan //////////////////////////////////////////////////////// QueryBlock leftBlock = context.plan.getBlockByExpr(setOperation.getLeft()); PlanContext leftContext = new PlanContext(context, leftBlock); stack.push(setOperation); LogicalNode leftChild = visit(leftContext, new Stack<Expr>(), setOperation.getLeft()); stack.pop(); // Connect left child and current blocks context.plan.connectBlocks(leftContext.queryBlock, context.queryBlock, BlockType.TableSubQuery); //////////////////////////////////////////////////////// // Visit and Build Right Child Plan //////////////////////////////////////////////////////// QueryBlock rightBlock = context.plan.getBlockByExpr(setOperation.getRight()); PlanContext rightContext = new PlanContext(context, rightBlock); stack.push(setOperation); LogicalNode rightChild = visit(rightContext, new Stack<Expr>(), setOperation.getRight()); stack.pop(); // Connect right child and current blocks context.plan.connectBlocks(rightContext.queryBlock, context.queryBlock, BlockType.TableSubQuery); BinaryNode setOp; if (setOperation.getType() == OpType.Union) { setOp = block.getNodeFromExpr(setOperation); } else if (setOperation.getType() == OpType.Except) { setOp = block.getNodeFromExpr(setOperation); } else if (setOperation.getType() == OpType.Intersect) { setOp = block.getNodeFromExpr(setOperation); } else { throw new VerifyException("Invalid Type: " + setOperation.getType()); } setOp.setLeftChild(leftChild); setOp.setRightChild(rightChild); // An union statement can be derived from two query blocks. // For one union statement between both relations, we can ensure that each corresponding data domain of both // relations are the same. However, if necessary, the schema of left query block will be used as a base schema. Target [] leftStrippedTargets = PlannerUtil.stripTarget( PlannerUtil.schemaToTargets(leftBlock.getRoot().getOutSchema())); setOp.setInSchema(leftChild.getOutSchema()); Schema outSchema = PlannerUtil.targetToSchema(leftStrippedTargets); setOp.setOutSchema(outSchema); return setOp; } /*=============================================================================================== INSERT SECTION ===============================================================================================*/ public LogicalNode visitInsert(PlanContext context, Stack<Expr> stack, Insert expr) throws PlanningException { stack.push(expr); LogicalNode subQuery = super.visitInsert(context, stack, expr); stack.pop(); InsertNode insertNode = context.queryBlock.getNodeFromExpr(expr); insertNode.setOverwrite(expr.isOverwrite()); insertNode.setSubQuery(subQuery); if (expr.hasTableName()) { // INSERT (OVERWRITE) INTO TABLE ... return buildInsertIntoTablePlan(context, insertNode, expr); } else if (expr.hasLocation()) { // INSERT (OVERWRITE) INTO LOCATION ... return buildInsertIntoLocationPlan(context, insertNode, expr); } else { throw new IllegalStateException("Invalid Query"); } } /** * Builds a InsertNode with a target table. * * ex) INSERT OVERWRITE INTO TABLE ... * <br /> * * We use the following terms, such target table, target column * <pre> * INSERT INTO TB_NAME (col1, col2) SELECT c1, c2 FROM ... * ^^^^^^^ ^^^^^^^^^^^^ ^^^^^^^^^^^^ * target table target columns (or schema) projected columns (or schema) * </pre> */ private InsertNode buildInsertIntoTablePlan(PlanContext context, InsertNode insertNode, Insert expr) throws PlanningException { // Get and set a target table TableDesc desc = catalog.getTableDesc(expr.getTableName()); insertNode.setTargetTable(desc); // // When we use 'INSERT (OVERWIRTE) INTO TABLE statements, there are two cases. // // First, when a user specified target columns // INSERT (OVERWRITE)? INTO table_name (col1 type, col2 type) SELECT ... // // Second, when a user do not specified target columns // INSERT (OVERWRITE)? INTO table_name SELECT ... // // In the former case is, target columns' schema and corresponding projected columns' schema // must be equivalent or be available to cast implicitly. // // In the later case, the target table's schema and projected column's // schema of select clause can be different to each other. In this case, // we use only a sequence of preceding columns of target table's schema // as target columns. // // For example, consider a target table and an 'insert into' query are give as follows: // // CREATE TABLE TB1 (col1 int, col2 int, col3 long); // || || // INSERT OVERWRITE INTO TB1 SELECT order_key, part_num FROM ... // // In this example, only col1 and col2 are used as target columns. if (expr.hasTargetColumns()) { // when a user specified target columns if (expr.getTargetColumns().length > insertNode.getChild().getOutSchema().size()) { throw new PlanningException("Target columns and projected columns are mismatched to each other"); } // See PreLogicalPlanVerifier.visitInsert. // It guarantees that the equivalence between the numbers of target and projected columns. ScanNode scanNode = context.plan.createNode(ScanNode.class); scanNode.init(desc); context.queryBlock.addRelation(scanNode); String [] targets = expr.getTargetColumns(); Schema targetColumns = new Schema(); for (int i = 0; i < targets.length; i++) { Column targetColumn = context.plan.resolveColumn(context.queryBlock, new ColumnReferenceExpr(targets[i])); targetColumns.addColumn(targetColumn); } insertNode.setTargetSchema(targetColumns); insertNode.setOutSchema(targetColumns); buildProjectedInsert(insertNode); } else { // when a user do not specified target columns // The output schema of select clause determines the target columns. Schema tableSchema = desc.getLogicalSchema(); Schema projectedSchema = insertNode.getChild().getOutSchema(); Schema targetColumns = new Schema(); for (int i = 0; i < projectedSchema.size(); i++) { targetColumns.addColumn(tableSchema.getColumn(i)); } insertNode.setTargetSchema(targetColumns); buildProjectedInsert(insertNode); } if (desc.hasPartition()) { insertNode.setPartitionMethod(desc.getPartitionMethod()); } return insertNode; } private void buildProjectedInsert(InsertNode insertNode) { Schema tableSchema = insertNode.getTableSchema(); Schema targetColumns = insertNode.getTargetSchema(); ProjectionNode projectionNode = insertNode.getChild(); // Modifying projected columns by adding NULL constants // It is because that table appender does not support target columns to be written. List<Target> targets = TUtil.newList(); for (int i = 0, j = 0; i < tableSchema.size(); i++) { Column column = tableSchema.getColumn(i); if(targetColumns.contains(column) && j < projectionNode.getTargets().length) { targets.add(projectionNode.getTargets()[j++]); } else { targets.add(new Target(new ConstEval(NullDatum.get()), column.getSimpleName())); } } projectionNode.setTargets(targets.toArray(new Target[targets.size()])); insertNode.setInSchema(projectionNode.getOutSchema()); insertNode.setOutSchema(projectionNode.getOutSchema()); insertNode.setProjectedSchema(PlannerUtil.targetToSchema(targets)); } /** * Build a InsertNode with a location. * * ex) INSERT OVERWRITE INTO LOCATION 'hdfs://....' .. */ private InsertNode buildInsertIntoLocationPlan(PlanContext context, InsertNode insertNode, Insert expr) { // INSERT (OVERWRITE)? INTO LOCATION path (USING file_type (param_clause)?)? query_expression Schema childSchema = insertNode.getChild().getOutSchema(); insertNode.setInSchema(childSchema); insertNode.setOutSchema(childSchema); insertNode.setTableSchema(childSchema); insertNode.setTargetLocation(new Path(expr.getLocation())); if (expr.hasStorageType()) { insertNode.setStorageType(CatalogUtil.getStoreType(expr.getStorageType())); } if (expr.hasParams()) { Options options = new Options(); options.putAll(expr.getParams()); insertNode.setOptions(options); } return insertNode; } /*=============================================================================================== Data Definition Language (DDL) SECTION ===============================================================================================*/ @Override public LogicalNode visitCreateTable(PlanContext context, Stack<Expr> stack, CreateTable expr) throws PlanningException { CreateTableNode createTableNode = context.queryBlock.getNodeFromExpr(expr); // Set a table name to be created. createTableNode.setTableName(expr.getTableName()); if (expr.hasStorageType()) { // If storage type (using clause) is specified createTableNode.setStorageType(CatalogUtil.getStoreType(expr.getStorageType())); } else { // otherwise, default type createTableNode.setStorageType(CatalogProtos.StoreType.CSV); } if (expr.hasParams()) { Options options = new Options(); options.putAll(expr.getParams()); createTableNode.setOptions(options); } if (expr.hasPartition()) { if (expr.getPartitionMethod().getPartitionType().equals(PartitionType.COLUMN)) { createTableNode.setPartitionMethod(getPartitionMethod(context, expr.getTableName(), expr.getPartitionMethod())); } else { throw new PlanningException(String.format("Not supported PartitonType: %s", expr.getPartitionMethod().getPartitionType())); } } if (expr.hasSubQuery()) { // CREATE TABLE .. AS SELECT stack.add(expr); LogicalNode subQuery = visit(context, stack, expr.getSubQuery()); stack.pop(); createTableNode.setChild(subQuery); createTableNode.setInSchema(subQuery.getOutSchema()); // If the table schema is defined // ex) CREATE TABLE tbl(col1 type, col2 type) AS SELECT ... if (expr.hasTableElements()) { createTableNode.setOutSchema(convertTableElementsSchema(expr.getTableElements())); createTableNode.setTableSchema(convertTableElementsSchema(expr.getTableElements())); } else { // if no table definition, the select clause's output schema will be used. // ex) CREATE TABLE tbl AS SELECT ... if (expr.hasPartition()) { PartitionMethodDesc partitionMethod = createTableNode.getPartitionMethod(); Schema queryOutputSchema = subQuery.getOutSchema(); Schema partitionExpressionSchema = partitionMethod.getExpressionSchema(); if (partitionMethod.getPartitionType() == CatalogProtos.PartitionType.COLUMN && queryOutputSchema.size() < partitionExpressionSchema.size()) { throw new VerifyException("Partition columns cannot be more than table columns."); } Schema tableSchema = new Schema(); for (int i = 0; i < queryOutputSchema.size() - partitionExpressionSchema.size(); i++) { tableSchema.addColumn(queryOutputSchema.getColumn(i)); } createTableNode.setOutSchema(tableSchema); createTableNode.setTableSchema(tableSchema); } else { createTableNode.setOutSchema(subQuery.getOutSchema()); createTableNode.setTableSchema(subQuery.getOutSchema()); } } return createTableNode; } else { // if CREATE AN EMPTY TABLE Schema tableSchema = convertColumnsToSchema(expr.getTableElements()); createTableNode.setTableSchema(tableSchema); if (expr.isExternal()) { createTableNode.setExternal(true); } if (expr.hasLocation()) { createTableNode.setPath(new Path(expr.getLocation())); } return createTableNode; } } private PartitionMethodDesc getPartitionMethod(PlanContext context, String tableName, CreateTable.PartitionMethodDescExpr expr) throws PlanningException { PartitionMethodDesc partitionMethodDesc = new PartitionMethodDesc(); partitionMethodDesc.setTableId(tableName); if(expr.getPartitionType() == PartitionType.COLUMN) { CreateTable.ColumnPartition partition = (CreateTable.ColumnPartition) expr; String partitionExpression = Joiner.on(',').join(partition.getColumns()); partitionMethodDesc.setPartitionType(CatalogProtos.PartitionType.COLUMN); partitionMethodDesc.setExpression(partitionExpression); partitionMethodDesc.setExpressionSchema(convertColumnsToSchema(partition.getColumns())); } else { throw new PlanningException(String.format("Not supported PartitonType: %s", expr.getPartitionType())); } return partitionMethodDesc; } /** * It transforms table definition elements to schema. * * @param elements to be transformed * @return schema transformed from table definition elements */ private Schema convertColumnsToSchema(CreateTable.ColumnDefinition[] elements) { Schema schema = new Schema(); for (CreateTable.ColumnDefinition columnDefinition: elements) { schema.addColumn(convertColumn(columnDefinition)); } return schema; } /** * It transforms table definition elements to schema. * * @param elements to be transformed * @return schema transformed from table definition elements */ private Schema convertTableElementsSchema(CreateTable.ColumnDefinition[] elements) { Schema schema = new Schema(); for (CreateTable.ColumnDefinition columnDefinition: elements) { schema.addColumn(convertColumn(columnDefinition)); } return schema; } private Column convertColumn(ColumnDefinition columnDefinition) { return new Column(columnDefinition.getColumnName(), convertDataType(columnDefinition)); } static TajoDataTypes.DataType convertDataType(DataTypeExpr dataType) { TajoDataTypes.Type type = TajoDataTypes.Type.valueOf(dataType.getTypeName()); TajoDataTypes.DataType.Builder builder = TajoDataTypes.DataType.newBuilder(); builder.setType(type); if (dataType.hasLengthOrPrecision()) { builder.setLength(dataType.getLengthOrPrecision()); } return builder.build(); } @Override public LogicalNode visitDropTable(PlanContext context, Stack<Expr> stack, DropTable dropTable) { DropTableNode dropTableNode = context.queryBlock.getNodeFromExpr(dropTable); dropTableNode.init(dropTable.getTableName(), dropTable.isPurge()); return dropTableNode; } /*=============================================================================================== Util SECTION ===============================================================================================*/ public static boolean checkIfBeEvaluatedAtGroupBy(EvalNode evalNode, GroupbyNode node) { Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode); if (columnRefs.size() > 0 && !node.getInSchema().containsAll(columnRefs)) { return false; } return true; } public static boolean checkIfBeEvaluatedAtJoin(QueryBlock block, EvalNode evalNode, JoinNode node, boolean isTopMostJoin) { Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode); if (EvalTreeUtil.findDistinctAggFunction(evalNode).size() > 0) { return false; } if (columnRefs.size() > 0 && !node.getInSchema().containsAll(columnRefs)) { return false; } // When a 'case-when' is used with outer join, the case-when expression must be evaluated // at the topmost join operator. // TODO - It's also valid that case-when is evalauted at the topmost outer operator. // But, how can we know there is no further outer join operator after this node? if (!checkIfCaseWhenWithOuterJoinBeEvaluated(block, evalNode, isTopMostJoin)) { return false; } return true; } private static boolean checkIfCaseWhenWithOuterJoinBeEvaluated(QueryBlock block, EvalNode evalNode, boolean isTopMostJoin) { if (block.containsJoinType(JoinType.LEFT_OUTER) || block.containsJoinType(JoinType.RIGHT_OUTER)) { Collection<CaseWhenEval> caseWhenEvals = EvalTreeUtil.findEvalsByType(evalNode, EvalType.CASE); if (caseWhenEvals.size() > 0 && !isTopMostJoin) { return false; } } return true; } /** * It checks if evalNode can be evaluated at this @{link RelationNode}. */ public static boolean checkIfBeEvaluatedAtRelation(QueryBlock block, EvalNode evalNode, RelationNode node) { Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode); // aggregation functions cannot be evaluated in scan node if (EvalTreeUtil.findDistinctAggFunction(evalNode).size() > 0) { return false; } if (columnRefs.size() > 0 && !node.getTableSchema().containsAll(columnRefs)) { return false; } // Why? - When a {case when} is used with outer join, case when must be evaluated at topmost outer join. if (block.containsJoinType(JoinType.LEFT_OUTER) || block.containsJoinType(JoinType.RIGHT_OUTER)) { Collection<CaseWhenEval> found = EvalTreeUtil.findEvalsByType(evalNode, EvalType.CASE); if (found.size() > 0) { return false; } } return true; } public static boolean checkIfBeEvaluatedAtThis(EvalNode evalNode, LogicalNode node) { Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode); if (columnRefs.size() > 0 && !node.getInSchema().containsAll(columnRefs)) { return false; } return true; } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.dom.svg; import org.apache.batik.parser.DefaultPathHandler; import org.apache.batik.parser.ParseException; import org.apache.batik.parser.PathParser; import org.w3c.dom.DOMException; import org.w3c.dom.svg.SVGException; import org.w3c.dom.svg.SVGPathSeg; import org.w3c.dom.svg.SVGPathSegArcAbs; import org.w3c.dom.svg.SVGPathSegArcRel; import org.w3c.dom.svg.SVGPathSegClosePath; import org.w3c.dom.svg.SVGPathSegCurvetoCubicAbs; import org.w3c.dom.svg.SVGPathSegCurvetoCubicRel; import org.w3c.dom.svg.SVGPathSegCurvetoCubicSmoothAbs; import org.w3c.dom.svg.SVGPathSegCurvetoCubicSmoothRel; import org.w3c.dom.svg.SVGPathSegCurvetoQuadraticAbs; import org.w3c.dom.svg.SVGPathSegCurvetoQuadraticRel; import org.w3c.dom.svg.SVGPathSegCurvetoQuadraticSmoothAbs; import org.w3c.dom.svg.SVGPathSegCurvetoQuadraticSmoothRel; import org.w3c.dom.svg.SVGPathSegLinetoAbs; import org.w3c.dom.svg.SVGPathSegLinetoHorizontalAbs; import org.w3c.dom.svg.SVGPathSegLinetoHorizontalRel; import org.w3c.dom.svg.SVGPathSegLinetoRel; import org.w3c.dom.svg.SVGPathSegLinetoVerticalAbs; import org.w3c.dom.svg.SVGPathSegLinetoVerticalRel; import org.w3c.dom.svg.SVGPathSegList; import org.w3c.dom.svg.SVGPathSegMovetoAbs; import org.w3c.dom.svg.SVGPathSegMovetoRel; /** * This class is the implementation of * <code>SVGPathSegList</code>. * * @author [email protected] * @version $Id: AbstractSVGPathSegList.java 476924 2006-11-19 21:13:26Z dvholten $ */ public abstract class AbstractSVGPathSegList extends AbstractSVGList implements SVGPathSegList, SVGPathSegConstants { /** * Separator for a point list. */ public static final String SVG_PATHSEG_LIST_SEPARATOR =" "; /** * Creates a new SVGPathSegList. */ protected AbstractSVGPathSegList() { super(); } /** * Return the separator between segments in the list. */ protected String getItemSeparator(){ return SVG_PATHSEG_LIST_SEPARATOR; } /** * Create an SVGException when the checkItemType fails. * * @return SVGException */ protected abstract SVGException createSVGException(short type, String key, Object[] args); /** */ public SVGPathSeg initialize ( SVGPathSeg newItem ) throws DOMException, SVGException { return (SVGPathSeg)initializeImpl(newItem); } /** */ public SVGPathSeg getItem ( int index ) throws DOMException { return (SVGPathSeg)getItemImpl(index); } /** */ public SVGPathSeg insertItemBefore ( SVGPathSeg newItem, int index ) throws DOMException, SVGException { return (SVGPathSeg)insertItemBeforeImpl(newItem,index); } /** */ public SVGPathSeg replaceItem ( SVGPathSeg newItem, int index ) throws DOMException, SVGException { return (SVGPathSeg)replaceItemImpl(newItem,index); } /** */ public SVGPathSeg removeItem ( int index ) throws DOMException { return (SVGPathSeg)removeItemImpl(index); } /** */ public SVGPathSeg appendItem ( SVGPathSeg newItem ) throws DOMException, SVGException { return (SVGPathSeg) appendItemImpl(newItem); } /** */ protected SVGItem createSVGItem(Object newItem){ SVGPathSeg pathSeg = (SVGPathSeg)newItem; return createPathSegItem(pathSeg); } /** * Parse the 'd' attribute. * * @param value 'd' attribute value * @param handler : list handler */ protected void doParse(String value, ListHandler handler) throws ParseException{ PathParser pathParser = new PathParser(); PathSegListBuilder builder = new PathSegListBuilder(handler); pathParser.setPathHandler(builder); pathParser.parse(value); } /** * Check if the item is an SVGPathSeg. */ protected void checkItemType(Object newItem){ if ( !( newItem instanceof SVGPathSeg ) ){ createSVGException(SVGException.SVG_WRONG_TYPE_ERR, "expected SVGPathSeg", null); } } /** * create an SVGItem representing this SVGPathSeg. */ protected SVGPathSegItem createPathSegItem(SVGPathSeg pathSeg){ SVGPathSegItem pathSegItem = null; short type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_ARC_ABS: case SVGPathSeg.PATHSEG_ARC_REL: pathSegItem = new SVGPathSegArcItem(pathSeg); break; case SVGPathSeg.PATHSEG_CLOSEPATH: pathSegItem = new SVGPathSegItem(pathSeg); break; case SVGPathSeg.PATHSEG_CURVETO_CUBIC_ABS: case SVGPathSeg.PATHSEG_CURVETO_CUBIC_REL: pathSegItem = new SVGPathSegCurvetoCubicItem(pathSeg); break; case SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_ABS: case SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_REL: pathSegItem = new SVGPathSegCurvetoCubicSmoothItem(pathSeg); break; case SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_ABS: case SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_REL: pathSegItem = new SVGPathSegCurvetoQuadraticItem(pathSeg); break; case SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_ABS: case SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_REL: pathSegItem = new SVGPathSegCurvetoQuadraticSmoothItem(pathSeg); break; case SVGPathSeg.PATHSEG_LINETO_ABS: case SVGPathSeg.PATHSEG_LINETO_REL: case SVGPathSeg.PATHSEG_MOVETO_ABS: case SVGPathSeg.PATHSEG_MOVETO_REL: pathSegItem = new SVGPathSegMovetoLinetoItem(pathSeg); break; case SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_REL: case SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_ABS: pathSegItem = new SVGPathSegLinetoHorizontalItem(pathSeg); break; case SVGPathSeg.PATHSEG_LINETO_VERTICAL_REL: case SVGPathSeg.PATHSEG_LINETO_VERTICAL_ABS: pathSegItem = new SVGPathSegLinetoVerticalItem(pathSeg); break; default: } return pathSegItem; } /** * Internal representation of the item SVGPathSeg. */ protected class SVGPathSegItem extends AbstractSVGItem implements SVGPathSeg, SVGPathSegClosePath { protected short type; protected String letter; protected float x; protected float y; protected float x1; protected float y1; protected float x2; protected float y2; protected float r1; protected float r2; protected float angle; protected boolean largeArcFlag; protected boolean sweepFlag; protected SVGPathSegItem(){} public SVGPathSegItem(short type,String letter){ this.type = type; this.letter = letter; } public SVGPathSegItem(SVGPathSeg pathSeg){ type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_CLOSEPATH: letter = PATHSEG_CLOSEPATH_LETTER; break; default: } } protected String getStringValue(){ return letter; } public short getPathSegType() { return type; } public String getPathSegTypeAsLetter(){ return letter; } } public class SVGPathSegMovetoLinetoItem extends SVGPathSegItem implements SVGPathSegMovetoAbs, SVGPathSegMovetoRel, SVGPathSegLinetoAbs, SVGPathSegLinetoRel { public SVGPathSegMovetoLinetoItem(short type, String letter, float x, float y){ super(type,letter); this.x = x; this.y = y; } public SVGPathSegMovetoLinetoItem(SVGPathSeg pathSeg){ type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_LINETO_REL: letter = PATHSEG_LINETO_REL_LETTER; x = ((SVGPathSegLinetoRel)pathSeg).getX(); y = ((SVGPathSegLinetoRel)pathSeg).getY(); break; case SVGPathSeg.PATHSEG_LINETO_ABS: letter = PATHSEG_LINETO_ABS_LETTER; x = ((SVGPathSegLinetoAbs)pathSeg).getX(); y = ((SVGPathSegLinetoAbs)pathSeg).getY(); break; case SVGPathSeg.PATHSEG_MOVETO_REL: letter = PATHSEG_MOVETO_REL_LETTER; x = ((SVGPathSegMovetoRel)pathSeg).getX(); y = ((SVGPathSegMovetoRel)pathSeg).getY(); break; case SVGPathSeg.PATHSEG_MOVETO_ABS: letter = PATHSEG_MOVETO_ABS_LETTER; x = ((SVGPathSegMovetoAbs)pathSeg).getX(); y = ((SVGPathSegMovetoAbs)pathSeg).getY(); break; default: } } public float getX(){ return x; } public float getY(){ return y; } public void setX(float x){ this.x = x; resetAttribute(); } public void setY(float y){ this.y = y; resetAttribute(); } protected String getStringValue(){ return letter + ' ' + Float.toString(x) + ' ' + Float.toString(y); } } public class SVGPathSegCurvetoCubicItem extends SVGPathSegItem implements SVGPathSegCurvetoCubicAbs, SVGPathSegCurvetoCubicRel { public SVGPathSegCurvetoCubicItem(short type,String letter, float x1,float y1,float x2, float y2, float x, float y){ super(type,letter); this.x = x; this.y = y; this.x1 = x1; this.y1 = y1; this.x2 = x2; this.y2 = y2; } public SVGPathSegCurvetoCubicItem(SVGPathSeg pathSeg){ this.type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_CURVETO_CUBIC_ABS: letter = PATHSEG_CURVETO_CUBIC_ABS_LETTER; x = ((SVGPathSegCurvetoCubicAbs)pathSeg).getX(); y = ((SVGPathSegCurvetoCubicAbs)pathSeg).getY(); x1 = ((SVGPathSegCurvetoCubicAbs)pathSeg).getX1(); y1 = ((SVGPathSegCurvetoCubicAbs)pathSeg).getY1(); x2 = ((SVGPathSegCurvetoCubicAbs)pathSeg).getX2(); y2 = ((SVGPathSegCurvetoCubicAbs)pathSeg).getY2(); break; case SVGPathSeg.PATHSEG_CURVETO_CUBIC_REL: letter = PATHSEG_CURVETO_CUBIC_REL_LETTER; x = ((SVGPathSegCurvetoCubicRel)pathSeg).getX(); y = ((SVGPathSegCurvetoCubicRel)pathSeg).getY(); x1 = ((SVGPathSegCurvetoCubicRel)pathSeg).getX1(); y1 = ((SVGPathSegCurvetoCubicRel)pathSeg).getY1(); x2 = ((SVGPathSegCurvetoCubicRel)pathSeg).getX2(); y2 = ((SVGPathSegCurvetoCubicRel)pathSeg).getY2(); break; default: } } public float getX(){ return x; } public float getY(){ return y; } public void setX(float x){ this.x = x; resetAttribute(); } public void setY(float y){ this.y = y; resetAttribute(); } public float getX1(){ return x1; } public float getY1(){ return y1; } public void setX1(float x1){ this.x1 = x1; resetAttribute(); } public void setY1(float y1){ this.y1 = y1; resetAttribute(); } public float getX2(){ return x2; } public float getY2(){ return y2; } public void setX2(float x2){ this.x2 = x2; resetAttribute(); } public void setY2(float y2){ this.y2 = y2; resetAttribute(); } protected String getStringValue(){ return letter + ' ' + Float.toString(x1) + ' ' + Float.toString(y1) + ' ' + Float.toString(x2) + ' ' + Float.toString(y2) + ' ' + Float.toString(x) + ' ' + Float.toString(y); } } public class SVGPathSegCurvetoQuadraticItem extends SVGPathSegItem implements SVGPathSegCurvetoQuadraticAbs, SVGPathSegCurvetoQuadraticRel { public SVGPathSegCurvetoQuadraticItem(short type,String letter, float x1,float y1,float x, float y ){ super(type,letter); this.x = x; this.y = y; this.x1 = x1; this.y1 = y1; } public SVGPathSegCurvetoQuadraticItem(SVGPathSeg pathSeg){ this.type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_ABS: letter = PATHSEG_CURVETO_QUADRATIC_ABS_LETTER; x = ((SVGPathSegCurvetoQuadraticAbs)pathSeg).getX(); y = ((SVGPathSegCurvetoQuadraticAbs)pathSeg).getY(); x1 = ((SVGPathSegCurvetoQuadraticAbs)pathSeg).getX1(); y1= ((SVGPathSegCurvetoQuadraticAbs)pathSeg).getY1(); break; case SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_REL: letter = PATHSEG_CURVETO_QUADRATIC_REL_LETTER; x = ((SVGPathSegCurvetoQuadraticRel)pathSeg).getX(); y = ((SVGPathSegCurvetoQuadraticRel)pathSeg).getY(); x1 = ((SVGPathSegCurvetoQuadraticRel)pathSeg).getX1(); y1= ((SVGPathSegCurvetoQuadraticRel)pathSeg).getY1(); break; default: } } public float getX(){ return x; } public float getY(){ return y; } public void setX(float x){ this.x = x; resetAttribute(); } public void setY(float y){ this.y = y; resetAttribute(); } public float getX1(){ return x1; } public float getY1(){ return y1; } public void setX1(float x1){ this.x1 = x1; resetAttribute(); } public void setY1(float y1){ this.y1 = y1; resetAttribute(); } protected String getStringValue(){ return letter + ' ' + Float.toString(x1) + ' ' + Float.toString(y1) + ' ' + Float.toString(x) + ' ' + Float.toString(y); } } public class SVGPathSegArcItem extends SVGPathSegItem implements SVGPathSegArcAbs, SVGPathSegArcRel { public SVGPathSegArcItem(short type,String letter, float r1,float r2,float angle, boolean largeArcFlag, boolean sweepFlag, float x, float y ){ super(type,letter); this.x = x; this.y = y; this.r1 = r1; this.r2 = r2; this.angle = angle; this.largeArcFlag = largeArcFlag; this.sweepFlag = sweepFlag; } public SVGPathSegArcItem(SVGPathSeg pathSeg){ type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_ARC_ABS: letter = PATHSEG_ARC_ABS_LETTER; x = ((SVGPathSegArcAbs)pathSeg).getX(); y = ((SVGPathSegArcAbs)pathSeg).getY(); r1 = ((SVGPathSegArcAbs)pathSeg).getR1(); r2 = ((SVGPathSegArcAbs)pathSeg).getR2(); angle = ((SVGPathSegArcAbs)pathSeg).getAngle(); largeArcFlag = ((SVGPathSegArcAbs)pathSeg).getLargeArcFlag(); sweepFlag = ((SVGPathSegArcAbs)pathSeg).getSweepFlag(); break; case SVGPathSeg.PATHSEG_ARC_REL: letter = PATHSEG_ARC_REL_LETTER; x = ((SVGPathSegArcRel)pathSeg).getX(); y = ((SVGPathSegArcRel)pathSeg).getY(); r1 = ((SVGPathSegArcRel)pathSeg).getR1(); r2 = ((SVGPathSegArcRel)pathSeg).getR2(); angle = ((SVGPathSegArcRel)pathSeg).getAngle(); largeArcFlag = ((SVGPathSegArcRel)pathSeg).getLargeArcFlag(); sweepFlag = ((SVGPathSegArcRel)pathSeg).getSweepFlag(); break; default: } } public float getX(){ return x; } public float getY(){ return y; } public void setX(float x){ this.x = x; resetAttribute(); } public void setY(float y){ this.y = y; resetAttribute(); } public float getR1(){ return r1; } public float getR2(){ return r2; } public void setR1(float r1){ this.r1 = r1; resetAttribute(); } public void setR2(float r2){ this.r2 = r2; resetAttribute(); } public float getAngle(){ return angle; } public void setAngle(float angle){ this.angle = angle; resetAttribute(); } public boolean getSweepFlag(){ return sweepFlag; } public void setSweepFlag(boolean sweepFlag){ this.sweepFlag = sweepFlag; resetAttribute(); } public boolean getLargeArcFlag(){ return largeArcFlag; } public void setLargeArcFlag(boolean largeArcFlag){ this.largeArcFlag = largeArcFlag; resetAttribute(); } protected String getStringValue(){ return letter + ' ' + Float.toString(r1) + ' ' + Float.toString(r2) + ' ' + Float.toString(angle) + ' ' + ((largeArcFlag?"1":"0")) + ' ' + ((sweepFlag?"1":"0")) + (' ') + Float.toString(x) + ' ' + Float.toString(y); } } public class SVGPathSegLinetoHorizontalItem extends SVGPathSegItem implements SVGPathSegLinetoHorizontalAbs, SVGPathSegLinetoHorizontalRel { public SVGPathSegLinetoHorizontalItem(short type, String letter, float value){ super(type,letter); this.x = value; } public SVGPathSegLinetoHorizontalItem(SVGPathSeg pathSeg){ this.type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_ABS: letter = PATHSEG_LINETO_HORIZONTAL_ABS_LETTER; x = ((SVGPathSegLinetoHorizontalAbs)pathSeg).getX(); break; case SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_REL: letter = PATHSEG_LINETO_HORIZONTAL_REL_LETTER; x = ((SVGPathSegLinetoHorizontalRel)pathSeg).getX(); break; default: } } public float getX(){ return x; } public void setX(float x){ this.x = x; resetAttribute(); } protected String getStringValue(){ return letter + ' ' + Float.toString(x); } } public class SVGPathSegLinetoVerticalItem extends SVGPathSegItem implements SVGPathSegLinetoVerticalAbs, SVGPathSegLinetoVerticalRel { public SVGPathSegLinetoVerticalItem(short type, String letter, float value){ super(type,letter); this.y = value; } public SVGPathSegLinetoVerticalItem(SVGPathSeg pathSeg){ type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_LINETO_VERTICAL_ABS: letter = PATHSEG_LINETO_VERTICAL_ABS_LETTER; y = ((SVGPathSegLinetoVerticalAbs)pathSeg).getY(); break; case SVGPathSeg.PATHSEG_LINETO_VERTICAL_REL: letter = PATHSEG_LINETO_VERTICAL_REL_LETTER; y = ((SVGPathSegLinetoVerticalRel)pathSeg).getY(); break; default: } } public float getY(){ return y; } public void setY(float y){ this.y = y; resetAttribute(); } protected String getStringValue(){ return letter + ' ' + Float.toString(y); } } public class SVGPathSegCurvetoCubicSmoothItem extends SVGPathSegItem implements SVGPathSegCurvetoCubicSmoothAbs, SVGPathSegCurvetoCubicSmoothRel { public SVGPathSegCurvetoCubicSmoothItem(short type,String letter, float x2,float y2,float x, float y ){ super(type,letter); this.x = x; this.y = y; this.x2 = x2; this.y2 = y2; } public SVGPathSegCurvetoCubicSmoothItem(SVGPathSeg pathSeg){ type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_ABS: letter = PATHSEG_CURVETO_CUBIC_SMOOTH_ABS_LETTER; x = ((SVGPathSegCurvetoCubicSmoothAbs)pathSeg).getX(); y = ((SVGPathSegCurvetoCubicSmoothAbs)pathSeg).getY(); x2 = ((SVGPathSegCurvetoCubicSmoothAbs)pathSeg).getX2(); y2 = ((SVGPathSegCurvetoCubicSmoothAbs)pathSeg).getY2(); break; case SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_REL: letter = PATHSEG_CURVETO_CUBIC_SMOOTH_REL_LETTER; x = ((SVGPathSegCurvetoCubicSmoothRel)pathSeg).getX(); y = ((SVGPathSegCurvetoCubicSmoothRel)pathSeg).getY(); x2 = ((SVGPathSegCurvetoCubicSmoothRel)pathSeg).getX2(); y2 = ((SVGPathSegCurvetoCubicSmoothRel)pathSeg).getY2(); break; default: } } public float getX(){ return x; } public float getY(){ return y; } public void setX(float x){ this.x = x; resetAttribute(); } public void setY(float y){ this.y = y; resetAttribute(); } public float getX2(){ return x2; } public float getY2(){ return y2; } public void setX2(float x2){ this.x2 = x2; resetAttribute(); } public void setY2(float y2){ this.y2 = y2; resetAttribute(); } protected String getStringValue(){ return letter + ' ' + Float.toString(x2) + ' ' + Float.toString(y2) + ' ' + Float.toString(x) + ' ' + Float.toString(y); } } public class SVGPathSegCurvetoQuadraticSmoothItem extends SVGPathSegItem implements SVGPathSegCurvetoQuadraticSmoothAbs , SVGPathSegCurvetoQuadraticSmoothRel { public SVGPathSegCurvetoQuadraticSmoothItem(short type, String letter, float x, float y){ super(type,letter); this.x = x; this.y = y; } public SVGPathSegCurvetoQuadraticSmoothItem(SVGPathSeg pathSeg){ type = pathSeg.getPathSegType(); switch(type){ case SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_ABS: letter = PATHSEG_CURVETO_QUADRATIC_SMOOTH_ABS_LETTER; x = ((SVGPathSegCurvetoQuadraticSmoothAbs)pathSeg).getX(); y = ((SVGPathSegCurvetoQuadraticSmoothAbs)pathSeg).getY(); break; case SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_REL: letter = PATHSEG_CURVETO_QUADRATIC_SMOOTH_REL_LETTER; x = ((SVGPathSegCurvetoQuadraticSmoothRel)pathSeg).getX(); y = ((SVGPathSegCurvetoQuadraticSmoothRel)pathSeg).getY(); break; default: } } public float getX(){ return x; } public float getY(){ return y; } public void setX(float x){ this.x = x; resetAttribute(); } public void setY(float y){ this.y = y; resetAttribute(); } protected String getStringValue(){ return letter + ' ' + Float.toString(x) + ' ' + Float.toString(y); } } protected class PathSegListBuilder extends DefaultPathHandler { protected ListHandler listHandler; public PathSegListBuilder(ListHandler listHandler){ this.listHandler = listHandler; } /** * Implements {@link org.apache.batik.parser.PathHandler#startPath()}. */ public void startPath() throws ParseException { listHandler.startList(); } /** * Implements {@link org.apache.batik.parser.PathHandler#endPath()}. */ public void endPath() throws ParseException { listHandler.endList(); } /** * Implements {@link org.apache.batik.parser.PathHandler#movetoRel(float,float)}. */ public void movetoRel(float x, float y) throws ParseException { listHandler.item(new SVGPathSegMovetoLinetoItem (SVGPathSeg.PATHSEG_MOVETO_REL,PATHSEG_MOVETO_REL_LETTER, x,y)); } /** * Implements {@link org.apache.batik.parser.PathHandler#movetoAbs(float,float)}. */ public void movetoAbs(float x, float y) throws ParseException { listHandler.item(new SVGPathSegMovetoLinetoItem (SVGPathSeg.PATHSEG_MOVETO_ABS,PATHSEG_MOVETO_ABS_LETTER, x,y)); } /** * Implements {@link org.apache.batik.parser.PathHandler#closePath()}. */ public void closePath() throws ParseException { listHandler.item(new SVGPathSegItem (SVGPathSeg.PATHSEG_CLOSEPATH,PATHSEG_CLOSEPATH_LETTER)); } /** * Implements {@link org.apache.batik.parser.PathHandler#linetoRel(float,float)}. */ public void linetoRel(float x, float y) throws ParseException { listHandler.item(new SVGPathSegMovetoLinetoItem (SVGPathSeg.PATHSEG_LINETO_REL,PATHSEG_LINETO_REL_LETTER, x,y)); } /** * Implements {@link org.apache.batik.parser.PathHandler#linetoAbs(float,float)}. */ public void linetoAbs(float x, float y) throws ParseException { listHandler.item(new SVGPathSegMovetoLinetoItem (SVGPathSeg.PATHSEG_LINETO_ABS,PATHSEG_LINETO_ABS_LETTER, x,y)); } /** * Implements {@link org.apache.batik.parser.PathHandler#linetoHorizontalRel(float)}. */ public void linetoHorizontalRel(float x) throws ParseException { listHandler.item(new SVGPathSegLinetoHorizontalItem (SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_REL,PATHSEG_LINETO_HORIZONTAL_REL_LETTER, x)); } /** * Implements {@link org.apache.batik.parser.PathHandler#linetoHorizontalAbs(float)}. */ public void linetoHorizontalAbs(float x) throws ParseException { listHandler.item(new SVGPathSegLinetoHorizontalItem (SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_ABS,PATHSEG_LINETO_HORIZONTAL_ABS_LETTER, x)); } /** * Implements {@link org.apache.batik.parser.PathHandler#linetoVerticalRel(float)}. */ public void linetoVerticalRel(float y) throws ParseException { listHandler.item(new SVGPathSegLinetoVerticalItem (SVGPathSeg.PATHSEG_LINETO_VERTICAL_REL,PATHSEG_LINETO_VERTICAL_REL_LETTER, y)); } /** * Implements {@link org.apache.batik.parser.PathHandler#linetoVerticalAbs(float)}. */ public void linetoVerticalAbs(float y) throws ParseException { listHandler.item(new SVGPathSegLinetoVerticalItem (SVGPathSeg.PATHSEG_LINETO_VERTICAL_ABS,PATHSEG_LINETO_VERTICAL_ABS_LETTER, y)); } /** * Implements {@link * org.apache.batik.parser.PathHandler#curvetoCubicRel(float,float,float,float,float,float)}. */ public void curvetoCubicRel(float x1, float y1, float x2, float y2, float x, float y) throws ParseException { listHandler.item(new SVGPathSegCurvetoCubicItem (SVGPathSeg.PATHSEG_CURVETO_CUBIC_REL,PATHSEG_CURVETO_CUBIC_REL_LETTER, x1,y1,x2,y2,x,y)); } /** * Implements {@link * org.apache.batik.parser.PathHandler#curvetoCubicAbs(float,float,float,float,float,float)}. */ public void curvetoCubicAbs(float x1, float y1, float x2, float y2, float x, float y) throws ParseException { listHandler.item(new SVGPathSegCurvetoCubicItem (SVGPathSeg.PATHSEG_CURVETO_CUBIC_ABS,PATHSEG_CURVETO_CUBIC_ABS_LETTER, x1,y1,x2,y2,x,y)); } /** * Implements {@link * org.apache.batik.parser.PathHandler#curvetoCubicSmoothRel(float,float,float,float)}. */ public void curvetoCubicSmoothRel(float x2, float y2, float x, float y) throws ParseException { listHandler.item(new SVGPathSegCurvetoCubicSmoothItem (SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_REL,PATHSEG_CURVETO_CUBIC_SMOOTH_REL_LETTER, x2,y2,x,y)); } /** * Implements {@link * org.apache.batik.parser.PathHandler#curvetoCubicSmoothAbs(float,float,float,float)}. */ public void curvetoCubicSmoothAbs(float x2, float y2, float x, float y) throws ParseException { listHandler.item(new SVGPathSegCurvetoCubicSmoothItem (SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_ABS,PATHSEG_CURVETO_CUBIC_SMOOTH_ABS_LETTER, x2,y2,x,y)); } /** * Implements {@link * org.apache.batik.parser.PathHandler#curvetoQuadraticRel(float,float,float,float)}. */ public void curvetoQuadraticRel(float x1, float y1, float x, float y) throws ParseException { listHandler.item(new SVGPathSegCurvetoQuadraticItem (SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_REL,PATHSEG_CURVETO_QUADRATIC_REL_LETTER, x1,y1,x,y)); } /** * Implements {@link * org.apache.batik.parser.PathHandler#curvetoQuadraticAbs(float,float,float,float)}. */ public void curvetoQuadraticAbs(float x1, float y1, float x, float y) throws ParseException { listHandler.item(new SVGPathSegCurvetoQuadraticItem (SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_ABS,PATHSEG_CURVETO_QUADRATIC_ABS_LETTER, x1,y1,x,y)); } /** * Implements {@link org.apache.batik.parser.PathHandler#curvetoQuadraticSmoothRel(float,float)}. */ public void curvetoQuadraticSmoothRel(float x, float y) throws ParseException { listHandler.item(new SVGPathSegCurvetoQuadraticSmoothItem (SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_REL,PATHSEG_CURVETO_QUADRATIC_SMOOTH_REL_LETTER, x,y)); } /** * Implements {@link org.apache.batik.parser.PathHandler#curvetoQuadraticSmoothAbs(float,float)}. */ public void curvetoQuadraticSmoothAbs(float x, float y) throws ParseException { listHandler.item(new SVGPathSegCurvetoQuadraticSmoothItem (SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_ABS,PATHSEG_CURVETO_QUADRATIC_SMOOTH_ABS_LETTER, x,y)); } /** * Implements {@link * org.apache.batik.parser.PathHandler#arcRel(float,float,float,boolean,boolean,float,float)}. */ public void arcRel(float rx, float ry, float xAxisRotation, boolean largeArcFlag, boolean sweepFlag, float x, float y) throws ParseException { listHandler.item(new SVGPathSegArcItem (SVGPathSeg.PATHSEG_ARC_REL,PATHSEG_ARC_REL_LETTER, rx,ry,xAxisRotation,largeArcFlag,sweepFlag,x,y)); } /** * Implements {@link * org.apache.batik.parser.PathHandler#arcAbs(float,float,float,boolean,boolean,float,float)}. */ public void arcAbs(float rx, float ry, float xAxisRotation, boolean largeArcFlag, boolean sweepFlag, float x, float y) throws ParseException { listHandler.item(new SVGPathSegArcItem (SVGPathSeg.PATHSEG_ARC_ABS,PATHSEG_ARC_ABS_LETTER, rx,ry,xAxisRotation,largeArcFlag,sweepFlag,x,y)); } } }
package se.ericthelin.fractions; import org.junit.Test; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.*; public class FractionTest { @Test(expected = NullPointerException.class) public void rejectsNullText() { Fraction.of(null); } @Test public void rejectsTextWithLetters() { // Given String textWithLetters = "foo 7/5 bar"; try { // When Fraction.of(textWithLetters); // Then fail("Nothing thrown"); } catch (InvalidFractionFormatException e) { assertThat(e.getText(), is(textWithLetters)); } } @Test public void rejectsTextWithMultipleSlashes() { // Given String textWithMultipleSlashes = "7/5/3"; try { // When Fraction.of(textWithMultipleSlashes); // Then fail("Nothing thrown"); } catch (InvalidFractionFormatException e) { assertThat(e.getText(), is(textWithMultipleSlashes)); } } @Test public void rejectsTextWithDecimalNumerator() { // Given String textWithDecimalNumerator = "7.0/5"; try { // When Fraction.of(textWithDecimalNumerator); // Then fail("Nothing thrown"); } catch (InvalidFractionFormatException e) { assertThat(e.getText(), is(textWithDecimalNumerator)); } } @Test public void rejectsTextWithDecimalDenominator() { // Given String textWithDecimalDenominator = "7/5.0"; try { // When Fraction.of(textWithDecimalDenominator); // Then fail("Nothing thrown"); } catch (InvalidFractionFormatException e) { assertThat(e.getText(), is(textWithDecimalDenominator)); } } @Test public void rejectsTextWithZeroDenominator() { // Given String textWithZeroDenominator = "7/0"; try { // When Fraction.of(textWithZeroDenominator); // Then fail("Nothing thrown"); } catch (ZeroDenominatorException e) { assertThat(e.getNumerator(), is(7)); } } @Test public void rejectsTextWithZeroNumeratorAndDenominator() { // Given String textWithZeroNumeratorAndDenominator = "0/0"; try { // When Fraction.of(textWithZeroNumeratorAndDenominator); // Then fail("Nothing thrown"); } catch (ZeroDenominatorException e) { assertThat(e.getNumerator(), is(0)); } } @Test public void acceptsTextWithJustDigits() { Fraction.of("7"); } @Test public void acceptsNegativeInteger() { Fraction.of("-7"); } @Test public void hasMeaningfulStringRepresentation() { assertThat(Fraction.of("7/5").toString(), is("7/5")); } @Test public void simplifiesStringRepresentationWhenDenominatorIsOne() { assertThat(Fraction.of("7/1").toString(), is("7")); } @Test public void hidesDenominatorOfWholeNumber() { assertThat(Fraction.of("7").toString(), is("7")); } @Test public void includesMinusSignOfNegativeOfWholeNumberInStringRepresentation() { assertThat(Fraction.of("-7").toString(), is("-7")); } @Test public void simplifiesStringRepresentationOfZeroFraction() { assertThat(Fraction.of("0/5").toString(), is("0")); } @Test public void usesGreatestCommonDivisorToSimplifyRepresentation() { assertThat(Fraction.of("4/6").toString(), is("2/3")); } @Test public void beginsStringRepresentationOfNegativeFractionHavingNegativeNumeratorWithMinusSign() { assertThat(Fraction.of("-1/3").toString(), is("-1/3")); } @Test public void beginsStringRepresentationOfNegativeFractionHavingNegativeDenominatorWithMinusSign() { assertThat(Fraction.of("1/-3").toString(), is("-1/3")); } @Test public void removesDoubleMinusSignsFromStringRepresentation() { assertThat(Fraction.of("-1/-3").toString(), is("1/3")); } @Test public void isEqualToSelf() { // Given Fraction instance = Fraction.of("7/5"); // Then assertTrue(instance.equals(instance)); } @Test public void isEqualToFractionWithSimilarValues() { assertTrue(Fraction.of("7/5").equals(Fraction.of("7/5"))); } @Test public void isEqualToZeroWhenNumeratorIsZero() { assertTrue(Fraction.of("0/5").equals(Fraction.of("0"))); } @Test public void isEqualToFractionWithEqualNumericValue() { assertTrue(Fraction.of("4/6").equals(Fraction.of("2/3"))); } @Test public void isEqualToFractionWithNegatedValues() { assertTrue(Fraction.of("1/3").equals(Fraction.of("-1/-3"))); } @Test public void isEqualToOtherNegativeFractionHavingNegatedValues() { assertTrue(Fraction.of("-1/3").equals(Fraction.of("1/-3"))); } @Test public void doestNotCareAboutPlacementOfSignForEquality() { assertTrue(Fraction.of("1/-3").equals(Fraction.of("-1/3"))); } @Test public void isNotEqualToNull() { assertFalse(Fraction.of("7/5").equals(null)); } @Test public void isNotEqualToDifferentType() { assertFalse(Fraction.of("7/5").equals("7/5")); } @Test public void isNotEqualToFractionWithSimilarNumeratorButDifferentDenominator() { assertFalse(Fraction.of("7/5").equals(Fraction.of("7/4"))); } @Test public void isNotEqualToFractionWithSimilarDenominatorButDifferentNumerator() { assertFalse(Fraction.of("7/5").equals(Fraction.of("8/5"))); } @Test public void hasSameHashCodeAsFractionWithSimilarValues() { assertThat(Fraction.of("7/5").hashCode(), is(Fraction.of("7/5").hashCode())); } @Test public void hasHashCodeDifferentFromThatOfFractionWithDifferentNumerator() { assertThat(Fraction.of("7/5").hashCode(), not(is(Fraction.of("8/5").hashCode()))); } @Test public void hasHashCodeDifferentFromThatOfFractionWithDifferentDenominator() { assertThat(Fraction.of("7/5").hashCode(), not(is(Fraction.of("7/4").hashCode()))); } @Test public void canAddZeroToZero() { assertThat(Fraction.ZERO.plus(Fraction.ZERO), is(Fraction.ZERO)); } @Test public void canAddZeroToWholeNumber() { assertThat(Fraction.of("7").plus(Fraction.ZERO), is(Fraction.of("7"))); } @Test public void canAddWholeNumbers() { assertThat(Fraction.of("7").plus(Fraction.of("5")), is(Fraction.of("12"))); } @Test public void canAddWholeNumberToFraction() { assertThat(Fraction.of("4/3").plus(Fraction.of("5")), is(Fraction.of("19/3"))); } @Test public void canAddFractionToWholeNumber() { assertThat(Fraction.of("5").plus(Fraction.of("4/3")), is(Fraction.of("19/3"))); } @Test public void canAddFractionToFraction() { assertThat(Fraction.of("7/3").plus(Fraction.of("4/5")), is(Fraction.of("47/15"))); } @Test public void reducesSumUsingGreatestCommonDivisor() { assertThat(Fraction.of("3/8").plus(Fraction.of("1/8")), is(Fraction.of("1/2"))); } @Test public void canAddFractionToNegativeFraction() { assertThat(Fraction.of("-1/2").plus(Fraction.of("1/3")), is(Fraction.of("-1/6"))); } @Test public void canAddNegativeFractionToFraction() { assertThat(Fraction.of("1/2").plus(Fraction.of("-1/3")), is(Fraction.of("1/6"))); } @Test public void canAddNegativeFractions() { assertThat(Fraction.of("-1/2").plus(Fraction.of("-1/3")), is(Fraction.of("-5/6"))); } }
package org.maltparserx.core.syntaxgraph; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import org.maltparserx.core.exception.MaltChainedException; import org.maltparserx.core.pool.ObjectPoolList; import org.maltparserx.core.symbol.SymbolTableHandler; import org.maltparserx.core.syntaxgraph.edge.Edge; import org.maltparserx.core.syntaxgraph.edge.GraphEdge; import org.maltparserx.core.syntaxgraph.node.ComparableNode; import org.maltparserx.core.syntaxgraph.node.DependencyNode; import org.maltparserx.core.syntaxgraph.node.Node; import org.maltparserx.core.syntaxgraph.node.NonTerminal; import org.maltparserx.core.syntaxgraph.node.NonTerminalNode; import org.maltparserx.core.syntaxgraph.node.PhraseStructureNode; import org.maltparserx.core.syntaxgraph.node.Root; import org.maltparserx.core.syntaxgraph.node.TokenNode; /** * * * @author Johan Hall */ public class PhraseStructureGraph extends Sentence implements PhraseStructure { protected final ObjectPoolList<Edge> edgePool; protected final SortedSet<Edge> graphEdges; protected final SortedMap<Integer, NonTerminal> nonTerminalNodes; protected final ObjectPoolList<NonTerminal> nonTerminalPool; protected final Root root; public PhraseStructureGraph(SymbolTableHandler symbolTables) throws MaltChainedException { super(symbolTables); root = new Root(); root.setBelongsToGraph(this); graphEdges = new TreeSet<Edge>(); edgePool = new ObjectPoolList<Edge>() { protected Edge create() { return new GraphEdge(); } public void resetObject(Edge o) throws MaltChainedException { o.clear(); } }; nonTerminalNodes = new TreeMap<Integer,NonTerminal>(); nonTerminalPool = new ObjectPoolList<NonTerminal>() { protected NonTerminal create() throws MaltChainedException { return new NonTerminal(); } public void resetObject(NonTerminal o) throws MaltChainedException { o.clear(); } }; } public PhraseStructureNode addTerminalNode() throws MaltChainedException { return addTokenNode(); } public PhraseStructureNode addTerminalNode(int index) throws MaltChainedException { return addTokenNode(index); } public PhraseStructureNode getTerminalNode(int index) { return getTokenNode(index); } public int nTerminalNode() { return nTokenNode(); } public PhraseStructureNode addNonTerminalNode(int index) throws MaltChainedException { NonTerminal node = nonTerminalPool.checkOut(); node.setIndex(index); node.setBelongsToGraph(this); nonTerminalNodes.put(index,node); return node; } public PhraseStructureNode addNonTerminalNode() throws MaltChainedException { int index = getHighestNonTerminalIndex(); if (index > 0) { return addNonTerminalNode(index+1); } return addNonTerminalNode(1); } public PhraseStructureNode getNonTerminalNode(int index) throws MaltChainedException { return nonTerminalNodes.get(index); } public int getHighestNonTerminalIndex() { try { return nonTerminalNodes.lastKey(); } catch (NoSuchElementException e) { return 0; } } public Set<Integer> getNonTerminalIndices() { return new TreeSet<Integer>(nonTerminalNodes.keySet()); } public boolean hasNonTerminals() { return !nonTerminalNodes.isEmpty(); } public int nNonTerminals() { return nonTerminalNodes.size(); } public PhraseStructureNode getPhraseStructureRoot() { return root; } public Edge addPhraseStructureEdge(PhraseStructureNode parent, PhraseStructureNode child) throws MaltChainedException { if (parent == null || child == null) { throw new MaltChainedException("Parent or child node is missing."); } else if (parent instanceof NonTerminalNode && !child.isRoot()) { Edge e = edgePool.checkOut(); e.setBelongsToGraph(this); e.setEdge((Node)parent, (Node)child, Edge.PHRASE_STRUCTURE_EDGE); graphEdges.add(e); return e; } else { throw new MaltChainedException("Parent or child node is not of correct node type."); } } public void removePhraseStructureEdge(PhraseStructureNode parent, PhraseStructureNode child) throws MaltChainedException { if (parent == null || child == null) { throw new MaltChainedException("Parent or child node is missing."); } else if (parent instanceof NonTerminalNode && !child.isRoot()) { for (Edge e : graphEdges) { if (e.getSource() == parent && e.getTarget() == child) { e.clear(); graphEdges.remove(e); if (e instanceof GraphEdge) { edgePool.checkIn(e); } } } } else { throw new SyntaxGraphException("Head node is not a root node or a terminal node."); } } public Edge addSecondaryEdge(ComparableNode source, ComparableNode target) throws MaltChainedException { if (source == null || target == null) { throw new SyntaxGraphException("Head or dependent node is missing."); } else if (!target.isRoot()) { Edge e = edgePool.checkOut(); e.setBelongsToGraph(this); e.setEdge((Node)source, (Node)target, Edge.SECONDARY_EDGE); graphEdges.add(e); return e; } return null; } public void removeSecondaryEdge(ComparableNode source, ComparableNode target) throws MaltChainedException { if (source == null || target == null) { throw new SyntaxGraphException("Head or dependent node is missing."); } else if (!target.isRoot()) { Iterator<Edge> ie = ((Node)target).getIncomingEdgeIterator(); while (ie.hasNext()) { Edge e = ie.next(); if (e.getSource() == source) { ie.remove(); graphEdges.remove(e); edgePool.checkIn(e); } } } } public int nEdges() { return graphEdges.size(); } public SortedSet<Edge> getEdges() { return graphEdges; } public boolean isContinuous() { for (int index : nonTerminalNodes.keySet()) { NonTerminalNode node = nonTerminalNodes.get(index); if (!node.isContinuous()) { return false; } } return true; } public boolean isContinuousExcludeTerminalsAttachToRoot() { for (int index : nonTerminalNodes.keySet()) { NonTerminalNode node = nonTerminalNodes.get(index); if (!node.isContinuousExcludeTerminalsAttachToRoot()) { return false; } } return true; } // public void makeContinuous() throws MaltChainedException { // if (root != null) { // root.reArrangeChildrenAccordingToLeftAndRightProperDesendant(); // } // } public void clear() throws MaltChainedException { edgePool.checkInAll(); graphEdges.clear(); root.clear(); root.setBelongsToGraph(this); nonTerminalPool.checkInAll(); nonTerminalNodes.clear(); super.clear(); } public String toStringTerminalNode(TokenNode node) { final StringBuilder sb = new StringBuilder(); final DependencyNode depnode = node; sb.append(node.toString().trim()); if (depnode.hasHead()) { sb.append('\t'); try { sb.append(depnode.getHead().getIndex()); sb.append('\t'); sb.append(depnode.getHeadEdge().toString()); } catch (MaltChainedException e) { System.err.println(e); } } sb.append('\n'); return sb.toString(); } public String toStringNonTerminalNode(NonTerminalNode node) { final StringBuilder sb = new StringBuilder(); sb.append(node.toString().trim()); sb.append('\n'); Iterator<Edge> ie = ((Node)node).getOutgoingEdgeIterator(); while (ie.hasNext()) { Edge e = ie.next(); if (e.getTarget() instanceof TokenNode) { sb.append(" T"); sb.append(e.getTarget().getIndex()); } if (e.getTarget() instanceof NonTerminalNode) { sb.append(" N"); sb.append(e.getTarget().getIndex()); } sb.append('\t'); sb.append(e.toString()); sb.append('\n'); } return sb.toString(); } public String toString() { final StringBuilder sb = new StringBuilder(); for (int index : terminalNodes.keySet()) { sb.append(toStringTerminalNode(terminalNodes.get(index))); } sb.append('\n'); sb.append(toStringNonTerminalNode((NonTerminalNode)getPhraseStructureRoot())); for (int index : nonTerminalNodes.keySet()) { sb.append(toStringNonTerminalNode(nonTerminalNodes.get(index))); } return sb.toString(); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.python; import com.google.common.base.Ascii; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.common.options.Converter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDefinition; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionMetadataTag; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsParsingException; import com.google.devtools.common.options.TriState; import java.util.Map; /** * Python-related command-line options. * * <p>Due to the migration of the Python version API (see #6583) and the default Python version (see * (see #6647), the Python major version mode ({@code PY2} vs {@code PY3}) is a function of multiple * flags. See {@link #getPythonVersion} for more details. */ public class PythonOptions extends FragmentOptions { /** Converter for options that take ({@code PY2} or {@code PY3}). */ // We don't use EnumConverter because we want to disallow non-target PythonVersion values. public static class TargetPythonVersionConverter implements Converter<PythonVersion> { @Override public PythonVersion convert(String input) throws OptionsParsingException { try { // Although in rule attributes the enum values are case sensitive, the convention from // EnumConverter is that the options parser is case insensitive. input = Ascii.toUpperCase(input); return PythonVersion.parseTargetValue(input); } catch (IllegalArgumentException ex) { throw new OptionsParsingException( "Not a valid Python major version, should be PY2 or PY3", ex); } } @Override public String getTypeDescription() { return "PY2 or PY3"; } } @Option( name = "build_python_zip", defaultValue = "auto", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, help = "Build python executable zip; on on Windows, off on other platforms") public TriState buildPythonZip; /** * Deprecated machinery for setting the Python version; will be removed soon. * * <p>Not GraveyardOptions'd because we'll delete this alongside other soon-to-be-removed options * in this file. */ @Option( name = "incompatible_remove_old_python_version_api", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "No-op, will be removed soon.") public boolean incompatibleRemoveOldPythonVersionApi; /** * Deprecated machinery for setting the Python version; will be removed soon. * * <p>Not GraveyardOptions'd because we'll delete this alongside other soon-to-be-removed options * in this file. */ @Option( name = "incompatible_allow_python_version_transitions", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "No-op, will be removed soon.") public boolean incompatibleAllowPythonVersionTransitions; /** * Native rule logic should call {@link #getDefaultPythonVersion} instead of accessing this option * directly. */ @Option( name = "incompatible_py3_is_default", defaultValue = "true", documentationCategory = OptionDocumentationCategory.GENERIC_INPUTS, effectTags = { OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.AFFECTS_OUTPUTS // because of "-py2"/"-py3" output root }, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "If true, `py_binary` and `py_test` targets that do not set their `python_version` (or " + "`default_python_version`) attribute will default to PY3 rather than to PY2. If " + "you set this flag it is also recommended to set " + "`--incompatible_py2_outputs_are_suffixed`.") public boolean incompatiblePy3IsDefault; @Option( name = "incompatible_py2_outputs_are_suffixed", defaultValue = "true", documentationCategory = OptionDocumentationCategory.GENERIC_INPUTS, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "If true, targets built in the Python 2 configuration will appear under an output root " + "that includes the suffix '-py2', while targets built for Python 3 will appear " + "in a root with no Python-related suffix. This means that the `bazel-bin` " + "convenience symlink will point to Python 3 targets rather than Python 2. " + "If you enable this option it is also recommended to enable " + "`--incompatible_py3_is_default`.") public boolean incompatiblePy2OutputsAreSuffixed; /** * This field should be either null (unset), {@code PY2}, or {@code PY3}. Other {@code * PythonVersion} values do not represent distinct Python versions and are not allowed. * * <p>Native rule logic should call {@link #getPythonVersion} / {@link #setPythonVersion} instead * of accessing this option directly. BUILD/.bzl code should {@code select()} on {@code <tools * repo>//tools/python:python_version} rather than on this option directly. */ @Option( name = "python_version", defaultValue = "null", converter = TargetPythonVersionConverter.class, documentationCategory = OptionDocumentationCategory.GENERIC_INPUTS, effectTags = { OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.AFFECTS_OUTPUTS // because of "-py2"/"-py3" output root }, help = "The Python major version mode, either `PY2` or `PY3`. Note that this is overridden by " + "`py_binary` and `py_test` targets (even if they don't explicitly specify a " + "version) so there is usually not much reason to supply this flag.") public PythonVersion pythonVersion; private static final OptionDefinition PYTHON_VERSION_DEFINITION = OptionsParser.getOptionDefinitionByName(PythonOptions.class, "python_version"); /** * Deprecated machinery for setting the Python version; will be removed soon. * * <p>Not in GraveyardOptions because we still want to prohibit users from select()ing on it. */ @Option( name = "force_python", defaultValue = "null", converter = TargetPythonVersionConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.AFFECTS_OUTPUTS}, help = "No-op, will be removed soon.") public PythonVersion forcePython; private static final OptionDefinition FORCE_PYTHON_DEFINITION = OptionsParser.getOptionDefinitionByName(PythonOptions.class, "force_python"); /** * This field should be either null (unset), {@code PY2}, or {@code PY3}. Other {@code * PythonVersion} values do not represent distinct Python versions and are not allowed. * * <p>Null means to use the default ({@link #getDefaultPythonVersion}). * * <p>This option is only read by {@link #getHost}. It should not be read by other native code or * by {@code select()}s in user code. */ @Option( name = "host_force_python", defaultValue = "null", converter = TargetPythonVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.AFFECTS_OUTPUTS}, help = "Overrides the Python version for the host configuration. Can be \"PY2\" or \"PY3\".") public PythonVersion hostForcePython; private static final OptionDefinition HOST_FORCE_PYTHON_DEFINITION = OptionsParser.getOptionDefinitionByName(PythonOptions.class, "host_force_python"); @Option( name = "incompatible_disallow_legacy_py_provider", defaultValue = "true", documentationCategory = OptionDocumentationCategory.STARLARK_SEMANTICS, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "If set to true, native Python rules will neither produce nor consume the legacy \"py\" " + "provider. Use PyInfo instead. Under this flag, passing the legacy provider to a " + "Python target will be an error.") public boolean incompatibleDisallowLegacyPyProvider; // TODO(b/153369373): Delete this flag. @Option( name = "incompatible_use_python_toolchains", defaultValue = "true", documentationCategory = OptionDocumentationCategory.GENERIC_INPUTS, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "If set to true, executable native Python rules will use the Python runtime specified by " + "the Python toolchain, rather than the runtime given by legacy flags like " + "--python_top.") public boolean incompatibleUsePythonToolchains; @Option( name = "experimental_build_transitive_python_runfiles", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.AFFECTS_OUTPUTS}, help = "Build the runfiles trees of py_binary targets that appear in the transitive " + "data runfiles of another binary.") public boolean buildTransitiveRunfilesTrees; @Option( name = "incompatible_default_to_explicit_init_py", defaultValue = "false", documentationCategory = OptionDocumentationCategory.GENERIC_INPUTS, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "This flag changes the default behavior so that __init__.py files are no longer " + "automatically created in the runfiles of Python targets. Precisely, when a " + "py_binary or py_test target has legacy_create_init set to \"auto\" (the default), " + "it is treated as false if and only if this flag is set. See " + "https://github.com/bazelbuild/bazel/issues/10076.") public boolean incompatibleDefaultToExplicitInitPy; @Override public Map<OptionDefinition, SelectRestriction> getSelectRestrictions() { // TODO(brandjon): Instead of referencing the python_version target, whose path depends on the // tools repo name, reference a standalone documentation page instead. ImmutableMap.Builder<OptionDefinition, SelectRestriction> restrictions = ImmutableMap.builder(); restrictions.put( PYTHON_VERSION_DEFINITION, new SelectRestriction( /*visibleWithinToolsPackage=*/ true, "Use @bazel_tools//python/tools:python_version instead.")); restrictions.put( FORCE_PYTHON_DEFINITION, new SelectRestriction( /*visibleWithinToolsPackage=*/ true, "Use @bazel_tools//python/tools:python_version instead.")); restrictions.put( HOST_FORCE_PYTHON_DEFINITION, new SelectRestriction( /*visibleWithinToolsPackage=*/ false, "Use @bazel_tools//python/tools:python_version instead.")); return restrictions.build(); } /** * Returns the Python major version ({@code PY2} or {@code PY3}) that targets that do not specify * a version should be built for. */ public PythonVersion getDefaultPythonVersion() { return incompatiblePy3IsDefault ? PythonVersion.PY3 : PythonVersion.PY2; } /** * Returns the Python major version ({@code PY2} or {@code PY3}) that targets should be built for. * * <p>The version is taken as the value of {@code --python_version} if not null, otherwise {@link * #getDefaultPythonVersion}. */ public PythonVersion getPythonVersion() { if (pythonVersion != null) { return pythonVersion; } else { return getDefaultPythonVersion(); } } /** * Returns whether a Python version transition to {@code version} is not a no-op. * * @throws IllegalArgumentException if {@code version} is not {@code PY2} or {@code PY3} */ public boolean canTransitionPythonVersion(PythonVersion version) { Preconditions.checkArgument(version.isTargetValue()); return !version.equals(getPythonVersion()); } /** * Sets the Python version to {@code version}. * * <p>Since this is a mutation, it should only be called on a newly constructed instance. * * @throws IllegalArgumentException if {@code version} is not {@code PY2} or {@code PY3} */ // TODO(brandjon): Consider removing this mutator now that the various flags and semantics it // used to consider are gone. We'd revert to just setting the public option field directly. public void setPythonVersion(PythonVersion version) { Preconditions.checkArgument(version.isTargetValue()); this.pythonVersion = version; } @Override public FragmentOptions getHost() { PythonOptions hostPythonOptions = (PythonOptions) getDefault(); PythonVersion hostVersion = (hostForcePython != null) ? hostForcePython : getDefaultPythonVersion(); hostPythonOptions.setPythonVersion(hostVersion); hostPythonOptions.incompatiblePy3IsDefault = incompatiblePy3IsDefault; hostPythonOptions.incompatiblePy2OutputsAreSuffixed = incompatiblePy2OutputsAreSuffixed; hostPythonOptions.buildPythonZip = buildPythonZip; hostPythonOptions.incompatibleDisallowLegacyPyProvider = incompatibleDisallowLegacyPyProvider; hostPythonOptions.incompatibleUsePythonToolchains = incompatibleUsePythonToolchains; // Save host options in case of a further exec->host transition. hostPythonOptions.hostForcePython = hostForcePython; return hostPythonOptions; } @Override public FragmentOptions getNormalized() { // We want to ensure that options with "null" physical default values are normalized, to avoid // #7808. PythonOptions newOptions = (PythonOptions) clone(); newOptions.setPythonVersion(newOptions.getPythonVersion()); return newOptions; } }
package com.syntax.code; import java.util.ArrayList; import javax.swing.text.AttributeSet; /** * The content of StyledTextBody is synchronized with SyntaxTextArea. * Do not use {@link TextBody#insertText(int,String) insertText(int,string)} * or {@link TextBody#removeText(int,int) removeText(int,int)} to update text in SyntaxTextArea. * In order to avoid recursive function call, use * {@link #insertStyledText(int, String, AttributeSet) insertStyledText(int, String, AttributeSet)} and * {@link #removeStyledText(int, int) removeStyledText(int, int)} to modify text showing in the SyntaxTextArea * * @see com.syntax.ui.SyntaxTextArea */ public class StyledTextBody extends TextBody { private StyledChangeListener callback; private int nowState; private ArrayList<Command> commands; private boolean forcedMerge; /** * Construct empty StyledTextBody */ public StyledTextBody() { this(null); } /** * Construct empty StyledTextBody and set {@link StyledChangeListener StyledChangeListener} * * @param callback style change call back */ public StyledTextBody(StyledChangeListener callback) { super(); this.callback = callback; commands = new ArrayList<>(); nowState = -1; forcedMerge = false; } /** * Make the following editions containing {@link #removeStyledText(int,int) removement} * and {@link #insertStyledText(int,String,AttributeSet) insertion} merge into an edition. * The difference between this edition with others is that this action can be reversed * by a CTRL + Z shortcut but the others can't. Use {@link #finishForcedMerge() finishForcedMerge()} * to make the following editions be normal or be a separator between two merged editions */ public synchronized void startForcedMerge() { forcedMerge = true; } /** * Stop the forced merge mode. This can be a separator between two forced merged editions or just * change to mode. * * @see #startForcedMerge() */ public synchronized void finishForcedMerge() { forcedMerge = false; if(commands.size() != 0) { Command command = commands.get(commands.size() - 1); if(command instanceof CommandCollection) ((CommandCollection)command).setActive(false); } } /** * Insert text to StyledTextBody. The content of StyledTextBody is synchronized with SyntaxTextArea * * @param start the index where the text was inserted. If the location * is the head of paragraph, offset will should be 0. If the location is in the end * of paragraph, offset should be the length of paragraph * @param text the specified string which is instered to paragraph * @param attributeSet the attribute of text * * @see com.syntax.ui.SyntaxTextArea */ public synchronized void insertStyledText(int start, String text, AttributeSet attributeSet) { StyledInsertCommand command = new StyledInsertCommand(start, text, attributeSet); if(text.length() > 1) command.finish(); if(forcedMerge) { CommandCollection commandCollection = new CommandCollection(command); nextState(commandCollection); } else nextState(command); } /** * Remove text on SyntaxTextArea. The content of StyledTextBody is synchronized with SyntaxTextArea * * @param start the index where the text was removed. If the first character * of text is removed from the head of paragraph, offset should be 0 * @param length the length of string which is removed from paragraph * * @see com.syntax.ui.SyntaxTextArea */ public synchronized void removeStyledText(int start, int length) { StyledRemoveCommand command = new StyledRemoveCommand(start, getText().substring(start, start + length)); if(length > 1) command.finish(); if(forcedMerge) { CommandCollection commandCollection = new CommandCollection(command); nextState(commandCollection); } else nextState(command); } /** * Regist StyledChangeListener to StyledTextBody * * @param callback specified callback * * @see StyledChangeListener */ public void setStyledChangeListener(StyledChangeListener callback) { this.callback = callback; } /** * Implement {@link CommandModule#nextState(Command) nextState(Command)}. * Update state by a {@link Command Command}. * If the number of commands in this command module is not zero before this * command is added to, this command will try to {@link Command#combine(Command) combine} * the last command which have worked. If this command module have gone through some * {@link #reverse() reverse} operation, some commands in command module may be reversed * to un-worked state. Any command is added to command module can cause that commands in * un-worked state be deleted from command list * * @param command the command which update this state */ @Override protected synchronized void nextState(Command command) { for(int i = commands.size() - 1; i > nowState; i--) commands.remove(i); // Combine command if(commands.size() >= 1) { Command combinedCommand = command.combine(commands.get(commands.size() - 1)); if(combinedCommand != null) { commands.remove(commands.size() - 1); commands.add(combinedCommand); } else { nowState++; commands.add(command); } } else { nowState++; commands.add(command); } // execute command.execute(); } /** * Implement {@link CommandModule#reverse() reverse()}. * Move the current state to previous state by command in command list which have be added in module. * If there isn't previous command, the state will not be changed * * @return false if there is not previous state */ @Override public boolean reverse() { if(nowState != -1) { nowState--; commands.get(nowState + 1).reverseExecution(); return true; } else return false; } /** * Implement {@link CommandModule#forward() forawrd()}. * Move the current state to next state on command in command list which have be added in module. * If there isn't next command, the state will not be changed * * @return false if there is not next state */ @Override public boolean forward() { if(nowState + 1 < commands.size()) { nowState++; commands.get(nowState).execute(); return true; } else return false; } private class StyledInsertCommand extends InsertCommand { private int start; private String changeStr; private AttributeSet attributeSet; public StyledInsertCommand(int start,String changeStr, AttributeSet attributeSet) { super(start, changeStr); this.start = start; this.changeStr = changeStr; this.attributeSet = attributeSet; } @Override public void execute() { super.execute(); if(callback != null) callback.changeStyledText(start, changeStr, attributeSet); } @Override public void reverseExecution() { super.reverseExecution(); if(callback != null) callback.removeStyledText(start, changeStr); } @Override public Command combine(Command command) { if(isCombineable()) { if(!getChangeStr().equals("\n")) if(command instanceof StyledInsertCommand) { StyledInsertCommand sLastCommand = (StyledInsertCommand)command; if(sLastCommand.isCombineable()) if(getStart() == sLastCommand.getStart() + sLastCommand.getChangeStr().length()) { StyledInsertCommand newCommand = new StyledInsertCommand( sLastCommand.getStart(), sLastCommand.getChangeStr() + getChangeStr(), null); return newCommand; } } } return null; } } private class StyledRemoveCommand extends RemoveCommand { private int start; private String changeStr; public StyledRemoveCommand(int start,String changeStr) { super(start, changeStr); this.start = start; this.changeStr = changeStr; } @Override public void execute() { super.execute(); if(callback != null) callback.removeStyledText(start , changeStr); } @Override public void reverseExecution() { super.reverseExecution(); if(callback != null) callback.changeStyledText(start, changeStr, null); } @Override public Command combine(Command command) { if(isCombineable()) if(command instanceof StyledRemoveCommand) { StyledRemoveCommand sLastCommand = (StyledRemoveCommand)command; if(sLastCommand.isCombineable()) if(getStart() + getChangeStr().length() == sLastCommand.getStart()) { StyledRemoveCommand newCommand = new StyledRemoveCommand( getStart(), getChangeStr() + sLastCommand.getChangeStr()); if(getChangeStr().equals("\n")) newCommand.finish(); return newCommand; } } return null; } } private class CommandCollection implements Command { private ArrayList<Command> commands; private boolean active; public CommandCollection() { commands = new ArrayList<>(); active = true; } public CommandCollection(Command command) { commands = new ArrayList<>(); commands.add(command); active = true; } public void add(Command command) { commands.add(command); } public boolean isActive() { return active; } public void setActive(boolean active) { this.active = active; } @Override public void execute() { for(Command command: commands) command.execute(); } @Override public void reverseExecution() { for(int i = commands.size() - 1; i >= 0; i--) commands.get(i).reverseExecution(); } public ArrayList<Command> getCommands() { return commands; } @Override public Command combine(Command command) { CommandCollection newCommand = new CommandCollection(); if(command instanceof CommandCollection) { CommandCollection commandCollection = (CommandCollection)command; if(commandCollection.isActive()) { for(Command cmd: commandCollection.getCommands()) newCommand.add(cmd); for(Command cmd: getCommands()) newCommand.add(cmd); return newCommand; } else return null; } else return null; } } /** * Listen for style changing in StyledTextBody * * @see StyledTextBody */ public static interface StyledChangeListener { /** * Listeneing to text attribute changing in StyledTextBody * * @param start the start position of specified text and point out the first * character of text * @param text the text which is inserted to the text body * @param attributeSet the attribute of text */ public void changeStyledText(int start, String text, AttributeSet attributeSet); /** * Listener to request of changing attribute of text when removement happen on * text body * * @param start the start position of specified text and point out the first * character of text * @param text the text which is inserted to the text body */ public void removeStyledText(int start, String text); } }
/* * ==================================================================== * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.client.integration; import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; import org.apache.http.Header; import org.apache.http.HttpException; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.ProtocolException; import org.apache.http.ProtocolVersion; import org.apache.http.client.CircularRedirectException; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.CookieStore; import org.apache.http.client.RedirectException; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.params.ClientPNames; import org.apache.http.client.protocol.ClientContext; import org.apache.http.cookie.SM; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.cookie.BasicClientCookie; import org.apache.http.message.BasicHeader; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.ExecutionContext; import org.apache.http.protocol.HTTP; import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpRequestHandler; import org.apache.http.util.EntityUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * Redirection test cases. */ public class TestRedirects extends IntegrationTestBase { @Before public void setUp() throws Exception { startServer(); this.httpclient = new HttpClientBuilder().build(); } private static class BasicRedirectService implements HttpRequestHandler { private int statuscode = HttpStatus.SC_MOVED_TEMPORARILY; private String host = null; private int port; public BasicRedirectService(final String host, int port, int statuscode) { super(); this.host = host; this.port = port; if (statuscode > 0) { this.statuscode = statuscode; } } public BasicRedirectService(final String host, int port) { this(host, port, -1); } public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { ProtocolVersion ver = request.getRequestLine().getProtocolVersion(); String uri = request.getRequestLine().getUri(); if (uri.equals("/oldlocation/")) { response.setStatusLine(ver, this.statuscode); response.addHeader(new BasicHeader("Location", "http://" + this.host + ":" + this.port + "/newlocation/")); response.addHeader(new BasicHeader("Connection", "close")); } else if (uri.equals("/newlocation/")) { response.setStatusLine(ver, HttpStatus.SC_OK); StringEntity entity = new StringEntity("Successful redirect"); response.setEntity(entity); } else { response.setStatusLine(ver, HttpStatus.SC_NOT_FOUND); } } } private static class CircularRedirectService implements HttpRequestHandler { public CircularRedirectService() { super(); } public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { ProtocolVersion ver = request.getRequestLine().getProtocolVersion(); String uri = request.getRequestLine().getUri(); if (uri.startsWith("/circular-oldlocation")) { response.setStatusLine(ver, HttpStatus.SC_MOVED_TEMPORARILY); response.addHeader(new BasicHeader("Location", "/circular-location2")); } else if (uri.startsWith("/circular-location2")) { response.setStatusLine(ver, HttpStatus.SC_MOVED_TEMPORARILY); response.addHeader(new BasicHeader("Location", "/circular-oldlocation")); } else { response.setStatusLine(ver, HttpStatus.SC_NOT_FOUND); } } } private static class RelativeRedirectService implements HttpRequestHandler { public RelativeRedirectService() { super(); } public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { ProtocolVersion ver = request.getRequestLine().getProtocolVersion(); String uri = request.getRequestLine().getUri(); if (uri.equals("/oldlocation/")) { response.setStatusLine(ver, HttpStatus.SC_MOVED_TEMPORARILY); response.addHeader(new BasicHeader("Location", "/relativelocation/")); } else if (uri.equals("/relativelocation/")) { response.setStatusLine(ver, HttpStatus.SC_OK); StringEntity entity = new StringEntity("Successful redirect"); response.setEntity(entity); } else { response.setStatusLine(ver, HttpStatus.SC_NOT_FOUND); } } } private static class RelativeRedirectService2 implements HttpRequestHandler { public RelativeRedirectService2() { super(); } public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { ProtocolVersion ver = request.getRequestLine().getProtocolVersion(); String uri = request.getRequestLine().getUri(); if (uri.equals("/test/oldlocation")) { response.setStatusLine(ver, HttpStatus.SC_MOVED_TEMPORARILY); response.addHeader(new BasicHeader("Location", "relativelocation")); } else if (uri.equals("/test/relativelocation")) { response.setStatusLine(ver, HttpStatus.SC_OK); StringEntity entity = new StringEntity("Successful redirect"); response.setEntity(entity); } else { response.setStatusLine(ver, HttpStatus.SC_NOT_FOUND); } } } private static class BogusRedirectService implements HttpRequestHandler { private String url; public BogusRedirectService(String redirectUrl) { super(); this.url = redirectUrl; } public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { ProtocolVersion ver = request.getRequestLine().getProtocolVersion(); String uri = request.getRequestLine().getUri(); if (uri.equals("/oldlocation/")) { response.setStatusLine(ver, HttpStatus.SC_MOVED_TEMPORARILY); response.addHeader(new BasicHeader("Location", url)); } else if (uri.equals("/relativelocation/")) { response.setStatusLine(ver, HttpStatus.SC_OK); StringEntity entity = new StringEntity("Successful redirect"); response.setEntity(entity); } else { response.setStatusLine(ver, HttpStatus.SC_NOT_FOUND); } } } @Test public void testBasicRedirect300() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port, HttpStatus.SC_MULTIPLE_CHOICES)); HttpContext context = new BasicHttpContext(); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); Assert.assertEquals(HttpStatus.SC_MULTIPLE_CHOICES, response.getStatusLine().getStatusCode()); Assert.assertEquals("/oldlocation/", reqWrapper.getRequestLine().getUri()); } @Test public void testBasicRedirect301() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port, HttpStatus.SC_MOVED_PERMANENTLY)); HttpContext context = new BasicHttpContext(); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); HttpHost targetHost = (HttpHost) context.getAttribute( ExecutionContext.HTTP_TARGET_HOST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/newlocation/", reqWrapper.getRequestLine().getUri()); Assert.assertEquals(host, targetHost.getHostName()); Assert.assertEquals(port, targetHost.getPort()); } @Test public void testBasicRedirect302() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port, HttpStatus.SC_MOVED_TEMPORARILY)); HttpContext context = new BasicHttpContext(); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); HttpHost targetHost = (HttpHost) context.getAttribute( ExecutionContext.HTTP_TARGET_HOST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/newlocation/", reqWrapper.getRequestLine().getUri()); Assert.assertEquals(host, targetHost.getHostName()); Assert.assertEquals(port, targetHost.getPort()); } @Test public void testBasicRedirect302NoLocation() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new HttpRequestHandler() { public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { response.setStatusCode(HttpStatus.SC_MOVED_TEMPORARILY); } }); HttpContext context = new BasicHttpContext(); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); HttpHost targetHost = (HttpHost) context.getAttribute( ExecutionContext.HTTP_TARGET_HOST); Assert.assertEquals(HttpStatus.SC_MOVED_TEMPORARILY, response.getStatusLine().getStatusCode()); Assert.assertEquals("/oldlocation/", reqWrapper.getRequestLine().getUri()); Assert.assertEquals(host, targetHost.getHostName()); Assert.assertEquals(port, targetHost.getPort()); } @Test public void testBasicRedirect303() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port, HttpStatus.SC_SEE_OTHER)); HttpContext context = new BasicHttpContext(); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); HttpHost targetHost = (HttpHost) context.getAttribute( ExecutionContext.HTTP_TARGET_HOST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/newlocation/", reqWrapper.getRequestLine().getUri()); Assert.assertEquals(host, targetHost.getHostName()); Assert.assertEquals(port, targetHost.getPort()); } @Test public void testBasicRedirect304() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port, HttpStatus.SC_NOT_MODIFIED)); HttpContext context = new BasicHttpContext(); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); Assert.assertEquals(HttpStatus.SC_NOT_MODIFIED, response.getStatusLine().getStatusCode()); Assert.assertEquals("/oldlocation/", reqWrapper.getRequestLine().getUri()); } @Test public void testBasicRedirect305() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port, HttpStatus.SC_USE_PROXY)); HttpContext context = new BasicHttpContext(); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); Assert.assertEquals(HttpStatus.SC_USE_PROXY, response.getStatusLine().getStatusCode()); Assert.assertEquals("/oldlocation/", reqWrapper.getRequestLine().getUri()); } @Test public void testBasicRedirect307() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port, HttpStatus.SC_TEMPORARY_REDIRECT)); HttpContext context = new BasicHttpContext(); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); HttpHost targetHost = (HttpHost) context.getAttribute( ExecutionContext.HTTP_TARGET_HOST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/newlocation/", reqWrapper.getRequestLine().getUri()); Assert.assertEquals(host, targetHost.getHostName()); Assert.assertEquals(port, targetHost.getPort()); } @Test(expected=ClientProtocolException.class) public void testMaxRedirectCheck() throws Exception { this.localServer.register("*", new CircularRedirectService()); this.httpclient.getParams().setBooleanParameter(ClientPNames.ALLOW_CIRCULAR_REDIRECTS, true); this.httpclient.getParams().setIntParameter(ClientPNames.MAX_REDIRECTS, 5); HttpGet httpget = new HttpGet("/circular-oldlocation/"); try { this.httpclient.execute(getServerHttp(), httpget); } catch (ClientProtocolException e) { Assert.assertTrue(e.getCause() instanceof RedirectException); throw e; } } @Test(expected=ClientProtocolException.class) public void testCircularRedirect() throws Exception { this.localServer.register("*", new CircularRedirectService()); this.httpclient.getParams().setBooleanParameter(ClientPNames.ALLOW_CIRCULAR_REDIRECTS, false); HttpGet httpget = new HttpGet("/circular-oldlocation/"); try { this.httpclient.execute(getServerHttp(), httpget); } catch (ClientProtocolException e) { Assert.assertTrue(e.getCause() instanceof CircularRedirectException); throw e; } } @Test public void testPostNoRedirect() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port)); HttpContext context = new BasicHttpContext(); HttpPost httppost = new HttpPost("/oldlocation/"); httppost.setEntity(new StringEntity("stuff")); HttpResponse response = this.httpclient.execute(getServerHttp(), httppost, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); Assert.assertEquals(HttpStatus.SC_MOVED_TEMPORARILY, response.getStatusLine().getStatusCode()); Assert.assertEquals("/oldlocation/", reqWrapper.getRequestLine().getUri()); Assert.assertEquals("POST", reqWrapper.getRequestLine().getMethod()); } @Test public void testPostRedirectSeeOther() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port, HttpStatus.SC_SEE_OTHER)); HttpContext context = new BasicHttpContext(); HttpPost httppost = new HttpPost("/oldlocation/"); httppost.setEntity(new StringEntity("stuff")); HttpResponse response = this.httpclient.execute(getServerHttp(), httppost, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/newlocation/", reqWrapper.getRequestLine().getUri()); Assert.assertEquals("GET", reqWrapper.getRequestLine().getMethod()); } @Test public void testRelativeRedirect() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new RelativeRedirectService()); HttpContext context = new BasicHttpContext(); this.httpclient.getParams().setBooleanParameter( ClientPNames.REJECT_RELATIVE_REDIRECT, false); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); HttpHost targetHost = (HttpHost) context.getAttribute( ExecutionContext.HTTP_TARGET_HOST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/relativelocation/", reqWrapper.getRequestLine().getUri()); Assert.assertEquals(host, targetHost.getHostName()); Assert.assertEquals(port, targetHost.getPort()); } @Test public void testRelativeRedirect2() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new RelativeRedirectService2()); HttpContext context = new BasicHttpContext(); this.httpclient.getParams().setBooleanParameter( ClientPNames.REJECT_RELATIVE_REDIRECT, false); HttpGet httpget = new HttpGet("/test/oldlocation"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); HttpHost targetHost = (HttpHost) context.getAttribute( ExecutionContext.HTTP_TARGET_HOST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/test/relativelocation", reqWrapper.getRequestLine().getUri()); Assert.assertEquals(host, targetHost.getHostName()); Assert.assertEquals(port, targetHost.getPort()); } @Test(expected=ClientProtocolException.class) public void testRejectRelativeRedirect() throws Exception { this.localServer.register("*", new RelativeRedirectService()); this.httpclient.getParams().setBooleanParameter( ClientPNames.REJECT_RELATIVE_REDIRECT, true); HttpGet httpget = new HttpGet("/oldlocation/"); try { this.httpclient.execute(getServerHttp(), httpget); } catch (ClientProtocolException e) { Assert.assertTrue(e.getCause() instanceof ProtocolException); throw e; } } @Test(expected=ClientProtocolException.class) public void testRejectBogusRedirectLocation() throws Exception { this.localServer.register("*", new BogusRedirectService("xxx://bogus")); HttpGet httpget = new HttpGet("/oldlocation/"); this.httpclient.execute(getServerHttp(), httpget); } @Test(expected=ClientProtocolException.class) public void testRejectInvalidRedirectLocation() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BogusRedirectService("http://"+ host +":"+ port +"/newlocation/?p=I have spaces")); HttpGet httpget = new HttpGet("/oldlocation/"); try { this.httpclient.execute(getServerHttp(), httpget); } catch (ClientProtocolException e) { Assert.assertTrue(e.getCause() instanceof ProtocolException); throw e; } } @Test public void testRedirectWithCookie() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port)); CookieStore cookieStore = new BasicCookieStore(); BasicClientCookie cookie = new BasicClientCookie("name", "value"); cookie.setDomain(host); cookie.setPath("/"); cookieStore.addCookie(cookie); HttpContext context = new BasicHttpContext(); context.setAttribute(ClientContext.COOKIE_STORE, cookieStore); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/newlocation/", reqWrapper.getRequestLine().getUri()); Header[] headers = reqWrapper.getHeaders(SM.COOKIE); Assert.assertEquals("There can only be one (cookie)", 1, headers.length); } @Test public void testDefaultHeadersRedirect() throws Exception { InetSocketAddress address = this.localServer.getServiceAddress(); int port = address.getPort(); String host = address.getHostName(); this.localServer.register("*", new BasicRedirectService(host, port)); HttpContext context = new BasicHttpContext(); List<Header> defaultHeaders = new ArrayList<Header>(1); defaultHeaders.add(new BasicHeader(HTTP.USER_AGENT, "my-test-client")); this.httpclient.getParams().setParameter(ClientPNames.DEFAULT_HEADERS, defaultHeaders); HttpGet httpget = new HttpGet("/oldlocation/"); HttpResponse response = this.httpclient.execute(getServerHttp(), httpget, context); EntityUtils.consume(response.getEntity()); HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals("/newlocation/", reqWrapper.getRequestLine().getUri()); Header header = reqWrapper.getFirstHeader(HTTP.USER_AGENT); Assert.assertEquals("my-test-client", header.getValue()); } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ui.components.panels; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBValue; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.util.ArrayList; import java.util.List; /** * This class is intended to lay out added components horizontally. * It allows to add them into the LEFT, CENTER, or RIGHT group, which are aligned separately. * Every group can contain any amount of components. The specified gap is added between components, * and the double gap is added between groups of components. The gap will be scaled automatically. * <p><b>NB!: this class must be modified together with the {@code VerticalLayout} class accordingly</b></p> * * @see VerticalLayout */ public final class HorizontalLayout implements LayoutManager2 { public static final int FILL = -1; public static final String LEFT = "LEFT"; public static final String RIGHT = "RIGHT"; public static final String CENTER = "CENTER"; private final ArrayList<Component> myLeft = new ArrayList<>(); private final ArrayList<Component> myRight = new ArrayList<>(); private final ArrayList<Component> myCenter = new ArrayList<>(); private final int myAlignment; private final JBValue myGap; /** * Creates a layout with the specified gap. * All components will have preferred widths, * but their heights will be set according to the container. * The gap will be scaled automatically. * * @param gap horizontal gap between components, without DPI scaling */ public HorizontalLayout(int gap) { this(gap, FILL); } /** * Creates a layout with the specified gap and vertical alignment. * All components will have preferred sizes. * The gap will be scaled automatically. * * @param gap horizontal gap between components, without DPI scaling * @param alignment vertical alignment for components * @see SwingConstants#TOP * @see SwingConstants#BOTTOM * @see SwingConstants#CENTER */ public HorizontalLayout(int gap, int alignment) { this(new JBValue.Float(Math.max(0, gap)), alignment); } public HorizontalLayout(@NotNull JBValue gap, int alignment) { myGap = gap; switch (alignment) { case FILL: case SwingConstants.TOP: case SwingConstants.BOTTOM: case SwingConstants.CENTER: myAlignment = alignment; break; default: throw new IllegalArgumentException("unsupported alignment: " + alignment); } } @Override public void addLayoutComponent(Component component, Object constraints) { if ((constraints == null) || (constraints instanceof String)) { addLayoutComponent((String)constraints, component); } else { throw new IllegalArgumentException("unsupported constraints: " + constraints); } } @Override public Dimension maximumLayoutSize(Container target) { return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE); } @Override public float getLayoutAlignmentX(Container target) { return .5f; } @Override public float getLayoutAlignmentY(Container target) { return .5f; } @Override public void invalidateLayout(Container target) { } @Override public void addLayoutComponent(String name, Component component) { synchronized (component.getTreeLock()) { if (name == null || LEFT.equalsIgnoreCase(name)) { myLeft.add(component); } else if (CENTER.equalsIgnoreCase(name)) { myCenter.add(component); } else if (RIGHT.equalsIgnoreCase(name)) { myRight.add(component); } else { throw new IllegalArgumentException("unsupported name: " + name); } } } @Override public void removeLayoutComponent(Component component) { myLeft.remove(component); myRight.remove(component); myCenter.remove(component); } @Override public Dimension preferredLayoutSize(Container container) { return getPreferredSize(container, true); } @Override public Dimension minimumLayoutSize(Container container) { return getPreferredSize(container, false); } @Override public void layoutContainer(Container container) { int gap = myGap.get(); synchronized (container.getTreeLock()) { Dimension left = getPreferredSize(myLeft); Dimension right = getPreferredSize(myRight); Dimension center = getPreferredSize(myCenter); Insets insets = container.getInsets(); int width = container.getWidth() - insets.left - insets.right; int height = container.getHeight() - insets.top - insets.bottom; int leftX = 0; if (left != null) { leftX = gap + layout(myLeft, 0, height, insets); } int rightX = width; if (right != null) { rightX -= right.width; } if (rightX < leftX) { rightX = leftX; } if (center != null) { int centerX = (width - center.width) / 2; if (centerX > leftX) { int centerRightX = centerX + center.width + gap + gap; if (centerRightX > rightX) { centerX = rightX - center.width - gap - gap; } } if (centerX < leftX) { centerX = leftX; } centerX = gap + layout(myCenter, centerX, height, insets); if (rightX < centerX) { rightX = centerX; } } if (right != null) { layout(myRight, rightX, height, insets); } } } private int layout(List<? extends Component> list, int x, int height, Insets insets) { int gap = myGap.get(); for (Component component : list) { if (component.isVisible()) { Dimension size = component.getPreferredSize(); int y = 0; if (myAlignment == FILL) { size.height = height; } else if (myAlignment != SwingConstants.TOP) { y = height - size.height; if (myAlignment == SwingConstants.CENTER) { y /= 2; } } component.setBounds(x + insets.left, y + insets.top, size.width, size.height); x += size.width + gap; } } return x; } private static Dimension join(Dimension result, int gap, Dimension size) { if (size == null) { return result; } if (result == null) { return new Dimension(size); } result.width += gap + size.width; if (result.height < size.height) { result.height = size.height; } return result; } private Dimension getPreferredSize(List<? extends Component> list) { int gap = myGap.get(); Dimension result = null; for (Component component : list) { if (component.isVisible()) { result = join(result, gap, component.getPreferredSize()); } } return result; } private Dimension getPreferredSize(Container container, boolean aligned) { int gap2 = 2 * myGap.get(); synchronized (container.getTreeLock()) { Dimension left = getPreferredSize(myLeft); Dimension right = getPreferredSize(myRight); Dimension center = getPreferredSize(myCenter); Dimension result = join(join(join(null, gap2, left), gap2, center), gap2, right); if (result == null) { result = new Dimension(); } else if (aligned && center != null) { int leftWidth = left == null ? 0 : left.width; int rightWidth = right == null ? 0 : right.width; result.width += Math.abs(leftWidth - rightWidth); } JBInsets.addTo(result, container.getInsets()); return result; } } @NotNull public List<? extends Component> getComponents() { return ContainerUtil.concat(myLeft, myCenter, myRight); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.type; import com.facebook.presto.operator.scalar.AbstractTestFunctions; import com.facebook.presto.operator.scalar.TestingRowConstructor; import com.facebook.presto.spi.ErrorCode; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilderStatus; import com.facebook.presto.spi.type.SqlTimestamp; import com.facebook.presto.sql.analyzer.SemanticErrorCode; import com.facebook.presto.sql.analyzer.SemanticException; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import io.airlift.slice.DynamicSliceOutput; import org.testng.annotations.Test; import static com.facebook.presto.SessionTestUtils.TEST_SESSION; import static com.facebook.presto.block.BlockSerdeUtil.writeBlock; import static com.facebook.presto.spi.StandardErrorCode.FUNCTION_NOT_FOUND; import static com.facebook.presto.spi.StandardErrorCode.INVALID_CAST_ARGUMENT; import static com.facebook.presto.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.type.ArrayType.toStackRepresentation; import static com.facebook.presto.type.JsonType.JSON; import static com.facebook.presto.type.UnknownType.UNKNOWN; import static java.lang.Double.NEGATIVE_INFINITY; import static java.lang.Double.NaN; import static java.lang.Double.POSITIVE_INFINITY; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; public class TestArrayOperators extends AbstractTestFunctions { public TestArrayOperators() { registerScalar(TestingRowConstructor.class); } @Test public void testStackRepresentation() throws Exception { Block actualBlock = toStackRepresentation(ImmutableList.of( toStackRepresentation(ImmutableList.of(1L, 2L), BIGINT), toStackRepresentation(ImmutableList.of(3L), BIGINT)), new ArrayType(BIGINT)); DynamicSliceOutput actualSliceOutput = new DynamicSliceOutput(100); writeBlock(actualSliceOutput, actualBlock); Block expectedBlock = new ArrayType(BIGINT) .createBlockBuilder(new BlockBuilderStatus(), 3) .writeObject(BIGINT.createBlockBuilder(new BlockBuilderStatus(), 2).writeLong(1).closeEntry().writeLong(2).closeEntry().build()) .closeEntry() .writeObject(BIGINT.createBlockBuilder(new BlockBuilderStatus(), 1).writeLong(3).closeEntry().build()) .closeEntry() .build(); DynamicSliceOutput expectedSliceOutput = new DynamicSliceOutput(100); writeBlock(expectedSliceOutput, expectedBlock); assertEquals(actualSliceOutput.slice(), expectedSliceOutput.slice()); } @Test public void testArrayElements() throws Exception { assertFunction("CAST(ARRAY [1, 2, 3] AS ARRAY<BIGINT>)", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L)); assertFunction("CAST(ARRAY [1, null, 3] AS ARRAY<BIGINT>)", new ArrayType(BIGINT), asList(1L, null, 3L)); assertFunction("CAST(ARRAY [1, 2, 3] AS ARRAY<DOUBLE>)", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.0, 3.0)); assertFunction("CAST(ARRAY [1, null, 3] AS ARRAY<DOUBLE>)", new ArrayType(DOUBLE), asList(1.0, null, 3.0)); assertFunction("CAST(ARRAY ['1', '2'] AS ARRAY<VARCHAR>)", new ArrayType(VARCHAR), ImmutableList.of("1", "2")); assertFunction("CAST(ARRAY ['1', '2'] AS ARRAY<DOUBLE>)", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.0)); assertFunction("CAST(ARRAY [true, false] AS ARRAY<BOOLEAN>)", new ArrayType(BOOLEAN), ImmutableList.of(true, false)); assertFunction("CAST(ARRAY [true, false] AS ARRAY<VARCHAR>)", new ArrayType(VARCHAR), ImmutableList.of("true", "false")); assertFunction("CAST(ARRAY [1, 0] AS ARRAY<BOOLEAN>)", new ArrayType(BOOLEAN), ImmutableList.of(true, false)); assertFunction("CAST(ARRAY [ARRAY[1], ARRAY[2, 3]] AS ARRAY<ARRAY<DOUBLE>>)", new ArrayType(new ArrayType(DOUBLE)), asList(asList(1.0), asList(2.0, 3.0))); assertInvalidFunction("CAST(ARRAY [1, null, 3] AS ARRAY<TIMESTAMP>)", FUNCTION_NOT_FOUND); assertInvalidFunction("CAST(ARRAY [1, null, 3] AS ARRAY<ARRAY<TIMESTAMP>>)", FUNCTION_NOT_FOUND); assertInvalidFunction("CAST(ARRAY ['puppies', 'kittens'] AS ARRAY<BIGINT>)", INVALID_CAST_ARGUMENT); } @Test public void testArrayToJson() throws Exception { assertFunction("CAST(ARRAY [1, 2, 3] AS JSON)", JSON, "[1,2,3]"); assertFunction("CAST(ARRAY [1, NULL, 3] AS JSON)", JSON, "[1,null,3]"); assertFunction("CAST(ARRAY [1, 2.0, 3] AS JSON)", JSON, "[1.0,2.0,3.0]"); assertFunction("CAST(ARRAY [1.0, 2.5, 3.0] AS JSON)", JSON, "[1.0,2.5,3.0]"); assertFunction("CAST(ARRAY ['puppies', 'kittens'] AS JSON)", JSON, "[\"puppies\",\"kittens\"]"); assertFunction("CAST(ARRAY [TRUE, FALSE] AS JSON)", JSON, "[true,false]"); assertFunction("CAST(ARRAY [from_unixtime(1)] AS JSON)", JSON, "[\"" + sqlTimestamp(1000) + "\"]"); assertFunction("CAST(ARRAY [ARRAY [1], ARRAY [2, 3]] AS JSON)", JSON, "[[1],[2,3]]"); } @Test public void testJsonToArray() throws Exception { assertFunction("CAST(CAST('[1, 2, 3]' AS JSON) AS ARRAY<BIGINT>)", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L)); assertFunction("CAST(CAST('[1, null, 3]' AS JSON) AS ARRAY<BIGINT>)", new ArrayType(BIGINT), asList(1L, null, 3L)); assertFunction("CAST(CAST('[1, 2.0, 3]' AS JSON) AS ARRAY<DOUBLE>)", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.0, 3.0)); assertFunction("CAST(CAST('[1.0, 2.5, 3.0]' AS JSON) AS ARRAY<DOUBLE>)", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.5, 3.0)); assertFunction("CAST(CAST('[\"puppies\", \"kittens\"]' AS JSON) AS ARRAY<VARCHAR>)", new ArrayType(VARCHAR), ImmutableList.of("puppies", "kittens")); assertFunction("CAST(CAST('[true, false]' AS JSON) AS ARRAY<BOOLEAN>)", new ArrayType(BOOLEAN), ImmutableList.of(true, false)); assertFunction("CAST(CAST('[[1], [null]]' AS JSON) AS ARRAY<ARRAY<BIGINT>>)", new ArrayType(new ArrayType(BIGINT)), asList(asList(1L), asList((Long) null))); assertFunction("CAST(CAST('null' AS JSON) AS ARRAY<BIGINT>)", new ArrayType(BIGINT), null); assertFunction("CAST(CAST('[5, [1, 2, 3], \"e\", {\"a\": \"b\"}, null, \"null\", [null]]' AS JSON) AS ARRAY<JSON>)", new ArrayType(JSON), ImmutableList.of("5", "[1,2,3]", "\"e\"", "{\"a\":\"b\"}", "null", "\"null\"", "[null]")); assertInvalidCast("CAST(CAST('[1, null, 3]' AS JSON) AS ARRAY<TIMESTAMP>)"); assertInvalidCast("CAST(CAST('[1, null, 3]' AS JSON) AS ARRAY<ARRAY<TIMESTAMP>>)"); assertInvalidCast("CAST(CAST('[1, 2, 3]' AS JSON) AS ARRAY<BOOLEAN>)"); assertInvalidCast("CAST(CAST('[\"puppies\", \"kittens\"]' AS JSON) AS ARRAY<BIGINT>)"); } @Test public void testConstructor() throws Exception { assertFunction("ARRAY []", new ArrayType(UNKNOWN), ImmutableList.of()); assertFunction("ARRAY [NULL]", new ArrayType(UNKNOWN), Lists.newArrayList((Object) null)); assertFunction("ARRAY [1, 2, 3]", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L)); assertFunction("ARRAY [1, NULL, 3]", new ArrayType(BIGINT), Lists.newArrayList(1L, null, 3L)); assertFunction("ARRAY [NULL, 2, 3]", new ArrayType(BIGINT), Lists.newArrayList(null, 2L, 3L)); assertFunction("ARRAY [1, 2.0, 3]", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.0, 3.0)); assertFunction("ARRAY [ARRAY[1, 2], ARRAY[3]]", new ArrayType(new ArrayType(BIGINT)), ImmutableList.of(ImmutableList.of(1L, 2L), ImmutableList.of(3L))); assertFunction("ARRAY [ARRAY[1, 2], NULL, ARRAY[3]]", new ArrayType(new ArrayType(BIGINT)), Lists.newArrayList(ImmutableList.of(1L, 2L), null, ImmutableList.of(3L))); assertFunction("ARRAY [1.0, 2.5, 3.0]", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.5, 3.0)); assertFunction("ARRAY [1, 2.5, 3]", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.5, 3.0)); assertFunction("ARRAY ['puppies', 'kittens']", new ArrayType(VARCHAR), ImmutableList.of("puppies", "kittens")); assertFunction("ARRAY [TRUE, FALSE]", new ArrayType(BOOLEAN), ImmutableList.of(true, false)); assertFunction("ARRAY [from_unixtime(1), from_unixtime(100)]", new ArrayType(TIMESTAMP), ImmutableList.of( sqlTimestamp(1000), sqlTimestamp(100_000))); assertFunction("ARRAY [sqrt(-1)]", new ArrayType(DOUBLE), ImmutableList.of(NaN)); assertFunction("ARRAY [pow(infinity(), 2)]", new ArrayType(DOUBLE), ImmutableList.of(POSITIVE_INFINITY)); assertFunction("ARRAY [pow(-infinity(), 1)]", new ArrayType(DOUBLE), ImmutableList.of(NEGATIVE_INFINITY)); assertFunction("ARRAY [ARRAY [], NULL]", new ArrayType(new ArrayType(UNKNOWN)), asList(ImmutableList.of(), null)); } @Test public void testArrayToArrayConcat() throws Exception { assertFunction("ARRAY [1, NULL] || ARRAY [3]", new ArrayType(BIGINT), Lists.newArrayList(1L, null, 3L)); assertFunction("ARRAY [1, 2] || ARRAY[3, 4]", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L, 4L)); assertFunction("ARRAY [NULL] || ARRAY[NULL]", new ArrayType(UNKNOWN), Lists.newArrayList(null, null)); assertFunction("ARRAY ['puppies'] || ARRAY ['kittens']", new ArrayType(VARCHAR), ImmutableList.of("puppies", "kittens")); assertFunction("ARRAY [TRUE] || ARRAY [FALSE]", new ArrayType(BOOLEAN), ImmutableList.of(true, false)); assertFunction("concat(ARRAY [1] , ARRAY[2,3])", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L)); assertFunction("ARRAY [from_unixtime(1)] || ARRAY[from_unixtime(100)]", new ArrayType(TIMESTAMP), ImmutableList.of( sqlTimestamp(1000), sqlTimestamp(100_000))); assertFunction("ARRAY [ARRAY[ARRAY[1]]] || ARRAY [ARRAY[ARRAY[2]]]", new ArrayType(new ArrayType(new ArrayType(BIGINT))), asList(singletonList(Longs.asList(1)), singletonList(Longs.asList(2)))); assertFunction("ARRAY [] || ARRAY []", new ArrayType(UNKNOWN), ImmutableList.of()); assertFunction("ARRAY [TRUE] || ARRAY [FALSE] || ARRAY [TRUE]", new ArrayType(BOOLEAN), ImmutableList.of(true, false, true)); assertFunction("ARRAY [1] || ARRAY [2] || ARRAY [3] || ARRAY [4]", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L, 4L)); assertFunction("ARRAY [1] || ARRAY [2.0] || ARRAY [3] || ARRAY [4.0]", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.0, 3.0, 4.0)); assertFunction("ARRAY [ARRAY [1], ARRAY [2, 8]] || ARRAY [ARRAY [3, 6], ARRAY [4]]", new ArrayType(new ArrayType(BIGINT)), ImmutableList.of(ImmutableList.of(1L), ImmutableList.of(2L, 8L), ImmutableList.of(3L, 6L), ImmutableList.of(4L))); assertInvalidFunction("ARRAY [ARRAY[1]] || ARRAY[ARRAY[true], ARRAY[false]]", FUNCTION_NOT_FOUND.toErrorCode()); try { assertFunction("ARRAY [ARRAY[1]] || ARRAY[NULL]", new ArrayType(new ArrayType(BIGINT)), null); fail("arrays must be of the same type"); } catch (RuntimeException e) { // Expected } } @Test public void testElementArrayConcat() throws Exception { assertFunction("CAST (ARRAY [DATE '2001-08-22'] || DATE '2001-08-23' AS JSON)", JSON, "[\"2001-08-22\",\"2001-08-23\"]"); assertFunction("CAST (DATE '2001-08-23' || ARRAY [DATE '2001-08-22'] AS JSON)", JSON, "[\"2001-08-23\",\"2001-08-22\"]"); assertFunction("1 || ARRAY [2]", new ArrayType(BIGINT), Lists.newArrayList(1L, 2L)); assertFunction("ARRAY [2] || 1", new ArrayType(BIGINT), Lists.newArrayList(2L, 1L)); assertFunction("TRUE || ARRAY [FALSE]", new ArrayType(BOOLEAN), Lists.newArrayList(true, false)); assertFunction("ARRAY [FALSE] || TRUE", new ArrayType(BOOLEAN), Lists.newArrayList(false, true)); assertFunction("1.0 || ARRAY [2.0]", new ArrayType(DOUBLE), Lists.newArrayList(1.0, 2.0)); assertFunction("ARRAY [2.0] || 1.0", new ArrayType(DOUBLE), Lists.newArrayList(2.0, 1.0)); assertFunction("'puppies' || ARRAY ['kittens']", new ArrayType(VARCHAR), Lists.newArrayList("puppies", "kittens")); assertFunction("ARRAY ['kittens'] || 'puppies'", new ArrayType(VARCHAR), Lists.newArrayList("kittens", "puppies")); assertFunction("ARRAY [from_unixtime(1)] || from_unixtime(100)", new ArrayType(TIMESTAMP), ImmutableList.of( sqlTimestamp(1000), sqlTimestamp(100_000))); assertFunction("from_unixtime(100) || ARRAY [from_unixtime(1)]", new ArrayType(TIMESTAMP), ImmutableList.of( sqlTimestamp(100_000), sqlTimestamp(1000))); assertFunction("ARRAY [2, 8] || ARRAY[ARRAY[3, 6], ARRAY[4]]", new ArrayType(new ArrayType(BIGINT)), ImmutableList.of(ImmutableList.of(2L, 8L), ImmutableList.of(3L, 6L), ImmutableList.of(4L))); assertFunction("ARRAY [ARRAY [1], ARRAY [2, 8]] || ARRAY [3, 6]", new ArrayType(new ArrayType(BIGINT)), ImmutableList.of(ImmutableList.of(1L), ImmutableList.of(2L, 8L), ImmutableList.of(3L, 6L))); } @Test public void testArrayContains() throws Exception { assertFunction("CONTAINS(ARRAY [1, 2, 3], 2)", BOOLEAN, true); assertFunction("CONTAINS(ARRAY [1, 2, 3], 5)", BOOLEAN, false); assertFunction("CONTAINS(ARRAY [1, NULL, 3], 1)", BOOLEAN, true); assertFunction("CONTAINS(ARRAY [NULL, 2, 3], 1)", BOOLEAN, null); assertFunction("CONTAINS(ARRAY [1, 2.0, 3], 3.0)", BOOLEAN, true); assertFunction("CONTAINS(ARRAY [1.0, 2.5, 3.0], 2.2)", BOOLEAN, false); assertFunction("CONTAINS(ARRAY ['puppies', 'kittens'], 'kittens')", BOOLEAN, true); assertFunction("CONTAINS(ARRAY ['puppies', 'kittens'], 'lizards')", BOOLEAN, false); assertFunction("CONTAINS(ARRAY [TRUE, FALSE], TRUE)", BOOLEAN, true); assertFunction("CONTAINS(ARRAY [FALSE], TRUE)", BOOLEAN, false); assertFunction("CONTAINS(ARRAY [ARRAY [1, 2], ARRAY [3, 4]], ARRAY [3, 4])", BOOLEAN, true); assertFunction("CONTAINS(ARRAY [ARRAY [1, 2], ARRAY [3, 4]], ARRAY [3])", BOOLEAN, false); } @Test public void testArrayJoin() throws Exception { assertFunction("array_join(ARRAY[1, NULL, 2], ',')", VARCHAR, "1,2"); assertFunction("ARRAY_JOIN(ARRAY [1, 2, 3], ';', 'N/A')", VARCHAR, "1;2;3"); assertFunction("ARRAY_JOIN(ARRAY [1, 2, null], ';', 'N/A')", VARCHAR, "1;2;N/A"); assertFunction("ARRAY_JOIN(ARRAY [1, 2, 3], 'x')", VARCHAR, "1x2x3"); assertFunction("ARRAY_JOIN(ARRAY [null], '=')", VARCHAR, ""); assertFunction("ARRAY_JOIN(ARRAY [null,null], '=')", VARCHAR, ""); assertFunction("ARRAY_JOIN(ARRAY [], 'S')", VARCHAR, ""); assertFunction("ARRAY_JOIN(ARRAY [''], '', '')", VARCHAR, ""); assertFunction("ARRAY_JOIN(ARRAY [1, 2, 3, null, 5], ',', '*')", VARCHAR, "1,2,3,*,5"); assertFunction("ARRAY_JOIN(ARRAY ['a', 'b', 'c', null, null, 'd'], '-', 'N/A')", VARCHAR, "a-b-c-N/A-N/A-d"); assertFunction("ARRAY_JOIN(ARRAY ['a', 'b', 'c', null, null, 'd'], '-')", VARCHAR, "a-b-c-d"); assertFunction("ARRAY_JOIN(ARRAY [null, null, null, null], 'X')", VARCHAR, ""); assertFunction("ARRAY_JOIN(ARRAY [true, false], 'XX')", VARCHAR, "trueXXfalse"); assertFunction("ARRAY_JOIN(ARRAY [sqrt(-1), infinity()], ',')", VARCHAR, "NaN,Infinity"); assertFunction("ARRAY_JOIN(ARRAY [from_unixtime(1), from_unixtime(10)], '|')", VARCHAR, sqlTimestamp(1000).toString() + "|" + sqlTimestamp(10_000).toString()); assertFunction("ARRAY_JOIN(ARRAY [null, from_unixtime(10)], '|')", VARCHAR, sqlTimestamp(10_000).toString()); assertFunction("ARRAY_JOIN(ARRAY [null, from_unixtime(10)], '|', 'XYZ')", VARCHAR, "XYZ|" + sqlTimestamp(10_000).toString()); assertInvalidFunction("ARRAY_JOIN(ARRAY [ARRAY [1], ARRAY [2]], '-')", INVALID_FUNCTION_ARGUMENT); assertInvalidFunction("ARRAY_JOIN(ARRAY [MAP(ARRAY [1], ARRAY [2])], '-')", INVALID_FUNCTION_ARGUMENT); assertInvalidFunction("ARRAY_JOIN(ARRAY [test_row(1, 2)], '-')", INVALID_FUNCTION_ARGUMENT); } @Test public void testCardinality() throws Exception { assertFunction("CARDINALITY(ARRAY [])", BIGINT, 0); assertFunction("CARDINALITY(ARRAY [NULL])", BIGINT, 1); assertFunction("CARDINALITY(ARRAY [1, 2, 3])", BIGINT, 3); assertFunction("CARDINALITY(ARRAY [1, NULL, 3])", BIGINT, 3); assertFunction("CARDINALITY(ARRAY [1, 2.0, 3])", BIGINT, 3); assertFunction("CARDINALITY(ARRAY [ARRAY[1, 2], ARRAY[3]])", BIGINT, 2); assertFunction("CARDINALITY(ARRAY [1.0, 2.5, 3.0])", BIGINT, 3); assertFunction("CARDINALITY(ARRAY ['puppies', 'kittens'])", BIGINT, 2); assertFunction("CARDINALITY(ARRAY [TRUE, FALSE])", BIGINT, 2); } @Test public void testArrayMin() throws Exception { assertFunction("ARRAY_MIN(ARRAY [])", UNKNOWN, null); assertFunction("ARRAY_MIN(ARRAY [NULL])", UNKNOWN, null); assertFunction("ARRAY_MIN(ARRAY [NULL, NULL, NULL])", UNKNOWN, null); assertFunction("ARRAY_MIN(ARRAY [NULL, 2, 3])", BIGINT, null); assertFunction("ARRAY_MIN(ARRAY [1.0, NULL, 3])", DOUBLE, null); assertFunction("ARRAY_MIN(ARRAY ['1', '2', NULL])", VARCHAR, null); assertFunction("ARRAY_MIN(ARRAY [3, 2, 1])", BIGINT, 1); assertFunction("ARRAY_MIN(ARRAY [1, 2, 3])", BIGINT, 1); assertFunction("ARRAY_MIN(ARRAY [1, 2.0, 3])", DOUBLE, 1.0); assertFunction("ARRAY_MIN(ARRAY [ARRAY[1, 2], ARRAY[3]])", new ArrayType(BIGINT), ImmutableList.of(1L, 2L)); assertFunction("ARRAY_MIN(ARRAY [1.0, 2.5, 3.0])", DOUBLE, 1.0); assertFunction("ARRAY_MIN(ARRAY ['puppies', 'kittens'])", VARCHAR, "kittens"); assertFunction("ARRAY_MIN(ARRAY [TRUE, FALSE])", BOOLEAN, false); assertFunction("ARRAY_MIN(ARRAY [NULL, FALSE])", BOOLEAN, null); } @Test public void testArrayMax() throws Exception { assertFunction("ARRAY_MAX(ARRAY [])", UNKNOWN, null); assertFunction("ARRAY_MAX(ARRAY [NULL])", UNKNOWN, null); assertFunction("ARRAY_MAX(ARRAY [NULL, NULL, NULL])", UNKNOWN, null); assertFunction("ARRAY_MAX(ARRAY [NULL, 2, 3])", BIGINT, null); assertFunction("ARRAY_MAX(ARRAY [1.0, NULL, 3])", DOUBLE, null); assertFunction("ARRAY_MAX(ARRAY ['1', '2', NULL])", VARCHAR, null); assertFunction("ARRAY_MAX(ARRAY [3, 2, 1])", BIGINT, 3); assertFunction("ARRAY_MAX(ARRAY [1, 2, 3])", BIGINT, 3); assertFunction("ARRAY_MAX(ARRAY [1, 2.0, 3])", DOUBLE, 3.0); assertFunction("ARRAY_MAX(ARRAY [ARRAY[1, 2], ARRAY[3]])", new ArrayType(BIGINT), ImmutableList.of(3L)); assertFunction("ARRAY_MAX(ARRAY [1.0, 2.5, 3.0])", DOUBLE, 3.0); assertFunction("ARRAY_MAX(ARRAY ['puppies', 'kittens'])", VARCHAR, "puppies"); assertFunction("ARRAY_MAX(ARRAY [TRUE, FALSE])", BOOLEAN, true); assertFunction("ARRAY_MAX(ARRAY [NULL, FALSE])", BOOLEAN, null); } @Test public void testArrayPosition() throws Exception { assertFunction("ARRAY_POSITION(ARRAY [10, 20, 30, 40], 30)", BIGINT, 3); assertFunction("ARRAY_POSITION(cast(cast('[]' as json) as array<bigint>), 30)", BIGINT, 0); assertFunction("ARRAY_POSITION(ARRAY [cast(NULL as bigint)], 30)", BIGINT, 0); assertFunction("ARRAY_POSITION(ARRAY [cast(NULL as bigint), NULL, NULL], 30)", BIGINT, 0); assertFunction("ARRAY_POSITION(ARRAY [NULL, NULL, 30, NULL], 30)", BIGINT, 3); assertFunction("ARRAY_POSITION(ARRAY [1.1, 2.1, 3.1, 4.1], 3.1)", BIGINT, 3); assertFunction("ARRAY_POSITION(ARRAY [false, false, true, true], true)", BIGINT, 3); assertFunction("ARRAY_POSITION(ARRAY ['10', '20', '30', '40'], '30')", BIGINT, 3); assertFunction("ARRAY_POSITION(ARRAY [DATE '2000-01-01', DATE '2000-01-02', DATE '2000-01-03', DATE '2000-01-04'], DATE '2000-01-03')", BIGINT, 3); assertFunction("ARRAY_POSITION(ARRAY [ARRAY [1, 11], ARRAY [2, 12], ARRAY [3, 13], ARRAY [4, 14]], ARRAY [3, 13])", BIGINT, 3); } @Test public void testSubscript() throws Exception { String outOfBounds = "Array subscript out of bounds"; String negativeIndex = "Array subscript is negative"; String indexIsZero = "SQL array indices start at 1"; assertInvalidFunction("ARRAY [][1]", outOfBounds); assertInvalidFunction("ARRAY [null][-1]", negativeIndex); assertInvalidFunction("ARRAY [1, 2, 3][0]", indexIsZero); assertInvalidFunction("ARRAY [1, 2, 3][-1]", negativeIndex); assertInvalidFunction("ARRAY [1, 2, 3][4]", outOfBounds); try { assertFunction("ARRAY [1, 2, 3][1.1]", BIGINT, null); fail("Access to array with double subscript should fail"); } catch (SemanticException e) { assertTrue(e.getCode() == SemanticErrorCode.TYPE_MISMATCH); } assertFunction("ARRAY[NULL][1]", UNKNOWN, null); assertFunction("ARRAY[NULL, NULL, NULL][3]", UNKNOWN, null); assertFunction("1 + ARRAY [2, 1, 3][2]", BIGINT, 2); assertFunction("ARRAY [2, 1, 3][2]", BIGINT, 1); assertFunction("ARRAY [2, NULL, 3][2]", BIGINT, null); assertFunction("ARRAY [1.0, 2.5, 3.5][3]", DOUBLE, 3.5); assertFunction("ARRAY [ARRAY[1, 2], ARRAY[3]][2]", new ArrayType(BIGINT), ImmutableList.of(3L)); assertFunction("ARRAY [ARRAY[1, 2], NULL, ARRAY[3]][2]", new ArrayType(BIGINT), null); assertFunction("ARRAY [ARRAY[1, 2], ARRAY[3]][2][1]", BIGINT, 3); assertFunction("ARRAY ['puppies', 'kittens'][2]", VARCHAR, "kittens"); assertFunction("ARRAY ['puppies', 'kittens', NULL][3]", VARCHAR, null); assertFunction("ARRAY [TRUE, FALSE][2]", BOOLEAN, false); assertFunction("ARRAY [from_unixtime(1), from_unixtime(100)][1]", TIMESTAMP, sqlTimestamp(1000)); assertFunction("ARRAY [infinity()][1]", DOUBLE, POSITIVE_INFINITY); assertFunction("ARRAY [-infinity()][1]", DOUBLE, NEGATIVE_INFINITY); assertFunction("ARRAY [sqrt(-1)][1]", DOUBLE, NaN); } @Test public void testElementAt() throws Exception { String outOfBounds = "Array subscript out of bounds"; assertInvalidFunction("ELEMENT_AT(ARRAY [], -1)", outOfBounds); assertInvalidFunction("ELEMENT_AT(ARRAY [], 0)", "SQL array indices start at 1"); assertInvalidFunction("ELEMENT_AT(ARRAY [], 1)", outOfBounds); assertInvalidFunction("ELEMENT_AT(ARRAY [1, 2, 3], 0)", "SQL array indices start at 1"); assertInvalidFunction("ELEMENT_AT(ARRAY [1, 2, 3], 4)", outOfBounds); assertInvalidFunction("ELEMENT_AT(ARRAY [1, 2, 3], -4)", outOfBounds); assertFunction("ELEMENT_AT(ARRAY [NULL], 1)", UNKNOWN, null); assertFunction("ELEMENT_AT(ARRAY [NULL], -1)", UNKNOWN, null); assertFunction("ELEMENT_AT(ARRAY [NULL, NULL, NULL], 3)", UNKNOWN, null); assertFunction("ELEMENT_AT(ARRAY [NULL, NULL, NULL], -1)", UNKNOWN, null); assertFunction("1 + ELEMENT_AT(ARRAY [2, 1, 3], 2)", BIGINT, 2); assertFunction("1 + ELEMENT_AT(ARRAY [2, 1, 3], -2)", BIGINT, 2); assertFunction("ELEMENT_AT(ARRAY [2, 1, 3], 2)", BIGINT, 1); assertFunction("ELEMENT_AT(ARRAY [2, 1, 3], -2)", BIGINT, 1); assertFunction("ELEMENT_AT(ARRAY [2, NULL, 3], 2)", BIGINT, null); assertFunction("ELEMENT_AT(ARRAY [2, NULL, 3], -2)", BIGINT, null); assertFunction("ELEMENT_AT(ARRAY [1.0, 2.5, 3.5], 3)", DOUBLE, 3.5); assertFunction("ELEMENT_AT(ARRAY [1.0, 2.5, 3.5], -1)", DOUBLE, 3.5); assertFunction("ELEMENT_AT(ARRAY [ARRAY [1, 2], ARRAY [3]], 2)", new ArrayType(BIGINT), ImmutableList.of(3L)); assertFunction("ELEMENT_AT(ARRAY [ARRAY [1, 2], ARRAY [3]], -1)", new ArrayType(BIGINT), ImmutableList.of(3L)); assertFunction("ELEMENT_AT(ARRAY [ARRAY [1, 2], NULL, ARRAY [3]], 2)", new ArrayType(BIGINT), null); assertFunction("ELEMENT_AT(ARRAY [ARRAY [1, 2], NULL, ARRAY [3]], -2)", new ArrayType(BIGINT), null); assertFunction("ELEMENT_AT(ELEMENT_AT(ARRAY [ARRAY[1, 2], ARRAY [3]], 2) , 1)", BIGINT, 3); assertFunction("ELEMENT_AT(ELEMENT_AT(ARRAY [ARRAY[1, 2], ARRAY [3]], -1) , 1)", BIGINT, 3); assertFunction("ELEMENT_AT(ELEMENT_AT(ARRAY [ARRAY[1, 2], ARRAY [3]], 2) , -1)", BIGINT, 3); assertFunction("ELEMENT_AT(ELEMENT_AT(ARRAY [ARRAY[1, 2], ARRAY [3]], -1) , -1)", BIGINT, 3); assertFunction("ELEMENT_AT(ARRAY ['puppies', 'kittens'], 2)", VARCHAR, "kittens"); assertFunction("ELEMENT_AT(ARRAY ['puppies', 'kittens'], -1)", VARCHAR, "kittens"); assertFunction("ELEMENT_AT(ARRAY ['puppies', 'kittens', NULL], 3)", VARCHAR, null); assertFunction("ELEMENT_AT(ARRAY ['puppies', 'kittens', NULL], -1)", VARCHAR, null); assertFunction("ELEMENT_AT(ARRAY [TRUE, FALSE], 2)", BOOLEAN, false); assertFunction("ELEMENT_AT(ARRAY [TRUE, FALSE], -1)", BOOLEAN, false); assertFunction("ELEMENT_AT(ARRAY [from_unixtime(1), from_unixtime(100)], 1)", TIMESTAMP, sqlTimestamp(1000)); assertFunction("ELEMENT_AT(ARRAY [from_unixtime(1), from_unixtime(100)], -2)", TIMESTAMP, sqlTimestamp(1000)); assertFunction("ELEMENT_AT(ARRAY [infinity()], 1)", DOUBLE, POSITIVE_INFINITY); assertFunction("ELEMENT_AT(ARRAY [infinity()], -1)", DOUBLE, POSITIVE_INFINITY); assertFunction("ELEMENT_AT(ARRAY [-infinity()], 1)", DOUBLE, NEGATIVE_INFINITY); assertFunction("ELEMENT_AT(ARRAY [-infinity()], -1)", DOUBLE, NEGATIVE_INFINITY); assertFunction("ELEMENT_AT(ARRAY [sqrt(-1)], 1)", DOUBLE, NaN); assertFunction("ELEMENT_AT(ARRAY [sqrt(-1)], -1)", DOUBLE, NaN); } @Test public void testSort() throws Exception { assertFunction("ARRAY_SORT(ARRAY[2, 3, 4, 1])", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L, 4L)); assertFunction("ARRAY_SORT(ARRAY['z', 'f', 's', 'd', 'g'])", new ArrayType(VARCHAR), ImmutableList.of("d", "f", "g", "s", "z")); assertFunction("ARRAY_SORT(ARRAY[TRUE, FALSE])", new ArrayType(BOOLEAN), ImmutableList.of(false, true)); assertFunction("ARRAY_SORT(ARRAY[22.1, 11.1, 1.1, 44.1])", new ArrayType(DOUBLE), ImmutableList.of(1.1, 11.1, 22.1, 44.1)); assertFunction("ARRAY_SORT(ARRAY [from_unixtime(100), from_unixtime(1), from_unixtime(200)])", new ArrayType(TIMESTAMP), ImmutableList.of(sqlTimestamp(1000), sqlTimestamp(100 * 1000), sqlTimestamp(200 * 1000))); assertFunction("ARRAY_SORT(ARRAY [ARRAY [1], ARRAY [2]])", new ArrayType(new ArrayType(BIGINT)), ImmutableList.of(ImmutableList.of(1L), ImmutableList.of(2L))); assertInvalidFunction("ARRAY_SORT(ARRAY[color('red'), color('blue')])", FUNCTION_NOT_FOUND.toErrorCode()); } @Test void testDistinct() throws Exception { assertFunction("ARRAY_DISTINCT(ARRAY [])", new ArrayType(UNKNOWN), ImmutableList.of()); // Order matters here. Result should be stable. assertFunction("ARRAY_DISTINCT(ARRAY [2, 3, 4, 3, 1, 2, 3])", new ArrayType(BIGINT), ImmutableList.of(2L, 3L, 4L, 1L)); assertFunction("ARRAY_DISTINCT(ARRAY [2.2, 3.3, 4.4, 3.3, 1, 2.2, 3.3])", new ArrayType(DOUBLE), ImmutableList.of(2.2, 3.3, 4.4, 1.0)); assertFunction("ARRAY_DISTINCT(ARRAY [TRUE, TRUE, TRUE])", new ArrayType(BOOLEAN), ImmutableList.of(true)); assertFunction("ARRAY_DISTINCT(ARRAY [TRUE, FALSE, FALSE, TRUE])", new ArrayType(BOOLEAN), ImmutableList.of(true, false)); assertFunction("ARRAY_DISTINCT(ARRAY [from_unixtime(100), from_unixtime(1), from_unixtime(100)])", new ArrayType(TIMESTAMP), ImmutableList.of(sqlTimestamp(100 * 1000), sqlTimestamp(1000))); assertFunction("ARRAY_DISTINCT(ARRAY ['2', '3', '2'])", new ArrayType(VARCHAR), ImmutableList.of("2", "3")); assertFunction("ARRAY_DISTINCT(ARRAY ['BB', 'CCC', 'BB'])", new ArrayType(VARCHAR), ImmutableList.of("BB", "CCC")); assertFunction( "ARRAY_DISTINCT(ARRAY [ARRAY [1], ARRAY [1, 2], ARRAY [1, 2, 3], ARRAY [1, 2]])", new ArrayType(new ArrayType(BIGINT)), ImmutableList.of(ImmutableList.of(1L), ImmutableList.of(1L, 2L), ImmutableList.of(1L, 2L, 3L))); assertFunction("ARRAY_DISTINCT(ARRAY [NULL, 2.2, 3.3, 4.4, 3.3, 1, 2.2, 3.3])", new ArrayType(DOUBLE), asList(null, 2.2, 3.3, 4.4, 1.0)); assertFunction("ARRAY_DISTINCT(ARRAY [2, 3, NULL, 4, 3, 1, 2, 3])", new ArrayType(BIGINT), asList(2L, 3L, null, 4L, 1L)); assertFunction("ARRAY_DISTINCT(ARRAY ['BB', 'CCC', 'BB', NULL])", new ArrayType(VARCHAR), asList("BB", "CCC", null)); assertFunction("ARRAY_DISTINCT(ARRAY [NULL])", new ArrayType(UNKNOWN), asList((Object) null)); assertFunction("ARRAY_DISTINCT(ARRAY [NULL, NULL])", new ArrayType(UNKNOWN), asList((Object) null)); assertFunction("ARRAY_DISTINCT(ARRAY [NULL, NULL, NULL])", new ArrayType(UNKNOWN), asList((Object) null)); } @Test public void testSlice() throws Exception { assertFunction("SLICE(ARRAY [1, 2, 3, 4, 5], 1, 4)", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L, 4L)); assertFunction("SLICE(ARRAY [1, 2], 1, 4)", new ArrayType(BIGINT), ImmutableList.of(1L, 2L)); assertFunction("SLICE(ARRAY [1, 2, 3, 4, 5], 3, 2)", new ArrayType(BIGINT), ImmutableList.of(3L, 4L)); assertFunction("SLICE(ARRAY ['1', '2', '3', '4'], 2, 1)", new ArrayType(VARCHAR), ImmutableList.of("2")); assertFunction("SLICE(ARRAY [1, 2, 3, 4], 3, 3)", new ArrayType(BIGINT), ImmutableList.of(3L, 4L)); assertFunction("SLICE(ARRAY [1, 2, 3, 4], -3, 3)", new ArrayType(BIGINT), ImmutableList.of(2L, 3L, 4L)); assertFunction("SLICE(ARRAY [1, 2, 3, 4], -3, 5)", new ArrayType(BIGINT), ImmutableList.of(2L, 3L, 4L)); assertFunction("SLICE(ARRAY [1, 2, 3, 4], 1, 0)", new ArrayType(BIGINT), ImmutableList.of()); assertFunction("SLICE(ARRAY [1, 2, 3, 4], -2, 0)", new ArrayType(BIGINT), ImmutableList.of()); assertFunction("SLICE(ARRAY [ARRAY [1], ARRAY [2, 3], ARRAY [4, 5, 6]], 1, 2)", new ArrayType(new ArrayType(BIGINT)), ImmutableList.of(ImmutableList.of(1L), ImmutableList.of(2L, 3L))); assertInvalidFunction("SLICE(ARRAY [1, 2, 3, 4], 1, -1)", INVALID_FUNCTION_ARGUMENT); assertInvalidFunction("SLICE(ARRAY [1, 2, 3, 4], 0, 1)", INVALID_FUNCTION_ARGUMENT); } @Test public void testArrayIntersect() throws Exception { assertFunction("ARRAY_INTERSECT(ARRAY [12], ARRAY [10])", new ArrayType(BIGINT), ImmutableList.of()); assertFunction("ARRAY_INTERSECT(ARRAY ['foo', 'bar', 'baz'], ARRAY ['foo', 'test', 'bar'])", new ArrayType(VARCHAR), ImmutableList.of("bar", "foo")); assertFunction("ARRAY_INTERSECT(ARRAY [NULL], ARRAY [NULL, NULL])", new ArrayType(UNKNOWN), asList((Object) null)); assertFunction("ARRAY_INTERSECT(ARRAY ['abc', NULL, 'xyz', NULL], ARRAY [NULL, 'abc', NULL, NULL])", new ArrayType(VARCHAR), asList(null, "abc")); assertFunction("ARRAY_INTERSECT(ARRAY [1, 5], ARRAY [1])", new ArrayType(BIGINT), ImmutableList.of(1L)); assertFunction("ARRAY_INTERSECT(ARRAY [1, 1, 2, 4], ARRAY [1, 1, 4, 4])", new ArrayType(BIGINT), ImmutableList.of(1L, 4L)); assertFunction("ARRAY_INTERSECT(ARRAY [2, 8], ARRAY [8, 3])", new ArrayType(BIGINT), ImmutableList.of(8L)); assertFunction("ARRAY_INTERSECT(ARRAY [IF (RAND() < 1.0, 7, 1) , 2], ARRAY [7])", new ArrayType(BIGINT), ImmutableList.of(7L)); assertFunction("ARRAY_INTERSECT(ARRAY [1, 5], ARRAY [1.0])", new ArrayType(DOUBLE), ImmutableList.of(1.0)); assertFunction("ARRAY_INTERSECT(ARRAY [8.3, 1.6, 4.1, 5.2], ARRAY [4.0, 5.2, 8.3, 9.7, 3.5])", new ArrayType(DOUBLE), ImmutableList.of(5.2, 8.3)); assertFunction("ARRAY_INTERSECT(ARRAY [5.1, 7, 3.0, 4.8, 10], ARRAY [6.5, 10.0, 1.9, 5.1, 3.9, 4.8])", new ArrayType(DOUBLE), ImmutableList.of(4.8, 5.1, 10.0)); assertFunction("ARRAY_INTERSECT(ARRAY [ARRAY [4, 5], ARRAY [6, 7]], ARRAY [ARRAY [4, 5], ARRAY [6, 8]])", new ArrayType(new ArrayType(BIGINT)), ImmutableList.of(ImmutableList.of(4L, 5L))); } @Test public void testComparison() throws Exception { assertFunction("ARRAY [1, 2, 3] = ARRAY [1, 2, 3]", BOOLEAN, true); assertFunction("ARRAY [1, 2, 3] != ARRAY [1, 2, 3]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE] = ARRAY [TRUE, FALSE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE] != ARRAY [TRUE, FALSE]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] = ARRAY [1.1, 2.2, 3.3, 4.4]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] != ARRAY [1.1, 2.2, 3.3, 4.4]", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens'] = ARRAY ['puppies', 'kittens']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens'] != ARRAY ['puppies', 'kittens']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] = ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] != ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [timestamp '2012-10-31 08:00 UTC'] = ARRAY [timestamp '2012-10-31 01:00 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [timestamp '2012-10-31 08:00 UTC'] != ARRAY [timestamp '2012-10-31 01:00 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] = ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] != ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]]", BOOLEAN, false); assertFunction("ARRAY [10, 20, 30] != ARRAY [5]", BOOLEAN, true); assertFunction("ARRAY [10, 20, 30] = ARRAY [5]", BOOLEAN, false); assertFunction("ARRAY [1, 2, 3] != ARRAY [3, 2, 1]", BOOLEAN, true); assertFunction("ARRAY [1, 2, 3] = ARRAY [3, 2, 1]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE, TRUE] != ARRAY [TRUE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE, TRUE] = ARRAY [TRUE]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE] != ARRAY [FALSE, FALSE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE] = ARRAY [FALSE, FALSE]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] != ARRAY [1.1, 2.2]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] = ARRAY [1.1, 2.2]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3] != ARRAY [11.1, 22.1, 1.1, 44.1]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3] = ARRAY [11.1, 22.1, 1.1, 44.1]", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] != ARRAY ['puppies', 'kittens']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] = ARRAY ['puppies', 'kittens']", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens'] != ARRAY ['z', 'f', 's', 'd', 'g']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens'] = ARRAY ['z', 'f', 's', 'd', 'g']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] != ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] = ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] != ARRAY [TIME '04:05:06.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] = ARRAY [TIME '04:05:06.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] != ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5, 6]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] = ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5, 6]]", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] != ARRAY [ARRAY [1, 2, 3], ARRAY [4, 5]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] = ARRAY [ARRAY [1, 2, 3], ARRAY [4, 5]]", BOOLEAN, false); assertFunction("ARRAY [10, 20, 30] < ARRAY [10, 20, 40, 50]", BOOLEAN, true); assertFunction("ARRAY [10, 20, 30] >= ARRAY [10, 20, 40, 50]", BOOLEAN, false); assertFunction("ARRAY [10, 20, 30] < ARRAY [10, 40]", BOOLEAN, true); assertFunction("ARRAY [10, 20, 30] >= ARRAY [10, 40]", BOOLEAN, false); assertFunction("ARRAY [10, 20] < ARRAY [10, 20, 30]", BOOLEAN, true); assertFunction("ARRAY [10, 20] >= ARRAY [10, 20, 30]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE] < ARRAY [TRUE, TRUE, TRUE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE] >= ARRAY [TRUE, TRUE, TRUE]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE, FALSE] < ARRAY [TRUE, TRUE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE, FALSE] >= ARRAY [TRUE, TRUE]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE] < ARRAY [TRUE, FALSE, FALSE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE] >= ARRAY [TRUE, FALSE, FALSE]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] < ARRAY [1.1, 2.2, 4.4, 4.4]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] >= ARRAY [1.1, 2.2, 4.4, 4.4]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] < ARRAY [1.1, 2.2, 5.5]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] >= ARRAY [1.1, 2.2, 5.5]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2] < ARRAY [1.1, 2.2, 5.5]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2] >= ARRAY [1.1, 2.2, 5.5]", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] < ARRAY ['puppies', 'lizards', 'lizards']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] >= ARRAY ['puppies', 'lizards', 'lizards']", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] < ARRAY ['puppies', 'lizards']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] >= ARRAY ['puppies', 'lizards']", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens'] < ARRAY ['puppies', 'kittens', 'lizards']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens'] >= ARRAY ['puppies', 'kittens', 'lizards']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] < ARRAY [TIME '04:05:06.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] >= ARRAY [TIME '04:05:06.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] < ARRAY [TIME '04:05:06.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] >= ARRAY [TIME '04:05:06.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] < ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] >= ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] < ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5, 6]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] >= ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5, 6]]", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] < ARRAY [ARRAY [1, 2], ARRAY [3, 5, 6]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] >= ARRAY [ARRAY [1, 2], ARRAY [3, 5, 6]]", BOOLEAN, false); assertFunction("ARRAY [10, 20, 30] > ARRAY [10, 20, 20]", BOOLEAN, true); assertFunction("ARRAY [10, 20, 30] <= ARRAY [10, 20, 20]", BOOLEAN, false); assertFunction("ARRAY [10, 20, 30] > ARRAY [10, 20]", BOOLEAN, true); assertFunction("ARRAY [10, 20, 30] <= ARRAY [10, 20]", BOOLEAN, false); assertFunction("ARRAY [TRUE, TRUE, TRUE] > ARRAY [TRUE, TRUE, FALSE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, TRUE, TRUE] <= ARRAY [TRUE, TRUE, FALSE]", BOOLEAN, false); assertFunction("ARRAY [TRUE, TRUE, FALSE] > ARRAY [TRUE, TRUE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, TRUE, FALSE] <= ARRAY [TRUE, TRUE]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] > ARRAY [1.1, 2.2, 2.2, 4.4]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] <= ARRAY [1.1, 2.2, 2.2, 4.4]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] > ARRAY [1.1, 2.2, 3.3]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] <= ARRAY [1.1, 2.2, 3.3]", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] > ARRAY ['puppies', 'kittens', 'kittens']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] <= ARRAY ['puppies', 'kittens', 'kittens']", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] > ARRAY ['puppies', 'kittens']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] <= ARRAY ['puppies', 'kittens']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] > ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] <= ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] > ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:20.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] <= ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:20.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] > ARRAY [ARRAY [1, 2], ARRAY [3, 4]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] <= ARRAY [ARRAY [1, 2], ARRAY [3, 4]]", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] > ARRAY [ARRAY [1, 2], ARRAY [3, 3, 4]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] <= ARRAY [ARRAY [1, 2], ARRAY [3, 3, 4]]", BOOLEAN, false); assertFunction("ARRAY [10, 20, 30] <= ARRAY [50]", BOOLEAN, true); assertFunction("ARRAY [10, 20, 30] > ARRAY [50]", BOOLEAN, false); assertFunction("ARRAY [10, 20, 30] <= ARRAY [10, 20, 30]", BOOLEAN, true); assertFunction("ARRAY [10, 20, 30] > ARRAY [10, 20, 30]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE] <= ARRAY [TRUE, FALSE, true]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE] > ARRAY [TRUE, FALSE, true]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE] <= ARRAY [TRUE, FALSE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE] > ARRAY [TRUE, FALSE]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] <= ARRAY [2.2, 5.5]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] > ARRAY [2.2, 5.5]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] <= ARRAY [1.1, 2.2, 3.3, 4.4]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] > ARRAY [1.1, 2.2, 3.3, 4.4]", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] <= ARRAY ['puppies', 'lizards']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] > ARRAY ['puppies', 'lizards']", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens'] <= ARRAY ['puppies', 'kittens']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens'] > ARRAY ['puppies', 'kittens']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] <= ARRAY [TIME '04:05:06.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] > ARRAY [TIME '04:05:06.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] <= ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] > ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] <= ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] > ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]]", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] <= ARRAY [ARRAY [1, 2], ARRAY [3, 5, 6]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] > ARRAY [ARRAY [1, 2], ARRAY [3, 5, 6]]", BOOLEAN, false); assertFunction("ARRAY [10, 20, 30] >= ARRAY [10, 20, 30]", BOOLEAN, true); assertFunction("ARRAY [10, 20, 30] < ARRAY [10, 20, 30]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE, TRUE] >= ARRAY [TRUE, FALSE, TRUE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE, TRUE] < ARRAY [TRUE, FALSE, TRUE]", BOOLEAN, false); assertFunction("ARRAY [TRUE, FALSE, TRUE] >= ARRAY [TRUE]", BOOLEAN, true); assertFunction("ARRAY [TRUE, FALSE, TRUE] < ARRAY [TRUE]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] >= ARRAY [1.1, 2.2]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] < ARRAY [1.1, 2.2]", BOOLEAN, false); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] >= ARRAY [1.1, 2.2, 3.3, 4.4]", BOOLEAN, true); assertFunction("ARRAY [1.1, 2.2, 3.3, 4.4] < ARRAY [1.1, 2.2, 3.3, 4.4]", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] >= ARRAY ['puppies', 'kittens', 'kittens']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens', 'lizards'] < ARRAY ['puppies', 'kittens', 'kittens']", BOOLEAN, false); assertFunction("ARRAY ['puppies', 'kittens'] >= ARRAY ['puppies', 'kittens']", BOOLEAN, true); assertFunction("ARRAY ['puppies', 'kittens'] < ARRAY ['puppies', 'kittens']", BOOLEAN, false); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] >= ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, true); assertFunction("ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles'] < ARRAY [TIME '01:02:03.456 America/Los_Angeles', TIME '10:20:30.456 America/Los_Angeles']", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] >= ARRAY [ARRAY [1, 2], ARRAY [3, 4]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] < ARRAY [ARRAY [1, 2], ARRAY [3, 4]]", BOOLEAN, false); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] >= ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]]", BOOLEAN, true); assertFunction("ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]] < ARRAY [ARRAY [1, 2], ARRAY [3, 4, 5]]", BOOLEAN, false); assertInvalidFunction("ARRAY [1, NULL] = ARRAY [1, 2]", NOT_SUPPORTED.toErrorCode()); } @Test public void testArrayRemove() throws Exception { assertFunction("ARRAY_REMOVE(ARRAY ['foo', 'bar', 'baz'], 'foo')", new ArrayType(VARCHAR), ImmutableList.of("bar", "baz")); assertFunction("ARRAY_REMOVE(ARRAY ['foo', 'bar', 'baz'], 'bar')", new ArrayType(VARCHAR), ImmutableList.of("foo", "baz")); assertFunction("ARRAY_REMOVE(ARRAY ['foo', 'bar', 'baz'], 'baz')", new ArrayType(VARCHAR), ImmutableList.of("foo", "bar")); assertFunction("ARRAY_REMOVE(ARRAY ['foo', 'bar', 'baz'], 'zzz')", new ArrayType(VARCHAR), ImmutableList.of("foo", "bar", "baz")); assertFunction("ARRAY_REMOVE(ARRAY ['foo', 'foo', 'foo'], 'foo')", new ArrayType(VARCHAR), ImmutableList.of()); assertFunction("ARRAY_REMOVE(ARRAY [NULL, 'bar', 'baz'], 'foo')", new ArrayType(VARCHAR), asList(null, "bar", "baz")); assertFunction("ARRAY_REMOVE(ARRAY ['foo', 'bar', NULL], 'foo')", new ArrayType(VARCHAR), asList("bar", null)); assertFunction("ARRAY_REMOVE(ARRAY [1, 2, 3], 1)", new ArrayType(BIGINT), ImmutableList.of(2L, 3L)); assertFunction("ARRAY_REMOVE(ARRAY [1, 2, 3], 2)", new ArrayType(BIGINT), ImmutableList.of(1L, 3L)); assertFunction("ARRAY_REMOVE(ARRAY [1, 2, 3], 3)", new ArrayType(BIGINT), ImmutableList.of(1L, 2L)); assertFunction("ARRAY_REMOVE(ARRAY [1, 2, 3], 4)", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L)); assertFunction("ARRAY_REMOVE(ARRAY [1, 1, 1], 1)", new ArrayType(BIGINT), ImmutableList.of()); assertFunction("ARRAY_REMOVE(ARRAY [NULL, 2, 3], 1)", new ArrayType(BIGINT), asList(null, 2L, 3L)); assertFunction("ARRAY_REMOVE(ARRAY [1, NULL, 3], 1)", new ArrayType(BIGINT), asList(null, 3L)); assertFunction("ARRAY_REMOVE(ARRAY [-1.23, 3.14], 3.14)", new ArrayType(DOUBLE), ImmutableList.of(-1.23)); assertFunction("ARRAY_REMOVE(ARRAY [3.14], 0.0)", new ArrayType(DOUBLE), ImmutableList.of(3.14)); assertFunction("ARRAY_REMOVE(ARRAY [sqrt(-1), 3.14], 3.14)", new ArrayType(DOUBLE), ImmutableList.of(NaN)); assertFunction("ARRAY_REMOVE(ARRAY [-1.23, sqrt(-1)], nan())", new ArrayType(DOUBLE), ImmutableList.of(-1.23, NaN)); assertFunction("ARRAY_REMOVE(ARRAY [-1.23, nan()], nan())", new ArrayType(DOUBLE), ImmutableList.of(-1.23, NaN)); assertFunction("ARRAY_REMOVE(ARRAY [-1.23, infinity()], -1.23)", new ArrayType(DOUBLE), ImmutableList.of(POSITIVE_INFINITY)); assertFunction("ARRAY_REMOVE(ARRAY [infinity(), 3.14], infinity())", new ArrayType(DOUBLE), ImmutableList.of(3.14)); assertFunction("ARRAY_REMOVE(ARRAY [-1.23, NULL, 3.14], 3.14)", new ArrayType(DOUBLE), asList(-1.23, null)); assertFunction("ARRAY_REMOVE(ARRAY [TRUE, FALSE, TRUE], TRUE)", new ArrayType(BOOLEAN), ImmutableList.of(false)); assertFunction("ARRAY_REMOVE(ARRAY [TRUE, FALSE, TRUE], FALSE)", new ArrayType(BOOLEAN), ImmutableList.of(true, true)); assertFunction("ARRAY_REMOVE(ARRAY [NULL, FALSE, TRUE], TRUE)", new ArrayType(BOOLEAN), asList(null, false)); assertFunction("ARRAY_REMOVE(ARRAY [ARRAY ['foo'], ARRAY ['bar'], ARRAY ['baz']], ARRAY ['bar'])", new ArrayType(new ArrayType(VARCHAR)), ImmutableList.of(ImmutableList.of("foo"), ImmutableList.of("baz"))); } public void assertInvalidFunction(String projection, ErrorCode errorCode) { try { assertFunction(projection, UNKNOWN, null); fail("Expected error " + errorCode + " from " + projection); } catch (PrestoException e) { assertEquals(e.getErrorCode(), errorCode); } } private SqlTimestamp sqlTimestamp(long millisUtc) { return new SqlTimestamp(millisUtc, TEST_SESSION.getTimeZoneKey()); } }
package com.linkedin.thirdeye.impl.storage; import com.linkedin.thirdeye.api.DimensionKey; import com.linkedin.thirdeye.api.MetricSpec; import com.linkedin.thirdeye.api.MetricTimeSeries; import com.linkedin.thirdeye.api.StarTreeConfig; import com.linkedin.thirdeye.api.StarTreeConstants; import com.linkedin.thirdeye.impl.NumberUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.input.CountingInputStream; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.File; import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.ObjectInputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; public class StorageUtils { /** Time for file watchers to fire up. * If a lower granularity folder was modified within this time, do not delete it */ private static long QUIESCENCE_TIME = 300000; /** Adds a dimension combination to the end of a dimension store */ public static void addToDimensionStore(StarTreeConfig config, ByteBuffer buffer, DimensionKey dimensionKey, DimensionDictionary dictionary) { for (int i = 0; i < config.getDimensions().size(); i++) { String dimensionName = config.getDimensions().get(i).getName(); String dimensionValue = dimensionKey.getDimensionValues()[i]; Integer valueId = dictionary.getValueId(dimensionName, dimensionValue); buffer.putInt(valueId); } } /** Adds a metric time series to the end of a metric store */ public static void addToMetricStore(StarTreeConfig config, ByteBuffer buffer, MetricTimeSeries timeSeries) { try { List<Long> times = new ArrayList<Long>(timeSeries.getTimeWindowSet()); Collections.sort(times); for (Long time : times) { buffer.putLong(time); for (MetricSpec metricSpec : config.getMetrics()) { Number value = timeSeries.get(time, metricSpec.getName()); NumberUtils.addToBuffer(buffer, value, metricSpec.getType()); } } } catch (Exception e) { throw new IllegalStateException("Buffer: " + buffer, e); } } public static String getDataDirName(String treeId, String schedule, DateTime minTime, DateTime maxTime) { return StarTreeConstants.DATA_DIR_PREFIX + "_" + schedule + "_" + StarTreeConstants.DATE_TIME_FORMATTER.print(minTime) + "_" + (maxTime == null ? "LATEST" : StarTreeConstants.DATE_TIME_FORMATTER.print(maxTime)) + "_" + treeId; } public static String getDataDirPrefix() { return StarTreeConstants.DATA_DIR_PREFIX; } public static String getDataDirPrefix(String schedule, DateTime minTime, DateTime maxTime) { return StarTreeConstants.DATA_DIR_PREFIX + "_" + schedule + "_" + StarTreeConstants.DATE_TIME_FORMATTER.print(minTime) + "_" + (maxTime == null ? "LATEST" : StarTreeConstants.DATE_TIME_FORMATTER.print(maxTime)); } public static String getDataDirPrefix(String schedule) { return StarTreeConstants.DATA_DIR_PREFIX + "_" + schedule; } public static String getSchedule(String dataDir) { return dataDir.split("_")[1]; } public static DateTime getMinTime (String dataDir) { return StarTreeConstants.DATE_TIME_FORMATTER.parseDateTime(dataDir.split("_")[2]); } public static DateTime getMaxTime (String dataDir) { return StarTreeConstants.DATE_TIME_FORMATTER.parseDateTime(dataDir.split("_")[3]); } public static boolean isExpirable(File pathname, String lowerDir) { return pathname.getName().startsWith(lowerDir) && ((System.currentTimeMillis() - pathname.lastModified()) > QUIESCENCE_TIME); } public static void prefixFilesWithTime(File dir, String schedule, DateTime minTime, DateTime maxTime) throws IOException { File[] files = dir.listFiles(); if (files != null) { for (File file : files) { String minTimeComponent = StarTreeConstants.DATE_TIME_FORMATTER.print(minTime); String maxTimeComponent = StarTreeConstants.DATE_TIME_FORMATTER.print(maxTime); File renamed = new File( file.getParent(), schedule + "_" + minTimeComponent + "_" + maxTimeComponent + "_" + file.getName()); FileUtils.moveFile(file, renamed); } } } public static File findLatestDataDir(File collectionDir) { File[] dataDirs = collectionDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith(StorageUtils.getDataDirPrefix()); } }); if (dataDirs == null) { return null; } Arrays.sort(dataDirs, new Comparator<File>() { @Override public int compare(File f1, File f2) { String[] f1Tokens = f1.getName().split("_"); String[] f2Tokens = f2.getName().split("_"); if ("LATEST".equals(f1Tokens[3])) { return -1; } else if ("LATEST".equals(f2Tokens[3])) { return 1; } DateTime f1MaxTime = StarTreeConstants.DATE_TIME_FORMATTER.parseDateTime(f1Tokens[3]); DateTime f2MaxTime = StarTreeConstants.DATE_TIME_FORMATTER.parseDateTime(f2Tokens[3]); return (int) (f1MaxTime.getMillis() - f2MaxTime.getMillis()); } }); return dataDirs[dataDirs.length - 1]; } public static void moveAllFiles(File srcDataDir, File dstDataDir) throws IOException { // Tree File srcTreeFile = new File(srcDataDir, StarTreeConstants.TREE_FILE_NAME); File dstTreeFile = new File(dstDataDir, StarTreeConstants.TREE_FILE_NAME); if (!dstTreeFile.exists()) { FileUtils.moveFile(srcTreeFile, dstTreeFile); } // Metadata File srcMetadataFile = new File(srcDataDir, StarTreeConstants.METADATA_FILE_NAME); File dstMetadataFile = new File(dstDataDir, StarTreeConstants.METADATA_FILE_NAME); if (!dstMetadataFile.exists()) { FileUtils.moveFile(srcMetadataFile, dstMetadataFile); } // Config File srcConfigFile = new File(srcDataDir, StarTreeConstants.CONFIG_FILE_NAME); File dstConfigFile = new File(dstDataDir, StarTreeConstants.CONFIG_FILE_NAME); if (!dstConfigFile.exists()) { FileUtils.moveFile(srcConfigFile, dstConfigFile); } // Dimensions File[] dimensionFiles = new File(srcDataDir, StarTreeConstants.DIMENSION_STORE).listFiles(); File dstDimensionStore = new File(dstDataDir, StarTreeConstants.DIMENSION_STORE); if (dimensionFiles != null) { for (File file : dimensionFiles) { FileUtils.moveFile(file, new File(dstDimensionStore, file.getName())); } } // Metrics File[] metricFiles = new File(srcDataDir, StarTreeConstants.METRIC_STORE).listFiles(); File dstMetricStore = new File(dstDataDir, StarTreeConstants.METRIC_STORE); if (metricFiles != null) { for (File file : metricFiles) { FileUtils.moveFile(file, new File(dstMetricStore, file.getName())); } } } public static List<MetricIndexEntry> readMetricIndex(File indexFile) throws IOException { List<Object> objects = readObjectFile(indexFile); List<MetricIndexEntry> indexEntries = new ArrayList<MetricIndexEntry>(objects.size()); for (Object o : objects) { indexEntries.add((MetricIndexEntry) o); } return indexEntries; } public static List<DimensionIndexEntry> readDimensionIndex(File indexFile) throws IOException { List<Object> objects = readObjectFile(indexFile); List<DimensionIndexEntry> indexEntries = new ArrayList<DimensionIndexEntry>(objects.size()); for (Object o : objects) { indexEntries.add((DimensionIndexEntry) o); } return indexEntries; } private static List<Object> readObjectFile(File objectFile) throws IOException { long fileLength = objectFile.length(); FileInputStream fis = new FileInputStream(objectFile); CountingInputStream cis = new CountingInputStream(fis); ObjectInputStream ois = new ObjectInputStream(cis); List<Object> objects = new ArrayList<Object>(); try { while (cis.getByteCount() < fileLength) { objects.add(ois.readObject()); } } catch (ClassNotFoundException e) { throw new IOException(e); } finally { ois.close(); } return objects; } /** @return true if file was not modified in sleepMillis before timeoutMillis */ public static boolean waitForModifications(File file, long sleepMillis, long timeoutMillis) throws InterruptedException { long startTimeMillis = System.currentTimeMillis(); long lastModified = file.lastModified(); do { Thread.sleep(sleepMillis); long currentLastModified = file.lastModified(); if (lastModified == currentLastModified) { return true; } lastModified = currentLastModified; } while (System.currentTimeMillis() - startTimeMillis < timeoutMillis); return false; } }
package com.morihacky.android.rxjava.fragments; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.ListView; import com.morihacky.android.rxjava.R; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.concurrent.TimeUnit; import butterknife.Bind; import butterknife.ButterKnife; import butterknife.OnClick; import rx.Observable; import rx.functions.Action0; import rx.functions.Action1; import rx.functions.Func1; import rx.subscriptions.CompositeSubscription; import timber.log.Timber; public class PollingFragment extends BaseFragment { private static final int INITIAL_DELAY = 0; private static final int POLLING_INTERVAL = 1000; private static final int POLL_COUNT = 8; @Bind(R.id.list_threading_log) ListView _logsList; private LogAdapter _adapter; private List<String> _logs; private CompositeSubscription _subscriptions; private int _counter = 0; @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); _subscriptions = new CompositeSubscription(); } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); _setupLogger(); } @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View layout = inflater.inflate(R.layout.fragment_polling, container, false); ButterKnife.bind(this, layout); return layout; } @Override public void onDestroy() { super.onDestroy(); _subscriptions.unsubscribe(); ButterKnife.unbind(this); } @OnClick(R.id.btn_start_simple_polling) public void onStartSimplePollingClicked() { final int pollCount = POLL_COUNT; _subscriptions.add(// Observable.interval(INITIAL_DELAY, POLLING_INTERVAL, TimeUnit.MILLISECONDS) .map(new Func1<Long, String>() { @Override public String call(Long heartBeat) { return _doNetworkCallAndGetStringResult(heartBeat); } }).take(pollCount) .doOnSubscribe(new Action0() { @Override public void call() { _log(String.format("Start simple polling - %s", _counter)); } }) .subscribe(new Action1<String>() { @Override public void call(String taskName) { _log(String.format(Locale.US, "Executing polled task [%s] now time : [xx:%02d]", taskName, _getSecondHand())); } }) ); } @OnClick(R.id.btn_start_increasingly_delayed_polling) public void onStartIncreasinglyDelayedPolling() { _setupLogger(); final int pollingInterval = POLLING_INTERVAL; final int pollCount = POLL_COUNT; _log(String.format(Locale.US, "Start increasingly delayed polling now time: [xx:%02d]", _getSecondHand())); _subscriptions.add(// Observable.just(1) .repeatWhen(new RepeatWithDelay(pollCount, pollingInterval)) .subscribe(new Action1<Object>() { @Override public void call(Object o) { _log(String.format(Locale.US, "Executing polled task now time : [xx:%02d]", _getSecondHand())); } }, new Action1<Throwable>() { @Override public void call(Throwable e) { Timber.d(e, "arrrr. Error"); } }) ); } // ----------------------------------------------------------------------------------- // CAUTION: // -------------------------------------- // THIS notificationHandler class HAS NO BUSINESS BEING non-static // I ONLY did this cause i wanted access to the `_log` method from inside here // for the purpose of demonstration. In the real world, make it static and LET IT BE!! // It's 12am in the morning and i feel lazy dammit !!! //public static class RepeatWithDelay public class RepeatWithDelay implements Func1<Observable<? extends Void>, Observable<?>> { private final int _repeatLimit; private final int _pollingInterval; private int _repeatCount = 1; RepeatWithDelay(int repeatLimit, int pollingInterval) { _pollingInterval = pollingInterval; _repeatLimit = repeatLimit; } // this is a notificationhandler, all we care about is // the emission "type" not emission "content" // only onNext triggers a re-subscription @Override public Observable<?> call(Observable<? extends Void> inputObservable) { // it is critical to use inputObservable in the chain for the result // ignoring it and doing your own thing will break the sequence return inputObservable.flatMap(new Func1<Void, Observable<?>>() { @Override public Observable<?> call(Void blah) { if (_repeatCount >= _repeatLimit) { // terminate the sequence cause we reached the limit _log("Completing sequence"); return Observable.empty(); } // since we don't get an input // we store state in this handler to tell us the point of time we're firing _repeatCount++; return Observable.timer(_repeatCount * _pollingInterval, TimeUnit.MILLISECONDS); } }); } } // ----------------------------------------------------------------------------------- // Method that help wiring up the example (irrelevant to RxJava) private String _doNetworkCallAndGetStringResult(long attempt) { try { if (attempt == 4) { // randomly make one event super long so we test that the repeat logic waits // and accounts for this. Thread.sleep(9000); } else { Thread.sleep(3000); } } catch (InterruptedException e) { Timber.d("Operation was interrupted"); } _counter++; return String.valueOf(_counter); } private int _getSecondHand() { long millis = System.currentTimeMillis(); return (int) (TimeUnit.MILLISECONDS.toSeconds(millis) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(millis))); } private void _log(String logMsg) { if (_isCurrentlyOnMainThread()) { _logs.add(0, logMsg + " (main thread) "); _adapter.clear(); _adapter.addAll(_logs); } else { _logs.add(0, logMsg + " (NOT main thread) "); // You can only do below stuff on main thread. new Handler(Looper.getMainLooper()).post(new Runnable() { @Override public void run() { _adapter.clear(); _adapter.addAll(_logs); } }); } } private void _setupLogger() { _logs = new ArrayList<>(); _adapter = new LogAdapter(getActivity(), new ArrayList<String>()); _logsList.setAdapter(_adapter); _counter = 0; } private boolean _isCurrentlyOnMainThread() { return Looper.myLooper() == Looper.getMainLooper(); } private class LogAdapter extends ArrayAdapter<String> { public LogAdapter(Context context, List<String> logs) { super(context, R.layout.item_log, R.id.item_log, logs); } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.implementation; import com.microsoft.azure.management.resources.fluentcore.collection.InnerSupportsGet; import com.microsoft.azure.management.resources.fluentcore.collection.InnerSupportsDelete; import com.microsoft.azure.management.resources.fluentcore.collection.InnerSupportsListing; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.CloudException; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.HTTP; import retrofit2.http.Path; import retrofit2.http.PUT; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in RouteTables. */ public class RouteTablesInner implements InnerSupportsGet<RouteTableInner>, InnerSupportsDelete<Void>, InnerSupportsListing<RouteTableInner> { /** The Retrofit service to perform REST calls. */ private RouteTablesService service; /** The service client containing this operation class. */ private NetworkManagementClientImpl client; /** * Initializes an instance of RouteTablesInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public RouteTablesInner(Retrofit retrofit, NetworkManagementClientImpl client) { this.service = retrofit.create(RouteTablesService.class); this.client = client; } /** * The interface defining all the services for RouteTables to be * used by Retrofit to perform actually REST calls. */ interface RouteTablesService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables delete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> delete(@Path("resourceGroupName") String resourceGroupName, @Path("routeTableName") String routeTableName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables beginDelete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> beginDelete(@Path("resourceGroupName") String resourceGroupName, @Path("routeTableName") String routeTableName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables getByResourceGroup" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}") Observable<Response<ResponseBody>> getByResourceGroup(@Path("resourceGroupName") String resourceGroupName, @Path("routeTableName") String routeTableName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Query("$expand") String expand, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables createOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}") Observable<Response<ResponseBody>> createOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("routeTableName") String routeTableName, @Path("subscriptionId") String subscriptionId, @Body RouteTableInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables beginCreateOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}") Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("routeTableName") String routeTableName, @Path("subscriptionId") String subscriptionId, @Body RouteTableInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables listByResourceGroup" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables") Observable<Response<ResponseBody>> listByResourceGroup(@Path("resourceGroupName") String resourceGroupName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables list" }) @GET("subscriptions/{subscriptionId}/providers/Microsoft.Network/routeTables") Observable<Response<ResponseBody>> list(@Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables listByResourceGroupNext" }) @GET Observable<Response<ResponseBody>> listByResourceGroupNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.RouteTables listNext" }) @GET Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Deletes the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void delete(String resourceGroupName, String routeTableName) { deleteWithServiceResponseAsync(resourceGroupName, routeTableName).toBlocking().last().body(); } /** * Deletes the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> deleteAsync(String resourceGroupName, String routeTableName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, routeTableName), serviceCallback); } /** * Deletes the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<Void> deleteAsync(String resourceGroupName, String routeTableName) { return deleteWithServiceResponseAsync(resourceGroupName, routeTableName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String routeTableName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (routeTableName == null) { throw new IllegalArgumentException("Parameter routeTableName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2017-08-01"; Observable<Response<ResponseBody>> observable = service.delete(resourceGroupName, routeTableName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType()); } /** * Deletes the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginDelete(String resourceGroupName, String routeTableName) { beginDeleteWithServiceResponseAsync(resourceGroupName, routeTableName).toBlocking().single().body(); } /** * Deletes the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String routeTableName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, routeTableName), serviceCallback); } /** * Deletes the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> beginDeleteAsync(String resourceGroupName, String routeTableName) { return beginDeleteWithServiceResponseAsync(resourceGroupName, routeTableName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String routeTableName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (routeTableName == null) { throw new IllegalArgumentException("Parameter routeTableName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2017-08-01"; return service.beginDelete(resourceGroupName, routeTableName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = beginDeleteDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter()) .register(204, new TypeToken<Void>() { }.getType()) .register(200, new TypeToken<Void>() { }.getType()) .register(202, new TypeToken<Void>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the RouteTableInner object if successful. */ public RouteTableInner getByResourceGroup(String resourceGroupName, String routeTableName) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, routeTableName).toBlocking().single().body(); } /** * Gets the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<RouteTableInner> getByResourceGroupAsync(String resourceGroupName, String routeTableName, final ServiceCallback<RouteTableInner> serviceCallback) { return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, routeTableName), serviceCallback); } /** * Gets the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the RouteTableInner object */ public Observable<RouteTableInner> getByResourceGroupAsync(String resourceGroupName, String routeTableName) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, routeTableName).map(new Func1<ServiceResponse<RouteTableInner>, RouteTableInner>() { @Override public RouteTableInner call(ServiceResponse<RouteTableInner> response) { return response.body(); } }); } /** * Gets the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the RouteTableInner object */ public Observable<ServiceResponse<RouteTableInner>> getByResourceGroupWithServiceResponseAsync(String resourceGroupName, String routeTableName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (routeTableName == null) { throw new IllegalArgumentException("Parameter routeTableName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2017-08-01"; final String expand = null; return service.getByResourceGroup(resourceGroupName, routeTableName, this.client.subscriptionId(), apiVersion, expand, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<RouteTableInner>>>() { @Override public Observable<ServiceResponse<RouteTableInner>> call(Response<ResponseBody> response) { try { ServiceResponse<RouteTableInner> clientResponse = getByResourceGroupDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } /** * Gets the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param expand Expands referenced resources. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the RouteTableInner object if successful. */ public RouteTableInner getByResourceGroup(String resourceGroupName, String routeTableName, String expand) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, routeTableName, expand).toBlocking().single().body(); } /** * Gets the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param expand Expands referenced resources. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<RouteTableInner> getByResourceGroupAsync(String resourceGroupName, String routeTableName, String expand, final ServiceCallback<RouteTableInner> serviceCallback) { return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, routeTableName, expand), serviceCallback); } /** * Gets the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param expand Expands referenced resources. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the RouteTableInner object */ public Observable<RouteTableInner> getByResourceGroupAsync(String resourceGroupName, String routeTableName, String expand) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, routeTableName, expand).map(new Func1<ServiceResponse<RouteTableInner>, RouteTableInner>() { @Override public RouteTableInner call(ServiceResponse<RouteTableInner> response) { return response.body(); } }); } /** * Gets the specified route table. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param expand Expands referenced resources. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the RouteTableInner object */ public Observable<ServiceResponse<RouteTableInner>> getByResourceGroupWithServiceResponseAsync(String resourceGroupName, String routeTableName, String expand) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (routeTableName == null) { throw new IllegalArgumentException("Parameter routeTableName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2017-08-01"; return service.getByResourceGroup(resourceGroupName, routeTableName, this.client.subscriptionId(), apiVersion, expand, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<RouteTableInner>>>() { @Override public Observable<ServiceResponse<RouteTableInner>> call(Response<ResponseBody> response) { try { ServiceResponse<RouteTableInner> clientResponse = getByResourceGroupDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<RouteTableInner> getByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<RouteTableInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<RouteTableInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Create or updates a route table in a specified resource group. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param parameters Parameters supplied to the create or update route table operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the RouteTableInner object if successful. */ public RouteTableInner createOrUpdate(String resourceGroupName, String routeTableName, RouteTableInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, routeTableName, parameters).toBlocking().last().body(); } /** * Create or updates a route table in a specified resource group. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param parameters Parameters supplied to the create or update route table operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<RouteTableInner> createOrUpdateAsync(String resourceGroupName, String routeTableName, RouteTableInner parameters, final ServiceCallback<RouteTableInner> serviceCallback) { return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, routeTableName, parameters), serviceCallback); } /** * Create or updates a route table in a specified resource group. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param parameters Parameters supplied to the create or update route table operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<RouteTableInner> createOrUpdateAsync(String resourceGroupName, String routeTableName, RouteTableInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, routeTableName, parameters).map(new Func1<ServiceResponse<RouteTableInner>, RouteTableInner>() { @Override public RouteTableInner call(ServiceResponse<RouteTableInner> response) { return response.body(); } }); } /** * Create or updates a route table in a specified resource group. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param parameters Parameters supplied to the create or update route table operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<RouteTableInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String routeTableName, RouteTableInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (routeTableName == null) { throw new IllegalArgumentException("Parameter routeTableName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2017-08-01"; Observable<Response<ResponseBody>> observable = service.createOrUpdate(resourceGroupName, routeTableName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<RouteTableInner>() { }.getType()); } /** * Create or updates a route table in a specified resource group. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param parameters Parameters supplied to the create or update route table operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the RouteTableInner object if successful. */ public RouteTableInner beginCreateOrUpdate(String resourceGroupName, String routeTableName, RouteTableInner parameters) { return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, routeTableName, parameters).toBlocking().single().body(); } /** * Create or updates a route table in a specified resource group. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param parameters Parameters supplied to the create or update route table operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<RouteTableInner> beginCreateOrUpdateAsync(String resourceGroupName, String routeTableName, RouteTableInner parameters, final ServiceCallback<RouteTableInner> serviceCallback) { return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, routeTableName, parameters), serviceCallback); } /** * Create or updates a route table in a specified resource group. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param parameters Parameters supplied to the create or update route table operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the RouteTableInner object */ public Observable<RouteTableInner> beginCreateOrUpdateAsync(String resourceGroupName, String routeTableName, RouteTableInner parameters) { return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, routeTableName, parameters).map(new Func1<ServiceResponse<RouteTableInner>, RouteTableInner>() { @Override public RouteTableInner call(ServiceResponse<RouteTableInner> response) { return response.body(); } }); } /** * Create or updates a route table in a specified resource group. * * @param resourceGroupName The name of the resource group. * @param routeTableName The name of the route table. * @param parameters Parameters supplied to the create or update route table operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the RouteTableInner object */ public Observable<ServiceResponse<RouteTableInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String routeTableName, RouteTableInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (routeTableName == null) { throw new IllegalArgumentException("Parameter routeTableName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2017-08-01"; return service.beginCreateOrUpdate(resourceGroupName, routeTableName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<RouteTableInner>>>() { @Override public Observable<ServiceResponse<RouteTableInner>> call(Response<ResponseBody> response) { try { ServiceResponse<RouteTableInner> clientResponse = beginCreateOrUpdateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<RouteTableInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<RouteTableInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<RouteTableInner>() { }.getType()) .register(201, new TypeToken<RouteTableInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets all route tables in a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;RouteTableInner&gt; object if successful. */ public PagedList<RouteTableInner> listByResourceGroup(final String resourceGroupName) { ServiceResponse<Page<RouteTableInner>> response = listByResourceGroupSinglePageAsync(resourceGroupName).toBlocking().single(); return new PagedList<RouteTableInner>(response.body()) { @Override public Page<RouteTableInner> nextPage(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets all route tables in a resource group. * * @param resourceGroupName The name of the resource group. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<RouteTableInner>> listByResourceGroupAsync(final String resourceGroupName, final ListOperationCallback<RouteTableInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByResourceGroupSinglePageAsync(resourceGroupName), new Func1<String, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets all route tables in a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;RouteTableInner&gt; object */ public Observable<Page<RouteTableInner>> listByResourceGroupAsync(final String resourceGroupName) { return listByResourceGroupWithServiceResponseAsync(resourceGroupName) .map(new Func1<ServiceResponse<Page<RouteTableInner>>, Page<RouteTableInner>>() { @Override public Page<RouteTableInner> call(ServiceResponse<Page<RouteTableInner>> response) { return response.body(); } }); } /** * Gets all route tables in a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;RouteTableInner&gt; object */ public Observable<ServiceResponse<Page<RouteTableInner>>> listByResourceGroupWithServiceResponseAsync(final String resourceGroupName) { return listByResourceGroupSinglePageAsync(resourceGroupName) .concatMap(new Func1<ServiceResponse<Page<RouteTableInner>>, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(ServiceResponse<Page<RouteTableInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets all route tables in a resource group. * ServiceResponse<PageImpl<RouteTableInner>> * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;RouteTableInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<RouteTableInner>>> listByResourceGroupSinglePageAsync(final String resourceGroupName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2017-08-01"; return service.listByResourceGroup(resourceGroupName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<RouteTableInner>> result = listByResourceGroupDelegate(response); return Observable.just(new ServiceResponse<Page<RouteTableInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<RouteTableInner>> listByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<RouteTableInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<RouteTableInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets all route tables in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;RouteTableInner&gt; object if successful. */ public PagedList<RouteTableInner> list() { ServiceResponse<Page<RouteTableInner>> response = listSinglePageAsync().toBlocking().single(); return new PagedList<RouteTableInner>(response.body()) { @Override public Page<RouteTableInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets all route tables in a subscription. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<RouteTableInner>> listAsync(final ListOperationCallback<RouteTableInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listSinglePageAsync(), new Func1<String, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets all route tables in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;RouteTableInner&gt; object */ public Observable<Page<RouteTableInner>> listAsync() { return listWithServiceResponseAsync() .map(new Func1<ServiceResponse<Page<RouteTableInner>>, Page<RouteTableInner>>() { @Override public Page<RouteTableInner> call(ServiceResponse<Page<RouteTableInner>> response) { return response.body(); } }); } /** * Gets all route tables in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;RouteTableInner&gt; object */ public Observable<ServiceResponse<Page<RouteTableInner>>> listWithServiceResponseAsync() { return listSinglePageAsync() .concatMap(new Func1<ServiceResponse<Page<RouteTableInner>>, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(ServiceResponse<Page<RouteTableInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets all route tables in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;RouteTableInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<RouteTableInner>>> listSinglePageAsync() { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2017-08-01"; return service.list(this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<RouteTableInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<RouteTableInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<RouteTableInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<RouteTableInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<RouteTableInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets all route tables in a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;RouteTableInner&gt; object if successful. */ public PagedList<RouteTableInner> listByResourceGroupNext(final String nextPageLink) { ServiceResponse<Page<RouteTableInner>> response = listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<RouteTableInner>(response.body()) { @Override public Page<RouteTableInner> nextPage(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets all route tables in a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<RouteTableInner>> listByResourceGroupNextAsync(final String nextPageLink, final ServiceFuture<List<RouteTableInner>> serviceFuture, final ListOperationCallback<RouteTableInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByResourceGroupNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets all route tables in a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;RouteTableInner&gt; object */ public Observable<Page<RouteTableInner>> listByResourceGroupNextAsync(final String nextPageLink) { return listByResourceGroupNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<RouteTableInner>>, Page<RouteTableInner>>() { @Override public Page<RouteTableInner> call(ServiceResponse<Page<RouteTableInner>> response) { return response.body(); } }); } /** * Gets all route tables in a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;RouteTableInner&gt; object */ public Observable<ServiceResponse<Page<RouteTableInner>>> listByResourceGroupNextWithServiceResponseAsync(final String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<RouteTableInner>>, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(ServiceResponse<Page<RouteTableInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets all route tables in a resource group. * ServiceResponse<PageImpl<RouteTableInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;RouteTableInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<RouteTableInner>>> listByResourceGroupNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listByResourceGroupNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<RouteTableInner>> result = listByResourceGroupNextDelegate(response); return Observable.just(new ServiceResponse<Page<RouteTableInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<RouteTableInner>> listByResourceGroupNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<RouteTableInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<RouteTableInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets all route tables in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;RouteTableInner&gt; object if successful. */ public PagedList<RouteTableInner> listNext(final String nextPageLink) { ServiceResponse<Page<RouteTableInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<RouteTableInner>(response.body()) { @Override public Page<RouteTableInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets all route tables in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<RouteTableInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<RouteTableInner>> serviceFuture, final ListOperationCallback<RouteTableInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets all route tables in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;RouteTableInner&gt; object */ public Observable<Page<RouteTableInner>> listNextAsync(final String nextPageLink) { return listNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<RouteTableInner>>, Page<RouteTableInner>>() { @Override public Page<RouteTableInner> call(ServiceResponse<Page<RouteTableInner>> response) { return response.body(); } }); } /** * Gets all route tables in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;RouteTableInner&gt; object */ public Observable<ServiceResponse<Page<RouteTableInner>>> listNextWithServiceResponseAsync(final String nextPageLink) { return listNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<RouteTableInner>>, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(ServiceResponse<Page<RouteTableInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets all route tables in a subscription. * ServiceResponse<PageImpl<RouteTableInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;RouteTableInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<RouteTableInner>>> listNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<RouteTableInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteTableInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<RouteTableInner>> result = listNextDelegate(response); return Observable.just(new ServiceResponse<Page<RouteTableInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<RouteTableInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<RouteTableInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<RouteTableInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } }
//===================================================================== // //File: $RCSfile: RenameTest.java,v $ //Version: $Revision: 1.11 $ //Modified: $Date: 2013/01/10 23:12:55 $ // //(c) Copyright 2004-2014 by Mentor Graphics Corp. All rights reserved. // //===================================================================== // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. //===================================================================== package com.mentor.nucleus.bp.io.mdl.test.pkgcm; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.TreeItem; import org.eclipse.ui.PartInitException; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.ide.IDE; import com.mentor.nucleus.bp.core.Ooaofooa; import com.mentor.nucleus.bp.core.common.NonRootModelElement; import com.mentor.nucleus.bp.core.common.PersistableModelComponent; import com.mentor.nucleus.bp.core.common.PersistenceManager; import com.mentor.nucleus.bp.test.common.CanvasEditorUtils; import com.mentor.nucleus.bp.test.common.ExplorerUtil; import com.mentor.nucleus.bp.ui.graphics.editor.GraphicalEditor; public abstract class RenameTest extends PkgCMBaseTest { // array of all opened editors whose title can be effected by rename // of component under test protected Object[] openEditors; protected IPath compPath; protected NonRootModelElement meBeingTested; protected PersistableModelComponent pmcBeingTested; protected GraphicalEditor baseEditor = null; protected String compType; protected String oldCompName; protected int focusedEditor; protected String newName; protected int childrenCount; protected int oldProxyRefs; protected int expectedEditorCount; protected int editorToTest; public RenameTest(String project, String name) { super(project, name); } protected void initTest(String compType1, String compName, int focusedEditor1, boolean forceNotFocus, int expectedEditorCount) throws Exception { PersistableModelComponent component = getComponent(compType1, compName); focusedEditor = focusedEditor1; editorToTest = focusedEditor; assertNotNull("Could not find component; " + compType1 + ":" + compName, component); compType = compType1; if (!component.isLoaded()) component.load(new NullProgressMonitor()); meBeingTested = component.getRootModelElement(); pmcBeingTested = component; newName = component.getName() + "_n"; //save old name oldCompName=pmcBeingTested.getName(); compPath = component.getFullPath(); childrenCount = pmcBeingTested.getChildrenCount(); oldProxyRefs = getProxyRefrences(component); boolean setFocus = false; Display d = Display.getCurrent(); PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage() .closeAllEditors(false); if (focusedEditor==EditorTestUtilities.EDITOR_TYPE_NONE) { openEditors=new Object[0]; } else if(focusedEditor==EditorTestUtilities.EDITOR_TYPE_CANVAS) { // open diagram editor that represent rootMe as shape if(throughRN){ openEditors = openEditors(pmcBeingTested, expectedEditorCount); } else{ openEditors=EditorTestUtilities.openEditorsForRename(meBeingTested); } baseEditor= CanvasEditorUtils.openEditorWithShapeOf(meBeingTested); if(baseEditor==null) { setFocus=true; } else { m_wp.activate(baseEditor); } } else { // test asserted due to following statement are N/A, so can be // eliminated assertTrue("Editor not supported for " + meBeingTested.getClass(), EditorTestUtilities.isEditorSupported(meBeingTested, focusedEditor)); if(throughRN){ openEditors = openEditors(pmcBeingTested, expectedEditorCount); } else{ openEditors=EditorTestUtilities.openEditorsForRename(meBeingTested); } setFocus = true; } if (forceNotFocus) { // forceNotFocus will be used when these is only one editor and we // wana test it in not focused state setFocus = false; baseEditor = null; // graphics need not to be tested while editor // is not in focuse // Open any editor and set focus to that editor try { IDE.openEditor(PlatformUI.getWorkbench() .getActiveWorkbenchWindow().getActivePage(), getProject().getFile(".project")); } catch (PartInitException e) { fail("Can't open editor for .project file :" + e.getMessage()); } } if (setFocus) setFocus(openEditors, focusedEditor); assertEquals("All editors did not open", expectedEditorCount, openEditors.length); while (d.readAndDispatch()) ; } /** * * @param compType * TODO * @param compName * @param focusedEditor * @param forceNotFocus * some components has only one editor, this parameter will be * used to put it in background by open some other editor * @param expectedEditorCount * TODO * @throws Exception * @throws CoreException */ protected void performRenameComponentThruME(String compType, String compName, int focusedEditor, boolean forceNotFocus, int expectedEditorCount) throws Exception { throughRN = false; initTest(compType, compName, focusedEditor, forceNotFocus, expectedEditorCount); if (!toRunTests()) return; // this pass is for setup workspace next pass will execute actual test TreeItem item = selectMEInModelExplorer(compPath); //init variables from restored component meBeingTested = (NonRootModelElement) item.getData(); pmcBeingTested = PersistenceManager.getComponent( meBeingTested); Display d = Display.getCurrent(); while (d.readAndDispatch()) ; doRenameThruMExplorer(newName); while (Display.getCurrent().readAndDispatch()) ; performRenameChecks(); } protected void performRenameComponentThruMEGenerics(String compType, String compName, int focusedEditor, boolean forceNotFocus, int expectedEditorCount) throws Exception { throughRN = false; initTest(compType, compName, focusedEditor, forceNotFocus, expectedEditorCount); if (!toRunTests()) return; // this pass is for setup workspace next pass will execute actual test TreeItem item = selectMEInModelExplorer(compPath); //init variables from restored component meBeingTested = (NonRootModelElement) item.getData(); pmcBeingTested = PersistenceManager.getComponent( meBeingTested); Display d = Display.getCurrent(); while (d.readAndDispatch()) ; doRenameThruMExplorer(newName); while (Display.getCurrent().readAndDispatch()) ; performRenameChecksGenerics(); } protected void performRenameComponentThruCE(String compType, String compName, int expectedEditorCount) throws Exception { throughRN = false; initTest(compType, compName, EditorTestUtilities.EDITOR_TYPE_CANVAS, false, expectedEditorCount); // this pass is for setup workspace next pass will execute actual test if (!toRunTests()) return; while (Display.getCurrent().readAndDispatch()) ; //init variables from restored component pmcBeingTested=PersistenceManager.findComponent(compPath); meBeingTested=pmcBeingTested.getRootModelElement(); assertNotNull("Canvas Editor with shape couldn't opened.", baseEditor); CanvasEditorUtils.getShape(meBeingTested, true); while (Display.getCurrent().readAndDispatch()) ; doRenameThruCanvasEditor(newName); while (Display.getCurrent().readAndDispatch()) ; performRenameChecks(); } protected void performRenameComponentThruCEGenerics(String compType, String compName, int expectedEditorCount) throws Exception { throughRN = false; initTest(compType, compName, EditorTestUtilities.EDITOR_TYPE_CANVAS, false, expectedEditorCount); // this pass is for setup workspace next pass will execute actual test if (!toRunTests()) return; while (Display.getCurrent().readAndDispatch()) ; //init variables from restored component pmcBeingTested=PersistenceManager.findComponent(compPath); meBeingTested=pmcBeingTested.getRootModelElement(); assertNotNull("Canvas Editor with shape couldn't opened.", baseEditor); CanvasEditorUtils.getShape(meBeingTested, true); while (Display.getCurrent().readAndDispatch()) ; doRenameThruCanvasEditor(newName); while (Display.getCurrent().readAndDispatch()) ; performRenameChecksGenerics(); } protected void performRenameComponentThruRN(String compType, String compName, int focusedEditor, boolean forceNotFocus, int expectedEditorCount) throws Exception { throughRN = true; initTest(compType, compName, focusedEditor, forceNotFocus, expectedEditorCount); if (!toRunTests()) return; // this pass is for setup workspace next pass will execute actual test //init variables from restored component pmcBeingTested=PersistenceManager.findComponent(compPath); meBeingTested=pmcBeingTested.getRootModelElement(); dispatchEvents(0); IFile file = pmcBeingTested.getFile(); doRenameThruResNav(pmcBeingTested, newName); dispatchEvents(0); if(pmcBeingTested.isRootComponent()){ project=ResourcesPlugin.getWorkspace().getRoot().getProject(newName); meBeingTested=pmcBeingTested.getRootModelElement(); assertTrue(meBeingTested.getFile().exists()); }else{ file = project.getFile(compPath.removeFirstSegments(1).removeLastSegments(1).append(newName+ "." +Ooaofooa.MODELS_EXT)); IFolder folder = project.getFolder(compPath.removeFirstSegments(1).removeLastSegments(2).append(newName)); assertTrue("Renamed file does not exist: ", file.exists()); assertFalse("Parent Folder of File also renamed: ", folder.exists()); } if(compType.equals("SystemModel")){ checkTreeItemExistance(meBeingTested, newName); }else{ checkTreeItemDeletion(meBeingTested); TreeItem item = ExplorerUtil.findItem(newName); assertNull(item); } if (baseEditor != null) validateOrGenerateResults(baseEditor, generateResult); EditorTestUtilities.checkAllEditorClosed(openEditors); //Rename Back senerio start try { if(pmcBeingTested.isRootComponent()) { pmcBeingTested.getFile().getProject().move(new Path("/"+oldCompName), false, null); }else{ IPath newPath = file.getFullPath().removeLastSegments(1).append( oldCompName + "." + Ooaofooa.MODELS_EXT); file.move(newPath, false, new NullProgressMonitor()); } } catch (CoreException e) { fail("Could not rename file"); } dispatchEvents(0); pmcBeingTested=PersistenceManager.findComponent(compPath); TreeItem item = selectMEInModelExplorer(pmcBeingTested.getFullPath()); assertNotNull(item); } protected void performRenameComponentThruRNGenerics(String compType, String compName, int focusedEditor, boolean forceNotFocus, int expectedEditorCount) throws Exception { throughRN = true; initTest(compType, compName, focusedEditor, forceNotFocus, expectedEditorCount); if (!toRunTests()) return; // this pass is for setup workspace next pass will execute actual test //init variables from restored component pmcBeingTested=PersistenceManager.findComponent(compPath); meBeingTested=pmcBeingTested.getRootModelElement(); dispatchEvents(0); IFile file = pmcBeingTested.getFile(); doRenameThruResNav(pmcBeingTested, newName); dispatchEvents(0); if(pmcBeingTested.isRootComponent()){ project=ResourcesPlugin.getWorkspace().getRoot().getProject(newName); meBeingTested=pmcBeingTested.getRootModelElement(); assertTrue(meBeingTested.getFile().exists()); }else{ file = project.getFile(compPath.removeFirstSegments(1).removeLastSegments(1).append(newName+ "." +Ooaofooa.MODELS_EXT)); IFolder folder = project.getFolder(compPath.removeFirstSegments(1).removeLastSegments(2).append(newName)); assertTrue("Renamed file does not exist: ", file.exists()); assertFalse("Parent Folder of File also renamed: ", folder.exists()); } if(compType.equals("SystemModel")){ checkTreeItemExistance(meBeingTested, newName); }else{ checkTreeItemDeletion(meBeingTested); TreeItem item = ExplorerUtil.findItem(newName); assertNull(item); } if (baseEditor != null) validateOrGenerateResultsGenerics(baseEditor, generateResult); EditorTestUtilities.checkAllEditorClosed(openEditors); //Rename Back senerio start try { if(pmcBeingTested.isRootComponent()) { pmcBeingTested.getFile().getProject().move(new Path("/"+oldCompName), false, null); }else{ IPath newPath = file.getFullPath().removeLastSegments(1).append( oldCompName + "." + Ooaofooa.MODELS_EXT); file.move(newPath, false, new NullProgressMonitor()); } } catch (CoreException e) { fail("Could not rename file"); } dispatchEvents(0); pmcBeingTested=PersistenceManager.findComponent(compPath); TreeItem item = selectMEInModelExplorer(pmcBeingTested.getFullPath()); assertNotNull(item); } private void performRenameChecks() { performRenameChecks(openEditors, baseEditor, pmcBeingTested, newName); assertEquals("Children component count differ after rename", childrenCount, pmcBeingTested.getChildrenCount()); assertEquals("Not all proxy refrences updated!", oldProxyRefs, getProxyRefrences(pmcBeingTested)); } private void performRenameChecksGenerics() { performRenameChecksGenerics(openEditors, baseEditor, pmcBeingTested, newName); assertEquals("Children component count differ after rename", childrenCount, pmcBeingTested.getChildrenCount()); assertEquals("Not all proxy refrences updated!", oldProxyRefs, getProxyRefrences(pmcBeingTested)); } }
/* * CsvResultsTable.java * * Copyright 2006-2015 James F. Bowring and www.Earth-Time.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.earthtime.UPb_Redux.reports.excelReports; import java.awt.Frame; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import javax.swing.JOptionPane; import org.earthtime.UPb_Redux.filters.CSVFileFilter; import org.earthtime.UPb_Redux.utilities.BrowserControl; import org.earthtime.utilities.FileHelper; /** * * @author James F. Bowring */ public class CsvResultsTable { private static String sampleName; private static String[][] reportFractions; /** * * @param mySampleName * @param myReportFractions */ public static void produceCSVResultsTable ( String mySampleName, String[][] myReportFractions ) { sampleName = mySampleName; reportFractions = myReportFractions; File excelFile = FileHelper.AllPlatformSaveAs( new Frame(), "Save Report Table as '.csv' File: *.csv", null, ".csv", sampleName + "_ReportTable" + ".csv", new CSVFileFilter() ); if ( excelFile != null ) { writeCSVReport( excelFile ); try { loadCSVFileAfterCreation( excelFile ); } catch (IOException iOException) { } } } /** * * @param csvFile */ public static void writeCSVReport ( File csvFile ) { int firstDataRow = Integer.parseInt( reportFractions[0][0] ); PrintWriter outputWriter = null; try { outputWriter = new PrintWriter( new FileWriter( csvFile ) ); // Sample outputWriter.write( sampleName + "\n" ); // category titles String catName = "Fraction"; String savedCatName = catName; // starts after Fraction column = 2 for (int c = 2; c < reportFractions[0].length; c ++) { catName = reportFractions[0][c].trim(); if ( ! catName.equalsIgnoreCase( savedCatName ) ) { outputWriter.write( catName + "," ); savedCatName = catName; } else { outputWriter.write( "," ); } } outputWriter.write( "\n" ); // column titles for (int row = 1; row < 4; row ++) { for (int c = 2; c < reportFractions[0].length; c ++) { // footnote if ( row == 3 ) { outputWriter.write( replaceUnicodes(reportFractions[row][c]) + " " + reportFractions[5][c] + "," ); } else { outputWriter.write( replaceUnicodes(reportFractions[row][c]) + "," ); } } outputWriter.write( "\n" ); } String saveAliquotName = ""; // reportRowAdvance accounts for 6 rows of header info int reportRowAdvance = 6 - firstDataRow; // footnotes start with no data and advance as rows are added int footNoteStartRow = firstDataRow; for (int row = firstDataRow - 1; row < reportFractions.length; row ++) { // check whether fraction is included if ( reportFractions[row][0].equalsIgnoreCase( "TRUE" ) ) { footNoteStartRow ++; // for each aliquot if ( ! reportFractions[row][1].equalsIgnoreCase( saveAliquotName ) ) { saveAliquotName = reportFractions[row][1]; footNoteStartRow ++; outputWriter.write( reportFractions[row][1] + "\n" ); reportRowAdvance ++; } // fraction data for (int c = 2; c < reportFractions[0].length; c ++) { outputWriter.write( reportFractions[row][c] + "," ); } outputWriter.write( "\n" ); } else { reportRowAdvance --; } } // write out footnotes outputWriter.write( "\n" ); outputWriter.write( "\n" ); for (int i = 0; i < reportFractions[6].length; i ++) { if ( ! reportFractions[6][i].equals( "" ) ) { // strip out footnote letter String[] footNote = reportFractions[6][i].split( "&" ); String footNoteLine = // " " // + footNote[0] // + " " // + footNote[1] + "\n"; outputWriter.write( replaceUnicodes(footNoteLine) ); footNoteStartRow ++; } } outputWriter.write( "\n" ); outputWriter.write( "\n" ); outputWriter.flush(); outputWriter.close(); } catch (IOException iOException) { } // try { // BrowserControl.displayURL( csvFile.getCanonicalPath() ); // } catch (IOException ex) { // } } private static String replaceUnicodes(String text){ String retVal = text; retVal = retVal.replace( "\u00B1", "+/-" ); retVal = retVal.replace( "\u03C3", "sigma" ); retVal = retVal.replace( "\u03c1", "rho" ); retVal = retVal.replace( "\u03BB", "lambda" ); retVal = retVal.replace( ",", " and " ); return retVal; } private static void loadCSVFileAfterCreation ( File csvFile ) throws IOException { JOptionPane jopt = new JOptionPane(); int userChoice = jopt.showConfirmDialog(// null,// "'.csv' file successfully generated."// + " Do you want to open the file?",// "Open a '.csv' file?",// JOptionPane.YES_NO_OPTION ); if ( userChoice == JOptionPane.YES_OPTION ) { BrowserControl.displayURL( csvFile.getCanonicalPath() ); } } /** * @param aSampleName the sampleName to set */ public static void setSampleName ( String aSampleName ) { sampleName = aSampleName; } /** * @param aReportFractions the reportFractions to set */ public static void setReportFractions ( String[][] aReportFractions ) { reportFractions = aReportFractions; } }
/* Copyright (c) 2008-2015, Avian Contributors Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. There is NO WARRANTY for this software. See license.txt for details. */ package avian; import java.util.Map; import java.util.Map.Entry; import java.util.AbstractSet; import java.util.Collection; import java.util.Iterator; import java.util.Collections; public class Data { public static int nextPowerOfTwo(int n) { int r = 1; while (r < n) r <<= 1; return r; } public static <V> boolean equal(V a, V b) { return a == null ? b == null : a.equals(b); } public static <T> T[] toArray(Collection collection, T[] array) { Class c = array.getClass().getComponentType(); if (array.length < collection.size()) { array = (T[]) java.lang.reflect.Array.newInstance(c, collection.size()); } int i = 0; for (Object o: collection) { if (c.isInstance(o)) { array[i++] = (T) o; } else { throw new ArrayStoreException(); } } return array; } public static String toString(Collection c) { StringBuilder sb = new StringBuilder(); sb.append("["); for (Iterator it = c.iterator(); it.hasNext();) { sb.append(it.next()); if (it.hasNext()) { sb.append(","); } } sb.append("]"); return sb.toString(); } public static String toString(Map m) { StringBuilder sb = new StringBuilder(); sb.append("{"); for (Iterator<Entry> it = m.entrySet().iterator(); it.hasNext();) { Entry e = it.next(); sb.append(e.getKey()) .append("=") .append(e.getValue()); if (it.hasNext()) { sb.append(","); } } sb.append("}"); return sb.toString(); } public interface EntryMap<K,V> { public int size(); public Entry<K,V> find(Object key); public Entry<K,V> remove(Object key); public void clear(); public Iterator<Entry<K,V>> iterator(); } public static class EntrySet<K, V> extends AbstractSet<Entry<K, V>> { private final EntryMap<K, V> map; public EntrySet(EntryMap<K, V> map) { this.map = map; } public int size() { return map.size(); } public boolean isEmpty() { return map.size() == 0; } public boolean contains(Object o) { return (o instanceof Entry<?,?>) && map.find(((Entry<?,?>)o).getKey()) != null; } public boolean add(Entry<K, V> e) { throw new UnsupportedOperationException(); } public boolean remove(Object o) { return (o instanceof Entry<?,?>) && map.remove(((Entry<?,?>) o).getKey()) != null; } public boolean remove(Entry<K, V> e) { return map.remove(e.getKey()) != null; } public Object[] toArray() { return toArray(new Object[size()]); } public <T> T[] toArray(T[] array) { return Data.toArray(this, array); } public void clear() { map.clear(); } public Iterator<Entry<K, V>> iterator() { return map.iterator(); } } public static class KeySet<K> extends AbstractSet<K> { private final EntryMap<K, ?> map; public KeySet(EntryMap<K, ?> map) { this.map = map; } public int size() { return map.size(); } public boolean isEmpty() { return map.size() == 0; } public boolean contains(Object key) { return map.find(key) != null; } public boolean add(K key) { throw new UnsupportedOperationException(); } public boolean remove(Object key) { return map.remove(key) != null; } public Object[] toArray() { return toArray(new Object[size()]); } public <T> T[] toArray(T[] array) { return Data.toArray(this, array); } public void clear() { map.clear(); } public Iterator<K> iterator() { return new KeyIterator(map.iterator()); } } public static class Values<K, V> implements Collection<V> { private final EntryMap<K, V> map; public Values(EntryMap<K, V> map) { this.map = map; } public int size() { return map.size(); } public boolean isEmpty() { return map.size() == 0; } public boolean contains(Object value) { for (Iterator<Entry<K, V>> it = map.iterator(); it.hasNext();) { if (equal(it.next().getValue(), value)) { return true; } } return false; } public boolean containsAll(Collection<?> c) { if (c == null) { throw new NullPointerException("collection is null"); } for (Iterator<?> it = c.iterator(); it.hasNext();) { if (! contains(it.next())) { return false; } } return true; } public boolean add(V value) { throw new UnsupportedOperationException(); } public boolean addAll(Collection<? extends V> collection) { throw new UnsupportedOperationException(); } public boolean remove(Object value) { for (Iterator<Entry<K, V>> it = map.iterator(); it.hasNext();) { if (equal(it.next().getValue(), value)) { it.remove(); return true; } } return false; } public boolean removeAll(Collection<?> c) { boolean changed = false; for (Iterator<Entry<K, V>> it = map.iterator(); it.hasNext();) { if (c.contains(it.next().getValue())) { it.remove(); changed = true; } } return changed; } public Object[] toArray() { return toArray(new Object[size()]); } public <T> T[] toArray(T[] array) { return Data.toArray(this, array); } public void clear() { map.clear(); } public Iterator<V> iterator() { return new ValueIterator(map.iterator()); } } public static class KeyIterator<K, V> implements Iterator<K> { private final Iterator<Entry<K, V>> it; public KeyIterator(Iterator<Entry<K, V>> it) { this.it = it; } public K next() { return it.next().getKey(); } public boolean hasNext() { return it.hasNext(); } public void remove() { it.remove(); } } public static class ValueIterator<K, V> implements Iterator<V> { private final Iterator<Entry<K, V>> it; public ValueIterator(Iterator<Entry<K, V>> it) { this.it = it; } public V next() { return it.next().getValue(); } public boolean hasNext() { return it.hasNext(); } public void remove() { it.remove(); } } }
package org.github.irengrig.fossil4idea.actions; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserFactory; import com.intellij.openapi.fileChooser.FileSaverDescriptor; import com.intellij.openapi.fileChooser.FileSaverDialog; import com.intellij.openapi.progress.PerformInBackgroundOption; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogBuilder; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vcs.CheckoutProvider; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.ui.VcsBalloonProblemNotifier; import com.intellij.openapi.vcs.update.RefreshVFsSynchronously; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileWrapper; import com.intellij.util.Consumer; import org.github.irengrig.fossil4idea.FossilVcs; import org.github.irengrig.fossil4idea.checkout.CheckoutUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; public class CloneAndOpenAction extends AnAction { @Override public void actionPerformed(AnActionEvent anActionEvent) { final Project project = PlatformDataKeys.PROJECT.getData(anActionEvent.getDataContext()); if (project == null) return; executeMe(project, null); } public static void executeMe(final Project project, final CheckoutProvider.Listener listener) { final UIWorker uiWorker = new UIWorker(); uiWorker.showDialog(project, new Runnable() { @Override public void run() { ProgressManager.getInstance().run(new Task.Backgroundable(project, "Clone Fossil Repository", false, PerformInBackgroundOption.ALWAYS_BACKGROUND) { @Override public void run(@NotNull ProgressIndicator progressIndicator) { try { progressIndicator.setText("Cloning Fossil Repository..."); final String localRepoFile = uiWorker.getLocalRepoFile(); new CheckoutUtil(project).cloneRepo(uiWorker.getUrl(), localRepoFile); VcsBalloonProblemNotifier.showOverVersionControlView(project, "Fossil clone successful: " + localRepoFile, MessageType.INFO); progressIndicator.checkCanceled(); progressIndicator.setText("Opening Fossil Repository..."); final String checkoutPath = uiWorker.getLocalPath(); final File target = new File(checkoutPath); new CheckoutUtil(project).checkout(new File(localRepoFile), target, null); VcsBalloonProblemNotifier.showOverVersionControlView(project, "Fossil repository successfully opened: " + checkoutPath, MessageType.INFO); notifyListenerIfNeeded(target, listener); } catch (VcsException e) { VcsBalloonProblemNotifier.showOverVersionControlView(project, "Fossil clone and open failed: " + e.getMessage(), MessageType.ERROR); } } }); } }); } private static void notifyListenerIfNeeded(final File target, final CheckoutProvider.Listener listener) { if (listener != null) { final LocalFileSystem lfs = LocalFileSystem.getInstance(); final VirtualFile vf = lfs.refreshAndFindFileByIoFile(target); if (vf != null) { vf.refresh(true, true, new Runnable() { public void run() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { notifyListener(listener, target); } }); } }); } else { notifyListener(listener, target); } } } private static void notifyListener(CheckoutProvider.Listener listener, File target) { listener.directoryCheckedOut(target, FossilVcs.getVcsKey()); listener.checkoutCompleted(); } private static class UIWorker { private TextFieldWithBrowseButton myLocalRepoFile; private TextFieldWithBrowseButton myLocalPath; private JTextField myUrlField; public void showDialog(final Project project, final Runnable callback) { final DialogBuilder builder = new DialogBuilder(project); builder.setCenterPanel(createPanel(project, new Consumer<Boolean>() { @Override public void consume(Boolean aBoolean) { builder.setOkActionEnabled(aBoolean); } })); builder.addOkAction(); builder.addCancelAction(); builder.setDimensionServiceKey(getClass().getName()); builder.setTitle("Clone and Open Fossil Repository"); builder.setOkOperation(new Runnable() { @Override public void run() { builder.getWindow().setVisible(false); callback.run(); } }); builder.setPreferredFocusComponent(myUrlField); builder.show(); } public String getLocalPath() { return myLocalPath.getText(); } public String getUrl() { return myUrlField.getText(); } public String getLocalRepoFile() { return myLocalRepoFile.getText(); } private JComponent createPanel(final Project project, final Consumer<Boolean> enableConsumer) { final JPanel main = new JPanel(new GridBagLayout()); main.setMinimumSize(new Dimension(150, 50)); final GridBagConstraints gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 0; gbc.insets = new Insets(2,2,2,2); gbc.anchor = GridBagConstraints.NORTHWEST; main.add(new JLabel("Remote URL: "), gbc); myUrlField = new JTextField(50); gbc.gridx ++; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.weightx = 1; main.add(myUrlField, gbc); gbc.gridx = 0; gbc.gridy ++; gbc.weightx = 0; gbc.fill = GridBagConstraints.NONE; main.add(new JLabel("Local Repository File: "), gbc); myLocalRepoFile = new TextFieldWithBrowseButton(); myLocalRepoFile.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final FileSaverDialog dialog = FileChooserFactory.getInstance().createSaveFileDialog( new FileSaverDescriptor("Fossil Clone", "Select local file"), project); final String path = FileUtil.toSystemIndependentName(myLocalRepoFile.getText().trim()); final int idx = path.lastIndexOf("/"); VirtualFile baseDir = idx == -1 ? project.getBaseDir() : (LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(path.substring(0, idx)))); baseDir = baseDir == null ? project.getBaseDir() : baseDir; final String name = idx == -1 ? path : path.substring(idx + 1); final VirtualFileWrapper fileWrapper = dialog.save(baseDir, name); if (fileWrapper != null) { myLocalRepoFile.setText(fileWrapper.getFile().getPath()); } } }); gbc.weightx = 1; gbc.gridx ++; gbc.fill = GridBagConstraints.HORIZONTAL; main.add(myLocalRepoFile, gbc); gbc.gridx = 0; gbc.gridy ++; gbc.fill = GridBagConstraints.NONE; main.add(new JLabel("Local Checkout Folder: "), gbc); myLocalPath = new TextFieldWithBrowseButton(); myLocalPath.addBrowseFolderListener("Select Checkout Folder", null, project, new FileChooserDescriptor(false, true, false, false, false, false)); /*myLocalPath.addBrowseFolderListener("Select Local File", "Select local file for clone", project, new FileSaverDescriptor("Fossil Clone", "Select local file", "checkout", ""));*/ gbc.weightx = 1; gbc.gridx ++; gbc.fill = GridBagConstraints.HORIZONTAL; main.add(myLocalPath, gbc); return main; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sparql; import org.apache.jena.datatypes.xsd.XSDDatatype ; import org.apache.jena.shared.PrefixMapping ; import org.apache.jena.shared.impl.PrefixMappingImpl ; import org.apache.jena.sparql.util.Symbol ; import org.apache.jena.system.JenaSystem ; import org.apache.jena.vocabulary.OWL ; import org.apache.jena.vocabulary.RDF ; import org.apache.jena.vocabulary.RDFS ; /** * Internal constants - configuration is in class ARQ */ public class ARQConstants { static { JenaSystem.init() ; } /** The prefix of XQuery/Xpath functions and operator */ public static final String fnPrefix = "http://www.w3.org/2005/xpath-functions#" ; /** RDF namespace prefix */ public static final String rdfPrefix = RDF.getURI() ; /** RDFS namespace prefix */ public static final String rdfsPrefix = RDFS.getURI() ; /** OWL namespace prefix */ public static final String owlPrefix = OWL.getURI() ; /** XSD namespace prefix */ public static final String xsdPrefix = XSDDatatype.XSD+"#" ; /** The prefix of SPARQL functions and operator */ public static final String fnSparql = "http://www.w3.org/ns/sparql#" ; /** The namespace of the XML results format */ public static final String srxPrefix = "http://www.w3.org/2005/sparql-results#" ; /** XML namespace */ public static final String XML_NS = "http://www.w3.org/XML/1998/namespace" ; /** XML Schema namespace */ public static final String XML_SCHEMA_NS = "http://www.w3.org/2001/XMLSchema#" ; public static final String javaClassURIScheme = "java:" ; /** The ARQ function library URI space */ public static final String ARQFunctionLibraryURI = "http://jena.apache.org/ARQ/function#" ; /** The ARQ function library URI space - old Jena2 name * @deprecated Use #ARQFunctionLibraryURI */ @Deprecated public static final String ARQFunctionLibraryURI_Jena2 = "http://jena.hpl.hp.com/ARQ/function#" ; /** The ARQ property function library URI space */ public static final String ARQPropertyFunctionLibraryURI = "http://jena.apache.org/ARQ/property#" ; /** The ARQ property function library URI space - old Jena2 name * @deprecated Use #ARQFunctionLibraryURI */ @Deprecated public static final String ARQPropertyFunctionLibraryURI_Jena2 = "http://jena.hpl.hp.com/ARQ/property#" ; /** The ARQ procedure library URI space */ public static final String ARQProcedureLibraryURI = "http://jena.apache.org/ARQ/procedure#" ; /** The ARQ function library */ public static final String ARQFunctionLibrary = javaClassURIScheme+"org.apache.jena.sparql.function.library." ; /** The ARQ property function library */ public static final String ARQPropertyFunctionLibrary = javaClassURIScheme+"org.apache.jena.sparql.pfunction.library." ; /** The ARQ property function library */ public static final String ARQProcedureLibrary = javaClassURIScheme+"org.apache.jena.sparql.procedure.library." ; /** Common prefixes */ protected static final PrefixMapping globalPrefixMap = new PrefixMappingImpl() ; static { //globalPrefixMap.setNsPrefixes(PrefixMapping.Standard) ; globalPrefixMap.setNsPrefix("rdf", rdfPrefix) ; globalPrefixMap.setNsPrefix("rdfs", rdfsPrefix) ; globalPrefixMap.setNsPrefix("xsd", xsdPrefix) ; globalPrefixMap.setNsPrefix("owl" , owlPrefix) ; globalPrefixMap.setNsPrefix("fn" , fnPrefix) ; globalPrefixMap.setNsPrefix("afn", ARQFunctionLibraryURI) ; globalPrefixMap.setNsPrefix("apf", ARQPropertyFunctionLibraryURI) ; } public static PrefixMapping getGlobalPrefixMap() { return globalPrefixMap ; } /* Variable names and allocated variables. * NB Must agree with the variable parsing rules in SSE * Allocated variables use names that are not legal in SPARQL. * Examples include the "?" variable initial character. * * We need to allocate so clashes never occur within scopes. * Distinguished (named variables) and non-distinguished (anon variables, bNodes) * * SSE also allows some convenience forms of exactly these string: * * See: ParseHandlerPlain.emitVar * * Naming: * Named (distinguished) allocated variables start "?." * Non-Distinguished, allocated variables start "??" * Quad transform hidden vars: "?*" * * Scopes and usages: * Global: * allocVarMarker "?.." * VarAlloc.getVarAllocator * * Query: Expressions and aggregates * Parser: Used in turning blank nodes into variables in query patterns * Via LabelToNodeMap ("??") * Algebra Generator: * PathCompiler ("??P") : Non-distinguished variables. * * SSE * "?" short hand for "some variable" using ?0, ?1, ?2 naming (legal SPARQL names) * "??" short hand for "some new anon variable" * "?." short hand for "some new named variable" * * See also sysVarAllocNamed and sysVarAllocAnon for symbols to identify in a context. */ /** Marker for generated variables for non-distinguished in query patterns (??a etc) */ public static final String allocVarAnonMarker = "?" ; /** Marker for general temporary variables (not blank node variables) */ public static final String allocVarMarker = "." ; // Secondary marker for globally allocated variables. private static final String globalVar = "." ; /** Marker for variables replacing blank nodes in SPARQL Update patterns */ public static final String allocVarBNodeToVar = "~" ; /** Marker for variables renamed to make variables hidden by scope have globally unique names */ public static final String allocVarScopeHiding = "/" ; /** Marker for variables renamed to make variables hidden because of quad transformation */ public static final String allocVarQuad = "*g" ; // Spare primary marker. //private static final String executionVar = "@" ; // These strings are without the leading "?" // Put each constant here and not in the place the variable allocator created. // Alwats 0, 1, 2, 3 after these prefixes. public static final String allocGlobalVarMarker = allocVarMarker+globalVar ; // VarAlloc public static final String allocPathVariables = allocVarAnonMarker+"P" ; // PathCompiler public static final String allocQueryVariables = allocVarMarker ; // Query public static final String allocParserAnonVars = allocVarAnonMarker ; // LabelToModeMap // SSE public static final String allocSSEUnamedVars = "_" ; // ParseHandlerPlain - SSE token "?" - legal SPARQL public static final String allocSSEAnonVars = allocVarAnonMarker ; // ParseHandlerPlain - SSE token "??" public static final String allocSSENamedVars = allocVarMarker ; // ParseHandlerPlain - SSE token "?." /** Marker for system symbols */ public static final String systemVarNS = "http://jena.apache.org/ARQ/system#" ; /** Context key for the query for the current query execution * (may be null if was not created from a query string ) */ public static final Symbol sysCurrentQuery = Symbol.create(systemVarNS+"query") ; /** Context key for the OpExecutor to be used */ public static final Symbol sysOpExecutorFactory = Symbol.create(systemVarNS+"opExecutorFactory") ; /** Context key for the optimizer factory to be used */ public static final Symbol sysOptimizerFactory = Symbol.create(systemVarNS+"optimizerFactory") ; /** Context key for the optimizer used in this execution */ public static final Symbol sysOptimizer = Symbol.create(systemVarNS+"optimizer") ; /** Context key for the dataset for the current query execution. */ public static final Symbol sysCurrentDataset = Symbol.create(systemVarNS+"dataset") ; /** Context key for the dataset description (if any). * See the <a href="http://www.w3.org/TR/sparql11-protocol">SPARQL protocol</a>. * <p> * A dataset description specified outside the query should override a dataset description * in query and also the implicit dataset of a service. The order is: * <ol> * <li>Dataset description from the protocol</li> * <li>Dataset description from the query (FROM/FROM NAMED)</li> * <li>Dataset of the service</li> * </ol> * Use in other situations should reflect this design. * The value of this key in a Context must be an object of type DatasetDescription. */ public static final Symbol sysDatasetDescription = Symbol.create(systemVarNS+"datasetDescription") ; /** Context key for the algebra expression of the query execution after optimization */ public static final Symbol sysCurrentAlgebra = Symbol.create(systemVarNS+"algebra") ; // /** Context key for the algebra execution engine of the query execution */ // public static final Symbol sysCurrentOpExec = Symbol.create(systemVarNS+"opExec") ; /** Context key for the current time of query execution */ public static final Symbol sysCurrentTime = Symbol.create(systemVarNS+"now") ; /** Context key for ARQ version */ public static final Symbol sysVersionARQ = Symbol.create(systemVarNS+"version/ARQ") ; /** Context key for Jena version */ public static final Symbol sysVersionJena = Symbol.create(systemVarNS+"version/Jena") ; /** Context key for the execution-scoped named variable generator */ public static final Symbol sysVarAllocNamed = Symbol.create(systemVarNS+"namedVarAlloc") ; /** Context key for the execution-scoped bNode variable generator */ public static final Symbol sysVarAllocAnon = Symbol.create(systemVarNS+"namedVarAnon") ; /** Graphs forming the default graph (List&lt;String&gt;) (Dynamic dataset) */ public static final Symbol symDatasetDefaultGraphs = SystemARQ.allocSymbol("datasetDefaultGraphs") ; /** Graphs forming the named graphs (List&lt;String&gt;) (Dynamic dataset) */ public static final Symbol symDatasetNamedGraphs = SystemARQ.allocSymbol("datasetNamedGraphs") ; /** Context key for making all SELECT queries have DISTINCT applied, whether stated ot not */ public static final Symbol autoDistinct = SystemARQ.allocSymbol("autoDistinct") ; // Context keys : some here, some in ARQ - sort out /** The property function registry key */ public static final Symbol registryPropertyFunctions = SystemARQ.allocSymbol("registryPropertyFunctions") ; /** The describe handler registry key */ public static final Symbol registryDescribeHandlers = SystemARQ.allocSymbol("registryDescribeHandlers") ; /** The function library registry key */ public static final Symbol registryFunctions = SystemARQ.allocSymbol("registryFunctions") ; /** The function library registry key */ public static final Symbol registryProcedures = SystemARQ.allocSymbol("registryProcedures") ; /** The extension library registry key */ public static final Symbol registryExtensions = SystemARQ.allocSymbol("registryExtensions") ; }
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.exceptions; import static org.junit.Assert.*; import java.io.*; import java.util.*; import org.junit.*; import rx.exceptions.CompositeException.CompositeExceptionCausalChain; public class CompositeExceptionTest { private final Throwable ex1 = new Throwable("Ex1"); private final Throwable ex2 = new Throwable("Ex2", ex1); private final Throwable ex3 = new Throwable("Ex3", ex2); public CompositeExceptionTest() { } private CompositeException getNewCompositeExceptionWithEx123() { List<Throwable> throwables = new ArrayList<Throwable>(); throwables.add(ex1); throwables.add(ex2); throwables.add(ex3); return new CompositeException(throwables); } @Test(timeout = 1000) public void testMultipleWithSameCause() { Throwable rootCause = new Throwable("RootCause"); Throwable e1 = new Throwable("1", rootCause); Throwable e2 = new Throwable("2", rootCause); Throwable e3 = new Throwable("3", rootCause); CompositeException ce = new CompositeException(Arrays.asList(e1, e2, e3)); System.err.println("----------------------------- print composite stacktrace"); ce.printStackTrace(); assertEquals(3, ce.getExceptions().size()); assertNoCircularReferences(ce); assertNotNull(getRootCause(ce)); System.err.println("----------------------------- print cause stacktrace"); ce.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromParentThenChild() { CompositeException cex = new CompositeException(Arrays.asList(ex1, ex2)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(2, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromChildThenParent() { CompositeException cex = new CompositeException(Arrays.asList(ex2, ex1)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(2, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromChildAndComposite() { CompositeException cex = new CompositeException(Arrays.asList(ex1, getNewCompositeExceptionWithEx123())); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(3, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromCompositeAndChild() { CompositeException cex = new CompositeException(Arrays.asList(getNewCompositeExceptionWithEx123(), ex1)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(3, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromTwoDuplicateComposites() { List<Throwable> exs = new ArrayList<Throwable>(); exs.add(getNewCompositeExceptionWithEx123()); exs.add(getNewCompositeExceptionWithEx123()); CompositeException cex = new CompositeException(exs); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(3, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } /** * This hijacks the Throwable.printStackTrace() output and puts it in a string, where we can look for * "CIRCULAR REFERENCE" (a String added by Throwable.printEnclosedStackTrace) */ private static void assertNoCircularReferences(Throwable ex) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream printStream = new PrintStream(baos); ex.printStackTrace(printStream); assertFalse(baos.toString().contains("CIRCULAR REFERENCE")); } private static Throwable getRootCause(Throwable ex) { Throwable root = ex.getCause(); if (root == null) { return null; } else { while (true) { if (root.getCause() == null) { return root; } else { root = root.getCause(); } } } } @Test public void testNullCollection() { CompositeException composite = new CompositeException((List<Throwable>)null); composite.getCause(); composite.printStackTrace(); } @Test public void testNullElement() { CompositeException composite = new CompositeException(Collections.singletonList((Throwable) null)); composite.getCause(); composite.printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionWithUnsupportedInitCause() { Throwable t = new Throwable() { /** */ private static final long serialVersionUID = -3282577447436848385L; @Override public synchronized Throwable initCause(Throwable cause) { throw new UnsupportedOperationException(); } }; CompositeException cex = new CompositeException(Arrays.asList(t, ex1)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(2, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionWithNullInitCause() { Throwable t = new Throwable("ThrowableWithNullInitCause") { /** */ private static final long serialVersionUID = -7984762607894527888L; @Override public synchronized Throwable initCause(Throwable cause) { return null; } }; CompositeException cex = new CompositeException(Arrays.asList(t, ex1)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(2, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test public void messageCollection() { CompositeException compositeException = new CompositeException(Arrays.asList(ex1, ex3)); assertEquals("2 exceptions occurred. ", compositeException.getMessage()); } @Test public void messageVarargs() { CompositeException compositeException = new CompositeException(ex1, ex2, ex3); assertEquals("3 exceptions occurred. ", compositeException.getMessage()); } @Test public void complexCauses() { Throwable e1 = new Throwable("1"); Throwable e2 = new Throwable("2"); e1.initCause(e2); Throwable e3 = new Throwable("3"); Throwable e4 = new Throwable("4"); e3.initCause(e4); Throwable e5 = new Throwable("5"); Throwable e6 = new Throwable("6"); e5.initCause(e6); CompositeException compositeException = new CompositeException(e1, e3, e5); Assert.assertTrue(compositeException.getCause() instanceof CompositeExceptionCausalChain); List<Throwable> causeChain = new ArrayList<Throwable>(); Throwable cause = compositeException.getCause().getCause(); while (cause != null) { causeChain.add(cause); cause = cause.getCause(); } // The original relations // // e1 -> e2 // e3 -> e4 // e5 -> e6 // // will be set to // // e1 -> e2 -> e3 -> e4 -> e5 -> e6 assertEquals(Arrays.asList(e1, e2, e3, e4, e5, e6), causeChain); } }
package com.fasterxml.jackson.databind; import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.core.Base64Variant; import com.fasterxml.jackson.databind.cfg.BaseSettings; import com.fasterxml.jackson.databind.cfg.HandlerInstantiator; import com.fasterxml.jackson.databind.cfg.MapperConfigBase; import com.fasterxml.jackson.databind.introspect.ClassIntrospector; import com.fasterxml.jackson.databind.introspect.VisibilityChecker; import com.fasterxml.jackson.databind.jsontype.SubtypeResolver; import com.fasterxml.jackson.databind.jsontype.TypeResolverBuilder; import com.fasterxml.jackson.databind.ser.FilterProvider; import com.fasterxml.jackson.databind.type.ClassKey; import com.fasterxml.jackson.databind.type.TypeFactory; import java.io.Serializable; import java.text.DateFormat; import java.util.Locale; import java.util.Map; import java.util.TimeZone; public final class SerializationConfig extends MapperConfigBase<SerializationFeature, SerializationConfig> implements Serializable { private static final long serialVersionUID = 8849092838541724233L; protected final FilterProvider _filterProvider; protected final int _serFeatures; protected JsonInclude.Include _serializationInclusion = null; private SerializationConfig(SerializationConfig paramSerializationConfig, int paramInt1, int paramInt2) { super(paramSerializationConfig, paramInt1); this._serFeatures = paramInt2; this._serializationInclusion = paramSerializationConfig._serializationInclusion; this._filterProvider = paramSerializationConfig._filterProvider; } private SerializationConfig(SerializationConfig paramSerializationConfig, JsonInclude.Include paramInclude) { super(paramSerializationConfig); this._serFeatures = paramSerializationConfig._serFeatures; this._serializationInclusion = paramInclude; this._filterProvider = paramSerializationConfig._filterProvider; } private SerializationConfig(SerializationConfig paramSerializationConfig, BaseSettings paramBaseSettings) { super(paramSerializationConfig, paramBaseSettings); this._serFeatures = paramSerializationConfig._serFeatures; this._serializationInclusion = paramSerializationConfig._serializationInclusion; this._filterProvider = paramSerializationConfig._filterProvider; } private SerializationConfig(SerializationConfig paramSerializationConfig, SubtypeResolver paramSubtypeResolver) { super(paramSerializationConfig, paramSubtypeResolver); this._serFeatures = paramSerializationConfig._serFeatures; this._serializationInclusion = paramSerializationConfig._serializationInclusion; this._filterProvider = paramSerializationConfig._filterProvider; } private SerializationConfig(SerializationConfig paramSerializationConfig, FilterProvider paramFilterProvider) { super(paramSerializationConfig); this._serFeatures = paramSerializationConfig._serFeatures; this._serializationInclusion = paramSerializationConfig._serializationInclusion; this._filterProvider = paramFilterProvider; } private SerializationConfig(SerializationConfig paramSerializationConfig, Class<?> paramClass) { super(paramSerializationConfig, paramClass); this._serFeatures = paramSerializationConfig._serFeatures; this._serializationInclusion = paramSerializationConfig._serializationInclusion; this._filterProvider = paramSerializationConfig._filterProvider; } private SerializationConfig(SerializationConfig paramSerializationConfig, String paramString) { super(paramSerializationConfig, paramString); this._serFeatures = paramSerializationConfig._serFeatures; this._serializationInclusion = paramSerializationConfig._serializationInclusion; this._filterProvider = paramSerializationConfig._filterProvider; } protected SerializationConfig(SerializationConfig paramSerializationConfig, Map<ClassKey, Class<?>> paramMap) { super(paramSerializationConfig, paramMap); this._serFeatures = paramSerializationConfig._serFeatures; this._serializationInclusion = paramSerializationConfig._serializationInclusion; this._filterProvider = paramSerializationConfig._filterProvider; } public SerializationConfig(BaseSettings paramBaseSettings, SubtypeResolver paramSubtypeResolver, Map<ClassKey, Class<?>> paramMap) { super(paramBaseSettings, paramSubtypeResolver, paramMap); this._serFeatures = collectFeatureDefaults(SerializationFeature.class); this._filterProvider = null; } private final SerializationConfig _withBase(BaseSettings paramBaseSettings) { if (this._base == paramBaseSettings) return this; return new SerializationConfig(this, paramBaseSettings); } public final AnnotationIntrospector getAnnotationIntrospector() { if (isEnabled(MapperFeature.USE_ANNOTATIONS)) return super.getAnnotationIntrospector(); return AnnotationIntrospector.nopInstance(); } public final VisibilityChecker<?> getDefaultVisibilityChecker() { VisibilityChecker localVisibilityChecker = super.getDefaultVisibilityChecker(); if (!isEnabled(MapperFeature.AUTO_DETECT_GETTERS)) localVisibilityChecker = localVisibilityChecker.withGetterVisibility(JsonAutoDetect.Visibility.NONE); if (!isEnabled(MapperFeature.AUTO_DETECT_IS_GETTERS)) localVisibilityChecker = localVisibilityChecker.withIsGetterVisibility(JsonAutoDetect.Visibility.NONE); if (!isEnabled(MapperFeature.AUTO_DETECT_FIELDS)) localVisibilityChecker = localVisibilityChecker.withFieldVisibility(JsonAutoDetect.Visibility.NONE); return localVisibilityChecker; } public final FilterProvider getFilterProvider() { return this._filterProvider; } public final int getSerializationFeatures() { return this._serFeatures; } public final JsonInclude.Include getSerializationInclusion() { if (this._serializationInclusion != null) return this._serializationInclusion; return JsonInclude.Include.ALWAYS; } public final <T extends BeanDescription> T introspect(JavaType paramJavaType) { return getClassIntrospector().forSerialization(this, paramJavaType, this); } public final BeanDescription introspectClassAnnotations(JavaType paramJavaType) { return getClassIntrospector().forClassAnnotations(this, paramJavaType, this); } public final BeanDescription introspectDirectClassAnnotations(JavaType paramJavaType) { return getClassIntrospector().forDirectClassAnnotations(this, paramJavaType, this); } public final boolean isEnabled(SerializationFeature paramSerializationFeature) { return (this._serFeatures & paramSerializationFeature.getMask()) != 0; } public final String toString() { return "[SerializationConfig: flags=0x" + Integer.toHexString(this._serFeatures) + "]"; } public final boolean useRootWrapping() { if (this._rootName != null) return this._rootName.length() > 0; return isEnabled(SerializationFeature.WRAP_ROOT_VALUE); } public final SerializationConfig with(Base64Variant paramBase64Variant) { return _withBase(this._base.with(paramBase64Variant)); } public final SerializationConfig with(AnnotationIntrospector paramAnnotationIntrospector) { return _withBase(this._base.withAnnotationIntrospector(paramAnnotationIntrospector)); } public final SerializationConfig with(PropertyNamingStrategy paramPropertyNamingStrategy) { return _withBase(this._base.withPropertyNamingStrategy(paramPropertyNamingStrategy)); } public final SerializationConfig with(SerializationFeature paramSerializationFeature) { int i = this._serFeatures | paramSerializationFeature.getMask(); if (i == this._serFeatures) return this; return new SerializationConfig(this, this._mapperFeatures, i); } public final SerializationConfig with(SerializationFeature paramSerializationFeature, SerializationFeature[] paramArrayOfSerializationFeature) { int i = this._serFeatures | paramSerializationFeature.getMask(); int j = paramArrayOfSerializationFeature.length; for (int k = 0; k < j; k++) i |= paramArrayOfSerializationFeature[k].getMask(); if (i == this._serFeatures) return this; return new SerializationConfig(this, this._mapperFeatures, i); } public final SerializationConfig with(HandlerInstantiator paramHandlerInstantiator) { return _withBase(this._base.withHandlerInstantiator(paramHandlerInstantiator)); } public final SerializationConfig with(ClassIntrospector paramClassIntrospector) { return _withBase(this._base.withClassIntrospector(paramClassIntrospector)); } public final SerializationConfig with(VisibilityChecker<?> paramVisibilityChecker) { return _withBase(this._base.withVisibilityChecker(paramVisibilityChecker)); } public final SerializationConfig with(SubtypeResolver paramSubtypeResolver) { if (paramSubtypeResolver == this._subtypeResolver) return this; return new SerializationConfig(this, paramSubtypeResolver); } public final SerializationConfig with(TypeResolverBuilder<?> paramTypeResolverBuilder) { return _withBase(this._base.withTypeResolverBuilder(paramTypeResolverBuilder)); } public final SerializationConfig with(TypeFactory paramTypeFactory) { return _withBase(this._base.withTypeFactory(paramTypeFactory)); } public final SerializationConfig with(DateFormat paramDateFormat) { SerializationConfig localSerializationConfig = new SerializationConfig(this, this._base.withDateFormat(paramDateFormat)); if (paramDateFormat == null) return localSerializationConfig.with(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); return localSerializationConfig.without(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); } public final SerializationConfig with(Locale paramLocale) { return _withBase(this._base.with(paramLocale)); } public final SerializationConfig with(TimeZone paramTimeZone) { return _withBase(this._base.with(paramTimeZone)); } public final SerializationConfig with(MapperFeature[] paramArrayOfMapperFeature) { int i = this._mapperFeatures; int j = paramArrayOfMapperFeature.length; for (int k = 0; k < j; k++) i |= paramArrayOfMapperFeature[k].getMask(); if (i == this._mapperFeatures) return this; return new SerializationConfig(this, i, this._serFeatures); } public final SerializationConfig withAppendedAnnotationIntrospector(AnnotationIntrospector paramAnnotationIntrospector) { return _withBase(this._base.withAppendedAnnotationIntrospector(paramAnnotationIntrospector)); } public final SerializationConfig withFeatures(SerializationFeature[] paramArrayOfSerializationFeature) { int i = this._serFeatures; int j = paramArrayOfSerializationFeature.length; for (int k = 0; k < j; k++) i |= paramArrayOfSerializationFeature[k].getMask(); if (i == this._serFeatures) return this; return new SerializationConfig(this, this._mapperFeatures, i); } public final SerializationConfig withFilters(FilterProvider paramFilterProvider) { if (paramFilterProvider == this._filterProvider) return this; return new SerializationConfig(this, paramFilterProvider); } public final SerializationConfig withInsertedAnnotationIntrospector(AnnotationIntrospector paramAnnotationIntrospector) { return _withBase(this._base.withInsertedAnnotationIntrospector(paramAnnotationIntrospector)); } public final SerializationConfig withRootName(String paramString) { if (paramString == null) { if (this._rootName == null) return this; } else if (paramString.equals(this._rootName)) return this; return new SerializationConfig(this, paramString); } public final SerializationConfig withSerializationInclusion(JsonInclude.Include paramInclude) { if (this._serializationInclusion == paramInclude) return this; return new SerializationConfig(this, paramInclude); } public final SerializationConfig withView(Class<?> paramClass) { if (this._view == paramClass) return this; return new SerializationConfig(this, paramClass); } public final SerializationConfig withVisibility(PropertyAccessor paramPropertyAccessor, JsonAutoDetect.Visibility paramVisibility) { return _withBase(this._base.withVisibility(paramPropertyAccessor, paramVisibility)); } public final SerializationConfig without(SerializationFeature paramSerializationFeature) { int i = this._serFeatures & (0xFFFFFFFF ^ paramSerializationFeature.getMask()); if (i == this._serFeatures) return this; return new SerializationConfig(this, this._mapperFeatures, i); } public final SerializationConfig without(SerializationFeature paramSerializationFeature, SerializationFeature[] paramArrayOfSerializationFeature) { int i = this._serFeatures & (0xFFFFFFFF ^ paramSerializationFeature.getMask()); int j = paramArrayOfSerializationFeature.length; for (int k = 0; k < j; k++) i &= (0xFFFFFFFF ^ paramArrayOfSerializationFeature[k].getMask()); if (i == this._serFeatures) return this; return new SerializationConfig(this, this._mapperFeatures, i); } public final SerializationConfig without(MapperFeature[] paramArrayOfMapperFeature) { int i = this._mapperFeatures; int j = paramArrayOfMapperFeature.length; for (int k = 0; k < j; k++) i &= (0xFFFFFFFF ^ paramArrayOfMapperFeature[k].getMask()); if (i == this._mapperFeatures) return this; return new SerializationConfig(this, i, this._serFeatures); } public final SerializationConfig withoutFeatures(SerializationFeature[] paramArrayOfSerializationFeature) { int i = this._serFeatures; int j = paramArrayOfSerializationFeature.length; for (int k = 0; k < j; k++) i &= (0xFFFFFFFF ^ paramArrayOfSerializationFeature[k].getMask()); if (i == this._serFeatures) return this; return new SerializationConfig(this, this._mapperFeatures, i); } } /* Location: /Users/vikas/Documents/Mhacks_Real_app/classes-dex2jar.jar * Qualified Name: com.fasterxml.jackson.databind.SerializationConfig * JD-Core Version: 0.6.2 */
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <p> */ package org.olat.dispatcher; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.Locale; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.olat.basesecurity.AuthHelper; import org.olat.core.CoreSpringFactory; import org.olat.core.dispatcher.Dispatcher; import org.olat.core.dispatcher.DispatcherAction; import org.olat.core.gui.UserRequest; import org.olat.core.gui.Windows; import org.olat.core.gui.components.Window; import org.olat.core.gui.control.ChiefController; import org.olat.core.gui.render.StringOutput; import org.olat.core.gui.render.URLBuilder; import org.olat.core.id.Identity; import org.olat.core.id.context.BusinessControl; import org.olat.core.id.context.BusinessControlFactory; import org.olat.core.logging.OLog; import org.olat.core.logging.Tracing; import org.olat.core.util.UserSession; import org.olat.core.util.WebappHelper; import org.olat.core.util.i18n.I18nManager; import org.olat.core.util.i18n.I18nModule; import org.olat.login.LoginModule; import org.olat.restapi.security.RestSecurityBean; import org.olat.restapi.security.RestSecurityHelper; /** * Description:<br> * Entry point for Resource URL's which are a replacement for the jumpIn / Go Repo style URL's. The assumption is, that the URL here set up from a list of * BusinessControls containing a (type/resource)name and an (type/resource)id of type long.</br> e.g. * [RepoyEntry:12323123][CourseNode:2341231456][message:123123][blablup:555555] which is mapped to</br> * /RepoyEntry/12323123/CourseNode/2341231456/message/123123/blablup/555555/</p> This dispatcher does the reverse mapping and creation of a list of BusinessControls which * can be used to activate/spawn the Controller. The same mechanism is used for lucene search engine and the activation of search results. * <p> * This dispatcher supports also a simple single sign-on-mechanism (SS). If an URL contains the parameter X-OLAT-TOKEN, the RestSecurityBean will be used to look up the * associated user. You can use the REST API to create such a X-OLAT-TOKEN or replace the RestSecurityBean with your own implementation that creates the tokens. Please * refere to the REST API documentation on how to create the X-OLAT-TOKEN <br /> * Example: [RepoyEntry:12323123][CourseNode:2341231456][message:123123][blablup:555555]?X-OLAT-TOKEN=xyz * <P> * TODO:pb:2009-06-02: (1) Check for Authenticated Session, otherwise send over login page (2) UZHDisparcher has a security check for use of SSL -> introduce also here or * maybe bring the check into webapphelper. * <P> * Initial Date: 24.04.2009 <br> * * @author patrickb */ public class RESTDispatcher implements Dispatcher { private static final OLog log = Tracing.createLoggerFor(RESTDispatcher.class); @Override public void execute(final HttpServletRequest request, final HttpServletResponse response, final String uriPrefix) { // // create a ContextEntries String which can be used to create a BusinessControl -> move to // final String origUri = request.getRequestURI(); String restPart = origUri.substring(uriPrefix.length()); try { restPart = URLDecoder.decode(restPart, "UTF8"); } catch (final UnsupportedEncodingException e) { log.error("Unsupported encoding", e); } final String[] split = restPart.split("/"); if (split.length % 2 != 0) { // assert(split.length % 2 == 0); // The URL is not a valid business path DispatcherAction.sendBadRequest(origUri, response); log.warn("URL is not valid: " + restPart); return; } String businessPath = ""; for (int i = 0; i < split.length; i = i + 2) { String key = split[i]; if (key != null && key.startsWith("path=")) { key = key.replace("~~", "/"); } final String value = split[i + 1]; businessPath += "[" + key + ":" + value + "]"; } if (log.isDebug()) { log.debug("REQUEST URI: " + origUri); log.debug("REQUEST PREFIX " + restPart); log.debug("calc buspath " + businessPath); } // check if the businesspath is valid try { final BusinessControl bc = BusinessControlFactory.getInstance().createFromString(businessPath); if (!bc.hasContextEntry()) { // The URL is not a valid business path DispatcherAction.sendBadRequest(origUri, response); return; } } catch (final Exception e) { DispatcherAction.sendBadRequest(origUri, response); log.warn("Error with business path: " + origUri, e); return; } // // create the olat ureq and get an associated main window to spawn the "tab" // final UserSession usess = UserSession.getUserSession(request); UserRequest ureq = null; try { // upon creation URL is checked for ureq = new UserRequest(uriPrefix, request, response); } catch (final NumberFormatException nfe) { // MODE could not be decoded // typically if robots with wrong urls hit the system // or user have bookmarks // or authors copy-pasted links to the content. // showing redscreens for non valid URL is wrong instead // a 404 message must be shown -> e.g. robots correct their links. if (log.isDebug()) { log.debug("Bad Request " + request.getPathInfo()); } DispatcherAction.sendBadRequest(request.getPathInfo(), response); return; } // XX:GUIInterna.setLoadPerformanceMode(ureq); // Do auto-authenticate if url contains a X-OLAT-TOKEN Single-Sign-On REST-Token final String xOlatToken = ureq.getParameter(RestSecurityHelper.SEC_TOKEN); if (xOlatToken != null) { // Lookup identity that is associated with this token final RestSecurityBean securityBean = (RestSecurityBean) CoreSpringFactory.getBean(RestSecurityBean.class); final Identity restIdentity = securityBean.getIdentity(xOlatToken); // if (log.isDebug()) { if (restIdentity == null) { log.debug("Found SSO token " + RestSecurityHelper.SEC_TOKEN + " in url, but token is not bound to an identity"); } else { log.debug("Found SSO token " + RestSecurityHelper.SEC_TOKEN + " in url which is bound to identity::" + restIdentity.getName()); } } // if (restIdentity != null) { // Test if the current OLAT session does already belong to this user. // The session could be an old session from another user or it could // belong to this user but miss the window object because so far it was // a head-less REST session. REST sessions initially have a small // timeout, however OLAT does set the standard session timeout on each // UserSession.getSession() request. This means, the normal session // timeout is set in the redirect request that will happen immediately // after the REST dispatcher finishes. No need to change it here. if (!usess.isAuthenticated() || !restIdentity.equalsByPersistableKey(usess.getIdentity())) { // Re-authenticate user session for this user and start a fresh // standard OLAT session AuthHelper.doLogin(restIdentity, RestSecurityHelper.SEC_TOKEN, ureq); } else if (Windows.getWindows(usess).getAttribute("AUTHCHIEFCONTROLLER") == null) { // Session is already available, but no main window (Head-less REST // session). Only create the base chief controller and the window AuthHelper.createAuthHome(ureq); } } } final boolean auth = usess.isAuthenticated(); if (auth) { usess.putEntryInNonClearedStore(AuthenticatedDispatcher.AUTHDISPATCHER_BUSINESSPATH, businessPath); final String url = getRedirectToURL(usess); DispatcherAction.redirectTo(response, url); } else { // prepare for redirect usess.putEntryInNonClearedStore(AuthenticatedDispatcher.AUTHDISPATCHER_BUSINESSPATH, businessPath); final String invitationAccess = ureq.getParameter(AuthenticatedDispatcher.INVITATION); if (invitationAccess != null && LoginModule.isInvitationEnabled()) { // try to log in as anonymous // use the language from the lang paramter if available, otherwhise use the system default locale final Locale guestLoc = getLang(ureq); final int loginStatus = AuthHelper.doInvitationLogin(invitationAccess, ureq, guestLoc); if (loginStatus == AuthHelper.LOGIN_OK) { // logged in as invited user, continue final String url = getRedirectToURL(usess); DispatcherAction.redirectTo(response, url); } else if (loginStatus == AuthHelper.LOGIN_NOTAVAILABLE) { DispatcherAction.redirectToServiceNotAvailable(response); } else { // error, redirect to login screen DispatcherAction.redirectToDefaultDispatcher(response); } } else { final String guestAccess = ureq.getParameter(AuthenticatedDispatcher.GUEST); if (guestAccess == null || !LoginModule.isGuestLoginLinksEnabled()) { DispatcherAction.redirectToDefaultDispatcher(response); return; } else if (guestAccess.equals(AuthenticatedDispatcher.TRUE)) { // try to log in as anonymous // use the language from the lang paramter if available, otherwhise use the system default locale final Locale guestLoc = getLang(ureq); final int loginStatus = AuthHelper.doAnonymousLogin(ureq, guestLoc); if (loginStatus == AuthHelper.LOGIN_OK) { // logged in as anonymous user, continue final String url = getRedirectToURL(usess); DispatcherAction.redirectTo(response, url); } else if (loginStatus == AuthHelper.LOGIN_NOTAVAILABLE) { DispatcherAction.redirectToServiceNotAvailable(response); } else { // error, redirect to login screen DispatcherAction.redirectToDefaultDispatcher(response); } } } } } private Locale getLang(final UserRequest ureq) { // try to log in as anonymous // use the language from the lang paramter if available, otherwhise use the system default locale final String guestLang = ureq.getParameter("lang"); Locale guestLoc; if (guestLang == null) { guestLoc = I18nModule.getDefaultLocale(); } else { guestLoc = I18nManager.getInstance().getLocaleOrDefault(guestLang); } return guestLoc; } private String getRedirectToURL(final UserSession usess) { final ChiefController cc = (ChiefController) Windows.getWindows(usess).getAttribute("AUTHCHIEFCONTROLLER"); final Window w = cc.getWindow(); final URLBuilder ubu = new URLBuilder("", w.getInstanceId(), String.valueOf(w.getTimestamp()), null); final StringOutput sout = new StringOutput(30); ubu.buildURI(sout, null, null); return WebappHelper.getServletContextPath() + DispatcherAction.PATH_AUTHENTICATED + sout.toString(); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.offlineViewer; import com.intellij.codeInsight.daemon.impl.CollectHighlightsUtil; import com.intellij.codeInsight.daemon.impl.DaemonProgressIndicator; import com.intellij.codeInsight.daemon.impl.analysis.HighlightingLevelManager; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.codeInspection.*; import com.intellij.codeInspection.actions.RunInspectionAction; import com.intellij.codeInspection.ex.GlobalInspectionToolWrapper; import com.intellij.codeInspection.ex.InspectionToolWrapper; import com.intellij.codeInspection.ex.LocalInspectionToolWrapper; import com.intellij.codeInspection.ex.QuickFixWrapper; import com.intellij.codeInspection.offline.OfflineProblemDescriptor; import com.intellij.codeInspection.reference.RefElement; import com.intellij.codeInspection.reference.RefEntity; import com.intellij.codeInspection.reference.RefModule; import com.intellij.codeInspection.ui.InspectionToolPresentation; import com.intellij.lang.Language; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.ArrayUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * @author Dmitry Batkovich */ class OfflineDescriptorResolveResult { private static final Logger LOG = Logger.getInstance(OfflineDescriptorResolveResult.class); private final RefEntity myResolvedEntity; private final CommonProblemDescriptor myResolvedDescriptor; private volatile boolean myExcluded; public OfflineDescriptorResolveResult(RefEntity resolvedEntity, CommonProblemDescriptor resolvedDescriptor) { myResolvedEntity = resolvedEntity; myResolvedDescriptor = resolvedDescriptor; } @Nullable public RefEntity getResolvedEntity() { return myResolvedEntity; } @Nullable public CommonProblemDescriptor getResolvedDescriptor() { return myResolvedDescriptor; } public boolean isExcluded() { return myExcluded; } public void setExcluded(boolean excluded) { myExcluded = excluded; } @NotNull static OfflineDescriptorResolveResult resolve(@NotNull OfflineProblemDescriptor descriptor, @NotNull InspectionToolWrapper wrapper, @NotNull InspectionToolPresentation presentation) { final RefEntity element = descriptor.getRefElement(presentation.getContext().getRefManager()); final CommonProblemDescriptor resolvedDescriptor = ReadAction.compute(() -> createDescriptor(element, descriptor, wrapper, presentation)); return new OfflineDescriptorResolveResult(element, resolvedDescriptor); } @Nullable private static CommonProblemDescriptor createDescriptor(@Nullable RefEntity element, @NotNull OfflineProblemDescriptor offlineDescriptor, @NotNull InspectionToolWrapper toolWrapper, @NotNull InspectionToolPresentation presentation) { if (toolWrapper instanceof GlobalInspectionToolWrapper) { final LocalInspectionToolWrapper localTool = ((GlobalInspectionToolWrapper)toolWrapper).getSharedLocalInspectionToolWrapper(); if (localTool != null) { final CommonProblemDescriptor descriptor = createDescriptor(element, offlineDescriptor, localTool, presentation); if (descriptor != null) { return descriptor; } } return createRerunGlobalToolDescriptor((GlobalInspectionToolWrapper)toolWrapper, element, offlineDescriptor); } if (!(toolWrapper instanceof LocalInspectionToolWrapper)) return null; final InspectionManager inspectionManager = InspectionManager.getInstance(presentation.getContext().getProject()); final OfflineProblemDescriptor offlineProblemDescriptor = offlineDescriptor; if (element instanceof RefElement) { final PsiElement psiElement = ((RefElement)element).getElement(); if (psiElement != null) { ProblemDescriptor descriptor = ProgressManager.getInstance().runProcess( () -> runLocalTool(psiElement, inspectionManager, offlineProblemDescriptor, (LocalInspectionToolWrapper)toolWrapper), new DaemonProgressIndicator()); if (descriptor != null) return descriptor; } return null; } final List<String> hints = offlineProblemDescriptor.getHints(); CommonProblemDescriptor descriptor = inspectionManager.createProblemDescriptor(offlineProblemDescriptor.getDescription(), (QuickFix)null); final QuickFix[] quickFixes = getFixes(descriptor, hints, presentation); if (quickFixes != null) { descriptor = inspectionManager.createProblemDescriptor(offlineProblemDescriptor.getDescription(), quickFixes); } return descriptor; } private static ProblemDescriptor runLocalTool(@NotNull PsiElement psiElement, @NotNull InspectionManager inspectionManager, @NotNull OfflineProblemDescriptor offlineProblemDescriptor, @NotNull LocalInspectionToolWrapper toolWrapper) { PsiFile containingFile = psiElement.getContainingFile(); final ProblemsHolder holder = new ProblemsHolder(inspectionManager, containingFile, false); final LocalInspectionTool localTool = toolWrapper.getTool(); TextRange textRange = psiElement.getTextRange(); LOG.assertTrue(textRange != null, "text range muse be not null here; " + "isValid = " + psiElement.isValid() + ", " + "isPhysical = " + psiElement.isPhysical() + ", " + "containingFile = " + containingFile.getName() + ", " + "inspection = " + toolWrapper.getShortName()); final int startOffset = textRange.getStartOffset(); final int endOffset = textRange.getEndOffset(); LocalInspectionToolSession session = new LocalInspectionToolSession(containingFile, startOffset, endOffset); final PsiElementVisitor visitor = localTool.buildVisitor(holder, false, session); localTool.inspectionStarted(session, false); final PsiElement[] elementsInRange = getElementsIntersectingRange(containingFile, startOffset, endOffset); for (PsiElement element : elementsInRange) { element.accept(visitor); } localTool.inspectionFinished(session, holder); if (holder.hasResults()) { final List<ProblemDescriptor> list = holder.getResults(); final int idx = offlineProblemDescriptor.getProblemIndex(); int curIdx = 0; for (ProblemDescriptor descriptor : list) { final PsiNamedElement member = localTool.getProblemElement(descriptor.getPsiElement()); if (psiElement instanceof PsiFile || member != null && member.equals(psiElement)) { if (curIdx == idx) { return descriptor; } curIdx++; } } } return null; } @NotNull private static PsiElement[] getElementsIntersectingRange(PsiFile file, final int startOffset, final int endOffset) { final FileViewProvider viewProvider = file.getViewProvider(); final Set<PsiElement> result = new LinkedHashSet<>(); for (Language language : viewProvider.getLanguages()) { final PsiFile psiRoot = viewProvider.getPsi(language); if (HighlightingLevelManager.getInstance(file.getProject()).shouldInspect(psiRoot)) { result.addAll(CollectHighlightsUtil.getElementsInRange(psiRoot, startOffset, endOffset, true)); } } return PsiUtilCore.toPsiElementArray(result); } @Nullable private static LocalQuickFix[] getFixes(@NotNull CommonProblemDescriptor descriptor, List<String> hints, InspectionToolPresentation presentation) { final List<LocalQuickFix> fixes = new ArrayList<>(hints == null ? 1 : hints.size()); if (hints == null) { addFix(descriptor, fixes, null, presentation); } else { for (String hint : hints) { addFix(descriptor, fixes, hint, presentation); } } return fixes.isEmpty() ? null : fixes.toArray(new LocalQuickFix[fixes.size()]); } private static void addFix(@NotNull CommonProblemDescriptor descriptor, final List<LocalQuickFix> fixes, String hint, InspectionToolPresentation presentation) { final IntentionAction intentionAction = presentation.findQuickFixes(descriptor, hint); if (intentionAction instanceof QuickFixWrapper) { fixes.add(((QuickFixWrapper)intentionAction).getFix()); } } private static CommonProblemDescriptor createRerunGlobalToolDescriptor(@NotNull GlobalInspectionToolWrapper wrapper, @Nullable RefEntity entity, OfflineProblemDescriptor offlineDescriptor) { QuickFix rerunFix = new QuickFix() { @Nls @NotNull @Override public String getFamilyName() { return "Rerun \'" + wrapper.getDisplayName() + "\' inspection"; } @Override public void applyFix(@NotNull Project project, @NotNull CommonProblemDescriptor descriptor) { VirtualFile file = null; if (entity != null && entity.isValid() && entity instanceof RefElement) { file = ((RefElement)entity).getPointer().getVirtualFile(); } PsiFile psiFile = null; if (file != null) { psiFile = PsiManager.getInstance(project).findFile(file); } RunInspectionAction.runInspection(project, wrapper.getShortName(), file, null, psiFile); } @Override public boolean startInWriteAction() { return false; } }; List<String> hints = offlineDescriptor.getHints(); if (hints != null && entity instanceof RefModule) { List<QuickFix> fixes = hints.stream().map(hint -> wrapper.getTool().getQuickFix(hint)).filter(f -> f != null).collect(Collectors.toList()); return new ModuleProblemDescriptorImpl(ArrayUtil.append(fixes.toArray(QuickFix.EMPTY_ARRAY), rerunFix), offlineDescriptor.getDescription(), ((RefModule)entity).getModule()); } return new CommonProblemDescriptorImpl(new QuickFix[]{rerunFix}, offlineDescriptor.getDescription()); } }
package org.sagebionetworks.table.query.util; import static org.sagebionetworks.repo.model.table.TableConstants.NULL_VALUE_KEYWORD; import java.util.StringJoiner; import org.apache.commons.lang3.StringUtils; import org.sagebionetworks.repo.model.table.ColumnModel; import org.sagebionetworks.repo.model.table.ColumnType; import org.sagebionetworks.repo.model.table.FacetColumnRangeRequest; import org.sagebionetworks.repo.model.table.FacetColumnRequest; import org.sagebionetworks.repo.model.table.FacetColumnResult; import org.sagebionetworks.repo.model.table.FacetColumnValuesRequest; import org.sagebionetworks.repo.model.table.FacetType; import org.sagebionetworks.util.ValidateArgument; /** * An class representing requested facet columns that have been verified against its schema * @author zdong * */ public class FacetRequestColumnModel { private String columnName; private FacetType facetType; private FacetColumnRequest facetColumnRequest; private String searchConditionString; private boolean columnTypeIsList; /** * Constructor. * @param columnModel The original ColumnModel from which we derive the FacetRequestColumnModel * @param facetColumnRequest The FacetColumnRequest describing the requested facet. * */ public FacetRequestColumnModel(ColumnModel columnModel, FacetColumnRequest facetColumnRequest){ ValidateArgument.required(columnModel, "columnModel"); ValidateArgument.required(columnModel.getName(), "columnModel.name"); ValidateArgument.required(columnModel.getFacetType(), "columnModel.facetType"); ValidateArgument.required(columnModel.getColumnType(), "columnModel.columnType"); ValidateArgument.requirement(facetColumnRequest == null || columnModel.getName().equals(facetColumnRequest.getColumnName()), "names of the columns must match"); //checks to make sure that useless parameters are not passed in if(facetColumnRequest != null){ if(FacetType.enumeration.equals(columnModel.getFacetType()) && !(facetColumnRequest instanceof FacetColumnValuesRequest)){ throw new IllegalArgumentException("facetColumnRequest was not an instance of FacetColumnValuesRequest"); } if(FacetType.range.equals(columnModel.getFacetType()) && !(facetColumnRequest instanceof FacetColumnRangeRequest)){ throw new IllegalArgumentException("facetColumnRequest was not an instance of FacetColumnRangeRequest"); } } this.columnName = columnModel.getName(); this.facetType = columnModel.getFacetType(); this.facetColumnRequest = facetColumnRequest; this.columnTypeIsList = ColumnTypeListMappings.isList(columnModel.getColumnType()); this.searchConditionString = createFacetSearchConditionString(facetColumnRequest, this.columnTypeIsList); } public String getColumnName() { return this.columnName; } /** * returns null if there were no filter requests associated with this column * @return */ public FacetColumnRequest getFacetColumnRequest(){ return this.facetColumnRequest; } /** * returns null if no search conditions exist * @return */ String getSearchConditionString(){ return this.searchConditionString; } public FacetType getFacetType(){ return this.facetType; } public boolean isColumnTypeIsList() { return columnTypeIsList; } /** * Creates the search condition for a FacetColumnRequest * @param facetColumnRequest * @return the search condition string */ static String createFacetSearchConditionString(FacetColumnRequest facetColumnRequest, boolean columnTypeIsList){ if (facetColumnRequest == null){ return null; } if (facetColumnRequest instanceof FacetColumnValuesRequest){ if(columnTypeIsList){ return createListColumnEnumerationSearchCondition((FacetColumnValuesRequest) facetColumnRequest); }else { return createSingleValueColumnEnumerationSearchCondition((FacetColumnValuesRequest) facetColumnRequest); } }else if (facetColumnRequest instanceof FacetColumnRangeRequest){ return createRangeSearchCondition((FacetColumnRangeRequest) facetColumnRequest); }else{ throw new IllegalArgumentException("Unexpected instance of FacetColumnRequest"); } } static String createRangeSearchCondition(FacetColumnRangeRequest facetRange){ if( facetRange == null || ( StringUtils.isEmpty( facetRange.getMin() ) && StringUtils.isEmpty( facetRange.getMax() ) ) ){ return null; } String min = facetRange.getMin(); String max = facetRange.getMax(); StringBuilder builder = new StringBuilder("("); //at this point we know at least one value is not null and is not empty string builder.append(SqlElementUtils.wrapInDoubleQuotes(facetRange.getColumnName())); if (NULL_VALUE_KEYWORD.equals(min) || NULL_VALUE_KEYWORD.equals(max)){ builder.append(" IS NULL"); } else if(min == null){ //only max exists builder.append("<="); appendValueToStringBuilder(builder, max); }else if (max == null){ //only min exists builder.append(">="); appendValueToStringBuilder(builder, min); }else{ builder.append(" BETWEEN "); appendValueToStringBuilder(builder, min); builder.append(" AND "); appendValueToStringBuilder(builder, max); } builder.append(")"); return builder.toString(); } static String createSingleValueColumnEnumerationSearchCondition(FacetColumnValuesRequest facetValues){ if(facetValues == null || facetValues.getFacetValues() == null|| facetValues.getFacetValues().isEmpty()){ return null; } StringBuilder builder = new StringBuilder("("); int initialSize = builder.length(); for(String value : facetValues.getFacetValues()){ if(builder.length() > initialSize){ builder.append(" OR "); } builder.append(SqlElementUtils.wrapInDoubleQuotes(facetValues.getColumnName())); if(value.equals(NULL_VALUE_KEYWORD)){ builder.append(" IS NULL"); }else{ builder.append("="); appendValueToStringBuilder(builder, value); } } builder.append(")"); return builder.toString(); } static String createListColumnEnumerationSearchCondition(FacetColumnValuesRequest facetValues){ if(facetValues == null || facetValues.getFacetValues() == null|| facetValues.getFacetValues().isEmpty()){ return null; } String quotedColumnName = SqlElementUtils.wrapInDoubleQuotes(facetValues.getColumnName()); StringJoiner hasClauseJoiner = new StringJoiner(",", quotedColumnName + " HAS (", ")"); //initial size will be non-zero because we gave the constructor a prefix and suffix int joinerInitialSize = hasClauseJoiner.length(); boolean includeColumnIsNullCondition = false; for(String value : facetValues.getFacetValues()){ // values inside lists may not have the null keyword (e.g. "[null]" is not allowed) // so seeing the null keyword is treated as selecting for columns in which there is no list value. if(value.equals(NULL_VALUE_KEYWORD)){ includeColumnIsNullCondition = true; }else { hasClauseJoiner.add("'" + value.replaceAll("'", "''")+"'"); } } String searchCondition; if(includeColumnIsNullCondition){ boolean noValuesAddedToJoiner = hasClauseJoiner.length() == joinerInitialSize; String isNullCondition = quotedColumnName + " IS NULL"; if(noValuesAddedToJoiner){ searchCondition = isNullCondition ; }else{ searchCondition = hasClauseJoiner + " OR " + isNullCondition; } } else { searchCondition = hasClauseJoiner.toString(); } return "(" + searchCondition + ")"; } /** * Appends a value to the string builder * and places single quotes (') around it if the column type is String */ static void appendValueToStringBuilder(StringBuilder builder, String value){ builder.append("'"); builder.append(value.replaceAll("'", "''")); builder.append("'"); } }
package org.imaginea.botbot.common; import java.io.*; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import junit.framework.Assert; import org.imaginea.botbot.api.DefaultProperties; public class TestCaseGenerator { HashMap<String, ArrayList<String>> hm = new HashMap<String, ArrayList<String>>(); private static String baseFolder=""; private String dataDrivenClasses = ""; //private static String baseFolder=new File("runner").getPath()+File.separatorChar; public void listDirectory(File f, String directoryName, HashMap<String, ArrayList<String>> fileList,final String fileType, final boolean absolute) { char pathSeparator=File.separatorChar; String testcaseFolder=getPath(new File(baseFolder+"testcases"+pathSeparator),absolute); File[] listOfFiles = f.listFiles(); if (directoryName.equalsIgnoreCase("") && f.isDirectory()) { String path=getPath(f,absolute); directoryName=getDirectoryName(testcaseFolder,path); } else if (directoryName.equalsIgnoreCase("") && f.isFile()) { directoryName = getDirectoryName(testcaseFolder,getPath(new File(f.getParent()),absolute) ); } for (int i = 0; i < listOfFiles.length; i++) { if (shouldBeFiltered(listOfFiles[i], fileType)) { if (fileList.containsKey(directoryName)) { List<String> flist=fileList.get(directoryName); String fPath=getPath(listOfFiles[i],absolute); fPath=filterPath(fPath, absolute); flist.add(fPath); } else { ArrayList<String> flist = new ArrayList<String>(); String fPath=getPath(listOfFiles[i],absolute); fPath=filterPath(fPath, absolute); flist.add(fPath); fileList.put(directoryName, flist); } } else if (listOfFiles[i].isDirectory()) { listDirectory(listOfFiles[i], getDirectoryName(testcaseFolder,getPath(listOfFiles[i],absolute)), fileList,fileType,absolute); } } } private boolean shouldBeFiltered(File f,String filetype){ boolean isRequired=false; if(f.isDirectory()){ return isRequired; } if(f.getName().startsWith(".")){ return isRequired; } if(f.getName().endsWith("."+filetype)){ return true; } return isRequired; } private String getPath(File f, boolean absolute){ if(absolute){ return f.getAbsolutePath(); }else{ return f.getPath(); } } private String filterPath(String fPath,boolean absolute){ String path; if(absolute){ path=fPath.replace("\\", "\\\\"); }else{ path=fPath.replace("\\", "/"); } return path; } private String getDirectoryName(String path,String testFolder){ String directoryName = ""; if (String.valueOf(File.separatorChar).contentEquals("\\")) { List<String> paths = new ArrayList<String>(Arrays.asList(path.split("\\\\"))); List<String> testCaseFolders = new ArrayList<String>(Arrays .asList(testFolder.split("\\\\"))); int pathsLength = paths.size(); for (int i = 0; i < pathsLength && paths.size() != 0; i++) { if (!paths.get(0).contentEquals(testCaseFolders.get(0))) { break; } paths.remove(0); testCaseFolders.remove(0); } Iterator<String> it = testCaseFolders.iterator(); while (it.hasNext()) { directoryName = directoryName + File.separatorChar + it.next(); } directoryName = directoryName.substring(1); } else { directoryName = testFolder.split(path+File.separatorChar)[1]; } return directoryName; } public void classGenerator(HashMap<String, ArrayList<String>> hm,String template) throws Exception { char pathSeparator= File.separatorChar; Iterator<String> it = hm.keySet().iterator(); String testSource=baseFolder+("src:test:".replace(":", String.valueOf(pathSeparator))); while (it.hasNext()) { String classPath = it.next(); String packagePath = classPath.toLowerCase(); packagePath = replaceSpecialChar(packagePath); String javaClassFile = new File(packagePath).getName(); String javaClassName = javaClassFile.substring(0, 1).toUpperCase() + javaClassFile.substring(1); //checking if folder name is datadriven if (javaClassFile.equalsIgnoreCase("datadriven")&&template.contentEquals("RobotiumTemplate")) { try { packagePath = packagePath.substring(0, packagePath.lastIndexOf(pathSeparator)); } catch (StringIndexOutOfBoundsException e) { System.out.println("Exception out of bounds"); packagePath = ""; } //creating package folder new File(testSource + packagePath + pathSeparator + javaClassFile).mkdirs(); //For iterating through classes ArrayList<String> ar = hm.get(classPath); Iterator<String> dataDrivenIterator = ar.iterator(); while (dataDrivenIterator.hasNext()) { String testFilePath = dataDrivenIterator.next(); String fileName = new File(testFilePath).getName(); fileName = fileName.substring(0, fileName.indexOf(".csv")); //Creating java classes for data driven test cases & ignoring other files if (fileName.endsWith("_datadriven")) { String dataDrivenJavaFileName = fileName.replace( "_datadriven", ""); dataDrivenJavaFileName = dataDrivenJavaFileName .substring(0, 1).toUpperCase() + dataDrivenJavaFileName.substring(1); Writer output = new BufferedWriter(new FileWriter( testSource + packagePath + pathSeparator + "datadriven" + pathSeparator + dataDrivenJavaFileName + ".java")); String packageString = packagePath + pathSeparator + "datadriven"; dataDrivenClassWriter(dataDrivenJavaFileName, output, "RobotiumDataDrivenTemplate", testFilePath, packageString); dataDrivenClasses += "test." + packageString .replace(pathSeparator + "", ".") + "."+dataDrivenJavaFileName + ","; output.close(); } } } //Java class generation for non-datadriven cases else { try{ packagePath = packagePath .substring(0, packagePath.lastIndexOf(pathSeparator));} catch(StringIndexOutOfBoundsException e) { System.out.println("Exception out of bounds"); packagePath=""; } new File(testSource + packagePath).mkdirs(); Writer output = new BufferedWriter(new FileWriter(testSource + packagePath + pathSeparator + javaClassName + ".java")); ArrayList<String> ar = hm.get(classPath); classWriter(classPath, ar, output,template); output.close(); } } } //Class writer for data driven csv private void dataDrivenClassWriter(String fileName, Writer output, String template,String testFilePath,String packagePath) { try{ char pathSeparator=File.separatorChar; FileInputStream fstream; String resourcesPath=baseFolder+"resources"+pathSeparator; fstream = new FileInputStream(resourcesPath+template); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); boolean flag = false; String importString=packagePath.replace(pathSeparator+"", "."); String javaClass ="package test."+importString+";\n"; String tempString=""; while ((tempString = br.readLine()) != null) { if (tempString.contains("TestClassName")){ javaClass+=tempString.replace("TestClassName", fileName)+"\n"; } else if (tempString.contains("filePath")){ javaClass+=tempString.replace("filePath", testFilePath)+"\n"; } else if (tempString.contains("dataFile")){ String dataFile=testFilePath.substring(0,testFilePath.indexOf("_datadriven.csv")); javaClass+=tempString.replace("dataFile", dataFile+"_data.csv")+"\n"; } else{ javaClass+=tempString+"\n"; } } output.write(javaClass); } catch(IOException e) { System.out.println("Exception "+e); } } public static void main(String[] args) { TestCaseGenerator tc = new TestCaseGenerator(); HashMap<String, ArrayList<String>> hm1 = new HashMap<String, ArrayList<String>>(); DefaultProperties prop = DefaultProperties.getDefaultProperty(); boolean absolute=false; boolean isRobotium=false; String template ="Template"; char pathSeparator=File.separatorChar; File f =new File(""); System.out.println(f.getAbsolutePath()); String testFolderPath=baseFolder+"testcases"+pathSeparator; if(prop.getValueFromProperty("FRAMEWORK").equalsIgnoreCase("robotium")){ isRobotium=true; template="RobotiumTemplate"; } if(!isRobotium){ absolute=true; } tc.listDirectory(new File(testFolderPath + prop.getValueFromProperty("TESTCASE_FOLDER")), "",hm1, "csv", absolute); System.out.println(hm1); try { tc.classGenerator(hm1,template); //Creating a properties file specifying data driven classes Writer dataDrivenPropertiesWriter = new BufferedWriter(new FileWriter(baseFolder+"resources"+pathSeparator+"datadriven.properties")); dataDrivenPropertiesWriter.write("DATA_CLASSES="+tc.dataDrivenClasses); dataDrivenPropertiesWriter.close(); } catch (Exception e) { e.printStackTrace(); } } private void classWriter(String classPath, ArrayList<String> ar, Writer output,String template) { boolean isRobotium=false; FileInputStream fstream; char pathSeparator=File.separatorChar; String resourcesPath=baseFolder+"resources"+pathSeparator; if(template.contentEquals("RobotiumTemplate")){ isRobotium=true; } try { String tempString = ""; String importString = classPath.replace(String.valueOf(pathSeparator), "."); String outputStringClass=""; importString = replaceSpecialChar(importString); try{ importString = importString.substring(0, importString.lastIndexOf(".")); } catch(StringIndexOutOfBoundsException e) { System.out.println("Exception out of bounds"); outputStringClass = "package test;\n"; } if(outputStringClass==""){ outputStringClass = "package test." + importString + ";\n"; } String outputStringTestCase = ""; String testCase = ""; fstream = new FileInputStream(resourcesPath+template); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); boolean flag = false; while ((tempString = br.readLine()) != null) { if (tempString.equalsIgnoreCase("@Test") || (isRobotium && tempString.contains("testName"))) flag = true; if (!flag) outputStringClass += tempString + "\n"; else outputStringTestCase += tempString + "\n"; } String packagePath = classPath.toLowerCase(); String javaClassFile = new File(packagePath).getName(); javaClassFile = replaceSpecialChar(javaClassFile); String javaClassName = javaClassFile.substring(0, 1).toUpperCase() + javaClassFile.substring(1); outputStringClass = outputStringClass.replace("TestClassName", javaClassName); output.write(outputStringClass + "\n"); Iterator<String> arrayIt = ar.iterator(); while (arrayIt.hasNext()) { String testfile = arrayIt.next(); String fileName = new File(testfile).getName(); fileName = fileName.substring(0, fileName.indexOf(".csv")); fileName = replaceSpecialChar(fileName); if(isRobotium){ fileName="test"+fileName.substring(0, 1).toUpperCase() + fileName.substring(1); } testCase = outputStringTestCase.replace("testName", fileName); testCase = testCase.replace("filePath", testfile); output.write(testCase + "\n"); } output.write("}"); } catch (FileNotFoundException e) { // TODO Auto-generated catch block Assert.fail(e.toString()); } catch (IOException e) { // TODO Auto-generated catch block Assert.fail(e.toString()); } } private String replaceSpecialChar(String fileName) { fileName=fileName.replace(" ", ""); fileName=fileName.replace("-", ""); fileName=fileName.replace("_", ""); return fileName; } }
package de.tu_darmstadt.elc.olw.api.misc.io; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Deque; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedList; import java.util.Vector; import java.util.zip.Adler32; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; import org.apache.commons.compress.archivers.zip.UnrecognizedExtraField; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; import org.apache.commons.compress.archivers.zip.ZipLong; import org.apache.commons.compress.archivers.zip.ZipShort; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; import de.tu_darmstadt.elc.olw.api.misc.execution.Executer; import de.tu_darmstadt.elc.olw.api.misc.execution.ExecutionException; public class FileExtractor { public static final int BUFFER = 2048; /** * returns the extension of the given file * * @param fileName * @return */ public static String getFileExtension(String fileName) { int extPos = fileName.lastIndexOf("."); return fileName.substring(extPos + 1); } /** * returns the name of the file without the extension * * @param fileName * @return */ public static String getFileName(String fileName) { int extPos = fileName.lastIndexOf("."); return fileName.substring(0, extPos); } public static String removeSpace(String fileName) { return fileName.replace(' ', '_'); } public static String getDateTime() { DateFormat dateFormat = new SimpleDateFormat("HHmmddMMyyyy"); Date date = new Date(); return dateFormat.format(date); } /** * compresses the given zip folder into the zip file * * @param destDir * input * @param zipFile * output * @throws IOException */ public static void zipFile(File destDir, File zipFile) throws IOException { URI base = destDir.toURI(); Deque<File> queue = new LinkedList<File>(); queue.push(destDir); BufferedInputStream origin = null; FileOutputStream fos = new FileOutputStream(zipFile); ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream(fos)); // out.setMethod(ZipOutputStream.DEFLATED); byte data[] = new byte[BUFFER]; // get a list of files from current directory while (!queue.isEmpty()) { File dirEntry = queue.pop(); for (File fileEntry : dirEntry.listFiles()) { String name = base.relativize(fileEntry.toURI()).getPath(); if (!fileEntry.isDirectory()) { System.out.println("Adding: " + fileEntry); FileInputStream fi = new FileInputStream(fileEntry); origin = new BufferedInputStream(fi, BUFFER); out.putNextEntry(new ZipEntry(name)); int count; while ((count = origin.read(data, 0, BUFFER)) != -1) { out.write(data, 0, count); } origin.close(); } else { // directory queue.push(fileEntry); name = name.endsWith("/") ? name : name + "/"; out.putNextEntry(new ZipEntry(name)); } } } out.close(); } /** * decompresses the zip file * * @param zipFile * @param destDir * @throws ZipException * @throws IOException */ public static void unzip(File zipFile, File destDir) throws ZipException, IOException { @SuppressWarnings("resource") ZipFile zFile = new ZipFile(zipFile); Enumeration<? extends ZipEntry> entries = zFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); File file = new File(destDir, entry.getName().replace(' ','_')); if (entry.isDirectory()) { file.mkdirs(); } else { file.getParentFile().mkdirs(); InputStream in = zFile.getInputStream(entry); try { // System.out.println("Decompress: " + entry.getName()); @SuppressWarnings("resource") OutputStream out = new FileOutputStream(file); int count; byte[] buffer = new byte[BUFFER]; while ((count = in.read(buffer)) != -1) { out.write(buffer, 0, count); } } finally { in.close(); } } } } public static byte[] getBytesFromStrean(InputStream is, long length, String name) throws IOException { // Create the byte array to hold the data byte[] bytes = new byte[(int) length]; // Read in the bytes int offset = 0; int numRead = 0; while (offset < bytes.length && (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) { offset += numRead; } // Ensure all the bytes have been read in if (offset < bytes.length) { throw new IOException("Could not completely read from stream " + name); } // Close the input stream and return bytes is.close(); return bytes; } /** * zip the folder with adler32 checksum for fzip * * @param destDir * @param zipFile * @throws IOException */ @SuppressWarnings("unchecked") public static void zipWithChecksum(File destDir, File zipFile) throws IOException { File tempZip = new File(destDir.getParentFile(), "temp.zip"); zipFile(destDir, tempZip); ZipFile zippedIn = new ZipFile(tempZip); ZipArchiveOutputStream zippedOut = new ZipArchiveOutputStream(zipFile); zippedOut.setMethod(ZipArchiveOutputStream.DEFLATED); Enumeration<ZipEntry> entries = (Enumeration<ZipEntry>) zippedIn .entries(); Adler32 adlerChecksum = new Adler32(); while (entries.hasMoreElements()) { ZipArchiveEntry entry = new ZipArchiveEntry(entries.nextElement()); InputStream input = zippedIn.getInputStream(entry); byte[] content = getBytesFromStrean(input, entry.getSize(), entry.getName()); adlerChecksum.reset(); // checksum is calculated on the uncompressed bytes of the file in // question adlerChecksum.update(content); // adds the adler32 checksum to the zip file UnrecognizedExtraField adlerField = new UnrecognizedExtraField(); adlerField.setHeaderId(new ZipShort(0xdada)); adlerField.setLocalFileDataData(new ZipLong(adlerChecksum .getValue()).getBytes()); entry.addExtraField(adlerField); zippedOut.putArchiveEntry(entry); zippedOut.write(content); zippedOut.closeArchiveEntry(); } zippedIn.close(); zippedOut.close(); System.out.println("Converted " + tempZip + " to " + zipFile); } /** * * @param file * @return * @throws IOException */ public static Element importXMLFile(File file) throws IOException { FileInputStream inFile = new FileInputStream(file); BufferedInputStream buffer = new BufferedInputStream(inFile); SAXBuilder inputStream = new SAXBuilder(); // inputStream.setIgnoringBoundaryWhitespace(true); try { Document doc = inputStream.build(buffer); Element root = doc.getRootElement(); Element copyRoot = (Element) root.clone(); return copyRoot; } catch (JDOMException e) { throw new IOException(e); } } public static void writeXML(File xmlFile, Document doc) { try { XMLOutputter serializer = new XMLOutputter(); FileWriter writer = new FileWriter(xmlFile); Format format = serializer.getFormat(); format = Format.getPrettyFormat(); serializer.setFormat(format); serializer.output(doc, writer); } catch (IOException e) { } } public static void writeXMLwithEncoding(File xmlFile, Document doc, String encoding) { try { XMLOutputter serializer = new XMLOutputter(); FileWriter writer = new FileWriter(xmlFile); Format format = serializer.getFormat(); format = Format.getPrettyFormat(); format.setEncoding(encoding); serializer.setFormat(format); serializer.output(doc, writer); } catch (IOException e) { } } /** * searches the file in the given folder with the specified suffix * * @param folder * @param suffix * @return */ public static File findFileWithSuffix(File folder, String suffix) { if (folder != null & folder.isDirectory()) { File[] listFile = folder.listFiles(); for (int i = 0; i < listFile.length; i++) { String fileName = listFile[i].getName().toLowerCase(); if (fileName.endsWith(suffix)) return listFile[i]; } } return null; } public static HashMap<String, Integer> loadStartTime(String EVQFileName) throws IOException { HashMap<String, Integer> startTimeTable = new HashMap<String, Integer>(); FileReader fr = new FileReader(EVQFileName); @SuppressWarnings("resource") BufferedReader in = new BufferedReader(fr); int id = 1; String line; // Zeile einlesen while ((line = in.readLine()) != null) { String startTime = line.split(" ")[0]; // first element startTimeTable.put(startTime, id); id++; } return startTimeTable; } /** * copy file * * @param src * @param dst * @throws IOException */ public static void copy(File src, File dst) throws IOException { InputStream in = new FileInputStream(src); OutputStream out = new FileOutputStream(dst); byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); } public static String getDuration(File mediaFile) throws ExecutionException { String duration = ""; String line = "/opt/olw/olw-ffmpeg/bin/ffmpeg -i " + mediaFile.getAbsolutePath(); ByteArrayInputStream stream = Executer.executeInfo(line); Vector<Integer> buffer = new Vector<Integer>(); int letter = 0; while ((letter = stream.read()) != -1) buffer.add(letter); for (int i = 0; i < buffer.size(); i++) if (buffer.get(i) == ((int) 'D')) { String token = ""; int j = i; while (buffer.get(j) != ((int) ' ')) { token += (char) buffer.get(j).intValue(); j++; } if (token.contains("Duration")) { j++; while (buffer.get(j) != ((int) ',')) { duration += (char) buffer.get(j).intValue(); j++; } return duration; } } return duration; } }
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/portal/tags/sakai-10.1/portal-impl/impl/src/java/org/sakaiproject/portal/charon/site/DefaultSiteViewImpl.java $ * $Id: DefaultSiteViewImpl.java 310500 2014-06-30 20:09:29Z [email protected] $ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.portal.charon.site; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Iterator; import javax.servlet.http.HttpServletRequest; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.authz.api.AuthzGroup; import org.sakaiproject.authz.api.Role; import org.sakaiproject.authz.cover.AuthzGroupService; import org.sakaiproject.authz.cover.SecurityService; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.portal.api.Portal; import org.sakaiproject.portal.api.SiteNeighbourhoodService; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.site.api.ToolConfiguration; import org.sakaiproject.site.api.SitePage; import org.sakaiproject.tool.api.Session; import org.sakaiproject.user.api.Preferences; import org.sakaiproject.user.api.PreferencesService; import org.sakaiproject.user.cover.UserDirectoryService; import org.sakaiproject.util.Web; /** * @author ieb */ public class DefaultSiteViewImpl extends AbstractSiteViewImpl { private static final Log LOG = LogFactory.getLog(DefaultSiteViewImpl.class); /** * @param siteHelper * @param request * @param session * @param currentSiteId * @param siteService * @param serverConfigurationService * @param preferencesService */ public DefaultSiteViewImpl(PortalSiteHelperImpl siteHelper, SiteNeighbourhoodService siteNeighbourhoodService, HttpServletRequest request, Session session, String currentSiteId, SiteService siteService, ServerConfigurationService serverConfigurationService, PreferencesService preferencesService) { super(siteHelper, siteNeighbourhoodService, request, session, currentSiteId, siteService, serverConfigurationService, preferencesService); } /* * (non-Javadoc) * * @see org.sakaiproject.portal.api.SiteView#getRenderContextObject() */ public Object getRenderContextObject() { // Get the list of sites in the right order, // My WorkSpace will be the first in the list // if public workgroup/gateway site is not included, add to list boolean siteFound = false; for (int i = 0; i < mySites.size(); i++) { if (((Site) mySites.get(i)).getId().equals(currentSiteId)) { siteFound = true; } } try { if (!siteFound) { mySites.add(siteService.getSite(currentSiteId)); } } catch (IdUnusedException e) { } // ignore int tabsToDisplay = serverConfigurationService.getInt(Portal.CONFIG_DEFAULT_TABS, 5); boolean loggedIn = session.getUserId() != null; if (!loggedIn) { tabsToDisplay = serverConfigurationService.getInt( "gatewaySiteListDisplayCount", tabsToDisplay); } else { Preferences prefs = preferencesService .getPreferences(session.getUserId()); ResourceProperties props = prefs.getProperties("sakai:portal:sitenav"); try { tabsToDisplay = (int) props.getLongProperty("tabs"); } catch (Exception any) { } } // we allow one site in the drawer - that is OK moreSites = new ArrayList<Site>(); if (mySites.size() > tabsToDisplay) { // Check to see if the selected site is in the first // "tabsToDisplay" tabs boolean found = false; for (int i = 0; i < tabsToDisplay && i < mySites.size(); i++) { Site site = mySites.get(i); String effectiveId = siteHelper.getSiteEffectiveId(site); if (site.getId().equals(currentSiteId) || effectiveId.equals(currentSiteId)) found = true; } // Save space for the current site if (!found) tabsToDisplay = tabsToDisplay - 1; if (tabsToDisplay < 2) tabsToDisplay = 2; // Create the list of "additional sites"- but do not // include the currently selected set in the list Site currentSelectedSite = null; int remove = mySites.size() - tabsToDisplay; for (int i = 0; i < remove; i++) { // We add the site the the drop-down // unless it it the current site in which case // we retain it for later Site site = mySites.get(tabsToDisplay); mySites.remove(tabsToDisplay); String effectiveId = siteHelper.getSiteEffectiveId(site); if (site.getId().equals(currentSiteId) || effectiveId.equals(currentSiteId)) { currentSelectedSite = site; } else { moreSites.add(site); } } // check to see if we need to re-add the current site if (currentSelectedSite != null) { mySites.add(currentSelectedSite); } } processMySites(); String profileToolId = serverConfigurationService.getString("portal.profiletool","sakai.profile2"); String preferencesToolId = serverConfigurationService.getString("portal.preferencestool","sakai.preferences"); String worksiteToolId = serverConfigurationService.getString("portal.worksitetool","sakai.sitesetup"); String profileToolUrl = null; String worksiteToolUrl = null; String prefsToolUrl = null; if ( myWorkspaceSiteId != null ) { for (Iterator iSi = mySites.iterator(); iSi.hasNext();) { Site s = (Site) iSi.next(); if (myWorkspaceSiteId.equals(s.getId()) ) { List pages = siteHelper.getPermittedPagesInOrder(s); for (Iterator iPg = pages.iterator(); iPg.hasNext();) { SitePage p = (SitePage) iPg.next(); List<ToolConfiguration> pTools = p.getTools(); Iterator iPt = pTools.iterator(); while (iPt.hasNext()) { ToolConfiguration placement = (ToolConfiguration) iPt.next(); if ( profileToolId.equals(placement.getToolId()) ) { profileToolUrl = Web.returnUrl(request, "/site/" + Web.escapeUrl(siteHelper.getSiteEffectiveId(s)) + "/page/" + Web.escapeUrl(p.getId())); } else if ( preferencesToolId.equals(placement.getToolId()) ) { prefsToolUrl = Web.returnUrl(request, "/site/" + Web.escapeUrl(siteHelper.getSiteEffectiveId(s)) + "/page/" + Web.escapeUrl(p.getId())); } else if ( worksiteToolId.equals(placement.getToolId()) ) { worksiteToolUrl = Web.returnUrl(request, "/site/" + Web.escapeUrl(siteHelper.getSiteEffectiveId(s)) + "/page/" + Web.escapeUrl(p.getId())); } } } } } } if ( profileToolUrl != null ) { renderContextMap.put("profileToolUrl", profileToolUrl); } if ( prefsToolUrl != null ) { renderContextMap.put("prefsToolUrl", prefsToolUrl); } if ( worksiteToolUrl != null ) { renderContextMap.put("worksiteToolUrl", worksiteToolUrl); } if (serverConfigurationService.getBoolean("portal.use.tutorial", true)) { renderContextMap.put("tutorial", true); } else { renderContextMap.put("tutorial", false); } List<Map> l = siteHelper.convertSitesToMaps(request, mySites, prefix, currentSiteId, myWorkspaceSiteId, /* includeSummary */false, /* expandSite */false, /* resetTools */"true".equalsIgnoreCase(serverConfigurationService .getString(Portal.CONFIG_AUTO_RESET)), /* doPages */true, /* toolContextPath */null, loggedIn); renderContextMap.put("tabsSites", l); boolean displayActive = serverConfigurationService.getBoolean("portal.always.display.active_sites",false); //If we don't always want to display it anyway, check to see if we need to display it if (!displayActive) { displayActive=Boolean.valueOf(moreSites.size() > 0); } renderContextMap.put("tabsMoreSitesShow", displayActive); // more dropdown if (moreSites.size() > 0) { List<Map> m = siteHelper.convertSitesToMaps(request, moreSites, prefix, currentSiteId, myWorkspaceSiteId, /* includeSummary */false, /* expandSite */ false, /* resetTools */"true".equalsIgnoreCase(serverConfigurationService .getString(Portal.CONFIG_AUTO_RESET)), /* doPages */true, /* toolContextPath */null, loggedIn); renderContextMap.put("tabsMoreSites", m); } return renderContextMap; } /** */ protected void processMySites() { } /* * (non-Javadoc) * * @see org.sakaiproject.portal.api.SiteView#isEmpty() */ public boolean isEmpty() { return mySites.isEmpty(); } /* * (non-Javadoc) * * @see org.sakaiproject.portal.api.SiteView#setPrefix(java.lang.String) */ public void setPrefix(String prefix) { this.prefix = prefix; } /* * (non-Javadoc) * * @see org.sakaiproject.portal.api.SiteView#setToolContextPath(java.lang.String) */ public void setToolContextPath(String toolContextPath) { this.toolContextPath = toolContextPath; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import java.util.List; import com.google.common.collect.ImmutableList; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFFlowStatsEntryVer14 implements OFFlowStatsEntry { private static final Logger logger = LoggerFactory.getLogger(OFFlowStatsEntryVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int MINIMUM_LENGTH = 56; private final static TableId DEFAULT_TABLE_ID = TableId.ALL; private final static long DEFAULT_DURATION_SEC = 0x0L; private final static long DEFAULT_DURATION_NSEC = 0x0L; private final static int DEFAULT_PRIORITY = 0x0; private final static int DEFAULT_IDLE_TIMEOUT = 0x0; private final static int DEFAULT_HARD_TIMEOUT = 0x0; private final static Set<OFFlowModFlags> DEFAULT_FLAGS = ImmutableSet.<OFFlowModFlags>of(); private final static int DEFAULT_IMPORTANCE = 0x0; private final static U64 DEFAULT_COOKIE = U64.ZERO; private final static U64 DEFAULT_PACKET_COUNT = U64.ZERO; private final static U64 DEFAULT_BYTE_COUNT = U64.ZERO; private final static Match DEFAULT_MATCH = OFFactoryVer14.MATCH_WILDCARD_ALL; private final static List<OFInstruction> DEFAULT_INSTRUCTIONS = ImmutableList.<OFInstruction>of(); // OF message fields private final TableId tableId; private final long durationSec; private final long durationNsec; private final int priority; private final int idleTimeout; private final int hardTimeout; private final Set<OFFlowModFlags> flags; private final int importance; private final U64 cookie; private final U64 packetCount; private final U64 byteCount; private final Match match; private final List<OFInstruction> instructions; // // Immutable default instance final static OFFlowStatsEntryVer14 DEFAULT = new OFFlowStatsEntryVer14( DEFAULT_TABLE_ID, DEFAULT_DURATION_SEC, DEFAULT_DURATION_NSEC, DEFAULT_PRIORITY, DEFAULT_IDLE_TIMEOUT, DEFAULT_HARD_TIMEOUT, DEFAULT_FLAGS, DEFAULT_IMPORTANCE, DEFAULT_COOKIE, DEFAULT_PACKET_COUNT, DEFAULT_BYTE_COUNT, DEFAULT_MATCH, DEFAULT_INSTRUCTIONS ); // package private constructor - used by readers, builders, and factory OFFlowStatsEntryVer14(TableId tableId, long durationSec, long durationNsec, int priority, int idleTimeout, int hardTimeout, Set<OFFlowModFlags> flags, int importance, U64 cookie, U64 packetCount, U64 byteCount, Match match, List<OFInstruction> instructions) { if(tableId == null) { throw new NullPointerException("OFFlowStatsEntryVer14: property tableId cannot be null"); } if(flags == null) { throw new NullPointerException("OFFlowStatsEntryVer14: property flags cannot be null"); } if(cookie == null) { throw new NullPointerException("OFFlowStatsEntryVer14: property cookie cannot be null"); } if(packetCount == null) { throw new NullPointerException("OFFlowStatsEntryVer14: property packetCount cannot be null"); } if(byteCount == null) { throw new NullPointerException("OFFlowStatsEntryVer14: property byteCount cannot be null"); } if(match == null) { throw new NullPointerException("OFFlowStatsEntryVer14: property match cannot be null"); } if(instructions == null) { throw new NullPointerException("OFFlowStatsEntryVer14: property instructions cannot be null"); } this.tableId = tableId; this.durationSec = durationSec; this.durationNsec = durationNsec; this.priority = priority; this.idleTimeout = idleTimeout; this.hardTimeout = hardTimeout; this.flags = flags; this.importance = importance; this.cookie = cookie; this.packetCount = packetCount; this.byteCount = byteCount; this.match = match; this.instructions = instructions; } // Accessors for OF message fields @Override public TableId getTableId() { return tableId; } @Override public long getDurationSec() { return durationSec; } @Override public long getDurationNsec() { return durationNsec; } @Override public int getPriority() { return priority; } @Override public int getIdleTimeout() { return idleTimeout; } @Override public int getHardTimeout() { return hardTimeout; } @Override public U64 getCookie() { return cookie; } @Override public U64 getPacketCount() { return packetCount; } @Override public U64 getByteCount() { return byteCount; } @Override public Match getMatch() { return match; } @Override public List<OFInstruction> getInstructions() { return instructions; } @Override public List<OFAction> getActions()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public Set<OFFlowModFlags> getFlags() { return flags; } @Override public int getImportance() { return importance; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } public OFFlowStatsEntry.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFFlowStatsEntry.Builder { final OFFlowStatsEntryVer14 parentMessage; // OF message fields private boolean tableIdSet; private TableId tableId; private boolean durationSecSet; private long durationSec; private boolean durationNsecSet; private long durationNsec; private boolean prioritySet; private int priority; private boolean idleTimeoutSet; private int idleTimeout; private boolean hardTimeoutSet; private int hardTimeout; private boolean flagsSet; private Set<OFFlowModFlags> flags; private boolean importanceSet; private int importance; private boolean cookieSet; private U64 cookie; private boolean packetCountSet; private U64 packetCount; private boolean byteCountSet; private U64 byteCount; private boolean matchSet; private Match match; private boolean instructionsSet; private List<OFInstruction> instructions; BuilderWithParent(OFFlowStatsEntryVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public TableId getTableId() { return tableId; } @Override public OFFlowStatsEntry.Builder setTableId(TableId tableId) { this.tableId = tableId; this.tableIdSet = true; return this; } @Override public long getDurationSec() { return durationSec; } @Override public OFFlowStatsEntry.Builder setDurationSec(long durationSec) { this.durationSec = durationSec; this.durationSecSet = true; return this; } @Override public long getDurationNsec() { return durationNsec; } @Override public OFFlowStatsEntry.Builder setDurationNsec(long durationNsec) { this.durationNsec = durationNsec; this.durationNsecSet = true; return this; } @Override public int getPriority() { return priority; } @Override public OFFlowStatsEntry.Builder setPriority(int priority) { this.priority = priority; this.prioritySet = true; return this; } @Override public int getIdleTimeout() { return idleTimeout; } @Override public OFFlowStatsEntry.Builder setIdleTimeout(int idleTimeout) { this.idleTimeout = idleTimeout; this.idleTimeoutSet = true; return this; } @Override public int getHardTimeout() { return hardTimeout; } @Override public OFFlowStatsEntry.Builder setHardTimeout(int hardTimeout) { this.hardTimeout = hardTimeout; this.hardTimeoutSet = true; return this; } @Override public U64 getCookie() { return cookie; } @Override public OFFlowStatsEntry.Builder setCookie(U64 cookie) { this.cookie = cookie; this.cookieSet = true; return this; } @Override public U64 getPacketCount() { return packetCount; } @Override public OFFlowStatsEntry.Builder setPacketCount(U64 packetCount) { this.packetCount = packetCount; this.packetCountSet = true; return this; } @Override public U64 getByteCount() { return byteCount; } @Override public OFFlowStatsEntry.Builder setByteCount(U64 byteCount) { this.byteCount = byteCount; this.byteCountSet = true; return this; } @Override public Match getMatch() { return match; } @Override public OFFlowStatsEntry.Builder setMatch(Match match) { this.match = match; this.matchSet = true; return this; } @Override public List<OFInstruction> getInstructions() { return instructions; } @Override public OFFlowStatsEntry.Builder setInstructions(List<OFInstruction> instructions) { this.instructions = instructions; this.instructionsSet = true; return this; } @Override public List<OFAction> getActions()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public OFFlowStatsEntry.Builder setActions(List<OFAction> actions) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public Set<OFFlowModFlags> getFlags() { return flags; } @Override public OFFlowStatsEntry.Builder setFlags(Set<OFFlowModFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public int getImportance() { return importance; } @Override public OFFlowStatsEntry.Builder setImportance(int importance) { this.importance = importance; this.importanceSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFFlowStatsEntry build() { TableId tableId = this.tableIdSet ? this.tableId : parentMessage.tableId; if(tableId == null) throw new NullPointerException("Property tableId must not be null"); long durationSec = this.durationSecSet ? this.durationSec : parentMessage.durationSec; long durationNsec = this.durationNsecSet ? this.durationNsec : parentMessage.durationNsec; int priority = this.prioritySet ? this.priority : parentMessage.priority; int idleTimeout = this.idleTimeoutSet ? this.idleTimeout : parentMessage.idleTimeout; int hardTimeout = this.hardTimeoutSet ? this.hardTimeout : parentMessage.hardTimeout; Set<OFFlowModFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); int importance = this.importanceSet ? this.importance : parentMessage.importance; U64 cookie = this.cookieSet ? this.cookie : parentMessage.cookie; if(cookie == null) throw new NullPointerException("Property cookie must not be null"); U64 packetCount = this.packetCountSet ? this.packetCount : parentMessage.packetCount; if(packetCount == null) throw new NullPointerException("Property packetCount must not be null"); U64 byteCount = this.byteCountSet ? this.byteCount : parentMessage.byteCount; if(byteCount == null) throw new NullPointerException("Property byteCount must not be null"); Match match = this.matchSet ? this.match : parentMessage.match; if(match == null) throw new NullPointerException("Property match must not be null"); List<OFInstruction> instructions = this.instructionsSet ? this.instructions : parentMessage.instructions; if(instructions == null) throw new NullPointerException("Property instructions must not be null"); // return new OFFlowStatsEntryVer14( tableId, durationSec, durationNsec, priority, idleTimeout, hardTimeout, flags, importance, cookie, packetCount, byteCount, match, instructions ); } } static class Builder implements OFFlowStatsEntry.Builder { // OF message fields private boolean tableIdSet; private TableId tableId; private boolean durationSecSet; private long durationSec; private boolean durationNsecSet; private long durationNsec; private boolean prioritySet; private int priority; private boolean idleTimeoutSet; private int idleTimeout; private boolean hardTimeoutSet; private int hardTimeout; private boolean flagsSet; private Set<OFFlowModFlags> flags; private boolean importanceSet; private int importance; private boolean cookieSet; private U64 cookie; private boolean packetCountSet; private U64 packetCount; private boolean byteCountSet; private U64 byteCount; private boolean matchSet; private Match match; private boolean instructionsSet; private List<OFInstruction> instructions; @Override public TableId getTableId() { return tableId; } @Override public OFFlowStatsEntry.Builder setTableId(TableId tableId) { this.tableId = tableId; this.tableIdSet = true; return this; } @Override public long getDurationSec() { return durationSec; } @Override public OFFlowStatsEntry.Builder setDurationSec(long durationSec) { this.durationSec = durationSec; this.durationSecSet = true; return this; } @Override public long getDurationNsec() { return durationNsec; } @Override public OFFlowStatsEntry.Builder setDurationNsec(long durationNsec) { this.durationNsec = durationNsec; this.durationNsecSet = true; return this; } @Override public int getPriority() { return priority; } @Override public OFFlowStatsEntry.Builder setPriority(int priority) { this.priority = priority; this.prioritySet = true; return this; } @Override public int getIdleTimeout() { return idleTimeout; } @Override public OFFlowStatsEntry.Builder setIdleTimeout(int idleTimeout) { this.idleTimeout = idleTimeout; this.idleTimeoutSet = true; return this; } @Override public int getHardTimeout() { return hardTimeout; } @Override public OFFlowStatsEntry.Builder setHardTimeout(int hardTimeout) { this.hardTimeout = hardTimeout; this.hardTimeoutSet = true; return this; } @Override public U64 getCookie() { return cookie; } @Override public OFFlowStatsEntry.Builder setCookie(U64 cookie) { this.cookie = cookie; this.cookieSet = true; return this; } @Override public U64 getPacketCount() { return packetCount; } @Override public OFFlowStatsEntry.Builder setPacketCount(U64 packetCount) { this.packetCount = packetCount; this.packetCountSet = true; return this; } @Override public U64 getByteCount() { return byteCount; } @Override public OFFlowStatsEntry.Builder setByteCount(U64 byteCount) { this.byteCount = byteCount; this.byteCountSet = true; return this; } @Override public Match getMatch() { return match; } @Override public OFFlowStatsEntry.Builder setMatch(Match match) { this.match = match; this.matchSet = true; return this; } @Override public List<OFInstruction> getInstructions() { return instructions; } @Override public OFFlowStatsEntry.Builder setInstructions(List<OFInstruction> instructions) { this.instructions = instructions; this.instructionsSet = true; return this; } @Override public List<OFAction> getActions()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public OFFlowStatsEntry.Builder setActions(List<OFAction> actions) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public Set<OFFlowModFlags> getFlags() { return flags; } @Override public OFFlowStatsEntry.Builder setFlags(Set<OFFlowModFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public int getImportance() { return importance; } @Override public OFFlowStatsEntry.Builder setImportance(int importance) { this.importance = importance; this.importanceSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // @Override public OFFlowStatsEntry build() { TableId tableId = this.tableIdSet ? this.tableId : DEFAULT_TABLE_ID; if(tableId == null) throw new NullPointerException("Property tableId must not be null"); long durationSec = this.durationSecSet ? this.durationSec : DEFAULT_DURATION_SEC; long durationNsec = this.durationNsecSet ? this.durationNsec : DEFAULT_DURATION_NSEC; int priority = this.prioritySet ? this.priority : DEFAULT_PRIORITY; int idleTimeout = this.idleTimeoutSet ? this.idleTimeout : DEFAULT_IDLE_TIMEOUT; int hardTimeout = this.hardTimeoutSet ? this.hardTimeout : DEFAULT_HARD_TIMEOUT; Set<OFFlowModFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); int importance = this.importanceSet ? this.importance : DEFAULT_IMPORTANCE; U64 cookie = this.cookieSet ? this.cookie : DEFAULT_COOKIE; if(cookie == null) throw new NullPointerException("Property cookie must not be null"); U64 packetCount = this.packetCountSet ? this.packetCount : DEFAULT_PACKET_COUNT; if(packetCount == null) throw new NullPointerException("Property packetCount must not be null"); U64 byteCount = this.byteCountSet ? this.byteCount : DEFAULT_BYTE_COUNT; if(byteCount == null) throw new NullPointerException("Property byteCount must not be null"); Match match = this.matchSet ? this.match : DEFAULT_MATCH; if(match == null) throw new NullPointerException("Property match must not be null"); List<OFInstruction> instructions = this.instructionsSet ? this.instructions : DEFAULT_INSTRUCTIONS; if(instructions == null) throw new NullPointerException("Property instructions must not be null"); return new OFFlowStatsEntryVer14( tableId, durationSec, durationNsec, priority, idleTimeout, hardTimeout, flags, importance, cookie, packetCount, byteCount, match, instructions ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFFlowStatsEntry> { @Override public OFFlowStatsEntry readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); TableId tableId = TableId.readByte(bb); // pad: 1 bytes bb.skipBytes(1); long durationSec = U32.f(bb.readInt()); long durationNsec = U32.f(bb.readInt()); int priority = U16.f(bb.readShort()); int idleTimeout = U16.f(bb.readShort()); int hardTimeout = U16.f(bb.readShort()); Set<OFFlowModFlags> flags = OFFlowModFlagsSerializerVer14.readFrom(bb); int importance = U16.f(bb.readShort()); // pad: 2 bytes bb.skipBytes(2); U64 cookie = U64.ofRaw(bb.readLong()); U64 packetCount = U64.ofRaw(bb.readLong()); U64 byteCount = U64.ofRaw(bb.readLong()); Match match = ChannelUtilsVer14.readOFMatch(bb); List<OFInstruction> instructions = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFInstructionVer14.READER); OFFlowStatsEntryVer14 flowStatsEntryVer14 = new OFFlowStatsEntryVer14( tableId, durationSec, durationNsec, priority, idleTimeout, hardTimeout, flags, importance, cookie, packetCount, byteCount, match, instructions ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", flowStatsEntryVer14); return flowStatsEntryVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFFlowStatsEntryVer14Funnel FUNNEL = new OFFlowStatsEntryVer14Funnel(); static class OFFlowStatsEntryVer14Funnel implements Funnel<OFFlowStatsEntryVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFFlowStatsEntryVer14 message, PrimitiveSink sink) { // FIXME: skip funnel of length message.tableId.putTo(sink); // skip pad (1 bytes) sink.putLong(message.durationSec); sink.putLong(message.durationNsec); sink.putInt(message.priority); sink.putInt(message.idleTimeout); sink.putInt(message.hardTimeout); OFFlowModFlagsSerializerVer14.putTo(message.flags, sink); sink.putInt(message.importance); // skip pad (2 bytes) message.cookie.putTo(sink); message.packetCount.putTo(sink); message.byteCount.putTo(sink); message.match.putTo(sink); FunnelUtils.putList(message.instructions, sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFFlowStatsEntryVer14> { @Override public void write(ChannelBuffer bb, OFFlowStatsEntryVer14 message) { int startIndex = bb.writerIndex(); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); message.tableId.writeByte(bb); // pad: 1 bytes bb.writeZero(1); bb.writeInt(U32.t(message.durationSec)); bb.writeInt(U32.t(message.durationNsec)); bb.writeShort(U16.t(message.priority)); bb.writeShort(U16.t(message.idleTimeout)); bb.writeShort(U16.t(message.hardTimeout)); OFFlowModFlagsSerializerVer14.writeTo(bb, message.flags); bb.writeShort(U16.t(message.importance)); // pad: 2 bytes bb.writeZero(2); bb.writeLong(message.cookie.getValue()); bb.writeLong(message.packetCount.getValue()); bb.writeLong(message.byteCount.getValue()); message.match.writeTo(bb); ChannelUtils.writeList(bb, message.instructions); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFFlowStatsEntryVer14("); b.append("tableId=").append(tableId); b.append(", "); b.append("durationSec=").append(durationSec); b.append(", "); b.append("durationNsec=").append(durationNsec); b.append(", "); b.append("priority=").append(priority); b.append(", "); b.append("idleTimeout=").append(idleTimeout); b.append(", "); b.append("hardTimeout=").append(hardTimeout); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("importance=").append(importance); b.append(", "); b.append("cookie=").append(cookie); b.append(", "); b.append("packetCount=").append(packetCount); b.append(", "); b.append("byteCount=").append(byteCount); b.append(", "); b.append("match=").append(match); b.append(", "); b.append("instructions=").append(instructions); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFFlowStatsEntryVer14 other = (OFFlowStatsEntryVer14) obj; if (tableId == null) { if (other.tableId != null) return false; } else if (!tableId.equals(other.tableId)) return false; if( durationSec != other.durationSec) return false; if( durationNsec != other.durationNsec) return false; if( priority != other.priority) return false; if( idleTimeout != other.idleTimeout) return false; if( hardTimeout != other.hardTimeout) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if( importance != other.importance) return false; if (cookie == null) { if (other.cookie != null) return false; } else if (!cookie.equals(other.cookie)) return false; if (packetCount == null) { if (other.packetCount != null) return false; } else if (!packetCount.equals(other.packetCount)) return false; if (byteCount == null) { if (other.byteCount != null) return false; } else if (!byteCount.equals(other.byteCount)) return false; if (match == null) { if (other.match != null) return false; } else if (!match.equals(other.match)) return false; if (instructions == null) { if (other.instructions != null) return false; } else if (!instructions.equals(other.instructions)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((tableId == null) ? 0 : tableId.hashCode()); result = prime * (int) (durationSec ^ (durationSec >>> 32)); result = prime * (int) (durationNsec ^ (durationNsec >>> 32)); result = prime * result + priority; result = prime * result + idleTimeout; result = prime * result + hardTimeout; result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + importance; result = prime * result + ((cookie == null) ? 0 : cookie.hashCode()); result = prime * result + ((packetCount == null) ? 0 : packetCount.hashCode()); result = prime * result + ((byteCount == null) ? 0 : byteCount.hashCode()); result = prime * result + ((match == null) ? 0 : match.hashCode()); result = prime * result + ((instructions == null) ? 0 : instructions.hashCode()); return result; } }
/* * Copyright (c) 2011-2018, Meituan Dianping. All Rights Reserved. * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dianping.cat.report.page.heartbeat; import javax.servlet.ServletException; import java.io.IOException; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.unidal.lookup.annotation.Inject; import org.unidal.lookup.util.StringUtils; import org.unidal.web.mvc.PageHandler; import org.unidal.web.mvc.annotation.InboundActionMeta; import org.unidal.web.mvc.annotation.OutboundActionMeta; import org.unidal.web.mvc.annotation.PayloadMeta; import com.dianping.cat.Cat; import com.dianping.cat.Constants; import com.dianping.cat.consumer.heartbeat.HeartbeatAnalyzer; import com.dianping.cat.consumer.heartbeat.model.entity.HeartbeatReport; import com.dianping.cat.consumer.heartbeat.model.entity.Machine; import com.dianping.cat.consumer.heartbeat.model.entity.Period; import com.dianping.cat.helper.SortHelper; import com.dianping.cat.helper.TimeHelper; import com.dianping.cat.mvc.PayloadNormalizer; import com.dianping.cat.report.ReportPage; import com.dianping.cat.report.graph.svg.GraphBuilder; import com.dianping.cat.report.page.heartbeat.config.HeartbeatDisplayPolicyManager; import com.dianping.cat.report.page.heartbeat.service.HeartbeatReportService; import com.dianping.cat.report.service.ModelPeriod; import com.dianping.cat.report.service.ModelRequest; import com.dianping.cat.report.service.ModelResponse; import com.dianping.cat.report.service.ModelService; public class Handler implements PageHandler<Context> { @Inject private GraphBuilder m_builder; @Inject private HistoryGraphs m_historyGraphs; @Inject private JspViewer m_jspViewer; @Inject private HeartbeatReportService m_reportService; @Inject(type = ModelService.class, value = HeartbeatAnalyzer.ID) private ModelService<HeartbeatReport> m_service; @Inject private PayloadNormalizer m_normalizePayload; @Inject private HeartbeatDisplayPolicyManager m_manager; private void buildHeartbeatGraphInfo(Model model, HeartbeatSvgGraph displayHeartbeat) { if (displayHeartbeat == null) { return; } model.setResult(displayHeartbeat); model.setExtensionGraph(displayHeartbeat.getExtensionGraph()); } private void buildHistoryGraph(Model model, Payload payload) { Date start = new Date(payload.getDate() + 23 * TimeHelper.ONE_HOUR); Date end = new Date(payload.getDate() + 24 * TimeHelper.ONE_HOUR); HeartbeatReport report = m_reportService.queryReport(payload.getDomain(), start, end); List<String> extensionGroups = m_manager.sortGroupNames(extractExtensionGroups(report)); model.setExtensionGroups(extensionGroups); model.setReport(report); if (StringUtils.isEmpty(payload.getIpAddress()) || Constants.ALL.equals(payload.getIpAddress())) { String ipAddress = getIpAddress(report, payload); payload.setIpAddress(ipAddress); payload.setRealIp(ipAddress); } m_historyGraphs.showHeartBeatGraph(model, payload); } private Set<String> extractExtensionGroups(HeartbeatReport report) { Set<String> groupNames = new HashSet<String>(); for (Machine machine : report.getMachines().values()) { for (Period period : machine.getPeriods()) { Set<String> tmpGroupNames = period.getExtensions().keySet(); groupNames.addAll(tmpGroupNames); } } return groupNames; } private String getIpAddress(HeartbeatReport report, Payload payload) { Set<String> ips = report.getIps(); String ip = payload.getRealIp(); if ((ip == null || ip.length() == 0) && !ips.isEmpty()) { ip = SortHelper.sortIpAddress(ips).get(0); } return ip; } private HeartbeatReport getReport(String domain, String ipAddress, long date, ModelPeriod period) { ModelRequest request = new ModelRequest(domain, date) // .setProperty("ip", ipAddress); if (m_service.isEligable(request)) { ModelResponse<HeartbeatReport> response = m_service.invoke(request); HeartbeatReport report = response.getModel(); return report; } else { throw new RuntimeException("Internal error: no eligable ip service registered for " + request + "!"); } } @Override @PayloadMeta(Payload.class) @InboundActionMeta(name = "h") public void handleInbound(Context ctx) throws ServletException, IOException { // display only, no action here } @Override @OutboundActionMeta(name = "h") public void handleOutbound(Context ctx) throws ServletException, IOException { Model model = new Model(ctx); Payload payload = ctx.getPayload(); HeartbeatSvgGraph heartbeat = null; normalize(model, payload); switch (payload.getAction()) { case VIEW: heartbeat = showReport(model, payload); buildHeartbeatGraphInfo(model, heartbeat); break; case HISTORY: buildHistoryGraph(model, payload); break; case PART_HISTORY: buildHistoryGraph(model, payload); break; } m_jspViewer.view(ctx, model); } private void normalize(Model model, Payload payload) { String ipAddress = payload.getIpAddress(); model.setAction(payload.getAction()); model.setPage(ReportPage.HEARTBEAT); if (StringUtils.isEmpty(ipAddress) || ipAddress.equals(Constants.ALL)) { model.setIpAddress(Constants.ALL); } else { payload.setRealIp(payload.getIpAddress()); model.setIpAddress(payload.getRealIp()); } m_normalizePayload.normalize(model, payload); String reportType = payload.getReportType(); if ("month".equals(reportType) || "week".equals(reportType)) { payload.setReportType("day"); } String queryType = payload.getType(); if (queryType == null || queryType.trim().length() == 0) { payload.setType("frameworkThread"); } } private HeartbeatSvgGraph showReport(Model model, Payload payload) { try { HeartbeatReport report = getReport(payload.getDomain(), payload.getIpAddress(), payload.getDate(), payload.getPeriod()); model.setReport(report); if (report != null) { String displayIp = getIpAddress(report, payload); payload.setRealIp(displayIp); return new HeartbeatSvgGraph(m_builder, m_manager).display(report, displayIp); } } catch (Throwable e) { Cat.logError(e); model.setException(e); } return null; } // the detail order of heartbeat is:name min max sum sum2 count_in_minutes public enum DetailOrder { NAME, MIN, MAX, SUM, SUM2, COUNT_IN_MINUTES } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package db; import java.io.IOException; import db.buffers.DataBuffer; /** * <code>ShortField</code> provides a wrapper for 2-byte signed short data * which is read or written to a Record. */ public final class ShortField extends PrimitiveField { /** * Minimum short field value */ public static final ShortField MIN_VALUE = new ShortField(Short.MIN_VALUE, true); /** * Maximum short field value */ public static final ShortField MAX_VALUE = new ShortField(Short.MAX_VALUE, true); /** * Zero short field value */ public static final ShortField ZERO_VALUE = new ShortField((short) 0, true); /** * Instance intended for defining a {@link Table} {@link Schema} */ public static final ShortField INSTANCE = ZERO_VALUE; private short value; /** * Construct a short field with an initial value of 0. */ public ShortField() { } /** * Construct a short field with an initial value of s. * @param s initial value */ public ShortField(short s) { this(s, false); } /** * Construct a short field with an initial value of s. * @param s initial value * @param immutable true if field value is immutable */ ShortField(short s, boolean immutable) { super(immutable); value = s; } @Override void setNull() { super.setNull(); value = 0; } @Override public short getShortValue() { return value; } @Override public void setShortValue(short value) { updatingPrimitiveValue(); this.value = value; } @Override int length() { return 2; } @Override int write(Buffer buf, int offset) throws IOException { return buf.putShort(offset, value); } @Override int read(Buffer buf, int offset) throws IOException { updatingPrimitiveValue(); value = buf.getShort(offset); return offset + 2; } @Override int readLength(Buffer buf, int offset) throws IOException { return 2; } @Override byte getFieldType() { return SHORT_TYPE; } @Override public String getValueAsString() { return "0x" + Integer.toHexString(value & 0xffff); } @Override public boolean equals(Object obj) { if (!(obj instanceof ShortField)) { return false; } return ((ShortField) obj).value == value; } @Override public int compareTo(Field o) { ShortField f = (ShortField) o; if (value == f.value) { return 0; } else if (value < f.value) { return -1; } return 1; } @Override int compareTo(DataBuffer buffer, int offset) { short otherValue = buffer.getShort(offset); if (value == otherValue) { return 0; } else if (value < otherValue) { return -1; } return 1; } @Override public ShortField copyField() { if (isNull()) { ShortField copy = new ShortField(); copy.setNull(); return copy; } return new ShortField((short) getLongValue()); } @Override public ShortField newField() { return new ShortField(); } @Override public long getLongValue() { return value; } @Override public void setLongValue(long value) { setShortValue((short) value); } @Override public byte[] getBinaryData() { return new byte[] { (byte) (value >> 8), (byte) value }; } @Override public void setBinaryData(byte[] bytes) { if (bytes.length != 2) { throw new IllegalFieldAccessException(); } updatingPrimitiveValue(); value = (short) (((bytes[0] & 0xff) << 8) | (bytes[1] & 0xff)); } @Override public int hashCode() { return value; } @Override ShortField getMinValue() { return MIN_VALUE; } @Override ShortField getMaxValue() { return MAX_VALUE; } }
package ru.lanbilling.webservice.wsdl; import java.math.BigInteger; import javax.annotation.Generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for soapUsergroup complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="soapUsergroup"&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="groupid" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/&gt; * &lt;element name="promiseallow" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/&gt; * &lt;element name="promiserent" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/&gt; * &lt;element name="promisetill" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/&gt; * &lt;element name="curid" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/&gt; * &lt;element name="promiseondays" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/&gt; * &lt;element name="promiseblockdays" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/&gt; * &lt;element name="promisemax" type="{http://www.w3.org/2001/XMLSchema}double" minOccurs="0"/&gt; * &lt;element name="promisemin" type="{http://www.w3.org/2001/XMLSchema}double" minOccurs="0"/&gt; * &lt;element name="promiselimit" type="{http://www.w3.org/2001/XMLSchema}double" minOccurs="0"/&gt; * &lt;element name="blockamount" type="{http://www.w3.org/2001/XMLSchema}double" minOccurs="0"/&gt; * &lt;element name="blockdurationdebtor" type="{http://www.w3.org/2001/XMLSchema}unsignedLong" minOccurs="0"/&gt; * &lt;element name="blockdurationdenouncement" type="{http://www.w3.org/2001/XMLSchema}unsignedLong" minOccurs="0"/&gt; * &lt;element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt; * &lt;element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "soapUsergroup", propOrder = { "groupid", "promiseallow", "promiserent", "promisetill", "curid", "promiseondays", "promiseblockdays", "promisemax", "promisemin", "promiselimit", "blockamount", "blockdurationdebtor", "blockdurationdenouncement", "name", "description" }) @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public class SoapUsergroup { @XmlElement(defaultValue = "-1") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Long groupid; @XmlElement(defaultValue = "0") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Long promiseallow; @XmlElement(defaultValue = "0") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Long promiserent; @XmlElement(defaultValue = "0") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Long promisetill; @XmlElement(defaultValue = "0") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Long curid; @XmlElement(defaultValue = "0") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Long promiseondays; @XmlElement(defaultValue = "0") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Long promiseblockdays; @XmlElement(defaultValue = "0.000000") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Double promisemax; @XmlElement(defaultValue = "0.000000") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Double promisemin; @XmlElement(defaultValue = "0.000000") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Double promiselimit; @XmlElement(defaultValue = "-1.000000") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected Double blockamount; @XmlElement(defaultValue = "0") @XmlSchemaType(name = "unsignedLong") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected BigInteger blockdurationdebtor; @XmlElement(defaultValue = "0") @XmlSchemaType(name = "unsignedLong") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected BigInteger blockdurationdenouncement; @XmlElement(defaultValue = "") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected String name; @XmlElement(defaultValue = "") @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") protected String description; /** * Gets the value of the groupid property. * * @return * possible object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Long getGroupid() { return groupid; } /** * Sets the value of the groupid property. * * @param value * allowed object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setGroupid(Long value) { this.groupid = value; } /** * Gets the value of the promiseallow property. * * @return * possible object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Long getPromiseallow() { return promiseallow; } /** * Sets the value of the promiseallow property. * * @param value * allowed object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setPromiseallow(Long value) { this.promiseallow = value; } /** * Gets the value of the promiserent property. * * @return * possible object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Long getPromiserent() { return promiserent; } /** * Sets the value of the promiserent property. * * @param value * allowed object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setPromiserent(Long value) { this.promiserent = value; } /** * Gets the value of the promisetill property. * * @return * possible object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Long getPromisetill() { return promisetill; } /** * Sets the value of the promisetill property. * * @param value * allowed object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setPromisetill(Long value) { this.promisetill = value; } /** * Gets the value of the curid property. * * @return * possible object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Long getCurid() { return curid; } /** * Sets the value of the curid property. * * @param value * allowed object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setCurid(Long value) { this.curid = value; } /** * Gets the value of the promiseondays property. * * @return * possible object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Long getPromiseondays() { return promiseondays; } /** * Sets the value of the promiseondays property. * * @param value * allowed object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setPromiseondays(Long value) { this.promiseondays = value; } /** * Gets the value of the promiseblockdays property. * * @return * possible object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Long getPromiseblockdays() { return promiseblockdays; } /** * Sets the value of the promiseblockdays property. * * @param value * allowed object is * {@link Long } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setPromiseblockdays(Long value) { this.promiseblockdays = value; } /** * Gets the value of the promisemax property. * * @return * possible object is * {@link Double } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Double getPromisemax() { return promisemax; } /** * Sets the value of the promisemax property. * * @param value * allowed object is * {@link Double } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setPromisemax(Double value) { this.promisemax = value; } /** * Gets the value of the promisemin property. * * @return * possible object is * {@link Double } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Double getPromisemin() { return promisemin; } /** * Sets the value of the promisemin property. * * @param value * allowed object is * {@link Double } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setPromisemin(Double value) { this.promisemin = value; } /** * Gets the value of the promiselimit property. * * @return * possible object is * {@link Double } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Double getPromiselimit() { return promiselimit; } /** * Sets the value of the promiselimit property. * * @param value * allowed object is * {@link Double } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setPromiselimit(Double value) { this.promiselimit = value; } /** * Gets the value of the blockamount property. * * @return * possible object is * {@link Double } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public Double getBlockamount() { return blockamount; } /** * Sets the value of the blockamount property. * * @param value * allowed object is * {@link Double } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setBlockamount(Double value) { this.blockamount = value; } /** * Gets the value of the blockdurationdebtor property. * * @return * possible object is * {@link BigInteger } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public BigInteger getBlockdurationdebtor() { return blockdurationdebtor; } /** * Sets the value of the blockdurationdebtor property. * * @param value * allowed object is * {@link BigInteger } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setBlockdurationdebtor(BigInteger value) { this.blockdurationdebtor = value; } /** * Gets the value of the blockdurationdenouncement property. * * @return * possible object is * {@link BigInteger } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public BigInteger getBlockdurationdenouncement() { return blockdurationdenouncement; } /** * Sets the value of the blockdurationdenouncement property. * * @param value * allowed object is * {@link BigInteger } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setBlockdurationdenouncement(BigInteger value) { this.blockdurationdenouncement = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setName(String value) { this.name = value; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11") public void setDescription(String value) { this.description = value; } }
//******************************************************************** // Keyboard.java Author: Lewis and Loftus // // Facilitates keyboard input by abstracting details about input // parsing, conversions, and exception handling. //******************************************************************** import java.io.*; import java.util.*; public class Keyboard { //************* Error Handling Section ************************** private static boolean printErrors = true; private static int errorCount = 0; //----------------------------------------------------------------- // Returns the current error count. //----------------------------------------------------------------- public static int getErrorCount() { return errorCount; } //----------------------------------------------------------------- // Resets the current error count to zero. //----------------------------------------------------------------- public static void resetErrorCount (int count) { errorCount = 0; } //----------------------------------------------------------------- // Returns a boolean indicating whether input errors are // currently printed to standard output. //----------------------------------------------------------------- public static boolean getPrintErrors() { return printErrors; } //----------------------------------------------------------------- // Sets a boolean indicating whether input errors are to be // printed to standard output. //----------------------------------------------------------------- public static void setPrintErrors (boolean flag) { printErrors = flag; } //----------------------------------------------------------------- // Increments the error count and prints the error message if // appropriate. //----------------------------------------------------------------- private static void error (String str) { errorCount++; if (printErrors) System.out.println (str); } //************* Tokenized Input Stream Section ****************** private static String current_token = null; private static StringTokenizer reader; private static BufferedReader in = new BufferedReader (new InputStreamReader(System.in)); //----------------------------------------------------------------- // Gets the next input token assuming it may be on subsequent // input lines. //----------------------------------------------------------------- private static String getNextToken() { return getNextToken (true); } //----------------------------------------------------------------- // Gets the next input token, which may already have been read. //----------------------------------------------------------------- private static String getNextToken (boolean skip) { String token; if (current_token == null) token = getNextInputToken (skip); else { token = current_token; current_token = null; } return token; } //----------------------------------------------------------------- // Gets the next token from the input, which may come from the // current input line or a subsequent one. The parameter // determines if subsequent lines are used. //----------------------------------------------------------------- private static String getNextInputToken (boolean skip) { final String delimiters = " \t\n\r\f"; String token = null; try { if (reader == null) reader = new StringTokenizer (in.readLine(), delimiters, true); while (token == null || ((delimiters.indexOf (token) >= 0) && skip)) { while (!reader.hasMoreTokens()) reader = new StringTokenizer (in.readLine(), delimiters,true); token = reader.nextToken(); } } catch (Exception exception) { token = null; } return token; } //----------------------------------------------------------------- // Returns true if there are no more tokens to read on the // current input line. //----------------------------------------------------------------- public static boolean endOfLine() { return !reader.hasMoreTokens(); } //************* Reading Section ********************************* //----------------------------------------------------------------- // Returns a string read from standard input. //----------------------------------------------------------------- public static String readString() { String str; try { str = getNextToken(false); while (! endOfLine()) { str = str + getNextToken(false); } } catch (Exception exception) { error ("Error reading String data, null value returned."); str = null; } return str; } //----------------------------------------------------------------- // Returns a space-delimited substring (a word) read from // standard input. //----------------------------------------------------------------- public static String readWord() { String token; try { token = getNextToken(); } catch (Exception exception) { error ("Error reading String data, null value returned."); token = null; } return token; } //----------------------------------------------------------------- // Returns a boolean read from standard input. //----------------------------------------------------------------- public static boolean readBoolean() { String token = getNextToken(); boolean bool; try { if (token.toLowerCase().equals("true")) bool = true; else if (token.toLowerCase().equals("false")) bool = false; else { error ("Error reading boolean data, false value returned."); bool = false; } } catch (Exception exception) { error ("Error reading boolean data, false value returned."); bool = false; } return bool; } //----------------------------------------------------------------- // Returns a character read from standard input. //----------------------------------------------------------------- public static char readChar() { String token = getNextToken(false); char value; try { if (token.length() > 1) { current_token = token.substring (1, token.length()); } else current_token = null; value = token.charAt (0); } catch (Exception exception) { error ("Error reading char data, MIN_VALUE value returned."); value = Character.MIN_VALUE; } return value; } //----------------------------------------------------------------- // Returns an integer read from standard input. //----------------------------------------------------------------- public static int readInt() { String token = getNextToken(); int value; try { value = Integer.parseInt (token); } catch (Exception exception) { error ("Error reading int data, MIN_VALUE value returned."); value = Integer.MIN_VALUE; } return value; } //----------------------------------------------------------------- // Returns a long integer read from standard input. //----------------------------------------------------------------- public static long readLong() { String token = getNextToken(); long value; try { value = Long.parseLong (token); } catch (Exception exception) { error ("Error reading long data, MIN_VALUE value returned."); value = Long.MIN_VALUE; } return value; } //----------------------------------------------------------------- // Returns a float read from standard input. //----------------------------------------------------------------- public static float readFloat() { String token = getNextToken(); float value; try { value = (new Float(token)).floatValue(); } catch (Exception exception) { error ("Error reading float data, NaN value returned."); value = Float.NaN; } return value; } //----------------------------------------------------------------- // Returns a double read from standard input. //----------------------------------------------------------------- public static double readDouble() { String token = getNextToken(); double value; try { value = (new Double(token)).doubleValue(); } catch (Exception exception) { error ("Error reading double data, NaN value returned."); value = Double.NaN; } return value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.udfs; import io.netty.buffer.DrillBuf; import org.apache.drill.exec.expr.DrillSimpleFunc; import org.apache.drill.exec.expr.annotations.FunctionTemplate; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; import org.apache.drill.exec.expr.holders.VarCharHolder; import javax.inject.Inject; public class CryptoFunctions { /** * This class returns the md2 digest of a given input string. * Usage is SELECT md2( <input string> ) FROM ... */ @FunctionTemplate(name = "md2", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL) public static class MD2Function implements DrillSimpleFunc { @Param VarCharHolder rawInput; @Output VarCharHolder out; @Inject DrillBuf buffer; @Override public void setup() { } @Override public void eval() { String input = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput.start, rawInput.end, rawInput.buffer); String outputString = org.apache.commons.codec.digest.DigestUtils.md2Hex(input).toLowerCase(); out.buffer = buffer; out.start = 0; out.end = outputString.getBytes().length; buffer.setBytes(0, outputString.getBytes()); } } /** * This function returns the MD5 digest of a given input string. * Usage is shown below: * select md5( 'testing' ) from (VALUES(1)); */ @FunctionTemplate(name = "md5", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL) public static class MD5Function implements DrillSimpleFunc { @Param VarCharHolder rawInput; @Output VarCharHolder out; @Inject DrillBuf buffer; @Override public void setup() { } @Override public void eval() { String input = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput.start, rawInput.end, rawInput.buffer); String outputString = org.apache.commons.codec.digest.DigestUtils.md5Hex(input).toLowerCase(); out.buffer = buffer; out.start = 0; out.end = outputString.getBytes().length; buffer.setBytes(0, outputString.getBytes()); } } /** * sha(<text>) / sha1(<text>): Calculates an SHA-1 160-bit checksum for the string, as described in RFC 3174 (Secure Hash Algorithm). * (https://en.wikipedia.org/wiki/SHA-1) The value is returned as a string of 40 hexadecimal digits, or NULL if the argument was NULL. * Note that sha() and sha1() are aliases for the same function. * * > select sha1( 'testing' ) from (VALUES(1)); */ @FunctionTemplate(names = {"sha", "sha1"}, scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL) public static class SHA1Function implements DrillSimpleFunc { @Param VarCharHolder rawInput; @Output VarCharHolder out; @Inject DrillBuf buffer; @Override public void setup() { } @Override public void eval() { String input = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput.start, rawInput.end, rawInput.buffer); String sha1 = org.apache.commons.codec.digest.DigestUtils.sha1Hex(input); out.buffer = buffer; out.start = 0; out.end = sha1.getBytes().length; buffer.setBytes(0, sha1.getBytes()); } } /** * sha2(<text>) / sha256(<text>): Calculates an SHA-2 256-bit checksum for the string. The value is returned as a string of hexadecimal digits, * or NULL if the argument was NULL. Note that sha2() and sha256() are aliases for the same function. * > select sha2( 'testing' ) from (VALUES(1)); */ @FunctionTemplate(names = {"sha256", "sha2"}, scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL) public static class SHA256Function implements DrillSimpleFunc { @Param VarCharHolder rawInput; @Output VarCharHolder out; @Inject DrillBuf buffer; @Override public void setup() { } @Override public void eval() { String input = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput.start, rawInput.end, rawInput.buffer); String sha2 = org.apache.commons.codec.digest.DigestUtils.sha256Hex(input); out.buffer = buffer; out.start = 0; out.end = sha2.getBytes().length; buffer.setBytes(0, sha2.getBytes()); } } /** * This function returns the SHA384 digest of a given input string. * Usage is shown below: * select sha384( 'testing' ) from (VALUES(1)); */ @FunctionTemplate(name = "sha384", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL) public static class SHA384Function implements DrillSimpleFunc { @Param VarCharHolder rawInput; @Output VarCharHolder out; @Inject DrillBuf buffer; @Override public void setup() { } @Override public void eval() { String input = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput.start, rawInput.end, rawInput.buffer); String sha384 = org.apache.commons.codec.digest.DigestUtils.sha384Hex(input); out.buffer = buffer; out.start = 0; out.end = sha384.getBytes().length; buffer.setBytes(0, sha384.getBytes()); } } /** * This function returns the SHA512 digest of a given input string. * Usage is shown below: * select sha512( 'testing' ) from (VALUES(1)); */ @FunctionTemplate(name = "sha512", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL) public static class SHA512Function implements DrillSimpleFunc { @Param VarCharHolder rawInput; @Output VarCharHolder out; @Inject DrillBuf buffer; @Override public void setup() { } @Override public void eval() { String input = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput.start, rawInput.end, rawInput.buffer); String sha512 = org.apache.commons.codec.digest.DigestUtils.sha512Hex(input); out.buffer = buffer; out.start = 0; out.end = sha512.getBytes().length; buffer.setBytes(0, sha512.getBytes()); } } /** * aes_encrypt()/ aes_decrypt(): implement encryption and decryption of data using the official AES (Advanced Encryption Standard) algorithm, * previously known as "Rijndael." AES_ENCRYPT() encrypts the string str using the key string key_str and returns a * binary string containing the encrypted output. * Usage: SELECT aes_encrypt( 'encrypted_text', 'my_secret_key' ) AS aes FROM (VALUES(1)); */ @FunctionTemplate(name = "aes_encrypt", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL) public static class AESEncryptFunction implements DrillSimpleFunc { @Param VarCharHolder rawInput; @Param VarCharHolder rawKey; @Output VarCharHolder out; @Inject DrillBuf buffer; @Override public void setup() { } @Override public void eval() { String key = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawKey.start, rawKey.end, rawKey.buffer); String input = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput.start, rawInput.end, rawInput.buffer); String encryptedText = ""; try { byte[] keyByteArray = key.getBytes(java.nio.charset.StandardCharsets.UTF_8); java.security.MessageDigest sha = java.security.MessageDigest.getInstance("SHA-1"); keyByteArray = sha.digest(keyByteArray); keyByteArray = java.util.Arrays.copyOf(keyByteArray, 16); javax.crypto.spec.SecretKeySpec secretKey = new javax.crypto.spec.SecretKeySpec(keyByteArray, "AES"); javax.crypto.Cipher cipher = javax.crypto.Cipher.getInstance("AES/ECB/PKCS5Padding"); cipher.init(javax.crypto.Cipher.ENCRYPT_MODE, secretKey); encryptedText = javax.xml.bind.DatatypeConverter.printBase64Binary(cipher.doFinal(input.getBytes(java.nio.charset.StandardCharsets.UTF_8))); } catch (Exception e) { //Exceptions are ignored } out.buffer = buffer; out.start = 0; out.end = encryptedText.getBytes().length; buffer.setBytes(0, encryptedText.getBytes()); } } /** * AES_DECRYPT() decrypts the encrypted string crypt_str using the key string key_str and returns the original cleartext string. * If either function argument is NULL, the function returns NULL. * Usage: SELECT aes_decrypt( <encrypted_text>, <key> ) FROM ... */ @FunctionTemplate(name = "aes_decrypt", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL) public static class AESDecryptFunction implements DrillSimpleFunc { @Param VarCharHolder rawInput; @Param VarCharHolder rawKey; @Output VarCharHolder out; @Inject DrillBuf buffer; @Override public void setup() { } @Override public void eval() { String key = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawKey.start, rawKey.end, rawKey.buffer); String input = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput.start, rawInput.end, rawInput.buffer); String decryptedText = ""; try { byte[] keyByteArray = key.getBytes(java.nio.charset.StandardCharsets.UTF_8); java.security.MessageDigest sha = java.security.MessageDigest.getInstance("SHA-1"); keyByteArray = sha.digest(keyByteArray); keyByteArray = java.util.Arrays.copyOf(keyByteArray, 16); javax.crypto.spec.SecretKeySpec secretKey = new javax.crypto.spec.SecretKeySpec(keyByteArray, "AES"); javax.crypto.Cipher cipher = javax.crypto.Cipher.getInstance("AES/ECB/PKCS5Padding"); cipher.init(javax.crypto.Cipher.DECRYPT_MODE, secretKey); decryptedText = new String(cipher.doFinal(javax.xml.bind.DatatypeConverter.parseBase64Binary(input))); } catch (Exception e) { //Exceptions are ignored } out.buffer = buffer; out.start = 0; out.end = decryptedText.getBytes().length; buffer.setBytes(0, decryptedText.getBytes()); } } }
/* * Copyright 2016-2018 MarkLogic Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marklogic.entityservices; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.marklogic.client.document.DocumentWriteSet; import com.marklogic.client.document.TextDocumentManager; import com.marklogic.client.io.JacksonHandle; import com.marklogic.client.io.StringHandle; import org.assertj.core.api.SoftAssertions; import org.junit.BeforeClass; import org.junit.Test; import org.xml.sax.SAXException; import javax.xml.transform.TransformerException; import java.io.*; import java.util.*; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests server function es:version-translator-generate * * Covered so far: validity of XQuery module generation * * convert-instance-Order * * */ public class TestEsVersionTranslatorGenerate extends EntityServicesTestBase { //private StringHandle xqueryModule = new StringHandle(); private static TextDocumentManager docMgr; private static Map<String, StringHandle> conversionModules; @BeforeClass public static void setupClass() { setupClients(); // save xquery module to modules database docMgr = modulesClient.newTextDocumentManager(); conversionModules = generateVersionTranslatorModule(); storeConversionModules(conversionModules); } private static void storeConversionModules(Map<String, StringHandle> moduleMap) { DocumentWriteSet writeSet = docMgr.newWriteSet(); for (String entityTypeName : moduleMap.keySet()) { String moduleName = "/conv/" + entityTypeName.replaceAll("\\.(xml|json)", ".xqy"); writeSet.add(moduleName, moduleMap.get(entityTypeName)); } docMgr.write(writeSet); } private static Map<String, StringHandle> generateVersionTranslatorModule() { Map<String, StringHandle> map = new HashMap<String, StringHandle>(); for (String entityType : entityTypes) { if (entityType.startsWith("valid-")||entityType.startsWith("primary-")||entityType.startsWith("person")||entityType.startsWith("SchemaCompleteEntity")||entityType.contains(".xml")||entityType.contains("-Src.json")||entityType.contains(".jpg")||entityType.contains("invalid-")||entityType.contains("sameTgt-")) {continue; } String part = entityType.replaceAll("\\-(Src|Tgt)", ""); String source = part.replaceAll("\\.json", "-Src.json"); String target = part.replaceAll("\\.json", "-Tgt.json"); logger.info("Generating version translator module for : " + source + " & " + target); StringHandle xqueryModule = new StringHandle(); try { xqueryModule = evalOneResult("", "es:version-translator-generate(fn:doc('"+source+"'),fn:doc('"+target+"'))", xqueryModule); //logger.info("Ver Trans Gen for "+part+" : \n"+xqueryModule.get()); } catch (TestEvalException e) { throw new RuntimeException(e); } map.put(part, xqueryModule); } return map; } private void compareLines(String path, String content) throws IOException { List<String> contentLines = Arrays.asList(content.split("\\n")); Iterator<String> contentIterator = contentLines.iterator(); File expectedFile = new File("src/test/resources/"+path); try (BufferedReader br = new BufferedReader(new FileReader(expectedFile))) { long i=0; String line; SoftAssertions softly = new SoftAssertions(); while ((line = br.readLine()) != null) { if (contentIterator.hasNext()) { String expectedLine = contentIterator.next(); if (expectedLine.contains("Generated at timestamp")) { } else softly.assertThat(expectedLine) .as("Mismatch in conversion module line " + Long.toString(i++)) .isEqualToIgnoringWhitespace(line); } else { fail("Expected result has more lines than actual results"); } } softly.assertAll(); } } @Test public void testVersionTranslatorModule() throws TransformerException, IOException, SAXException { for (String entityTypeName : conversionModules.keySet()) { String actualDoc = conversionModules.get(entityTypeName).get(); logger.info("Checking version translator for "+entityTypeName.replaceAll("\\.(xml|json)", ".xqy")); //logger.info(actualDoc+"\n************************************************************\n"); compareLines("/test-version-translator/"+entityTypeName.replaceAll("\\.(xml|json)", ".xqy"), actualDoc); } } @Test public void testSrcAndTgtSame() throws TransformerException, IOException, SAXException { String entityTypeName = "sameTgt-Src.json"; StringHandle xqueryModule; try { logger.info("Checking version translator for "+entityTypeName.replaceAll("\\.(xml|json)", ".xqy")); xqueryModule = evalOneResult("", "es:version-translator-generate( es:model-validate( fn:doc( '"+entityTypeName+"')),fn:doc( '"+entityTypeName+"'))", new StringHandle()); String actualDoc = xqueryModule.get(); //logger.info(actualDoc); compareLines("/test-version-translator/"+entityTypeName.replaceAll("\\.(xml|json)", ".xqy"), actualDoc); } catch (TestEvalException e) { // TODO Auto-generated catch block throw new RuntimeException(e); } } @Test public void testSrcTgtSameDiffTitle() throws TransformerException, IOException, SAXException { String source = "sameTgt-Src.json"; String target = "sameTgt-Tgt.json"; StringHandle xqueryModule; try { logger.info("Checking version translator for "+target.replaceAll("\\.(xml|json)", ".xqy")); xqueryModule = evalOneResult("", "es:version-translator-generate(fn:doc( '"+source+"'),es:model-validate(fn:doc( '"+target+"')))", new StringHandle()); String actualDoc = xqueryModule.get(); //logger.info(actualDoc); compareLines("/test-version-translator/"+target.replaceAll("\\.(xml|json)", ".xqy"), actualDoc); } catch (TestEvalException e) { // TODO Auto-generated catch block throw new RuntimeException(e); } } @Test public void testMissingSrc() { logger.info("Checking version-translator-generate() with a missing document node"); try { evalOneResult("", "es:version-translator-generate(fn:doc('valid-datatype-array.xml'))", new JacksonHandle()); fail("eval should throw an ES-MODEL-INVALID exception for version-translator-generate() with a missing document node"); } catch (TestEvalException e) { logger.info(e.getMessage()); assertTrue("Must contain ES-MODEL-INVALID error message but got: "+e.getMessage(), e.getMessage().contains("Too few args, expected 2 but got 1")); } } @Test public void testInvalidETDocAsSrcTgt() { logger.info("Checking version-translator-generate() with invalid document node"); try { evalOneResult("", "es:version-translator-generate(es:model-validate(fn:doc('invalid-missing-info.json')),es:model-validate(fn:doc('invalid-missing-title.json')))", new JacksonHandle()); fail("eval should throw an ES-MODEL-INVALID exception for version-translator-generate() with invalid document node"); } catch (TestEvalException e) { logger.info(e.getMessage()); assertTrue("Must contain ES-MODEL-INVALID error message but got: "+e.getMessage(), e.getMessage().contains("ES-MODEL-INVALID: Model descriptor must contain exactly one info section. Primary Key orderId doesn't exist.")); } } @Test public void testSrcRefDiffDatatype() throws TransformerException, IOException, SAXException { String source = "srcRefDiffDatatype-Src.json"; String target = "srcRefDiffDatatype-Tgt.json"; StringHandle xqueryModule; try { logger.info("Checking version translator for "+target.replaceAll("\\.(xml|json)", ".xqy")); xqueryModule = evalOneResult("", "es:version-translator-generate(fn:doc( '"+source+"'),fn:doc( '"+target+"'))", new StringHandle()); String actualDoc = xqueryModule.get(); //logger.info(actualDoc); compareLines("/test-version-translator/"+target.replaceAll("\\-Tgt.json", ".xqy"), actualDoc); } catch (TestEvalException e) { // TODO Auto-generated catch block throw new RuntimeException(e); } } @Test public void testConvInst() throws JsonParseException, JsonMappingException, IOException { String import1 = "import module namespace locArr = 'http://localArrayRefTgt/localArrayRefTgt-0.0.2-from-localArrayRefSrc-0.0.1' at '/conv/localArrayRefs.xqy';\n" + "import module namespace locArrSrc = 'http://localArrayRefSrc/localArrayRefSrc-0.0.1' at '/conv/VT-localArrayRefs-Src.xqy';\n"; String query1 = "locArr:convert-instance-Order(locArrSrc:instance-to-envelope(locArrSrc:extract-instance-Order(doc('10252.xml'))))"; JacksonHandle handle; try { logger.info("Checking convert-instance-Order()"); handle = evalOneResult(import1, query1, new JacksonHandle()); JsonNode actualDoc = handle.get(); ObjectMapper mapper = new ObjectMapper(); InputStream is = this.getClass().getResourceAsStream("/test-extract-instance/convert-instance-Order.json"); JsonNode control = mapper.readValue(is, JsonNode.class); org.hamcrest.MatcherAssert.assertThat(control, org.hamcrest.Matchers.equalTo(actualDoc)); } catch (TestEvalException e) { // TODO Auto-generated catch block throw new RuntimeException(e); } } @Test public void testConvInst2() throws TransformerException, IOException, SAXException { String import1 = "import module namespace locArr = 'http://marklogic.com/srcRefDatatype/srcRefDatatypeTgt-0.0.2-from-srcRefDatatypeSrc-0.0.1' at '/conv/srcRefDiffDatatype.xqy';\n" + "import module namespace locArrSrc = 'http://marklogic.com/srcRefDatatype/srcRefDatatypeSrc-0.0.1' at '/conv/srcRefDiffDatatype-Src.xqy';\n"; String query1 = "locArr:convert-instance-Product(locArrSrc:instance-to-envelope(locArrSrc:extract-instance-Product(doc('51.xml'))))"; JacksonHandle handle; try { logger.info("Checking convert-instance-Product()"); handle = evalOneResult(import1, query1, new JacksonHandle()); JsonNode actualDoc = handle.get(); ObjectMapper mapper = new ObjectMapper(); InputStream is = this.getClass().getResourceAsStream("/test-extract-instance/convert-instance-Product.json"); JsonNode control = mapper.readValue(is, JsonNode.class); org.hamcrest.MatcherAssert.assertThat(control, org.hamcrest.Matchers.equalTo(actualDoc)); } catch (TestEvalException e) { // TODO Auto-generated catch block throw new RuntimeException(e); } } @Test public void testNamespace() throws IOException { String source = "valid-1-namespace.json"; String target = "valid-2-namespace.json"; StringHandle xqueryModule; try { logger.info("Checking version translator for namespaceTrans.xqy"); xqueryModule = evalOneResult("", "es:version-translator-generate(fn:doc( '"+source+"'),fn:doc( '"+target+"'))", new StringHandle()); String actualDoc = xqueryModule.get(); //logger.info(actualDoc); compareLines("/test-version-translator/namespaceTrans.xqy", actualDoc); } catch (TestEvalException e) { throw new RuntimeException(e); } } }
package com.neerajsingh.popularmovies2; import android.app.LoaderManager; import android.app.ProgressDialog; import android.content.CursorLoader; import android.content.Intent; import android.content.Loader; import android.content.res.Configuration; import android.database.Cursor; import android.graphics.Point; import android.net.Uri; import android.os.Bundle; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.Display; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.Toast; import com.google.gson.Gson; import com.neerajsingh.popularmovies2.Data.Movie; import com.neerajsingh.popularmovies2.Data.MovieContract; import com.neerajsingh.popularmovies2.Data.MovieList; import com.neerajsingh.popularmovies2.Network.PopularMoviesApplication; import java.util.ArrayList; import java.util.List; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; /** * An activity representing a list of Items. This activity * has different presentations for handset and tablet-size devices. On * handsets, the activity presents a list of items, which when touched, * lead to a {@link DetailActivity} representing * item details. On tablets, the activity presents the list of items and * item details side-by-side using two vertical panes. */ public class ItemListActivity extends AppCompatActivity implements NavigationInterface, LoaderManager.LoaderCallbacks<Cursor> { private static final int SPAN_1 = 1; private static final int SPAN_2 = 2; private static final int SPAN_3 = 3; private static final int SPAN_4 = 4; /** * Whether or not the activity is in two-pane mode, i.e. running on a tablet * device. */ private boolean mTwoPane; protected final String Loader_Message = "Please wait.."; RecyclerView recyclerView; List<Movie> movieList; ProgressDialog progressDialog; MoviesAdapter moviesAdapter; MovieState state ; RecyclerView.LayoutManager layoutManager; private int ID = 1; private Toolbar toolbar; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_item_list); toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); toolbar.setTitle(getTitle()); if (savedInstanceState != null){ state = (MovieState) savedInstanceState.getSerializable("State"); }else{ state = MovieState.popular; } recyclerView = (RecyclerView) findViewById(R.id.item_list); assert recyclerView != null; if (findViewById(R.id.item_detail_container) != null) { mTwoPane = true; if(movieList!=null) { openPage(movieList.get(0)); } } setupRecyclerView(); } private void getMovies(Call<MovieList> movieListCall) { progressDialog = ProgressDialog.show(this, null, Loader_Message); progressDialog.setCancelable(true); if (movieListCall != null) { movieListCall.enqueue(new Callback<MovieList>() { @Override public void onResponse(Call<MovieList> call, Response<MovieList> response) { progressDialog.dismiss(); if (response != null && response.isSuccessful()) { movieList = response.body().getPopularMovies(); setAdapter(); } } @Override public void onFailure(Call<MovieList> call, Throwable t) { progressDialog.dismiss(); setContentView(R.layout.no_net); } }); } else { beginLoader(ID); } } private void setAdapter() { moviesAdapter = new MoviesAdapter(movieList, ItemListActivity.this); recyclerView.setAdapter(moviesAdapter); moviesAdapter.notifyDataSetChanged(); if(mTwoPane && movieList!=null) { openPage(movieList.get(0)); } } private void beginLoader(int id) { if (getLoaderManager().getLoader(id) == null) { getLoaderManager().initLoader(id, null, this); } else { getLoaderManager().restartLoader(id, null, this); } } private void setupRecyclerView() { if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) { Display display = getWindowManager().getDefaultDisplay(); Point size = new Point(); display.getSize(size); int width = (size.x / Utils.dptopx(this, 156)); layoutManager = new GridLayoutManager(this, width); } else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) { layoutManager = new GridLayoutManager(this, mTwoPane ? SPAN_1 : SPAN_3); } recyclerView.setLayoutManager(layoutManager); getMovies(getMoviesHandler(state)); } private Call<MovieList> getMoviesHandler(MovieState state) { if (state == MovieState.popular) { return PopularMoviesApplication.getBaseRequestInterface().getPopularMovies(); } else if (state == MovieState.toprated) { return PopularMoviesApplication.getBaseRequestInterface().getTopRated(); } else { return null; } } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.app_menu, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.popular: if (state != MovieState.popular) { state = MovieState.popular; toolbar.setTitle(item.getTitle()); getMovies(getMoviesHandler(state)); } else { Toast.makeText(this, "Already showing top rated movies", Toast.LENGTH_SHORT).show(); } return true; case R.id.top_rated: if (state != MovieState.toprated) { state = MovieState.toprated; toolbar.setTitle(item.getTitle()); getMovies(getMoviesHandler(state)); } else { Toast.makeText(this, "Already showing popular movies", Toast.LENGTH_SHORT).show(); } return true; case R.id.fav: if (state != MovieState.fav) { state = MovieState.fav; toolbar.setTitle(item.getTitle()); getMovies(getMoviesHandler(state)); } else { Toast.makeText(this, "Already showing fav movies", Toast.LENGTH_SHORT).show(); } return true; default: return false; } } @Override public void openPage(Movie movie) { if (mTwoPane) { Bundle arguments = new Bundle(); arguments.putParcelable(ItemDetailFragment.MOVIE_DETAIL, movie); ItemDetailFragment fragment = new ItemDetailFragment(); fragment.setArguments(arguments); getSupportFragmentManager().beginTransaction() .replace(R.id.item_detail_container, fragment) .commit(); } else { Intent intent = new Intent(this, DetailActivity.class); intent.putExtra(ItemDetailFragment.MOVIE_DETAIL, movie); startActivity(intent); } } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { if (id == ID) { Uri uri = MovieContract.MovieEntry.CONTENT_URI; return new CursorLoader(this, uri, null, null, null, null); } return null; } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { if (loader.getId() == ID) { handleFav(data); progressDialog.dismiss(); } } private void handleFav(Cursor data) { Gson gson = new Gson(); movieList = new ArrayList<>(); if (data != null && data.moveToFirst()) { int movieIndex = data.getColumnIndex(MovieContract.MovieEntry.COLUMN_MOVIE); do { Movie movie = gson.fromJson(data.getString(movieIndex), Movie.class); if (movie != null) { movieList.add(movie); } } while (data.moveToNext()); } if (movieList.size() > 0) { setAdapter(); } else { setAdapter(); Toast.makeText(this, "Nothing in fav section", Toast.LENGTH_LONG).show(); } } @Override public void onLoaderReset(Loader<Cursor> loader) { } private enum MovieState { popular, toprated, fav; } @Override protected void onSaveInstanceState(Bundle outState) { outState.putSerializable("State",state); super.onSaveInstanceState(outState); } public void retryNetworkCall(View view){ setupRecyclerView(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.entrypoint; import org.apache.flink.api.common.time.Time; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.blob.BlobServer; import org.apache.flink.runtime.clusterframework.BootstrapTools; import org.apache.flink.runtime.heartbeat.HeartbeatServices; import org.apache.flink.runtime.highavailability.HighAvailabilityServices; import org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils; import org.apache.flink.runtime.metrics.MetricRegistry; import org.apache.flink.runtime.metrics.MetricRegistryConfiguration; import org.apache.flink.runtime.rpc.FatalErrorHandler; import org.apache.flink.runtime.rpc.RpcService; import org.apache.flink.runtime.rpc.akka.AkkaRpcService; import org.apache.flink.runtime.security.SecurityConfiguration; import org.apache.flink.runtime.security.SecurityContext; import org.apache.flink.runtime.security.SecurityUtils; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.Preconditions; import akka.actor.ActorSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.concurrent.GuardedBy; import java.io.IOException; import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import scala.concurrent.duration.FiniteDuration; /** * Base class for the Flink cluster entry points. * * <p>Specialization of this class can be used for the session mode and the per-job mode */ public abstract class ClusterEntrypoint implements FatalErrorHandler { protected static final Logger LOG = LoggerFactory.getLogger(ClusterEntrypoint.class); protected static final int SUCCESS_RETURN_CODE = 0; protected static final int STARTUP_FAILURE_RETURN_CODE = 1; protected static final int RUNTIME_FAILURE_RETURN_CODE = 2; /** The lock to guard startup / shutdown / manipulation methods. */ private final Object lock = new Object(); private final Configuration configuration; private final CompletableFuture<Boolean> terminationFuture; @GuardedBy("lock") private MetricRegistry metricRegistry = null; @GuardedBy("lock") private HighAvailabilityServices haServices = null; @GuardedBy("lock") private BlobServer blobServer = null; @GuardedBy("lock") private HeartbeatServices heartbeatServices = null; @GuardedBy("lock") private RpcService commonRpcService = null; protected ClusterEntrypoint(Configuration configuration) { this.configuration = Preconditions.checkNotNull(configuration); this.terminationFuture = new CompletableFuture<>(); } public CompletableFuture<Boolean> getTerminationFuture() { return terminationFuture; } protected void startCluster() { LOG.info("Starting {}.", getClass().getSimpleName()); try { installDefaultFileSystem(configuration); SecurityContext securityContext = installSecurityContext(configuration); securityContext.runSecured(new Callable<Void>() { @Override public Void call() throws Exception { runCluster(configuration); return null; } }); } catch (Throwable t) { LOG.error("Cluster initialization failed.", t); try { shutDown(false); } catch (Throwable st) { LOG.error("Could not properly shut down cluster entrypoint.", st); } System.exit(STARTUP_FAILURE_RETURN_CODE); } } protected void installDefaultFileSystem(Configuration configuration) throws Exception { LOG.info("Install default filesystem."); try { FileSystem.setDefaultScheme(configuration); } catch (IOException e) { throw new IOException("Error while setting the default " + "filesystem scheme from configuration.", e); } } protected SecurityContext installSecurityContext(Configuration configuration) throws Exception { LOG.info("Install security context."); SecurityUtils.install(new SecurityConfiguration(configuration)); return SecurityUtils.getInstalledContext(); } protected void runCluster(Configuration configuration) throws Exception { synchronized (lock) { initializeServices(configuration); // write host information into configuration configuration.setString(JobManagerOptions.ADDRESS, commonRpcService.getAddress()); configuration.setInteger(JobManagerOptions.PORT, commonRpcService.getPort()); startClusterComponents( configuration, commonRpcService, haServices, blobServer, heartbeatServices, metricRegistry); } } protected void initializeServices(Configuration configuration) throws Exception { assert(Thread.holdsLock(lock)); LOG.info("Initializing cluster services."); final String bindAddress = configuration.getString(JobManagerOptions.ADDRESS); // TODO: Add support for port ranges final String portRange = String.valueOf(configuration.getInteger(JobManagerOptions.PORT)); commonRpcService = createRpcService(configuration, bindAddress, portRange); haServices = createHaServices(configuration, commonRpcService.getExecutor()); blobServer = new BlobServer(configuration, haServices.createBlobStore()); blobServer.start(); heartbeatServices = createHeartbeatServices(configuration); metricRegistry = createMetricRegistry(configuration); } protected RpcService createRpcService( Configuration configuration, String bindAddress, String portRange) throws Exception { ActorSystem actorSystem = BootstrapTools.startActorSystem(configuration, bindAddress, portRange, LOG); FiniteDuration duration = AkkaUtils.getTimeout(configuration); return new AkkaRpcService(actorSystem, Time.of(duration.length(), duration.unit())); } protected HighAvailabilityServices createHaServices( Configuration configuration, Executor executor) throws Exception { return HighAvailabilityServicesUtils.createHighAvailabilityServices( configuration, executor, HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION); } protected HeartbeatServices createHeartbeatServices(Configuration configuration) { return HeartbeatServices.fromConfiguration(configuration); } protected MetricRegistry createMetricRegistry(Configuration configuration) { return new MetricRegistry(MetricRegistryConfiguration.fromConfiguration(configuration)); } protected void shutDown(boolean cleanupHaData) throws FlinkException { LOG.info("Stopping {}.", getClass().getSimpleName()); Throwable exception = null; synchronized (lock) { try { stopClusterComponents(cleanupHaData); } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } if (metricRegistry != null) { try { metricRegistry.shutdown(); } catch (Throwable t) { exception = t; } } if (blobServer != null) { try { blobServer.close(); } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } if (haServices != null) { try { if (cleanupHaData) { haServices.closeAndCleanupAllData(); } else { haServices.close(); } } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } if (commonRpcService != null) { try { commonRpcService.stopService(); } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } terminationFuture.complete(true); } if (exception != null) { throw new FlinkException("Could not properly shut down the cluster services.", exception); } } @Override public void onFatalError(Throwable exception) { LOG.error("Fatal error occurred in the cluster entrypoint.", exception); System.exit(RUNTIME_FAILURE_RETURN_CODE); } protected abstract void startClusterComponents( Configuration configuration, RpcService rpcService, HighAvailabilityServices highAvailabilityServices, BlobServer blobServer, HeartbeatServices heartbeatServices, MetricRegistry metricRegistry) throws Exception; protected void stopClusterComponents(boolean cleanupHaData) throws Exception { } protected static ClusterConfiguration parseArguments(String[] args) { ParameterTool parameterTool = ParameterTool.fromArgs(args); final String configDir = parameterTool.get("configDir", ""); return new ClusterConfiguration(configDir); } protected static Configuration loadConfiguration(ClusterConfiguration clusterConfiguration) { return GlobalConfiguration.loadConfiguration(clusterConfiguration.getConfigDir()); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.logging.v2; import com.google.api.core.BetaApi; import com.google.api.pathtemplate.PathTemplate; import com.google.api.pathtemplate.ValidationException; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class CmekSettingsName implements ResourceName { private static final PathTemplate PROJECT = PathTemplate.createWithoutUrlEncoding("projects/{project}/cmekSettings"); private static final PathTemplate ORGANIZATION = PathTemplate.createWithoutUrlEncoding("organizations/{organization}/cmekSettings"); private static final PathTemplate FOLDER = PathTemplate.createWithoutUrlEncoding("folders/{folder}/cmekSettings"); private static final PathTemplate BILLING_ACCOUNT = PathTemplate.createWithoutUrlEncoding("billingAccounts/{billing_account}/cmekSettings"); private volatile Map<String, String> fieldValuesMap; private PathTemplate pathTemplate; private String fixedValue; private final String project; private final String organization; private final String folder; private final String billingAccount; @Deprecated protected CmekSettingsName() { project = null; organization = null; folder = null; billingAccount = null; } private CmekSettingsName(Builder builder) { project = Preconditions.checkNotNull(builder.getProject()); organization = null; folder = null; billingAccount = null; pathTemplate = PROJECT; } private CmekSettingsName(OrganizationCmekSettingsBuilder builder) { organization = Preconditions.checkNotNull(builder.getOrganization()); project = null; folder = null; billingAccount = null; pathTemplate = ORGANIZATION; } private CmekSettingsName(FolderCmekSettingsBuilder builder) { folder = Preconditions.checkNotNull(builder.getFolder()); project = null; organization = null; billingAccount = null; pathTemplate = FOLDER; } private CmekSettingsName(BillingAccountCmekSettingsBuilder builder) { billingAccount = Preconditions.checkNotNull(builder.getBillingAccount()); project = null; organization = null; folder = null; pathTemplate = BILLING_ACCOUNT; } public String getProject() { return project; } public String getOrganization() { return organization; } public String getFolder() { return folder; } public String getBillingAccount() { return billingAccount; } public static Builder newBuilder() { return new Builder(); } @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static Builder newProjectBuilder() { return new Builder(); } /** @deprecated Please use {@link #newProjectBuilder()} instead */ @Deprecated public static Builder newProjectCmekSettingsBuilder() { return new Builder(); } @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static OrganizationCmekSettingsBuilder newOrganizationCmekSettingsBuilder() { return new OrganizationCmekSettingsBuilder(); } @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static FolderCmekSettingsBuilder newFolderCmekSettingsBuilder() { return new FolderCmekSettingsBuilder(); } @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static BillingAccountCmekSettingsBuilder newBillingAccountCmekSettingsBuilder() { return new BillingAccountCmekSettingsBuilder(); } public Builder toBuilder() { return new Builder(this); } public static CmekSettingsName of(String project) { return newBuilder().setProject(project).build(); } @BetaApi("The static create methods are not stable yet and may be changed in the future.") public static CmekSettingsName ofProjectName(String project) { return newBuilder().setProject(project).build(); } /** @deprecated Please use {@link #ofProjectName()} instead */ @Deprecated public static CmekSettingsName ofProjectCmekSettingsName(String project) { return newBuilder().setProject(project).build(); } @BetaApi("The static create methods are not stable yet and may be changed in the future.") public static CmekSettingsName ofOrganizationName(String organization) { return newOrganizationCmekSettingsBuilder().setOrganization(organization).build(); } /** @deprecated Please use {@link #ofOrganizationName()} instead */ @Deprecated public static CmekSettingsName ofOrganizationCmekSettingsName(String organization) { return newOrganizationCmekSettingsBuilder().setOrganization(organization).build(); } @BetaApi("The static create methods are not stable yet and may be changed in the future.") public static CmekSettingsName ofFolderName(String folder) { return newFolderCmekSettingsBuilder().setFolder(folder).build(); } /** @deprecated Please use {@link #ofFolderName()} instead */ @Deprecated public static CmekSettingsName ofFolderCmekSettingsName(String folder) { return newFolderCmekSettingsBuilder().setFolder(folder).build(); } @BetaApi("The static create methods are not stable yet and may be changed in the future.") public static CmekSettingsName ofBillingAccountName(String billingAccount) { return newBillingAccountCmekSettingsBuilder().setBillingAccount(billingAccount).build(); } /** @deprecated Please use {@link #ofBillingAccountName()} instead */ @Deprecated public static CmekSettingsName ofBillingAccountCmekSettingsName(String billingAccount) { return newBillingAccountCmekSettingsBuilder().setBillingAccount(billingAccount).build(); } public static String format(String project) { return newBuilder().setProject(project).build().toString(); } @BetaApi("The static format methods are not stable yet and may be changed in the future.") public static String formatProjectName(String project) { return newBuilder().setProject(project).build().toString(); } /** @deprecated Please use {@link #formatProjectName()} instead */ @Deprecated public static String formatProjectCmekSettingsName(String project) { return newBuilder().setProject(project).build().toString(); } @BetaApi("The static format methods are not stable yet and may be changed in the future.") public static String formatOrganizationName(String organization) { return newOrganizationCmekSettingsBuilder().setOrganization(organization).build().toString(); } /** @deprecated Please use {@link #formatOrganizationName()} instead */ @Deprecated public static String formatOrganizationCmekSettingsName(String organization) { return newOrganizationCmekSettingsBuilder().setOrganization(organization).build().toString(); } @BetaApi("The static format methods are not stable yet and may be changed in the future.") public static String formatFolderName(String folder) { return newFolderCmekSettingsBuilder().setFolder(folder).build().toString(); } /** @deprecated Please use {@link #formatFolderName()} instead */ @Deprecated public static String formatFolderCmekSettingsName(String folder) { return newFolderCmekSettingsBuilder().setFolder(folder).build().toString(); } @BetaApi("The static format methods are not stable yet and may be changed in the future.") public static String formatBillingAccountName(String billingAccount) { return newBillingAccountCmekSettingsBuilder() .setBillingAccount(billingAccount) .build() .toString(); } /** @deprecated Please use {@link #formatBillingAccountName()} instead */ @Deprecated public static String formatBillingAccountCmekSettingsName(String billingAccount) { return newBillingAccountCmekSettingsBuilder() .setBillingAccount(billingAccount) .build() .toString(); } public static CmekSettingsName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } if (PROJECT.matches(formattedString)) { Map<String, String> matchMap = PROJECT.match(formattedString); return ofProjectCmekSettingsName(matchMap.get("project")); } else if (ORGANIZATION.matches(formattedString)) { Map<String, String> matchMap = ORGANIZATION.match(formattedString); return ofOrganizationCmekSettingsName(matchMap.get("organization")); } else if (FOLDER.matches(formattedString)) { Map<String, String> matchMap = FOLDER.match(formattedString); return ofFolderCmekSettingsName(matchMap.get("folder")); } else if (BILLING_ACCOUNT.matches(formattedString)) { Map<String, String> matchMap = BILLING_ACCOUNT.match(formattedString); return ofBillingAccountCmekSettingsName(matchMap.get("billing_account")); } throw new ValidationException("CmekSettingsName.parse: formattedString not in valid format"); } public static List<CmekSettingsName> parseList(List<String> formattedStrings) { List<CmekSettingsName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<CmekSettingsName> values) { List<String> list = new ArrayList<>(values.size()); for (CmekSettingsName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return PROJECT.matches(formattedString) || ORGANIZATION.matches(formattedString) || FOLDER.matches(formattedString) || BILLING_ACCOUNT.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (project != null) { fieldMapBuilder.put("project", project); } if (organization != null) { fieldMapBuilder.put("organization", organization); } if (folder != null) { fieldMapBuilder.put("folder", folder); } if (billingAccount != null) { fieldMapBuilder.put("billing_account", billingAccount); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return fixedValue != null ? fixedValue : pathTemplate.instantiate(getFieldValuesMap()); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { CmekSettingsName that = ((CmekSettingsName) o); return Objects.equals(this.project, that.project) && Objects.equals(this.organization, that.organization) && Objects.equals(this.folder, that.folder) && Objects.equals(this.billingAccount, that.billingAccount); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(fixedValue); h *= 1000003; h ^= Objects.hashCode(project); h *= 1000003; h ^= Objects.hashCode(organization); h *= 1000003; h ^= Objects.hashCode(folder); h *= 1000003; h ^= Objects.hashCode(billingAccount); return h; } /** Builder for projects/{project}/cmekSettings. */ public static class Builder { private String project; protected Builder() {} public String getProject() { return project; } public Builder setProject(String project) { this.project = project; return this; } private Builder(CmekSettingsName cmekSettingsName) { Preconditions.checkArgument( Objects.equals(cmekSettingsName.pathTemplate, PROJECT), "toBuilder is only supported when CmekSettingsName has the pattern of projects/{project}/cmekSettings"); this.project = cmekSettingsName.project; } public CmekSettingsName build() { return new CmekSettingsName(this); } } /** Builder for organizations/{organization}/cmekSettings. */ @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static class OrganizationCmekSettingsBuilder { private String organization; protected OrganizationCmekSettingsBuilder() {} public String getOrganization() { return organization; } public OrganizationCmekSettingsBuilder setOrganization(String organization) { this.organization = organization; return this; } public CmekSettingsName build() { return new CmekSettingsName(this); } } /** Builder for folders/{folder}/cmekSettings. */ @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static class FolderCmekSettingsBuilder { private String folder; protected FolderCmekSettingsBuilder() {} public String getFolder() { return folder; } public FolderCmekSettingsBuilder setFolder(String folder) { this.folder = folder; return this; } public CmekSettingsName build() { return new CmekSettingsName(this); } } /** Builder for billingAccounts/{billing_account}/cmekSettings. */ @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static class BillingAccountCmekSettingsBuilder { private String billingAccount; protected BillingAccountCmekSettingsBuilder() {} public String getBillingAccount() { return billingAccount; } public BillingAccountCmekSettingsBuilder setBillingAccount(String billingAccount) { this.billingAccount = billingAccount; return this; } public CmekSettingsName build() { return new CmekSettingsName(this); } } }
package org.bouncycastle.math.ec.custom.sec; import java.math.BigInteger; import org.bouncycastle.math.ec.ECFieldElement; import org.bouncycastle.math.raw.Nat576; import org.bouncycastle.util.Arrays; public class SecT571FieldElement extends ECFieldElement { protected long[] x; public SecT571FieldElement(BigInteger x) { if (x == null || x.signum() < 0 || x.bitLength() > 571) { throw new IllegalArgumentException("x value invalid for SecT571FieldElement"); } this.x = SecT571Field.fromBigInteger(x); } public SecT571FieldElement() { this.x = Nat576.create64(); } protected SecT571FieldElement(long[] x) { this.x = x; } // public int bitLength() // { // return x.degree(); // } public boolean isOne() { return Nat576.isOne64(x); } public boolean isZero() { return Nat576.isZero64(x); } public boolean testBitZero() { return (x[0] & 1L) != 0L; } public BigInteger toBigInteger() { return Nat576.toBigInteger64(x); } public String getFieldName() { return "SecT571Field"; } public int getFieldSize() { return 571; } public ECFieldElement add(ECFieldElement b) { long[] z = Nat576.create64(); SecT571Field.add(x, ((SecT571FieldElement)b).x, z); return new SecT571FieldElement(z); } public ECFieldElement addOne() { long[] z = Nat576.create64(); SecT571Field.addOne(x, z); return new SecT571FieldElement(z); } public ECFieldElement subtract(ECFieldElement b) { // Addition and subtraction are the same in F2m return add(b); } public ECFieldElement multiply(ECFieldElement b) { long[] z = Nat576.create64(); SecT571Field.multiply(x, ((SecT571FieldElement)b).x, z); return new SecT571FieldElement(z); } public ECFieldElement multiplyMinusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y) { return multiplyPlusProduct(b, x, y); } public ECFieldElement multiplyPlusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y) { long[] ax = this.x, bx = ((SecT571FieldElement)b).x; long[] xx = ((SecT571FieldElement)x).x, yx = ((SecT571FieldElement)y).x; long[] tt = Nat576.createExt64(); SecT571Field.multiplyAddToExt(ax, bx, tt); SecT571Field.multiplyAddToExt(xx, yx, tt); long[] z = Nat576.create64(); SecT571Field.reduce(tt, z); return new SecT571FieldElement(z); } public ECFieldElement divide(ECFieldElement b) { return multiply(b.invert()); } public ECFieldElement negate() { return this; } public ECFieldElement square() { long[] z = Nat576.create64(); SecT571Field.square(x, z); return new SecT571FieldElement(z); } public ECFieldElement squareMinusProduct(ECFieldElement x, ECFieldElement y) { return squarePlusProduct(x, y); } public ECFieldElement squarePlusProduct(ECFieldElement x, ECFieldElement y) { long[] ax = this.x; long[] xx = ((SecT571FieldElement)x).x, yx = ((SecT571FieldElement)y).x; long[] tt = Nat576.createExt64(); SecT571Field.squareAddToExt(ax, tt); SecT571Field.multiplyAddToExt(xx, yx, tt); long[] z = Nat576.create64(); SecT571Field.reduce(tt, z); return new SecT571FieldElement(z); } public ECFieldElement squarePow(int pow) { if (pow < 1) { return this; } long[] z = Nat576.create64(); SecT571Field.squareN(x, pow, z); return new SecT571FieldElement(z); } public ECFieldElement invert() { long[] z = Nat576.create64(); SecT571Field.invert(x, z); return new SecT571FieldElement(z); } public ECFieldElement sqrt() { long[] z = Nat576.create64(); SecT571Field.sqrt(x, z); return new SecT571FieldElement(z); } public int getRepresentation() { return ECFieldElement.F2m.PPB; } public int getM() { return 571; } public int getK1() { return 2; } public int getK2() { return 5; } public int getK3() { return 10; } public boolean equals(Object other) { if (other == this) { return true; } if (!(other instanceof SecT571FieldElement)) { return false; } SecT571FieldElement o = (SecT571FieldElement)other; return Nat576.eq64(x, o.x); } public int hashCode() { return 5711052 ^ Arrays.hashCode(x, 0, 9); } }
package com.wiselane.revealcricketquiz; import java.io.IOException; import java.util.List; import android.app.Activity; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.Intent; import android.database.SQLException; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.Toast; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.AdView; import com.google.android.gms.games.Games; import com.google.example.games.basegameutils.GameHelper; import com.google.example.games.basegameutils.GameHelper.GameHelperListener; public class MenuActivity extends Activity implements OnClickListener { private Button playButton; private Button settingsButton; private Button quitButton; private Button highscores; private AdView adView; private DataBaseHelper dbHelper; private GameHelper gameHelper; private List<BlockInfo> blockInfoList; private boolean canSignIn; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_menu); adView = (AdView) this.findViewById(R.id.adViewMenu); AdRequest adRequest = new AdRequest.Builder().build(); adView.loadAd(adRequest); playButton = (Button) findViewById(R.id.bPlay); settingsButton = (Button) findViewById(R.id.bSettings); quitButton = (Button) findViewById(R.id.bQuit); highscores = (Button) findViewById(R.id.bHighscores); playButton.setOnClickListener(this); settingsButton.setOnClickListener(this); quitButton.setOnClickListener(this); highscores.setOnClickListener(this); } /*@Override protected void onStart() { super.onStart(); gameHelper.onStart(this); }*/ @Override protected void onResume() { super.onResume(); if (adView != null) { adView.resume(); } } @Override protected void onPause() { if (adView != null) { adView.pause(); } canSignIn = false; super.onPause(); } @Override protected void onStop() { super.onStop(); if(gameHelper != null){ gameHelper.onStop(); } } @Override protected void onDestroy() { if (adView != null) { adView.destroy(); } super.onDestroy(); if(dbHelper != null){ dbHelper.close(); dbHelper = null; } } public void onClick(View view) { switch (view.getId()) { case R.id.bPlay: Intent intent = new Intent(MenuActivity.this, BlockListActivity.class); startActivity(intent); overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left); break; case R.id.bSettings: Intent settingsIntent = new Intent(MenuActivity.this, SettingsActivity.class); startActivity(settingsIntent); overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left); break; case R.id.bQuit: showQuitDialog(); break; case R.id.bHighscores: if(!canSignIn){ createAndOpenDatabaseIfDoesNotExist(); final int totalScore = generateTotalScore(); gameHelper = new GameHelper(MenuActivity.this, GameHelper.CLIENT_GAMES); GameHelperListener listener = new GameHelperListener() { public void onSignInSucceeded() { if(gameHelper.getApiClient().isConnected()){ Games.Leaderboards.submitScore(gameHelper.getApiClient(), getString(R.string.leaderboard_highscores), totalScore); startActivityForResult(Games.Leaderboards.getLeaderboardIntent( gameHelper.getApiClient(), getString(R.string.leaderboard_highscores)), 1); } } public void onSignInFailed() { Toast.makeText(getBaseContext(), "Failed to sign in", Toast.LENGTH_SHORT).show(); canSignIn = false; } }; gameHelper.setup(listener); gameHelper.onStart(MenuActivity.this); } break; } } private int generateTotalScore() { int totalScore = 0; blockInfoList = dbHelper.getAllBlocksInfo("BlockInfo"); int length = blockInfoList.size(); for(int i = 0 ; i < length ; i++){ BlockInfo info = blockInfoList.get(i); totalScore += info.getTotalScore(); } return totalScore; } private void createAndOpenDatabaseIfDoesNotExist() { dbHelper = new DataBaseHelper(this); try { dbHelper.createDataBase(); } catch (IOException e) { throw new Error("Unable to create database"); } try { dbHelper.openDataBase(); } catch (SQLException sqle) { throw sqle; } } @Override protected void onActivityResult(int request, int response, Intent data) { super.onActivityResult(request, response, data); if(gameHelper != null){ gameHelper.onActivityResult(request, response, data); } } @Override public void onBackPressed() { showQuitDialog(); } private void showQuitDialog() { AlertDialog.Builder builder = new AlertDialog.Builder(MenuActivity.this); builder.setTitle("Quit"); builder.setMessage("Are you sure you want to quit?"); builder.setPositiveButton("Yes", new DialogInterface.OnClickListener() { public void onClick(DialogInterface arg0, int arg1) { MenuActivity.this.finish(); } }); builder.setNegativeButton("No", new DialogInterface.OnClickListener() { public void onClick(DialogInterface arg0, int arg1) { arg0.dismiss(); } }); AlertDialog dialog = builder.create(); dialog.show(); } }
package psidev.psi.mi.jami.xml.io.writer.elements.impl.compact.xml25; import junit.framework.Assert; import org.junit.Ignore; import org.junit.Test; import psidev.psi.mi.jami.binary.ModelledBinaryInteraction; import psidev.psi.mi.jami.binary.impl.DefaultNamedModelledBinaryInteraction; import psidev.psi.mi.jami.exception.IllegalRangeException; import psidev.psi.mi.jami.model.*; import psidev.psi.mi.jami.model.impl.*; import psidev.psi.mi.jami.utils.CvTermUtils; import psidev.psi.mi.jami.utils.InteractorUtils; import psidev.psi.mi.jami.utils.RangeUtils; import psidev.psi.mi.jami.xml.cache.PsiXmlObjectCache; import psidev.psi.mi.jami.xml.cache.InMemoryIdentityObjectCache; import psidev.psi.mi.jami.xml.io.writer.elements.impl.AbstractXmlWriterTest; import psidev.psi.mi.jami.xml.io.writer.elements.impl.compact.xml25.XmlNamedModelledBinaryInteractionWriter; import javax.xml.stream.XMLStreamException; import java.io.IOException; import java.math.BigDecimal; /** * Unit tester for XmlNamedModelledBinaryInteractionWriter * * @author Marine Dumousseau ([email protected]) * @version $Id$ * @since <pre>25/11/13</pre> */ public class XmlNamedModelledBinaryInteractionWriterTest extends AbstractXmlWriterTest { private String interaction = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_complex = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactionRef>4</interactionRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_shortName ="<interaction id=\"1\">\n" + " <names>\n" + " <shortLabel>interaction test</shortLabel>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_fullName ="<interaction id=\"1\">\n" + " <names>\n" + " <fullName>interaction test</fullName>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_aliases ="<interaction id=\"1\">\n" + " <names>\n" + " <alias type=\"synonym\">interaction synonym</alias>\n"+ " <alias>test</alias>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_identifier = "<interaction id=\"1\">\n" + " <xref>\n" + " <primaryRef db=\"intact\" id=\"EBI-xxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+ " </xref>\n"+ " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_xref = "<interaction id=\"1\">\n" + " <xref>\n" + " <primaryRef db=\"test2\" id=\"xxxx2\"/>\n" + " <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+ " </xref>\n"+ " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_inferred = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <featureList>\n" + " <feature id=\"5\">\n" + " <featureType>\n" + " <names>\n" + " <shortLabel>biological feature</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0252\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </featureType>\n" + " <featureRangeList>\n" + " <featureRange>\n" + " <startStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </startStatus>\n" + " <begin position=\"1\"/>\n"+ " <endStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </endStatus>\n" + " <end position=\"4\"/>\n"+ " </featureRange>\n"+ " </featureRangeList>\n" + " </feature>\n"+ " </featureList>\n" + " </participant>\n"+ " <participant id=\"6\">\n" + " <interactorRef>7</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <featureList>\n" + " <feature id=\"8\">\n" + " <featureType>\n" + " <names>\n" + " <shortLabel>biological feature</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0252\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </featureType>\n" + " <featureRangeList>\n" + " <featureRange>\n" + " <startStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </startStatus>\n" + " <begin position=\"1\"/>\n"+ " <endStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </endStatus>\n" + " <end position=\"4\"/>\n"+ " </featureRange>\n"+ " </featureRangeList>\n" + " </feature>\n"+ " </featureList>\n" + " </participant>\n"+ " </participantList>\n" + " <inferredInteractionList>\n" + " <inferredInteraction>\n" + " <participant>\n" + " <participantFeatureRef>5</participantFeatureRef>\n" + " </participant>\n"+ " <participant>\n" + " <participantFeatureRef>8</participantFeatureRef>\n" + " </participant>\n"+ " </inferredInteraction>\n"+ " </inferredInteractionList>\n" + "</interaction>"; private String interaction_type = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <interactionType>\n" + " <names>\n" + " <shortLabel>association</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0914\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionType>\n" + "</interaction>"; private String interaction_attributes = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <attributeList>\n" + " <attribute name=\"test2\"/>\n"+ " <attribute name=\"test3\"/>\n"+ " <attribute name=\"spoke expansion\" nameAc=\"MI:1060\"/>\n"+ " </attributeList>\n"+ "</interaction>"; private String interaction_registered = "<interaction id=\"2\">\n" + " <experimentList>\n" + " <experimentRef>3</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"4\">\n" + " <interactorRef>5</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_confidence = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <confidenceList>\n" + " <confidence>\n" + " <unit>\n" + " <names>\n" + " <shortLabel>intact-miscore</shortLabel>\n"+ " </names>\n"+ " </unit>\n" + " <value>0.8</value>\n" + " </confidence>\n"+ " </confidenceList>\n" + "</interaction>"; private String interaction_parameter = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <parameterList>\n" + " <parameter term=\"kd\" base=\"10\" exponent=\"0\" factor=\"5\">\n" + " <experimentRef>2</experimentRef>\n" + " </parameter>\n"+ " </parameterList>\n" + "</interaction>"; private String interaction_preAssembly = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <attributeList>\n" + " <attribute name=\"pre-assembly\" nameAc=\"MI:1158\"/>\n" + " <attribute name=\"positive cooperative effect\" nameAc=\"MI:1154\"/>\n" + " <attribute name=\"configurational pre-organization\" nameAc=\"MI:1174\"/>\n"+ " <attribute name=\"affected interaction\" nameAc=\"MI:1150\">5</attribute>\n" + " </attributeList>\n" + "</interaction>"; private String interaction_allostery = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <attributeList>\n" + " <attribute name=\"allostery\" nameAc=\"MI:1157\"/>\n" + " <attribute name=\"allosteric molecule\" nameAc=\"MI:1159\">3</attribute>\n" + " <attribute name=\"allosteric effector\" nameAc=\"MI:1160\">5</attribute>\n" + " <attribute name=\"heterotropic allostery\" nameAc=\"MI:1168\"/>\n" + " <attribute name=\"allosteric change in structure\" nameAc=\"MI:1165\"/>\n" + " <attribute name=\"positive cooperative effect\" nameAc=\"MI:1154\"/>\n" + " <attribute name=\"allosteric v-type response\" nameAc=\"MI:1163\"/>\n" + " <attribute name=\"affected interaction\" nameAc=\"MI:1150\">6</attribute>\n" + " </attributeList>\n" + "</interaction>"; private PsiXmlObjectCache elementCache = new InMemoryIdentityObjectCache(); @Test public void test_write_interaction() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction, output.toString()); } @Test public void test_write_participant_complex() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultNamedModelledParticipant(new DefaultProtein("test protein"))); ModelledParticipant participant = new DefaultNamedModelledParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_complex, output.toString()); } @Test public void test_write_participant_complex_as_interactor() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultNamedModelledParticipant(new DefaultProtein("test protein"))); ModelledParticipant participant = new DefaultNamedModelledParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setComplexAsInteractor(true); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction, output.toString()); } @Test public void test_write_participant_complex_no_participants() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); ModelledParticipant participant = new DefaultNamedModelledParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction, output.toString()); } @Test public void test_write_interaction_shortName() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction("interaction test"); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_shortName, output.toString()); } @Test public void test_write_interaction_fullName() throws XMLStreamException, IOException, IllegalRangeException { NamedInteraction interaction = new DefaultNamedModelledBinaryInteraction(); interaction.setFullName("interaction test"); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write((ModelledBinaryInteraction)interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_fullName, output.toString()); } @Test public void test_write_interaction_alias() throws XMLStreamException, IOException, IllegalRangeException { NamedInteraction interaction = new DefaultNamedModelledBinaryInteraction(); interaction.getAliases().add(new DefaultAlias(new DefaultCvTerm("synonym"), "interaction synonym")); interaction.getAliases().add(new DefaultAlias("test")); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write((ModelledBinaryInteraction)interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_aliases, output.toString()); } @Test public void test_write_interaction_identifier() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.getIdentifiers().add(new DefaultXref(new DefaultCvTerm("intact"), "EBI-xxx")); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1")); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_identifier, output.toString()); } @Test public void test_write_interaction_xref() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test2"), "xxxx2")); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1")); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_xref, output.toString()); } @Test @Ignore public void test_write_interaction_inferred() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); ModelledParticipant participant2 = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); // two inferred interactiosn f1, f2, f3 and f3,f4 ModelledFeature f1 = new DefaultModelledFeature(); f1.getRanges().add(RangeUtils.createRangeFromString("1-4")); ModelledFeature f2 = new DefaultModelledFeature(); f2.getRanges().add(RangeUtils.createRangeFromString("1-4")); f1.getLinkedFeatures().add(f2); f2.getLinkedFeatures().add(f1); participant.addFeature(f1); participant2.addFeature(f2); interaction.addParticipant(participant); interaction.addParticipant(participant2); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_inferred, output.toString()); } @Test public void test_write_interaction_type() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.setInteractionType(CvTermUtils.createMICvTerm("association", "MI:0914")); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_type, output.toString()); } @Test public void test_write_interaction_attributes() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test2"))); interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test3"))); interaction.setComplexExpansion(CvTermUtils.createMICvTerm("spoke expansion", "MI:1060")); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_attributes, output.toString()); } @Test public void test_write_interaction_registered() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); elementCache.extractIdForInteraction(new DefaultInteraction()); elementCache.extractIdForInteraction(interaction); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_registered, output.toString()); } @Test public void test_write_interaction_parameter() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.getModelledParameters().add(new DefaultModelledParameter(new DefaultCvTerm("kd"), new ParameterValue(new BigDecimal(5)))); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_parameter, output.toString()); } @Test public void test_write_interaction_confidence() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.getModelledConfidences().add(new DefaultModelledConfidence(new DefaultCvTerm("intact-miscore"), "0.8")); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_confidence, output.toString()); } @Test public void test_write_interaction_preassembly() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); Preassembly assembly = new DefaultPreassemby(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154")); assembly.setResponse(CvTermUtils.createMICvTerm("configurational pre-organization", "MI:1174")); assembly.getAffectedInteractions().add(new DefaultModelledInteraction()); interaction.getCooperativeEffects().add(assembly); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_preAssembly, output.toString()); } @Test public void test_write_interaction_preassembly_defaultExperiment() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); Preassembly assembly = new DefaultPreassemby(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154")); assembly.setResponse(CvTermUtils.createMICvTerm("configurational pre-organization", "MI:1174")); assembly.getAffectedInteractions().add(new DefaultModelledInteraction()); assembly.getCooperativityEvidences().add(new DefaultCooperativityEvidence(new DefaultPublication("12345"))); interaction.getCooperativeEffects().add(assembly); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_preAssembly, output.toString()); } @Test public void test_write_interaction_allostery() throws XMLStreamException, IOException, IllegalRangeException { ModelledBinaryInteraction interaction = new DefaultNamedModelledBinaryInteraction(); ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); Allostery allostery = new DefaultAllostery(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154"), participant, new DefaultMoleculeEffector(new DefaultModelledParticipant(InteractorUtils.createUnknownBasicInteractor()))); allostery.setResponse(CvTermUtils.createMICvTerm("allosteric v-type response", "MI:1163")); allostery.getAffectedInteractions().add(new DefaultModelledInteraction()); allostery.setAllostericMechanism(CvTermUtils.createMICvTerm("allosteric change in structure", "MI:1165")); allostery.setAllosteryType(CvTermUtils.createMICvTerm("heterotropic allostery", "MI:1168")); interaction.getCooperativeEffects().add(allostery); elementCache.clear(); XmlNamedModelledBinaryInteractionWriter writer = new XmlNamedModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_allostery, output.toString()); } }
package edu.wm.werewolf_client; import java.io.IOException; import java.io.UnsupportedEncodingException; import org.apache.http.HttpResponse; import org.apache.http.StatusLine; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicHeader; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.location.Location; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import com.loopj.android.http.*; import edu.wm.werewolf_client.FindLocation.LocationResult; public class Register extends Activity{ static Context context; static String TAG = "Register"; String username; String password; String passwordRetyped; String email; double lat; double lng; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_register); context = getApplicationContext(); Button registerButton = (Button)findViewById(R.id.registerButtonRegister); registerButton.setOnClickListener(registerListener); LocationResult locationResult = new LocationResult(){ @Override public void gotLocation(Location location){ if (location == null){ //If we failed to find the location for some reason, show the user an alert dialog Log.w(TAG,"failed to get location!"); } else{ //Got the location! lat = (double) (location.getLatitude()); lng = (double) (location.getLongitude()); Log.v(TAG,"latitude is: "+lat); Log.v(TAG,"longitude is: "+lng); } } }; FindLocation myLocation = new FindLocation(); myLocation.getLocation(this, locationResult); } View.OnClickListener registerListener = new View.OnClickListener() { @Override public void onClick(View v) { final EditText usernameText = (EditText) findViewById(R.id.usernameTextEditRegister); username = usernameText.getText().toString(); UsernameAndPassword.setUsername(username); Log.i(TAG, "Username is : "+username); final EditText passwordText = (EditText) findViewById(R.id.passwordTextEditRegister); password = passwordText.getText().toString(); UsernameAndPassword.setPassword(password); Log.i(TAG, "Password is : "+password); final EditText passwordRetypedText = (EditText) findViewById(R.id.passwordTextEditRetype); passwordRetyped = passwordRetypedText.getText().toString(); Log.i(TAG, "Password Retyped is : "+passwordRetyped); final EditText emailText = (EditText) findViewById(R.id.emailTextEdit); email = emailText.getText().toString(); Log.i(TAG, "Email is : "+email); if (username.isEmpty()){ Toast toast = Toast.makeText(context, "Please Enter a Username", Toast.LENGTH_LONG); toast.show(); } else{ if (password.isEmpty()){ Toast toast = Toast.makeText(context, "Please Enter a Password", Toast.LENGTH_LONG); toast.show(); } else{ if (!passwordRetyped.equals(password)){ Toast toast = Toast.makeText(context, "Password Fields Must Match", Toast.LENGTH_LONG); toast.show(); } else{ if (email.isEmpty()){ Toast toast = Toast.makeText(context, "Please Enter an Email Address", Toast.LENGTH_LONG); toast.show(); } else{ Log.i(TAG, "About to post new registration to server"); new Thread(new Runnable() { @Override public void run() { makeRequest("http://powerful-depths-2851.herokuapp.com/users/register?username=",username,password,lat,lng); } }).start(); } } } } } }; /** public static HttpResponse makeRequest(String uri, final String username, final String password, double lat, double lng) { try { //tacohen note: login should be something like: /users/login?username=admin&lat=31&lng=30&password=123 HttpClient client = new DefaultHttpClient(); uri = uri+username+"&lat="+lat+"&lng="+lng+"&password="+password; HttpPost httpPost = new HttpPost(uri); httpPost.setHeader(new BasicHeader("Content-type", "application/json")); HttpResponse response = client.execute(httpPost); Log.i(TAG, "URI is: "+httpPost.getURI()); StatusLine statusLine = response.getStatusLine(); Log.i(TAG, "HTTP response code was: "+statusLine.toString()); if (statusLine.toString().equals("HTTP/1.1 200 OK")){ Handler handler = new Handler(Looper.getMainLooper()); handler.post(new Runnable() { @Override public void run() { Intent intent = new Intent (context, MainInterface.class); intent.putExtra("username", username); intent.putExtra("password", password); startActivity(intent); } }); } else{ Log.e(TAG, "HTTP problem!"); } } catch (UnsupportedEncodingException e) { e.printStackTrace(); } catch (ClientProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return null; }*/ public HttpResponse makeRequest(String uri, final String username, final String password,double lat,double lng) { try { //tacohen note: login should be something like: /users/login?username=admin&lat=31&lng=30&password=123 HttpClient client = new DefaultHttpClient(); uri = uri+username+"&lat="+lat+"&lng="+lng+"&password="+password; HttpPost httpPost = new HttpPost(uri); httpPost.setHeader(new BasicHeader("Content-type", "application/json")); HttpResponse response = client.execute(httpPost); Log.i(TAG, "URI is: "+httpPost.getURI()); StatusLine statusLine = response.getStatusLine(); Log.i(TAG, "HTTP response code was: "+statusLine.toString()); if (statusLine.toString().equals("HTTP/1.1 200 OK")){ Handler handler = new Handler(Looper.getMainLooper()); handler.post(new Runnable() { @Override public void run() { Intent intent = new Intent (Register.this, MainInterface.class); intent.putExtra("username", username); intent.putExtra("password", password); startActivity(intent); } }); //Validate v = new Validate(); //v.MoveToPlayScreen(username); } else{ Log.e(TAG, "HTTP problem!"); } } catch (UnsupportedEncodingException e) { e.printStackTrace(); } catch (ClientProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return null; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.digitalpetri.opcua.stack.core.types.builtin.unsigned; import java.io.ObjectStreamException; /** * The <code>unsigned int</code> type * * @author Lukas Eder * @author Ed Schaller */ public final class UInteger extends UNumber implements Comparable<UInteger> { private static final Class<UInteger> CLASS = UInteger.class; private static final String CLASS_NAME = CLASS.getName(); /** * System property name for the property to set the size of the pre-cache. */ private static final String PRECACHE_PROPERTY = CLASS_NAME + ".precacheSize"; /** * Default size for the value cache. */ private static final int DEFAULT_PRECACHE_SIZE = 256; /** * Generated UID */ private static final long serialVersionUID = -6821055240959745390L; /** * Cached values */ private static final UInteger[] VALUES = mkValues(); /** * A constant holding the minimum value an <code>unsigned int</code> can * have, 0. */ public static final long MIN_VALUE = 0x00000000; /** * A constant holding the maximum value an <code>unsigned int</code> can * have, 2<sup>32</sup>-1. */ public static final long MAX_VALUE = 0xffffffffL; /** * The value modelling the content of this <code>unsigned int</code> */ private final long value; /** * Figure out the size of the precache. * * @return The parsed value of the system property * {@link #PRECACHE_PROPERTY} or {@link #DEFAULT_PRECACHE_SIZE} if * the property is not set, not a number or retrieving results in a * {@link SecurityException}. If the parsed value is zero or * negative no cache will be created. If the value is larger than * {@link Integer#MAX_VALUE} then Integer#MAX_VALUE will be used. */ private static final int getPrecacheSize() { String prop = null; long propParsed; try { prop = System.getProperty(PRECACHE_PROPERTY); } catch (SecurityException e) { // security manager stopped us so use default // FIXME: should we log this somewhere? return DEFAULT_PRECACHE_SIZE; } if (prop == null) return DEFAULT_PRECACHE_SIZE; if (prop.length() <= 0) { // empty value // FIXME: should we log this somewhere? return DEFAULT_PRECACHE_SIZE; } try { propParsed = Long.parseLong(prop); } catch (NumberFormatException e) { // not a valid number // FIXME: should we log this somewhere? return DEFAULT_PRECACHE_SIZE; } // treat negative value as no cache... if (propParsed < 0) return 0; if (propParsed > Integer.MAX_VALUE) { // FIXME: should we log this somewhere return Integer.MAX_VALUE; } return (int) propParsed; } /** * Generate a cached value for initial unsigned integer values. * * @return Array of cached values for UInteger */ private static final UInteger[] mkValues() { int precacheSize = getPrecacheSize(); UInteger[] ret; if (precacheSize <= 0) return null; ret = new UInteger[precacheSize]; for (int i = 0; i < precacheSize; i++) ret[i] = new UInteger(i); return ret; } /** * Unchecked internal constructor. This serves two purposes: first it allows * {@link #UInteger(long)} to stay deprecated without warnings and second * constructor without unnecessary value checks. * * @param value The value to wrap * @param unused Unused paramater to distinguish between this and the * deprecated public constructor. */ private UInteger(long value, boolean unused) { this.value = value; } /** * Retrieve a cached value. * * @param value Cached value to retrieve * @return Cached value if one exists. Null otherwise. */ private static UInteger getCached(long value) { if (VALUES != null && value < VALUES.length) return VALUES[(int) value]; return null; } /** * Get the value of a long without checking the value. */ private static UInteger valueOfUnchecked(long value) { UInteger cached; if ((cached = getCached(value)) != null) return cached; return new UInteger(value, true); } /** * Create an <code>unsigned int</code> * * @throws NumberFormatException If <code>value</code> does not contain a * parsable <code>unsigned int</code>. */ public static UInteger valueOf(String value) throws NumberFormatException { return valueOfUnchecked(rangeCheck(Long.parseLong(value))); } /** * Create an <code>unsigned int</code> by masking it with * <code>0xFFFFFFFF</code> i.e. <code>(int) -1</code> becomes * <code>(uint) 4294967295</code> */ public static UInteger valueOf(int value) { return valueOfUnchecked(value & MAX_VALUE); } /** * Create an <code>unsigned int</code> * * @throws NumberFormatException If <code>value</code> is not in the range * of an <code>unsigned byte</code> */ public static UInteger valueOf(long value) throws NumberFormatException { return valueOfUnchecked(rangeCheck(value)); } /** * Create an <code>unsigned int</code> * * @throws NumberFormatException If <code>value</code> is not in the range * of an <code>unsigned int</code> */ private UInteger(long value) throws NumberFormatException { this.value = rangeCheck(value); } /** * Create an <code>unsigned int</code> by masking it with * <code>0xFFFFFFFF</code> i.e. <code>(int) -1</code> becomes * <code>(uint) 4294967295</code> */ private UInteger(int value) { this.value = value & MAX_VALUE; } /** * Create an <code>unsigned int</code> * * @throws NumberFormatException If <code>value</code> does not contain a * parsable <code>unsigned int</code>. */ private UInteger(String value) throws NumberFormatException { this.value = rangeCheck(Long.parseLong(value)); } /** * Throw exception if value out of range (long version) * * @param value Value to check * @return value if it is in range * @throws NumberFormatException if value is out of range */ private static long rangeCheck(long value) throws NumberFormatException { if (value < MIN_VALUE || value > MAX_VALUE) { throw new NumberFormatException("Value is out of range : " + value); } return value; } /** * Replace version read through deserialization with cached version. * * @return cached instance of this object's value if one exists, otherwise * this object * @throws ObjectStreamException */ private Object readResolve() throws ObjectStreamException { UInteger cached; // the value read could be invalid so check it rangeCheck(value); if ((cached = getCached(value)) != null) return cached; return this; } @Override public int intValue() { return (int) value; } @Override public long longValue() { return value; } @Override public float floatValue() { return value; } @Override public double doubleValue() { return value; } @Override public int hashCode() { return Long.valueOf(value).hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj instanceof UInteger) { return value == ((UInteger) obj).value; } return false; } @Override public String toString() { return Long.valueOf(value).toString(); } @Override public int compareTo(UInteger o) { return (value < o.value ? -1 : (value == o.value ? 0 : 1)); } }
/* * Copyright 2014-2015 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.avro.ui.gwt.client.widget; import java.math.BigDecimal; import java.math.MathContext; import java.math.RoundingMode; import java.text.ParseException; import org.kaaproject.avro.ui.gwt.client.AvroUiResources.AvroUiStyle; import org.kaaproject.avro.ui.gwt.client.util.Utils; import com.google.gwt.core.client.Scheduler; import com.google.gwt.core.client.Scheduler.ScheduledCommand; import com.google.gwt.dom.client.Element; import com.google.gwt.event.dom.client.BlurEvent; import com.google.gwt.event.dom.client.BlurHandler; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.FocusEvent; import com.google.gwt.event.dom.client.FocusHandler; import com.google.gwt.event.dom.client.KeyCodes; import com.google.gwt.event.dom.client.KeyDownEvent; import com.google.gwt.event.dom.client.KeyDownHandler; import com.google.gwt.i18n.client.NumberFormat; import com.google.gwt.text.shared.AbstractRenderer; import com.google.gwt.text.shared.Parser; import com.google.gwt.text.shared.Renderer; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.ui.ValueBox; public abstract class NumberBox<T extends Number> extends ValueBox<T> implements BlurHandler, FocusHandler, ClickHandler, KeyDownHandler { private AvroUiStyle style; private String promptText; private boolean isDecimal = false; private Renderer<T> renderer; protected NumberBox(AvroUiStyle style, Element element, String promptText, final Renderer<T> renderer, final Parser<T> parser) { super(element, renderer, parser); sinkEvents(Event.ONPASTE); this.style = style; this.promptText = promptText; this.renderer = renderer; if (Utils.isNotBlank(promptText)) { this.addFocusHandler(this); this.addClickHandler(this); setPrompts(); } this.addKeyDownHandler(this); this.addBlurHandler(this); } @Override public void onBlur(BlurEvent event) { if (Utils.isNotBlank(promptText) && Utils.isBlank(super.getText())) { setPrompts(); } else { setText(renderer.render(getValue())); } } @Override public void onFocus(FocusEvent event) { this.setSelectionRange(0, 0); } @Override public void onClick(ClickEvent event) { if (promptText.equals(super.getText())) { removePrompts(); } } @Override public void onBrowserEvent(Event event) { super.onBrowserEvent(event); switch (DOM.eventGetType(event)) { case Event.ONPASTE: Scheduler.get().scheduleDeferred(new ScheduledCommand() { @Override public void execute() { if (Utils.isNotBlank(promptText) && Utils.isBlank(NumberBox.super.getText())) { setPrompts(); } else { setText(renderer.render(getValue())); } } }); break; } } @Override public void onKeyDown(KeyDownEvent event) { if ( !isEnabled( ) || isReadOnly( ) ) return; if (Utils.isNotBlank(promptText) && promptText.equals(super.getText()) && !(event.getNativeEvent().getKeyCode() == KeyCodes.KEY_TAB)) { removePrompts(); } int keyCode = event.getNativeEvent().getKeyCode(); // allow special keys switch(keyCode) { case KeyCodes.KEY_TAB: case KeyCodes.KEY_BACKSPACE: case KeyCodes.KEY_DELETE: case KeyCodes.KEY_LEFT: case KeyCodes.KEY_RIGHT: case KeyCodes.KEY_UP: case KeyCodes.KEY_DOWN: case KeyCodes.KEY_END: case KeyCodes.KEY_ENTER: case KeyCodes.KEY_ESCAPE: case KeyCodes.KEY_PAGEDOWN: case KeyCodes.KEY_PAGEUP: case KeyCodes.KEY_HOME: case KeyCodes.KEY_SHIFT: case KeyCodes.KEY_ALT: case KeyCodes.KEY_CTRL: return; default: if (event.isAltKeyDown() || (event.isControlKeyDown() && (keyCode == KeyCodes.KEY_C || keyCode == KeyCodes.KEY_V || keyCode == KeyCodes.KEY_X ))) return; if (!event.isShiftKeyDown()) { // check for decimal '.' if (isDecimal() && isDot(keyCode) && !getText().contains(".")) return; // check for negative sign '-' if (getCursorPos() == 0 && isDash(keyCode) && !getText().startsWith("-")) return; // filter out non-digits if (isDigit(keyCode)) { return; } } } cancelKey(); } private static boolean isDot(int keyCode) { return keyCode == 190 || keyCode == 110; } private static boolean isDash(int keyCode) { return (keyCode == (isFirefox() ? 173 : 189)) || keyCode == 109; } private static boolean isDigit(int keyCode) { return keyCode >= 48 && keyCode <= 57 || keyCode >= 96 && keyCode <= 105; } private static native boolean isFirefox() /*-{ return navigator.userAgent.toLowerCase().indexOf('firefox') > -1; }-*/; public boolean isDecimal() { return isDecimal; } public void setDecimal(boolean isDecimal) { this.isDecimal = isDecimal; } @Override public String getText() { String text = super.getText(); if (Utils.isNotBlank(promptText) && promptText.equals(text)) { return ""; } else { return text; } } @Override public void setText(String text) { if (Utils.isNotBlank(promptText)) { if (Utils.isBlank(text)) { setPrompts(); } else { removePrompts(); super.setText(text); } } else { super.setText(text); } } private void setPrompts() { this.addStyleName(style.prompt()); super.setText(promptText); } private void removePrompts() { this.removeStyleName(style.prompt()); super.setText(""); } static class NumberRenderer<N extends Number> extends AbstractRenderer<N> { private NumberFormat numberFormat; private boolean hasFraction; public NumberRenderer(String numberFormatPattern, boolean hasFraction) { numberFormat = NumberFormat.getFormat(numberFormatPattern); this.hasFraction = hasFraction; if (!hasFraction) { numberFormat.overrideFractionDigits(0); } } public String render(N object) { if (null == object) { return ""; } try { if (hasFraction) { int precision = 0; String strNumber = object.toString(); precision = strNumber.length(); if (strNumber.contains(".")) { precision--; } return numberFormat.format(new BigDecimal(object.doubleValue(), new MathContext(precision, RoundingMode.HALF_EVEN))); } else { return numberFormat.format(object); } } catch (NumberFormatException e) { return ""; } } } static abstract class NumberParser<N extends Number> implements Parser<N> { private NumberFormat numberFormat; public NumberParser(String numberFormatPattern, boolean hasFraction) { numberFormat = NumberFormat.getFormat(numberFormatPattern); if (!hasFraction) { numberFormat.overrideFractionDigits(0); } } public N parse(CharSequence object) throws ParseException { if ("".equals(object.toString())) { return null; } try { return toNumber(numberFormat.parse(object.toString())); } catch (NumberFormatException e) { throw new ParseException(e.getMessage(), 0); } } protected abstract N toNumber(double val); } static class IntegerParser extends NumberParser<Integer> { public IntegerParser(String numberFormatPattern) { super(numberFormatPattern, false); } @Override protected Integer toNumber(double val) { return (int) Math.rint(val); } } static class LongParser extends NumberParser<Long> { public LongParser(String numberFormatPattern) { super(numberFormatPattern, false); } @Override protected Long toNumber(double val) { return (long) val; } } static class FloatParser extends NumberParser<Float> { public FloatParser(String numberFormatPattern) { super(numberFormatPattern, true); } @Override protected Float toNumber(double val) { return (float) val; } } static class DoubleParser extends NumberParser<Double> { public DoubleParser(String numberFormatPattern) { super(numberFormatPattern, true); } @Override protected Double toNumber(double val) { return val; } } }
package org.ongawa.peru.chlorination.modules.reports; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.List; import java.util.Locale; import java.util.Properties; import java.util.ResourceBundle; import org.apache.commons.lang.NullArgumentException; import org.ongawa.peru.chlorination.ApplicationProperties; import org.ongawa.peru.chlorination.KEYS; import org.ongawa.peru.chlorination.logic.DataLoader; import org.ongawa.peru.chlorination.persistence.DataSourceFactory; import org.ongawa.peru.chlorination.persistence.IDataSource; import org.ongawa.peru.chlorination.persistence.elements.Catchment; import org.ongawa.peru.chlorination.persistence.elements.CatchmentDesinfection; import org.ongawa.peru.chlorination.persistence.elements.ChlorineCalculation; import org.ongawa.peru.chlorination.persistence.elements.ConductionPipe; import org.ongawa.peru.chlorination.persistence.elements.CubicReservoir; import org.ongawa.peru.chlorination.persistence.elements.CubicReservoirDesinfection; import org.ongawa.peru.chlorination.persistence.elements.Desinfection; import org.ongawa.peru.chlorination.persistence.elements.DistributionPipe; import org.ongawa.peru.chlorination.persistence.elements.PipeDesinfection; import org.ongawa.peru.chlorination.persistence.elements.ReliefValve; import org.ongawa.peru.chlorination.persistence.elements.ReliefValveDesinfection; import org.ongawa.peru.chlorination.persistence.elements.WaterSystem; import com.itextpdf.text.BaseColor; import com.itextpdf.text.Chunk; import com.itextpdf.text.Document; import com.itextpdf.text.DocumentException; import com.itextpdf.text.Element; import com.itextpdf.text.Font; import com.itextpdf.text.Paragraph; import com.itextpdf.text.Phrase; import com.itextpdf.text.pdf.PdfPCell; import com.itextpdf.text.pdf.PdfPTable; import com.itextpdf.text.pdf.PdfWriter; /** * * @author kiko * */ public class DesignReport extends Report { private IDataSource ds; private Properties properties; private ResourceBundle messages; private WaterSystem waterSystem; public DesignReport(WaterSystem waterSystem, File destFile, Locale locale, String author) throws ClassNotFoundException, InstantiationException, IllegalAccessException, IOException { super(destFile, locale, author); if(waterSystem == null) throw new NullArgumentException("waterSystem"); this.ds = DataSourceFactory.getInstance().getDefaultDataSource(); this.properties = ApplicationProperties.getInstance().getProperties(); this.messages = ResourceBundle.getBundle(this.properties.getProperty(KEYS.REPORT_BUNDLE_NAME), locale); this.waterSystem = waterSystem; } @Override public void createReport() throws FileNotFoundException, DocumentException { DataLoader dataloader = DataLoader.getDataLoader(); Document document = new Document(); document.addAuthor(author); document.addCreationDate(); document.addLanguage(locale.getLanguage().toLowerCase()+"_"+locale.getCountry().toUpperCase()); PdfWriter.getInstance(document, new FileOutputStream(this.file)); document.open(); ChlorineCalculation chlorineCalculation = this.ds.getLastChlorineCalculation(waterSystem); if(chlorineCalculation !=null){ Chunk chTitle1 = new Chunk(this.messages.getString(KEYS.REPORT_GENERIC_TITLE_FIRST_CHUNK), this.headerFont); Font headerUnderline = new Font(this.headerFont.getFamily(), this.headerFont.getSize()); headerUnderline.setStyle(Font.UNDERLINE); Chunk chTitle2 = new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_TITLE_CHUNK), headerUnderline); this.headerFont.setStyle(Font.NORMAL); Chunk chTitle3 = new Chunk(this.messages.getString(KEYS.REPORT_GENERIC_TITLE_LAST_CHUNK), this.headerFont); Phrase phTitle = new Phrase(); phTitle.add(chTitle1); phTitle.add(chTitle2); phTitle.add(chTitle3); Paragraph paTitle = new Paragraph(phTitle); paTitle.setAlignment(Paragraph.ALIGN_CENTER); paTitle.setSpacingAfter(DEFAULT_SPACING*5); document.add(paTitle); document.add(new Paragraph(new Chunk(this.sdf.format(Calendar.getInstance().getTime()), this.bodyFont))); document.add(new Paragraph(this.author, this.bodyFont)); Font subHeaderFont = new Font(this.headerFont.getFamily(), this.headerFont.getSize()-2); subHeaderFont.setStyle(Font.BOLD); Paragraph paSysDataTitle = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_GENERIC_SYSTEMDATA), subHeaderFont)); paSysDataTitle.setSpacingBefore(DEFAULT_SPACING*3); paSysDataTitle.setSpacingAfter(DEFAULT_SPACING); document.add(paSysDataTitle); PdfPTable inputTable = new PdfPTable(2); inputTable.setWidthPercentage(90); inputTable.getDefaultCell().setUseAscender(true); inputTable.getDefaultCell().setUseDescender(true); inputTable.getDefaultCell().setBorder(PdfPCell.NO_BORDER); inputTable.getDefaultCell().setFixedHeight(40); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_SUBBASIN)+" "+waterSystem.getCommunity().getSubBasin().getName()); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_ENDOWMENT)+" "+df.format(waterSystem.getEndowment())); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_COMMUNITY)+" "+waterSystem.getCommunity().getName()); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_CHLORINETYPE)+" "+chlorineCalculation.getChlorineType()); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_WATERSYSTEM)+" "+waterSystem.getName()); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_CHLORINEPURENESS)+" "+df.format(chlorineCalculation.getChlorinePureness())); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_FAMILIESNUM)+" "+chlorineCalculation.getFamiliesNum()); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_RELOADTIME)+" "+df.format(chlorineCalculation.getReloadTime())); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_POPULATION)+" "+chlorineCalculation.getPopulation()); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_DRIPPINGPERDAY)+" "+df.format(chlorineCalculation.getDrippingHoursPerDay())); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_GROWINGINDEX)+" "+df.format(waterSystem.getGrowingIndex())); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_CHLORINEDEMAND)+" "+df.format(chlorineCalculation.getChlorineDemand())); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_NATURALFLOW)+" "+df.format(chlorineCalculation.getNaturalFlow())); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_CLIMATE)+" "+waterSystem.getClimate()); inputTable.getDefaultCell().setColspan(2); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_CURRENTPOPULATIONNEEDEDFLOW)+" "+df.format(waterSystem.getCurrentNeededFlow())); inputTable.addCell(this.messages.getString(KEYS.REPORT_GENERIC_FUTUREPOPULATIONNEEDEDFLOW)+" "+df.format(waterSystem.getFutureNeededFlow())); document.add(inputTable); Paragraph paResults = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_GENERIC_RESULTS), subHeaderFont)); document.add(paResults); BaseColor oldColor = this.bodyFont.getColor(); this.bodyFont.setColor(BaseColor.RED); ChlorineCalculation clCalc = this.ds.getLastChlorineCalculation(waterSystem); Paragraph pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_RESULTS_DESIGN_CHLORINEAMOUNTNEEDED).replaceFirst("&result1", (clCalc!=null)?df.format(clCalc.getChlorineDosePerFortnight()):"---").replaceFirst("&result2", (clCalc!=null)?df.format(clCalc.getChlorineDosePerMonth()):"---"), bodyFont)); pa.setFirstLineIndent(LEFT_IDENTATION); document.add(pa); pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_RESULTS_DESIGN_TANKVOLUME).replaceFirst("&result1", df.format(chlorineCalculation.getTankVolume())), bodyFont)); pa.setFirstLineIndent(LEFT_IDENTATION); pa.setSpacingAfter(DEFAULT_SPACING); document.add(pa); document.newPage(); this.bodyFont.setColor(oldColor); pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_TITLE), subHeaderFont)); pa.setSpacingAfter(DEFAULT_SPACING); document.add(pa); Desinfection desinfection = this.ds.getLastDesinfection(waterSystem); List<CubicReservoir> cubicReservoirs = ds.getCubicReservoirs(waterSystem); List<Catchment> catchments = ds.getCatchments(waterSystem); List<DistributionPipe> distributionPipes = ds.getDistributionPipes(waterSystem); List<ConductionPipe> conductionPipes = ds.getConductionPipes(waterSystem); List<ReliefValve> reliefValves = ds.getReliefValves(waterSystem); double totalClAmount = 0, elementAmount = 0; int auxCount = 0; if(cubicReservoirs.isEmpty() && catchments.isEmpty() && distributionPipes.isEmpty() && conductionPipes.isEmpty() && reliefValves.isEmpty()){ pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_NODESIGNDATA), bodyFont)); pa.setSpacingAfter(DEFAULT_SPACING); document.add(pa); } else{ //Font headerTableFont = new Font(this.bodyFont.getFamily(), this.bodyFont.getSize()); //headerTableFont.setStyle(Font.BOLD); int tableSize = 7; PdfPTable wsTable = new PdfPTable(tableSize); wsTable.setWidthPercentage(100); wsTable.getDefaultCell().setUseAscender(true); wsTable.getDefaultCell().setUseDescender(true); wsTable.getDefaultCell().setHorizontalAlignment(PdfPCell.ALIGN_CENTER); wsTable.getDefaultCell().setVerticalAlignment(PdfPCell.ALIGN_MIDDLE); wsTable.getDefaultCell().setBackgroundColor(LIGHT_GRAY); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_NUM)); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_NAME)); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_ELEMENT)); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_ELEMENTSNUM)); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_CHLORINEPERELEMENT)); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_SPOONSPERELEMENT)); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_RETENTIONTIME)); wsTable.getDefaultCell().setBackgroundColor(BaseColor.WHITE); CubicReservoirDesinfection cubicReservoirDesinfection = null; for(CubicReservoir cubicReservoir : cubicReservoirs){ wsTable.addCell(this.df.format(cubicReservoir.getReservoirId())); wsTable.addCell(cubicReservoir.getElementName()); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_CUBICRESERVOIR)); auxCount = cubicReservoir.getCount(); wsTable.addCell(String.valueOf(auxCount)); if(desinfection!=null){ cubicReservoirDesinfection = this.ds.getCubicReservoirDesinfection(cubicReservoir, desinfection); if(cubicReservoirDesinfection!=null){ elementAmount = cubicReservoirDesinfection.getChlorineQty(); totalClAmount += (elementAmount*auxCount); wsTable.addCell(df.format(elementAmount)); wsTable.addCell(df.format(cubicReservoirDesinfection.getDemandSpoons())); wsTable.addCell(df.format(cubicReservoirDesinfection.getRetentionTime())); } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } CatchmentDesinfection catchmentDesinfection = null; for(Catchment catchment : catchments){ wsTable.addCell(this.df.format(catchment.getReservoirId())); wsTable.addCell(catchment.getElementName()); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_CATCHMENT)); auxCount = catchment.getCount(); wsTable.addCell(String.valueOf(auxCount)); if(desinfection != null){ catchmentDesinfection = this.ds.getCatchmentDesinfection(catchment, desinfection); if(catchmentDesinfection != null){ elementAmount = catchmentDesinfection.getChlorineQty(); totalClAmount += (elementAmount*auxCount); wsTable.addCell(df.format(elementAmount)); wsTable.addCell(df.format(catchmentDesinfection.getDemandSpoons())); wsTable.addCell(df.format(catchmentDesinfection.getRetentionTime())); } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } PipeDesinfection pipeDesinfection = null; for(DistributionPipe distributionPipe : distributionPipes){ wsTable.addCell(this.df.format(distributionPipe.getPipeId())); wsTable.addCell(distributionPipe.getElementName()); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_DISTRIBUTIONPIPE)); auxCount = distributionPipe.getCount(); wsTable.addCell(String.valueOf(auxCount)); if(desinfection != null){ pipeDesinfection = this.ds.getPipeDesinfection(distributionPipe, desinfection); if(pipeDesinfection != null){ elementAmount = pipeDesinfection.getChlorineQty(); totalClAmount += (elementAmount*auxCount); wsTable.addCell(df.format(elementAmount)); wsTable.addCell(df.format(pipeDesinfection.getDemandSpoons())); wsTable.addCell(df.format(pipeDesinfection.getRetentionTime())); } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } for(ConductionPipe conductionPipe : conductionPipes){ wsTable.addCell(this.df.format(conductionPipe.getPipeId())); wsTable.addCell(conductionPipe.getElementName()); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_CONDUCTIONPIPE)); auxCount = conductionPipe.getCount(); wsTable.addCell(String.valueOf(auxCount)); if(desinfection != null){ pipeDesinfection = this.ds.getPipeDesinfection(conductionPipe, desinfection); if(pipeDesinfection != null){ elementAmount = pipeDesinfection.getChlorineQty(); totalClAmount += (elementAmount*auxCount); wsTable.addCell(df.format(elementAmount)); wsTable.addCell(df.format(pipeDesinfection.getDemandSpoons())); wsTable.addCell(df.format(pipeDesinfection.getRetentionTime())); } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } ReliefValveDesinfection reliefValveDesinfection = null; for(ReliefValve reliefValve : reliefValves){ wsTable.addCell(this.df.format(reliefValve.getReliefValveId())); wsTable.addCell(reliefValve.getElementName()); wsTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_WATERSYSTEMDETAILS_RELIEFVALVE)); auxCount = reliefValve.getCount(); wsTable.addCell(String.valueOf(auxCount)); if(desinfection != null){ reliefValveDesinfection = this.ds.getReliefValveDesinfection(reliefValve, desinfection); if(reliefValveDesinfection != null){ elementAmount = reliefValveDesinfection.getChlorineQty(); totalClAmount += (elementAmount*auxCount); wsTable.addCell(df.format(elementAmount)); wsTable.addCell(df.format(reliefValveDesinfection.getDemandSpoons())); wsTable.addCell(df.format(reliefValveDesinfection.getRetentionTime())); } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } else{ wsTable.addCell("---"); wsTable.addCell("---"); wsTable.addCell("---"); } } wsTable.setSpacingAfter(DEFAULT_SPACING); document.add(wsTable); } oldColor = this.bodyFont.getColor(); this.bodyFont.setColor(BaseColor.RED); pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_CHLORINETOTALAMOUNT_RESULTS).replaceFirst("&result1", (desinfection!=null)?df.format(totalClAmount/1000):"---"), bodyFont)); pa.setFirstLineIndent(LEFT_IDENTATION); document.add(pa); this.bodyFont.setColor(oldColor); if(desinfection != null){ int year = Integer.parseInt(new SimpleDateFormat("yyyy").format(desinfection.getDate())); pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_DESINFECTIONSNUMPERYEAR_RESULTS).replaceFirst("&result1", String.valueOf(this.ds.getCountDesinfectionsPerYear(waterSystem, year))), bodyFont)); } else{ pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_DESINFECTIONSNUMPERYEAR_RESULTS).replaceFirst("&result1", "---"), bodyFont)); } pa.setFirstLineIndent(LEFT_IDENTATION); document.add(pa); document.newPage(); pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_FAMILYFEECALCULATION_TITLE), subHeaderFont)); pa.setSpacingAfter(DEFAULT_SPACING*2); document.add(pa); int tableSize = 2; PdfPTable feeTable = new PdfPTable(tableSize); feeTable.setComplete(false); feeTable.setWidthPercentage(50); feeTable.setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_CENTER); feeTable.getDefaultCell().setUseAscender(true); feeTable.getDefaultCell().setUseDescender(true); feeTable.getDefaultCell().setVerticalAlignment(PdfPCell.ALIGN_MIDDLE); feeTable.getDefaultCell().setFixedHeight(40f); feeTable.getDefaultCell().setColspan(2); feeTable.getDefaultCell().setBackgroundColor(LIGHT_GRAY); feeTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_TABLE_HEADER)); feeTable.getDefaultCell().setColspan(1); feeTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_INPUT_COLUMNS)); feeTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_UNIT_COLUMN)); feeTable.getDefaultCell().setBackgroundColor(BaseColor.WHITE); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_RIGHT); feeTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_CHLORINEFORCHLORINATION_ROW)); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.addCell(dataloader.getValue("solescl")); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_RIGHT); feeTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_CHLORINEFORDESINFECTION_ROW)); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.addCell(dataloader.getValue("solesDes")); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_RIGHT); feeTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_SAPSPARES_ROW)); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.addCell(dataloader.getValue("sapSpares")); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_RIGHT); feeTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_JASSMANAGEMENT_ROW)); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.addCell(dataloader.getValue("jassManage")); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_RIGHT); feeTable.addCell(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_OPERATORPAYMENT_ROW)); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.addCell(dataloader.getValue("workerPay")); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_RIGHT); feeTable.addCell(new Phrase(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_TOTAL_ROW), this.bodyBoldFont))); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.addCell(new Phrase(new Chunk(dataloader.getValue("yearTotal"), this.bodyBoldFont))); float oldHeight = feeTable.getDefaultCell().getFixedHeight(); feeTable.getDefaultCell().setFixedHeight(10f); feeTable.getDefaultCell().setColspan(2); feeTable.addCell(""); feeTable.getDefaultCell().setFixedHeight(oldHeight); feeTable.getDefaultCell().setColspan(1); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_RIGHT); feeTable.addCell(new Phrase(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_ONLYCHLORINE), this.bodyFont))); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.addCell(new Phrase(new Chunk(dataloader.getValue("justCL"), this.bodyFont))); document.add(feeTable); oldColor = this.bodyFont.getColor(); this.bodyFont.setColor(BaseColor.RED); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_RIGHT); feeTable.addCell(new Phrase(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_FEECALCULATION_FAMILYFEE), this.bodyFont))); feeTable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_LEFT); feeTable.addCell(new Phrase(new Chunk(dataloader.getValue("famCuot"), this.bodyFont))); feeTable.setSpacingAfter(DEFAULT_SPACING*3); document.add(feeTable); feeTable.setComplete(true); this.bodyFont.setColor(oldColor); pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_WARNINGS_TITLE), subHeaderFont)); pa.setSpacingAfter(DEFAULT_SPACING); document.add(pa); pa = new Paragraph(new Chunk(this.messages.getString(KEYS.REPORT_DESIGN_WARNINGS_CONTENT), this.bodyFont)); pa.setIndentationLeft(LEFT_IDENTATION); document.add(pa); document.close(); } else{ throw new DocumentException("There's no info about any chlorination"); } } public WaterSystem getWaterSystem(){ return this.waterSystem; } }
/* Bento * * $Id: BentoVisitor.java,v 1.22 2015/04/20 12:50:41 sthippo Exp $ * * Copyright (c) 2002-2015 by bentodev.org * * Use of this code in source or compiled form is subject to the * Bento Poetic License at http://www.bentodev.org/poetic-license.html */ package bento.runtime; import bento.lang.*; import bento.parser.*; /** Base class of bento tree visitors. Implements BentoParserVisitor, a * jjtree-generated interface implementing the Visitor design pattern. * Subclasses of BentoVisitor may override the handleNode method for general * node handling, and/or the visit method for any particular node types in * which it is interested. * * @author Michael St. Hippolyte * @version $Revision: 1.22 $ */ abstract public class BentoVisitor implements BentoParserVisitor { public BentoVisitor() {} /** By default, recursively visit chidren only if it is not * primitive. */ protected Object handleNode(BentoNode node, Object data) { //if (!node.isPrimitive()) { data = ((AbstractNode) node).childrenAccept(this, data); //} return data; } /** Needed to satisfy the BentoParserVisitor interface; should never * be called. */ public Object visit(SimpleNode node, Object data) { throw new UnsupportedOperationException("SimpleNodes not supported"); } /** Needed to satisfy the BentoParserVisitor interface; should never * be called. */ public Object visit(BentoNode node, Object data) { throw new UnsupportedOperationException("visit must be called with a concrete subclass"); } public Object visit(ParsedRoot node, Object data) { return handleNode(node, data); } public Object visit(ParsedStaticText node, Object data) { return handleNode(node, data); } public Object visit(ParsedLiteralText node, Object data) { return handleNode(node, data); } public Object visit(ParsedNameWithArguments node, Object data) { return handleNode(node, data); } public Object visit(ParsedNameWithIndexes node, Object data) { return handleNode(node, data); } public Object visit(ParsedName node, Object data) { return handleNode(node, data); } public Object visit(ParsedSpecialName node, Object data) { return handleNode(node, data); } public Object visit(ParsedSiteStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedCoreStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedDefaultStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedStaticBlock node, Object data) { return handleNode(node, data); } public Object visit(ParsedBentoBlock node, Object data) { return handleNode(node, data); } public Object visit(ParsedDynamicElementBlock node, Object data) { return handleNode(node, data); } public Object visit(ParsedDynamicBentoBlock node, Object data) { return handleNode(node, data); } public Object visit(ParsedConcurrentBentoBlock node, Object data) { return handleNode(node, data); } public Object visit(ParsedAdoptStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedExternStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedKeepStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedInsertStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedRedirectStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedContinueStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedConditionalExpression node, Object data) { return handleNode(node, data); } public Object visit(ParsedWithPredicate node, Object data) { return handleNode(node, data); } public Object visit(ParsedWithoutPredicate node, Object data) { return handleNode(node, data); } public Object visit(ParsedForExpression node, Object data) { return handleNode(node, data); } public Object visit(ParsedIteratorValues node, Object data) { return handleNode(node, data); } public Object visit(ParsedBreakStatement node, Object data) { return handleNode(node, data); } public Object visit(ParsedNextConstruction node, Object data) { return handleNode(node, data); } public Object visit(ParsedSubConstruction node, Object data) { return handleNode(node, data); } public Object visit(ParsedSuperConstruction node, Object data) { return handleNode(node, data); } public Object visit(ParsedOverConstruction node, Object data) { return handleNode(node, data); } public Object visit(ParsedUnderConstruction node, Object data) { return handleNode(node, data); } public Object visit(ParsedConstruction node, Object data) { return handleNode(node, data); } public Object visit(ParsedComplexName node, Object data) { return handleNode(node, data); } public Object visit(ParsedAnonymousDefinition node, Object data) { return handleNode(node, data); } public Object visit(ParsedCollectionDefinition node, Object data) { return handleNode(node, data); } public Object visit(ParsedComplexDefinition node, Object data) { return handleNode(node, data); } public Object visit(ParsedElementDefinition node, Object data) { return handleNode(node, data); } public Object visit(ParsedExternalDefinition node, Object data) { return handleNode(node, data); } public Object visit(ParsedExternalCollectionDefinition node, Object data) { return handleNode(node, data); } public Object visit(ParsedAnonymousArray node, Object data) { return handleNode(node, data); } public Object visit(ParsedAnonymousTable node, Object data) { return handleNode(node, data); } public Object visit(ParsedType node, Object data) { return handleNode(node, data); } public Object visit(ParsedDefTypeName node, Object data) { return handleNode(node, data); } public Object visit(ParsedDefElementName node, Object data) { return handleNode(node, data); } public Object visit(ParsedDefCollectionName node, Object data) { return handleNode(node, data); } public Object visit(ParsedParameterList node, Object data) { return handleNode(node, data); } public Object visit(ParsedDefParameter node, Object data) { return handleNode(node, data); } public Object visit(ParsedAny node, Object data) { return handleNode(node, data); } public Object visit(ParsedAnyAny node, Object data) { return handleNode(node, data); } public Object visit(ParsedPrimitiveType node, Object data) { return handleNode(node, data); } public Object visit(ParsedDim node, Object data) { return handleNode(node, data); } public Object visit(ParsedValueExpression node, Object data) { return handleNode(node, data); } public Object visit(ParsedUnaryExpression node, Object data) { return handleNode(node, data); } public Object visit(ParsedBinaryExpression node, Object data) { return handleNode(node, data); } public Object visit(ParsedChoiceExpression node, Object data) { return handleNode(node, data); } public Object visit(ParsedLogicalOrOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedLogicalAndOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedOrOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedXorOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedAndOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedEqualsOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedNotEqualsOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedIsaExpression node, Object data) { return handleNode(node, data); } public Object visit(ParsedLessThanOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedGreaterThanOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedLessThanOrEqualOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedGreaterThanOrEqualOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedInOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedLeftShiftOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedRightShiftOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedRightUnsignedShiftOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedAddOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedSubtractOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedMultiplyOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedDivideByOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedModOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedNegateOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedBitflipOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedLogicalNotOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedTypeOperator node, Object data) { return handleNode(node, data); } public Object visit(ParsedIntegerLiteral node, Object data) { return handleNode(node, data); } public Object visit(ParsedFloatingPointLiteral node, Object data) { return handleNode(node, data); } public Object visit(ParsedCharLiteral node, Object data) { return handleNode(node, data); } public Object visit(ParsedStringLiteral node, Object data) { return handleNode(node, data); } public Object visit(ParsedBooleanLiteral node, Object data) { return handleNode(node, data); } public Object visit(ParsedNullLiteral node, Object data) { return handleNode(node, data); } public Object visit(ParsedArguments node, Object data) { return handleNode(node, data); } public Object visit(ParsedArrayIndex node, Object data) { return handleNode(node, data); } public Object visit(ParsedTableIndex node, Object data) { return handleNode(node, data); } public Object visit(ParsedTableElement node, Object data) { return handleNode(node, data); } public Object visit(ParsedTypeList node, Object data) { return handleNode(node, data); } public Object visit(ParsedEllipsis node, Object data) { return handleNode(node, data); } }
// Code generated by Wire protocol buffer compiler, do not edit. // Source file: ../wire-runtime/src/test/proto/google/protobuf/descriptor.proto at 56:1 package com.google.protobuf; import com.squareup.wire.Message; import com.squareup.wire.ProtoField; import java.lang.Object; import java.lang.Override; import java.lang.String; import java.util.Collections; import java.util.List; /** * Describes a complete .proto file. */ public final class FileDescriptorProto extends Message<FileDescriptorProto> { private static final long serialVersionUID = 0L; public static final String DEFAULT_NAME = ""; public static final String DEFAULT_PACKAGE = ""; /** * file name, relative to root of source tree */ @ProtoField( tag = 1, type = Message.Datatype.STRING ) public final String name; /** * e.g. "foo", "foo.bar", etc. */ @ProtoField( tag = 2, type = Message.Datatype.STRING ) public final String _package; /** * Names of files imported by this file. */ @ProtoField( tag = 3, type = Message.Datatype.STRING, label = Message.Label.REPEATED ) public final List<String> dependency; /** * All top-level definitions in this file. */ @ProtoField( tag = 4, label = Message.Label.REPEATED ) public final List<DescriptorProto> message_type; @ProtoField( tag = 5, label = Message.Label.REPEATED ) public final List<EnumDescriptorProto> enum_type; @ProtoField( tag = 6, label = Message.Label.REPEATED ) public final List<ServiceDescriptorProto> service; @ProtoField( tag = 7, label = Message.Label.REPEATED ) public final List<FieldDescriptorProto> extension; @ProtoField( tag = 8 ) public final FileOptions options; /** * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. */ @ProtoField( tag = 9 ) public final SourceCodeInfo source_code_info; public FileDescriptorProto(String name, String _package, List<String> dependency, List<DescriptorProto> message_type, List<EnumDescriptorProto> enum_type, List<ServiceDescriptorProto> service, List<FieldDescriptorProto> extension, FileOptions options, SourceCodeInfo source_code_info) { this.name = name; this._package = _package; this.dependency = immutableCopyOf(dependency); this.message_type = immutableCopyOf(message_type); this.enum_type = immutableCopyOf(enum_type); this.service = immutableCopyOf(service); this.extension = immutableCopyOf(extension); this.options = options; this.source_code_info = source_code_info; } private FileDescriptorProto(Builder builder) { this(builder.name, builder._package, builder.dependency, builder.message_type, builder.enum_type, builder.service, builder.extension, builder.options, builder.source_code_info); setBuilder(builder); } @Override public boolean equals(Object other) { if (other == this) return true; if (!(other instanceof FileDescriptorProto)) return false; FileDescriptorProto o = (FileDescriptorProto) other; return equals(name, o.name) && equals(_package, o._package) && equals(dependency, o.dependency) && equals(message_type, o.message_type) && equals(enum_type, o.enum_type) && equals(service, o.service) && equals(extension, o.extension) && equals(options, o.options) && equals(source_code_info, o.source_code_info); } @Override public int hashCode() { int result = hashCode; if (result == 0) { result = name != null ? name.hashCode() : 0; result = result * 37 + (_package != null ? _package.hashCode() : 0); result = result * 37 + (dependency != null ? dependency.hashCode() : 1); result = result * 37 + (message_type != null ? message_type.hashCode() : 1); result = result * 37 + (enum_type != null ? enum_type.hashCode() : 1); result = result * 37 + (service != null ? service.hashCode() : 1); result = result * 37 + (extension != null ? extension.hashCode() : 1); result = result * 37 + (options != null ? options.hashCode() : 0); result = result * 37 + (source_code_info != null ? source_code_info.hashCode() : 0); hashCode = result; } return result; } public static final class Builder extends com.squareup.wire.Message.Builder<FileDescriptorProto, Builder> { public String name; public String _package; public List<String> dependency = Collections.emptyList(); public List<DescriptorProto> message_type = Collections.emptyList(); public List<EnumDescriptorProto> enum_type = Collections.emptyList(); public List<ServiceDescriptorProto> service = Collections.emptyList(); public List<FieldDescriptorProto> extension = Collections.emptyList(); public FileOptions options; public SourceCodeInfo source_code_info; public Builder() { } public Builder(FileDescriptorProto message) { super(message); if (message == null) return; this.name = message.name; this._package = message._package; this.dependency = copyOf(message.dependency); this.message_type = copyOf(message.message_type); this.enum_type = copyOf(message.enum_type); this.service = copyOf(message.service); this.extension = copyOf(message.extension); this.options = message.options; this.source_code_info = message.source_code_info; } /** * file name, relative to root of source tree */ public Builder name(String name) { this.name = name; return this; } /** * e.g. "foo", "foo.bar", etc. */ public Builder _package(String _package) { this._package = _package; return this; } /** * Names of files imported by this file. */ public Builder dependency(List<String> dependency) { this.dependency = canonicalizeList(dependency); return this; } /** * All top-level definitions in this file. */ public Builder message_type(List<DescriptorProto> message_type) { this.message_type = canonicalizeList(message_type); return this; } public Builder enum_type(List<EnumDescriptorProto> enum_type) { this.enum_type = canonicalizeList(enum_type); return this; } public Builder service(List<ServiceDescriptorProto> service) { this.service = canonicalizeList(service); return this; } public Builder extension(List<FieldDescriptorProto> extension) { this.extension = canonicalizeList(extension); return this; } public Builder options(FileOptions options) { this.options = options; return this; } /** * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. */ public Builder source_code_info(SourceCodeInfo source_code_info) { this.source_code_info = source_code_info; return this; } @Override public FileDescriptorProto build() { return new FileDescriptorProto(this); } } }
package com.fedevela.util; /** * Created by fvelazquez on 26/03/14. */ import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.net.InetAddress; import java.net.Socket; public class AVClient { private String servername = "201.134.153.190"; private int port = 1344; private String scanpolicy = "SCAN"; private String filename = ""; private String outputfilename; private Socket socket = null; private DataOutputStream output = null; private BufferedReader input = null; protected byte[] bytes; private String[] results; protected int allow = 204; public AVClient() { } public AVClient(String server_name, int the_port, String scan_policy) { servername = server_name; port = the_port; scanpolicy = scan_policy; } public int setparams(String server_name, int the_port, String scan_policy) { servername = server_name; port = the_port; scanpolicy = scan_policy; return (0); } public int test() { int return_value = 0; if ((servername.length()) < 7 || (servername.length()) > 16) { return_value = 1; } if (port < 1 || port > 8000) { return_value = 1; } if (return_value == 0 && socket == null) { return_value = connect(servername); disconnect(); } return (return_value); } public int optionscheck() { int the_return = 0; String message = "OPTIONS icap://" + servername + ":" + port + "/avscan ICAP/1.0 \r\n" + "Host " + servername + " \r\n" + "\r\n"; the_return = connect(servername); if (the_return == 0) { the_return = send(message); } return the_return; } public int scanfile(String file_name) { filename = file_name; int the_return = 0; long file_length = 0; long long_length = 4; String server_message = ""; try { File file = new File(file_name); InputStream is = new FileInputStream(file); file_length = file.length(); if (file_length < 1) { the_return = 1; } else if (file_length > 1 && (file_length > Integer.MAX_VALUE)) { the_return = 1; } } catch (Exception e) { the_return = 1; } the_return = connect(servername); AVRespond check_response = new AVRespond(); while (the_return == 0) { int req_header = 0; int res_header; int res_body; String reqheader = "GET http://scapi.symantec.com" + "/" + file_name + " HTTP/1.1\r\n" + "Host: scapi.symantec.com\r\n" + "\r\n"; String resheader = "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n"; res_header = reqheader.length(); res_body = res_header + (resheader.length()); String header = "RESPMOD icap://" + servername + ":" + port + "/AVSCAN?action=" + scanpolicy + " ICAP/1.0\r\n" + "Host: " + servername + ":" + port + "\r\n" + "Preview: 4" + "\r\n" + "Allow: " + allow + "\r\n" + "Encapsulated: req-hdr=" + req_header + " res-hdr=" + res_header + " res-body=" + res_body + "\r\n" + "\r\n"; the_return = send(header); if (the_return == 1) { break; } the_return = send(reqheader); if (the_return == 1) { break; } the_return = send(resheader); if (the_return == 1) { break; } byte[] b = convert(); header = ""; try { header = Long.toHexString(long_length); header = header + "\r\n"; the_return = send(header); if (the_return == 1) { break; } header = "\r\n" + "0" + "\r\n" + "\r\n"; output.write(b, 0, 4); the_return = send(header); if (the_return == 1) { break; } } catch (Exception e) { e.printStackTrace(); } header = ""; header = recieve(); if ((check_response.continue_check(header)) == true) { file_length = file_length - 4; header = Long.toHexString(file_length); header = header + "\r\n"; try { the_return = send(header); if (the_return == 1) { break; } output.write(b, 4, ( (int) file_length)); the_return = send("\r\n0\r\n\r\n"); if (the_return == 1) { break; } header = ""; header = recieve(); server_message = header.substring(0, 12); server_message = check_response.request(server_message); } catch (Exception e) { the_return = 1; } } else { the_return = 1; } disconnect(); break; } if (the_return != 1) { if (server_message.equals("virus")) { the_return = -1; } if (server_message.equals("clean")) { the_return = 0; } } return (the_return); } public int scanfile(String file_name, String outputfile_name) { filename = file_name; outputfilename = outputfile_name; int the_return = 0; long file_length = 0; long long_length = 4; String server_message = ""; String header = ""; String text = ""; try { File file = new File(file_name); InputStream is = new FileInputStream(file); file_length = file.length(); if (file_length < 1) { the_return = 1; } else if (file_length > 1 && (file_length > Integer.MAX_VALUE)) { the_return = 1; } } catch (Exception e) { the_return = 1; } the_return = connect(servername); AVRespond check_response = new AVRespond(); while (the_return == 0) { int req_header = 0; int res_header; int res_body; String reqheader = "GET http://scapi.symantec.com" + "/" + file_name + " HTTP/1.1\r\n" + "Host: scapi.symantec.com\r\n" + "\r\n"; String resheader = "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n"; res_header = reqheader.length(); res_body = res_header + (resheader.length()); header = "RESPMOD icap://" + servername + ":" + port + "/AVSCAN?action=" + scanpolicy + " ICAP/1.0\r\n" + "Host: " + servername + ":" + port + "\r\n" + "Preview: 4" + "\r\n" + "Allow: " + allow + "\r\n" + "Encapsulated: req-hdr=" + req_header + " res-hdr=" + res_header + " res-body=" + res_body + "\r\n" + "\r\n"; the_return = send(header); if (the_return == 1) { break; } the_return = send(reqheader); if (the_return == 1) { break; } the_return = send(resheader); if (the_return == 1) { break; } byte[] b = convert(); header = ""; try { header = Long.toHexString(long_length); header = header + "\r\n"; the_return = send(header); if (the_return == 1) { break; } header = "\r\n" + "0" + "\r\n" + "\r\n"; output.write(b, 0, 4); the_return = send(header); if (the_return == 1) { break; } } catch (Exception e) { e.printStackTrace(); } header = ""; header = recieve(); if ((check_response.continue_check(header)) == true) { file_length = file_length - 4; header = Long.toHexString(file_length); header = header + "\r\n"; try { the_return = send(header); if (the_return == 1) { break; } output.write(b, 4, ( (int) file_length)); the_return = send("\r\n0\r\n\r\n"); if (the_return == 1) { break; } header = ""; header = recieve(); server_message = header.substring(0, 12); server_message = check_response.request(server_message); if (server_message.equals("virus")) { header = header + recieve(); text = recieve(); header = header + text; } } catch (Exception e) { the_return = 1; } } else { the_return = 1; } disconnect(); break; } if (the_return != 1) { if (server_message.equals("virus")) { the_return = -1; } if (server_message.equals("clean")) { the_return = 0; } } if (the_return == -1 && scanpolicy.equals("SCANREPAIRDELETE")) { check_response.create_file(text, outputfilename); } return (the_return); } public int scanfile(String file_name, byte[] b) { int the_return = 0; long file_length = b.length; long long_length = 4; String server_message = ""; String header = ""; String text = ""; the_return = connect(servername); AVRespond check_response = new AVRespond(); while (the_return == 0) { int req_header = 0; int res_header; int res_body; String reqheader = "GET http://scapi.symantec.com" + "/" + file_name + " HTTP/1.1\r\n" + "Host: scapi.symantec.com\r\n" + "\r\n"; String resheader = "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n"; res_header = reqheader.length(); res_body = res_header + (resheader.length()); header = "RESPMOD icap://" + servername + ":" + port + "/AVSCAN?action=" + scanpolicy + " ICAP/1.0\r\n" + "Host: " + servername + ":" + port + "\r\n" + "Preview: 4" + "\r\n" + "Allow: " + allow + "\r\n" + "Encapsulated: req-hdr=" + req_header + " res-hdr=" + res_header + " res-body=" + res_body + "\r\n" + "\r\n"; the_return = send(header); if (the_return == 1) { break; } the_return = send(reqheader); if (the_return == 1) { break; } the_return = send(resheader); if (the_return == 1) { break; } header = ""; try { header = Long.toHexString(long_length); header = header + "\r\n"; the_return = send(header); if (the_return == 1) { break; } header = "\r\n" + "0" + "\r\n" + "\r\n"; output.write(b, 0, 4); the_return = send(header); if (the_return == 1) { break; } } catch (Exception e) { e.printStackTrace(); } header = ""; header = recieve(); if ((check_response.continue_check(header)) == true) { file_length = file_length - 4; header = Long.toHexString(file_length); header = header + "\r\n"; try { the_return = send(header); if (the_return == 1) { break; } output.write(b, 4, ( (int) file_length)); the_return = send("\r\n0\r\n\r\n"); if (the_return == 1) { break; } header = ""; header = recieve(); server_message = header.substring(0, 12); server_message = check_response.request(server_message); if (server_message.equals("virus")) { header = header + recieve(); text = recieve(); header = header + text; } } catch (Exception e) { the_return = 1; } } else { the_return = 1; } disconnect(); break; } if (the_return != 1) { if (server_message.equals("virus")) { the_return = -1; } if (server_message.equals("clean")) { the_return = 0; } } return (the_return); } private int connect(String host) { int response = 0; if (socket == null) { try { InetAddress ip_address = InetAddress.getByName(host); socket = new Socket(host, port); output = new DataOutputStream(socket.getOutputStream()); input = new BufferedReader(new InputStreamReader(socket.getInputStream())); } catch (Exception e) { response = 1; String body = "WARNING! Symantec Scan Engine could not be contacted! \n\rUsing \n\rhost: " + host + "\n\rport: " + port + " \n\rFiles will be saved without virus scan "; System.out.println("Error de conexion: " + body); } } return response; } private int send(String str) { int response = 0; try { output.writeBytes(str); output.flush(); } catch (Exception e) { response = 1; } return response; } private void disconnect() { try { input.close(); output.close(); socket.close(); socket = null; } catch (Exception e) { e.printStackTrace(); } } private byte[] convert() { long a_length = 0; try { File file = new File(filename); InputStream is = new FileInputStream(file); a_length = file.length(); bytes = new byte[ (int) a_length]; int offset = 0; int numRead = 0; while (offset < bytes.length && (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) { offset += numRead; } if (offset < bytes.length) { } } catch (Exception e) { e.printStackTrace(); } return bytes; } private String recieve() { String value = " "; String the_message = new String(); results = new String[512]; int array_location = 0; try { while (value.length() != 0) { value = input.readLine(); the_message = the_message + value + "\n"; results[array_location] = value; array_location++; } } catch (Exception e) { e.printStackTrace(); } return the_message; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.pipeline.movavg; import com.google.common.base.Function; import com.google.common.collect.EvictingQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelStreams; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams; import org.joda.time.DateTime; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.ListIterator; import java.util.Map; import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; public class MovAvgPipelineAggregator extends PipelineAggregator { public final static Type TYPE = new Type("moving_avg"); public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() { @Override public MovAvgPipelineAggregator readResult(StreamInput in) throws IOException { MovAvgPipelineAggregator result = new MovAvgPipelineAggregator(); result.readFrom(in); return result; } }; public static void registerStreams() { PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream()); } private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() { @Override public InternalAggregation apply(Aggregation input) { return (InternalAggregation) input; } }; private ValueFormatter formatter; private GapPolicy gapPolicy; private int window; private MovAvgModel model; private int predict; private boolean minimize; public MovAvgPipelineAggregator() { } public MovAvgPipelineAggregator(String name, String[] bucketsPaths, ValueFormatter formatter, GapPolicy gapPolicy, int window, int predict, MovAvgModel model, boolean minimize, Map<String, Object> metadata) { super(name, bucketsPaths, metadata); this.formatter = formatter; this.gapPolicy = gapPolicy; this.window = window; this.model = model; this.predict = predict; this.minimize = minimize; } @Override public Type type() { return TYPE; } @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { InternalHistogram histo = (InternalHistogram) aggregation; List<? extends InternalHistogram.Bucket> buckets = histo.getBuckets(); InternalHistogram.Factory<? extends InternalHistogram.Bucket> factory = histo.getFactory(); List newBuckets = new ArrayList<>(); EvictingQueue<Double> values = EvictingQueue.create(this.window); long lastValidKey = 0; int lastValidPosition = 0; int counter = 0; // Do we need to fit the model parameters to the data? if (minimize) { assert (model.canBeMinimized()); model = minimize(buckets, histo, model); } for (InternalHistogram.Bucket bucket : buckets) { Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy); // Default is to reuse existing bucket. Simplifies the rest of the logic, // since we only change newBucket if we can add to it InternalHistogram.Bucket newBucket = bucket; if (!(thisBucketValue == null || thisBucketValue.equals(Double.NaN))) { // Some models (e.g. HoltWinters) have certain preconditions that must be met if (model.hasValue(values.size())) { double movavg = model.next(values); List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList<PipelineAggregator>(), metaData())); newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations( aggs), bucket.getKeyed(), bucket.getFormatter()); } if (predict > 0) { if (bucket.getKey() instanceof Number) { lastValidKey = ((Number) bucket.getKey()).longValue(); } else if (bucket.getKey() instanceof DateTime) { lastValidKey = ((DateTime) bucket.getKey()).getMillis(); } else { throw new AggregationExecutionException("Expected key of type Number or DateTime but got [" + lastValidKey + "]"); } lastValidPosition = counter; } values.offer(thisBucketValue); } counter += 1; newBuckets.add(newBucket); } if (buckets.size() > 0 && predict > 0) { boolean keyed; ValueFormatter formatter; keyed = buckets.get(0).getKeyed(); formatter = buckets.get(0).getFormatter(); double[] predictions = model.predict(values, predict); for (int i = 0; i < predictions.length; i++) { List<InternalAggregation> aggs; long newKey = histo.getRounding().nextRoundingValue(lastValidKey); if (lastValidPosition + i + 1 < newBuckets.size()) { InternalHistogram.Bucket bucket = (InternalHistogram.Bucket) newBuckets.get(lastValidPosition + i + 1); // Get the existing aggs in the bucket so we don't clobber data aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList<PipelineAggregator>(), metaData())); InternalHistogram.Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations( aggs), keyed, formatter); // Overwrite the existing bucket with the new version newBuckets.set(lastValidPosition + i + 1, newBucket); } else { // Not seen before, create fresh aggs = new ArrayList<>(); aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList<PipelineAggregator>(), metaData())); InternalHistogram.Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations( aggs), keyed, formatter); // Since this is a new bucket, simply append it newBuckets.add(newBucket); } lastValidKey = newKey; } } return factory.create(newBuckets, histo); } private MovAvgModel minimize(List<? extends InternalHistogram.Bucket> buckets, InternalHistogram histo, MovAvgModel model) { int counter = 0; EvictingQueue<Double> values = EvictingQueue.create(window); double[] test = new double[window]; ListIterator<? extends InternalHistogram.Bucket> iter = buckets.listIterator(buckets.size()); // We have to walk the iterator backwards because we don't know if/how many buckets are empty. while (iter.hasPrevious() && counter < window) { Double thisBucketValue = resolveBucketValue(histo, iter.previous(), bucketsPaths()[0], gapPolicy); if (!(thisBucketValue == null || thisBucketValue.equals(Double.NaN))) { test[window - counter - 1] = thisBucketValue; counter += 1; } } // If we didn't fill the test set, we don't have enough data to minimize. // Just return the model with the starting coef if (counter < window) { return model; } //And do it again, for the train set. Unfortunately we have to fill an array and then //fill an evicting queue backwards :( counter = 0; double[] train = new double[window]; while (iter.hasPrevious() && counter < window) { Double thisBucketValue = resolveBucketValue(histo, iter.previous(), bucketsPaths()[0], gapPolicy); if (!(thisBucketValue == null || thisBucketValue.equals(Double.NaN))) { train[window - counter - 1] = thisBucketValue; counter += 1; } } // If we didn't fill the train set, we don't have enough data to minimize. // Just return the model with the starting coef if (counter < window) { return model; } for (double v : train) { values.add(v); } return SimulatedAnealingMinimizer.minimize(model, values, test); } @Override public void doReadFrom(StreamInput in) throws IOException { formatter = ValueFormatterStreams.readOptional(in); gapPolicy = GapPolicy.readFrom(in); window = in.readVInt(); predict = in.readVInt(); model = MovAvgModelStreams.read(in); minimize = in.readBoolean(); } @Override public void doWriteTo(StreamOutput out) throws IOException { ValueFormatterStreams.writeOptional(formatter, out); gapPolicy.writeTo(out); out.writeVInt(window); out.writeVInt(predict); model.writeTo(out); out.writeBoolean(minimize); } public static class Factory extends PipelineAggregatorFactory { private final ValueFormatter formatter; private GapPolicy gapPolicy; private int window; private MovAvgModel model; private int predict; private boolean minimize; public Factory(String name, String[] bucketsPaths, ValueFormatter formatter, GapPolicy gapPolicy, int window, int predict, MovAvgModel model, boolean minimize) { super(name, TYPE.name(), bucketsPaths); this.formatter = formatter; this.gapPolicy = gapPolicy; this.window = window; this.model = model; this.predict = predict; this.minimize = minimize; } @Override protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException { return new MovAvgPipelineAggregator(name, bucketsPaths, formatter, gapPolicy, window, predict, model, minimize, metaData); } @Override public void doValidate(AggregatorFactory parent, AggregatorFactory[] aggFactories, List<PipelineAggregatorFactory> pipelineAggregatoractories) { if (bucketsPaths.length != 1) { throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must contain a single entry for aggregation [" + name + "]"); } if (!(parent instanceof HistogramAggregator.Factory)) { throw new IllegalStateException("moving average aggregation [" + name + "] must have a histogram or date_histogram as parent"); } else { HistogramAggregator.Factory histoParent = (HistogramAggregator.Factory) parent; if (histoParent.minDocCount() != 0) { throw new IllegalStateException("parent histogram of moving average aggregation [" + name + "] must have min_doc_count of 0"); } } } } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.shell; import static org.pentaho.di.job.entry.validator.AbstractFileValidator.putVariableSpace; import static org.pentaho.di.job.entry.validator.AndValidator.putValidators; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileExistsValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notBlankValidator; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.commons.vfs.FileObject; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.logging.Log4jFileAppender; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.core.logging.LogWriter; import org.pentaho.di.core.util.StreamLogger; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.job.entry.validator.ValidatorContext; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceEntry; import org.pentaho.di.resource.ResourceEntry.ResourceType; import org.pentaho.di.resource.ResourceReference; import org.w3c.dom.Node; /** * Shell type of Job Entry. You can define shell scripts to be executed in a * Job. * * @author Matt * @since 01-10-2003, rewritten on 18-06-2004 */ public class JobEntryShell extends JobEntryBase implements Cloneable, JobEntryInterface { private static Class<?> PKG = JobEntryShell.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private String filename; private String workDirectory; public String arguments[]; public boolean argFromPrevious; public boolean setLogfile; public String logfile, logext; public boolean addDate, addTime; public LogLevel logFileLevel; public boolean execPerRow; public boolean setAppendLogfile; public boolean insertScript; public String script; public JobEntryShell(String name) { super(name, ""); } public JobEntryShell() { this(""); clear(); } public Object clone() { JobEntryShell je = (JobEntryShell) super.clone(); return je; } public String getXML() { StringBuffer retval = new StringBuffer(300); retval.append(super.getXML()); retval.append(" ").append(XMLHandler.addTagValue("filename", filename)); retval.append(" ").append(XMLHandler.addTagValue("work_directory", workDirectory)); retval.append(" ").append(XMLHandler.addTagValue("arg_from_previous", argFromPrevious)); retval.append(" ").append(XMLHandler.addTagValue("exec_per_row", execPerRow)); retval.append(" ").append(XMLHandler.addTagValue("set_logfile", setLogfile)); retval.append(" ").append(XMLHandler.addTagValue("logfile", logfile)); retval.append(" ").append(XMLHandler.addTagValue("set_append_logfile", setAppendLogfile)); retval.append(" ").append(XMLHandler.addTagValue("logext", logext)); retval.append(" ").append(XMLHandler.addTagValue("add_date", addDate)); retval.append(" ").append(XMLHandler.addTagValue("add_time", addTime)); retval.append(" ").append(XMLHandler.addTagValue("insertScript", insertScript)); retval.append(" ").append(XMLHandler.addTagValue("script", script)); retval.append(" ").append(XMLHandler.addTagValue("loglevel", (logFileLevel == null) ? null : logFileLevel.getCode())); if (arguments != null) for (int i = 0; i < arguments.length; i++) { // THIS IS A VERY BAD WAY OF READING/SAVING AS IT MAKES // THE XML "DUBIOUS". DON'T REUSE IT. (Sven B) retval.append(" ").append(XMLHandler.addTagValue("argument" + i, arguments[i])); } return retval.toString(); } public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException { try { super.loadXML(entrynode, databases, slaveServers); setFileName(XMLHandler.getTagValue(entrynode, "filename")); setWorkDirectory(XMLHandler.getTagValue(entrynode, "work_directory")); argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous")); execPerRow = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "exec_per_row")); setLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_logfile")); setAppendLogfile = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "set_append_logfile") ); addDate = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_date")); addTime = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_time")); logfile = XMLHandler.getTagValue(entrynode, "logfile"); logext = XMLHandler.getTagValue(entrynode, "logext"); logFileLevel = LogLevel.getLogLevelForCode(XMLHandler.getTagValue(entrynode, "loglevel")); insertScript = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "insertScript")); script= XMLHandler.getTagValue(entrynode, "script"); // How many arguments? int argnr = 0; while (XMLHandler.getTagValue(entrynode, "argument" + argnr) != null) argnr++; arguments = new String[argnr]; // Read them all... // THIS IS A VERY BAD WAY OF READING/SAVING AS IT MAKES // THE XML "DUBIOUS". DON'T REUSE IT. for (int a = 0; a < argnr; a++) arguments[a] = XMLHandler.getTagValue(entrynode, "argument" + a); } catch (KettleException e) { throw new KettleXMLException("Unable to load job entry of type 'shell' from XML node", e); } } // Load the jobentry from repository public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { setFileName(rep.getJobEntryAttributeString(id_jobentry, "file_name")); setWorkDirectory(rep.getJobEntryAttributeString(id_jobentry, "work_directory")); argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous"); execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row"); setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile"); setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile"); addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date"); addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time"); logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile"); logext = rep.getJobEntryAttributeString(id_jobentry, "logext"); logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel")); insertScript = rep.getJobEntryAttributeBoolean(id_jobentry, "insertScript"); script = rep.getJobEntryAttributeString(id_jobentry, "script"); // How many arguments? int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument"); arguments = new String[argnr]; // Read them all... for (int a = 0; a < argnr; a++) { arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument"); } } catch (KettleDatabaseException dbe) { throw new KettleException( "Unable to load job entry of type 'shell' from the repository with id_jobentry=" + id_jobentry, dbe); } } // Save the attributes of this job entry // public void saveRep(Repository rep, ObjectId id_job) throws KettleException { try { rep.saveJobEntryAttribute(id_job, getObjectId(), "file_name", filename); rep.saveJobEntryAttribute(id_job, getObjectId(), "work_directory", workDirectory); rep.saveJobEntryAttribute(id_job, getObjectId(), "arg_from_previous", argFromPrevious); rep.saveJobEntryAttribute(id_job, getObjectId(), "exec_per_row", execPerRow); rep.saveJobEntryAttribute(id_job, getObjectId(), "set_logfile", setLogfile); rep.saveJobEntryAttribute(id_job, getObjectId(), "set_append_logfile", setAppendLogfile); rep.saveJobEntryAttribute(id_job, getObjectId(), "add_date", addDate); rep.saveJobEntryAttribute(id_job, getObjectId(), "add_time", addTime); rep.saveJobEntryAttribute(id_job, getObjectId(), "logfile", logfile); rep.saveJobEntryAttribute(id_job, getObjectId(), "logext", logext); rep.saveJobEntryAttribute(id_job, getObjectId(), "loglevel", logFileLevel == null ? LogLevel.NOTHING.getCode() : logFileLevel.getCode()); rep.saveJobEntryAttribute(id_job, getObjectId(), "insertScript", insertScript); rep.saveJobEntryAttribute(id_job, getObjectId(), "script", script); // save the arguments... if (arguments != null) { for (int i = 0; i < arguments.length; i++) { rep.saveJobEntryAttribute(id_job, getObjectId(), i, "argument", arguments[i]); } } } catch (KettleDatabaseException dbe) { throw new KettleException("Unable to save job entry of type 'shell' to the repository", dbe); } } public void clear() { super.clear(); filename = null; workDirectory = null; arguments = null; argFromPrevious = false; addDate = false; addTime = false; logfile = null; logext = null; setLogfile = false; execPerRow = false; setAppendLogfile=false; insertScript=false; script=null; } public void setFileName(String n) { filename = n; } public String getFilename() { return filename; } public String getRealFilename() { return environmentSubstitute(getFilename()); } public void setWorkDirectory(String n) { workDirectory = n; } public String getWorkDirectory() { return workDirectory; } public void setScript(String scriptin) { script=scriptin; } public String getScript() { return script; } public String getLogFilename() { String retval = ""; if (setLogfile) { retval+=logfile==null?"":logfile; Calendar cal = Calendar.getInstance(); if (addDate) { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); retval += "_" + sdf.format(cal.getTime()); } if (addTime) { SimpleDateFormat sdf = new SimpleDateFormat("HHmmss"); retval += "_" + sdf.format(cal.getTime()); } if (logext != null && logext.length() > 0) { retval += "." + logext; } } return retval; } public Result execute(Result result, int nr) throws KettleException { Log4jFileAppender appender = null; LogLevel shellLogLevel = parentJob.getLogLevel(); if (setLogfile) { String realLogFilename=environmentSubstitute(getLogFilename()); // We need to check here the log filename // if we do not have one, we must fail if(Const.isEmpty(realLogFilename)) { logError(BaseMessages.getString(PKG, "JobEntryShell.Exception.LogFilenameMissing")); result.setNrErrors(1); result.setResult(false); return result; } try { appender = LogWriter.createFileAppender(realLogFilename, true,setAppendLogfile); LogWriter.getInstance().addAppender(appender); } catch (KettleException e) { logError(BaseMessages.getString(PKG, "JobEntryShell.Error.UnableopenAppenderFile",getLogFilename(), e.toString())); logError(Const.getStackTracker(e)); result.setNrErrors(1); result.setResult(false); return result; } shellLogLevel = logFileLevel; } log.setLogLevel(shellLogLevel); result.setEntryNr(nr); // "Translate" the arguments for later String substArgs[] = null; if (arguments != null) { substArgs = new String[arguments.length]; for (int idx = 0; idx < arguments.length; idx++) { substArgs[idx] = environmentSubstitute(arguments[idx]); } } int iteration = 0; String args[] = substArgs; RowMetaAndData resultRow = null; boolean first = true; List<RowMetaAndData> rows = result.getRows(); if(log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryShell.Log.FoundPreviousRows",""+(rows != null ? rows.size() : 0))); } while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) { first = false; if (rows != null && execPerRow) { resultRow = (RowMetaAndData) rows.get(iteration); } else { resultRow = null; } List<RowMetaAndData> cmdRows = null; if (execPerRow) // Execute for each input row { if (argFromPrevious) // Copy the input row to the (command // line) arguments { if (resultRow != null) { args = new String[resultRow.size()]; for (int i = 0; i < resultRow.size(); i++) { args[i] = resultRow.getString(i, null); } } } else { // Just pass a single row List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>(); newList.add(resultRow); cmdRows = newList; } } else { if (argFromPrevious) { // Only put the first Row on the arguments args = null; if (resultRow != null) { args = new String[resultRow.size()]; for (int i = 0; i < resultRow.size(); i++) { args[i] = resultRow.getString(i, null); } } else { cmdRows = rows; } } else { // Keep it as it was... cmdRows = rows; } } executeShell(result, cmdRows, args); iteration++; } if (setLogfile) { if (appender != null) { LogWriter.getInstance().removeAppender(appender); appender.close(); ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, appender.getFile(), parentJob.getJobname(), getName()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } } return result; } private void executeShell(Result result, List<RowMetaAndData> cmdRows, String[] args) { FileObject fileObject = null; String realScript=null; FileObject tempFile=null; try { // What's the exact command? String base[] = null; List<String> cmds = new ArrayList<String>(); if(log.isBasic()) logBasic(BaseMessages.getString(PKG, "JobShell.RunningOn",Const.getOS())); if(insertScript) { realScript=environmentSubstitute(script); }else { String realFilename = environmentSubstitute(getFilename()); fileObject = KettleVFS.getFileObject(realFilename, this); } if (Const.getOS().equals("Windows 95")) { base = new String[] { "command.com", "/C" }; if (insertScript) { tempFile = KettleVFS.createTempFile("kettle", "shell.bat", environmentSubstitute(workDirectory), this); fileObject = createTemporaryShellFile(tempFile, realScript); } } else if (Const.getOS().startsWith("Windows")) { base = new String[] { "cmd.exe", "/C" }; if (insertScript) { tempFile = KettleVFS.createTempFile("kettle", "shell.bat", environmentSubstitute(workDirectory), this); fileObject = createTemporaryShellFile(tempFile, realScript); } } else { if (insertScript) { tempFile = KettleVFS.createTempFile("kettle", "shell", environmentSubstitute(workDirectory), this); fileObject = createTemporaryShellFile(tempFile, realScript); } base = new String[] { KettleVFS.getFilename(fileObject) }; } // Construct the arguments... if (argFromPrevious && cmdRows != null) { // Add the base command... for (int i = 0; i < base.length; i++) cmds.add(base[i]); if (Const.getOS().equals("Windows 95") || Const.getOS().startsWith("Windows")) { // for windows all arguments including the command itself // need to be // included in 1 argument to cmd/command. StringBuffer cmdline = new StringBuffer(300); cmdline.append('"'); cmdline.append(Const.optionallyQuoteStringByOS(KettleVFS.getFilename(fileObject))); // Add the arguments from previous results... for (int i = 0; i < cmdRows.size(); i++) // Normally just // one row, but // once in a // while to // remain // compatible we // have // multiple. { RowMetaAndData r = (RowMetaAndData) cmdRows.get(i); for (int j = 0; j < r.size(); j++) { cmdline.append(' '); cmdline.append(Const.optionallyQuoteStringByOS(r.getString(j, null))); } } cmdline.append('"'); cmds.add(cmdline.toString()); } else { // Add the arguments from previous results... for (int i = 0; i < cmdRows.size(); i++) // Normally just // one row, but // once in a // while to // remain // compatible we // have // multiple. { RowMetaAndData r = (RowMetaAndData) cmdRows.get(i); for (int j = 0; j < r.size(); j++) { cmds.add(Const.optionallyQuoteStringByOS(r.getString(j, null))); } } } } else if (args != null) { // Add the base command... for (int i = 0; i < base.length; i++) cmds.add(base[i]); if (Const.getOS().equals("Windows 95") || Const.getOS().startsWith("Windows")) { // for windows all arguments including the command itself // need to be // included in 1 argument to cmd/command. StringBuffer cmdline = new StringBuffer(300); cmdline.append('"'); cmdline.append(Const.optionallyQuoteStringByOS(KettleVFS.getFilename(fileObject))); for (int i = 0; i < args.length; i++) { cmdline.append(' '); cmdline.append(Const.optionallyQuoteStringByOS(args[i])); } cmdline.append('"'); cmds.add(cmdline.toString()); } else { for (int i = 0; i < args.length; i++) { cmds.add(args[i]); } } } StringBuffer command = new StringBuffer(); Iterator<String> it = cmds.iterator(); boolean first = true; while (it.hasNext()) { if (!first) command.append(' '); else first = false; command.append((String) it.next()); } if(log.isBasic()) logBasic(BaseMessages.getString(PKG, "JobShell.ExecCommand",command.toString())); // Build the environment variable list... ProcessBuilder procBuilder = new ProcessBuilder(cmds); Map<String, String> env = procBuilder.environment(); String[] variables = listVariables(); for (int i = 0; i < variables.length; i++) { env.put(variables[i], getVariable(variables[i])); } if (getWorkDirectory() != null && !Const.isEmpty(Const.rtrim(getWorkDirectory()))) { String vfsFilename = environmentSubstitute(getWorkDirectory()); File file = new File(KettleVFS.getFilename(KettleVFS.getFileObject(vfsFilename, this))); procBuilder.directory(file); } Process proc = procBuilder.start(); // any error message? StreamLogger errorLogger = new StreamLogger(log, proc.getErrorStream(), "(stderr)"); // any output? StreamLogger outputLogger = new StreamLogger(log, proc.getInputStream(), "(stdout)"); // kick them off new Thread(errorLogger).start(); new Thread(outputLogger).start(); proc.waitFor(); if(log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobShell.CommandFinished",command.toString())); // What's the exit status? result.setExitStatus(proc.exitValue()); if (result.getExitStatus() != 0) { if(log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobShell.ExitStatus",environmentSubstitute(getFilename()),""+result.getExitStatus())); result.setNrErrors(1); } // close the streams // otherwise you get "Too many open files, java.io.IOException" after a lot of iterations proc.getErrorStream().close(); proc.getOutputStream().close(); } catch (IOException ioe) { logError(BaseMessages.getString(PKG, "JobShell.ErrorRunningShell",environmentSubstitute(getFilename()),ioe.toString()), ioe); result.setNrErrors(1); } catch (InterruptedException ie) { logError(BaseMessages.getString(PKG, "JobShell.Shellinterupted",environmentSubstitute(getFilename()),ie.toString()), ie); result.setNrErrors(1); } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobShell.UnexpectedError",environmentSubstitute(getFilename()),e.toString()), e); result.setNrErrors(1); } finally { // If we created a temporary file, remove it... // if (tempFile!=null) { try { tempFile.delete(); } catch(Exception e) { BaseMessages.getString(PKG, "JobShell.UnexpectedError",tempFile.toString(),e.toString()); } } } if (result.getNrErrors() > 0) { result.setResult(false); } else { result.setResult(true); } } private FileObject createTemporaryShellFile(FileObject tempFile, String fileContent) throws Exception { // Create a unique new temporary filename in the working directory, put the script in there // Set the permissions to execute and then run it... // if (tempFile != null && fileContent != null) { try { tempFile.createFile(); OutputStream outputStream = tempFile.getContent().getOutputStream(); outputStream.write(fileContent.getBytes()); outputStream.close(); if (!Const.getOS().startsWith("Windows")) { String tempFilename = KettleVFS.getFilename(tempFile); // Now we have to make this file executable... // On Unix-like systems this is done using the command "/bin/chmod +x filename" // ProcessBuilder procBuilder = new ProcessBuilder("chmod", "+x", tempFilename); Process proc = procBuilder.start(); // Eat/log stderr/stdout all messages in a different thread... StreamLogger errorLogger = new StreamLogger(log, proc.getErrorStream(), toString() + " (stderr)"); StreamLogger outputLogger = new StreamLogger(log, proc.getInputStream(), toString() + " (stdout)"); new Thread(errorLogger).start(); new Thread(outputLogger).start(); proc.waitFor(); } } catch(Exception e) { throw new Exception("Unable to create temporary file to execute script", e); } } return tempFile; } public boolean evaluates() { return true; } public boolean isUnconditional() { return true; } public List<ResourceReference> getResourceDependencies(JobMeta jobMeta) { List<ResourceReference> references = super.getResourceDependencies(jobMeta); if (!Const.isEmpty(filename)) { String realFileName = jobMeta.environmentSubstitute(filename); ResourceReference reference = new ResourceReference(this); reference.getEntries().add(new ResourceEntry(realFileName, ResourceType.FILE)); references.add(reference); } return references; } @Override public void check(List<CheckResultInterface> remarks, JobMeta jobMeta) { ValidatorContext ctx = new ValidatorContext(); putVariableSpace(ctx, getVariables()); putValidators(ctx, notBlankValidator(), fileExistsValidator()); andValidator().validate(this, "workDirectory", remarks, ctx); //$NON-NLS-1$ andValidator().validate(this, "filename", remarks, putValidators(notBlankValidator())); //$NON-NLS-1$ if (setLogfile) { andValidator().validate(this, "logfile", remarks, putValidators(notBlankValidator())); //$NON-NLS-1$ } } protected String getLogfile() { return logfile; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.area.inline; import java.io.Serializable; import java.util.List; import org.apache.fop.area.Area; import org.apache.fop.area.LineArea; import org.apache.fop.area.Trait; import org.apache.fop.complexscripts.bidi.InlineRun; /** * Inline Area * This area is for all inline areas that can be placed * in a line area. */ public class InlineArea extends Area { private static final long serialVersionUID = -8940066479810170980L; /** * this class stores information about potential adjustments * that can be used in order to re-compute adjustments when a * page-number or a page-number-citation is resolved */ // @SuppressFBWarnings("SE_INNER_CLASS") protected class InlineAdjustingInfo implements Serializable { private static final long serialVersionUID = -5601387735459712149L; /** stretch of the inline area */ protected int availableStretch; /** shrink of the inline area */ protected int availableShrink; /** total adjustment (= ipd - width of fixed elements) */ protected int adjustment; /** * Constructor * * @param stretch the available space for stretching * @param shrink the available space for shrinking * @param adj space adjustment type */ protected InlineAdjustingInfo(int stretch, int shrink, int adj) { availableStretch = stretch; availableShrink = shrink; adjustment = adj; } /** * Apply the variation factor * * @param variationFactor the factor by which the adjustment is to be changed * @return the IPD increase */ protected int applyVariationFactor(double variationFactor) { int oldAdjustment = adjustment; adjustment *= variationFactor; return adjustment - oldAdjustment; } } /** * offset position from before edge of parent area */ protected int blockProgressionOffset; /** * parent area * it is needed in order to recompute adjust ratio and indents * when a page-number or a page-number-citation is resolved */ private Area parentArea; /** * ipd variation of child areas: if this area has not already * been added and cannot notify its parent area, store the variation * and wait for the parent area to be set */ private int storedIPDVariation; /** * The adjustment information object */ protected InlineAdjustingInfo adjustingInfo; /** * Default constructor for inline area. */ public InlineArea() { this (0, -1); } /** * Instantiate inline area. * @param blockProgressionOffset a block progression offset or zero * @param bidiLevel a resolved bidi level or -1 */ protected InlineArea(int blockProgressionOffset, int bidiLevel) { this.blockProgressionOffset = blockProgressionOffset; setBidiLevel(bidiLevel); } /** * @return the adjustment information object */ public InlineAdjustingInfo getAdjustingInfo() { return adjustingInfo; } /** * Create a new adjustment information object * @param stretch the available space for stretching * @param shrink the available space for shrinking * @param adjustment space adjustment type */ public void setAdjustingInfo(int stretch, int shrink, int adjustment) { adjustingInfo = new InlineAdjustingInfo(stretch, shrink, adjustment); } /** * Sets the adjustment information from an existing object * @param adjustingInfo the existing adjustment object */ public void setAdjustingInfo(InlineAdjustingInfo adjustingInfo) { this.adjustingInfo = adjustingInfo; } /** * Modify the adjustment value in the adjustment information object * @param adjustment the new adjustment value */ public void setAdjustment(int adjustment) { if (adjustingInfo != null) { adjustingInfo.adjustment = adjustment; } } /** * Increase the inline progression dimensions of this area. * This is used for inline parent areas that contain mulitple child areas. * * @param ipd the inline progression to increase by */ public void increaseIPD(int ipd) { this.ipd += ipd; } /** * Set the block progression offset of this inline area. * This is used to set the offset of the inline area * which is relative to the before edge of the parent area. * * @param blockProgressionOffset the offset */ public void setBlockProgressionOffset(int blockProgressionOffset) { this.blockProgressionOffset = blockProgressionOffset; } /** * Get the block progression offset of this inline area. * This returns the offset of the inline area * relative to the before edge of the parent area. * * @return the blockProgressionOffset */ public int getBlockProgressionOffset() { return blockProgressionOffset; } /** * @param parentArea The parentArea to set. */ public void setParentArea(Area parentArea) { this.parentArea = parentArea; } /** * @return Returns the parentArea. */ public Area getParentArea() { return parentArea; } /** * Set the parent for the child area. * * {@inheritDoc} */ @Override public void addChildArea(Area childArea) { super.addChildArea(childArea); if (childArea instanceof InlineArea) { ((InlineArea) childArea).setParentArea(this); } } /** @return true if the inline area is underlined. */ public boolean hasUnderline() { return getTraitAsBoolean(Trait.UNDERLINE); } /** @return true if the inline area is overlined. */ public boolean hasOverline() { return getTraitAsBoolean(Trait.OVERLINE); } /** @return true if the inline area has a line through. */ public boolean hasLineThrough() { return getTraitAsBoolean(Trait.LINETHROUGH); } /** @return true if the inline area is blinking. */ public boolean isBlinking() { return getTraitAsBoolean(Trait.BLINK); } /** * recursively apply the variation factor to all descendant areas * @param variationFactor the variation factor that must be applied to adjustments * @param lineStretch the total stretch of the line * @param lineShrink the total shrink of the line * @return true if there is an UnresolvedArea descendant */ public boolean applyVariationFactor(double variationFactor, int lineStretch, int lineShrink) { // default behaviour: update the IPD and return false if (adjustingInfo != null) { setIPD(getIPD() + adjustingInfo.applyVariationFactor(variationFactor)); } return false; } /** * Apply IPD variation. * @param ipdVariation the variation */ public void handleIPDVariation(int ipdVariation) { if (log.isTraceEnabled()) { log.trace("Handling IPD variation for " + getClass().getSimpleName() + ": increase by " + ipdVariation + " mpt."); } if (ipdVariation != 0) { increaseIPD(ipdVariation); notifyIPDVariation(ipdVariation); } } /** * notify the parent area about the ipd variation of this area * or of a descendant area * @param ipdVariation the difference between new and old ipd */ protected void notifyIPDVariation(int ipdVariation) { Area parentArea = getParentArea(); if (parentArea instanceof InlineArea) { ((InlineArea) parentArea).handleIPDVariation(ipdVariation); } else if (parentArea instanceof LineArea) { ((LineArea) parentArea).handleIPDVariation(ipdVariation); } else if (parentArea == null) { // parent area not yet set: store the variations storedIPDVariation += ipdVariation; } } /** * Returns the offset that this area would have if its offset and size were taking * children areas into account. The bpd of an inline area is taken from its nominal * font and doesn't depend on the bpds of its children elements. However, in the case * of a basic-link element we want the active area to cover all of the children * elements. * * @return the offset that this area would have if the before-edge of its * content-rectangle were coinciding with the <q>beforest</q> before-edge of its * children allocation-rectangles. * @see #getVirtualBPD() * @see BasicLinkArea */ int getVirtualOffset() { return getBlockProgressionOffset(); } /** * Returns the block-progression-dimension that this area would have if it were taking * its children elements into account. See {@linkplain #getVirtualOffset()}. * * @return the bpd */ int getVirtualBPD() { return getBPD(); } /** * Collection bidi inline runs. * @param runs current list of inline runs * @return modified list of inline runs, having appended new run */ public List collectInlineRuns(List runs) { assert runs != null; runs.add(new InlineRun(this, new int[] {getBidiLevel()})); return runs; } /** * Determine if inline area IA is an ancestor inline area or same as this area. * @param ia inline area to test * @return true if specified inline area is an ancestor or same as this area */ public boolean isAncestorOrSelf(InlineArea ia) { return (ia == this) || isAncestor(ia); } /** * Determine if inline area IA is an ancestor inline area of this area. * @param ia inline area to test * @return true if specified inline area is an ancestor of this area */ public boolean isAncestor(InlineArea ia) { for (Area p = getParentArea(); p != null;) { if (p == ia) { return true; } else if (p instanceof InlineArea) { p = ((InlineArea) p).getParentArea(); } else { p = null; } } return false; } }
package ikube.toolkit; import ikube.AbstractTest; import org.apache.commons.lang.StringUtils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.ResponseHandler; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.AutoRetryHttpClient; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.*; /** * @author Michael Couck * @version 01.00 * @since 21-11-2010 */ public class FILETest extends AbstractTest { private File file; private File dotFolder; private String[] stringPatterns; @Before public void before() { String fileName = "file.file"; dotFolder = new File("."); file = new File(dotFolder, fileName); stringPatterns = new String[]{fileName}; FILE.deleteFile(new File("./common"), 1); FILE.deleteFile(new File("./spring.xml"), 1); } @After public void after() { FILE.deleteFile(file, 1); FILE.deleteFile(new File("./common")); FILE.deleteFile(new File("./indexes")); FILE.deleteFile(new File("./spring.xml")); } @Test public void findFiles() throws Exception { assertFalse(file.exists()); assertTrue(file.createNewFile()); assertTrue(file.exists()); File[] files = FILE.findFiles(dotFolder, stringPatterns); int initialLength = files.length; assertTrue(initialLength >= 1); assertTrue(file.delete()); files = FILE.findFiles(dotFolder, stringPatterns); assertEquals(initialLength - 1, files.length); } @Test public void findFilesRecursively() throws Exception { assertFalse(file.exists()); assertTrue(file.createNewFile()); assertTrue(file.exists()); List<File> files = FILE.findFilesRecursively(dotFolder, new ArrayList<File>(), stringPatterns); assertTrue(files.size() >= 1); files.clear(); files = FILE.findFilesRecursively(dotFolder, files, ".xml"); assertTrue(files.size() >= 1); List<File> properties = FILE.findFilesRecursively(dotFolder, new ArrayList<File>(), "doctors.xml"); logger.error("Properties : " + properties.size() + ", " + properties); assertTrue(properties.size() > 0); } @Test public void deleteFile() throws Exception { assertFalse(file.exists()); assertTrue(file.createNewFile()); assertTrue(file.exists()); FILE.deleteFile(file, 1); assertFalse(file.exists()); } @Test public void deleteFiles() throws Exception { assertFalse(file.exists()); assertTrue(file.createNewFile()); assertTrue(file.exists()); FILE.deleteFiles(dotFolder, stringPatterns); assertFalse(file.exists()); } @Test public void findFile() { File file = FILE.findFileRecursively(new File("."), "doctors.xml"); assertNotNull(file); } @Test public void setContents() throws Exception { String data = "Michael Couck"; File tempFile = FILE.getFile("./indexes/data.dat", Boolean.FALSE); FILE.setContents(tempFile.getAbsolutePath(), data.getBytes()); assertTrue(tempFile.exists()); assertTrue(tempFile.length() > 5); } @Test public void findDirectoryRecursively() { File file = FILE.findDirectoryRecursively(new File("."), "data"); assertNotNull(file); assertTrue(file.exists()); assertTrue(file.isDirectory()); } @Test public void findFileRecursivelyUp() { File folder = new File(".").getAbsoluteFile(); File pomFile = FILE.findFileRecursively(folder, 2, "mime-mapping.xml"); assertNotNull(pomFile); } @Test public void findDirectoryRecursivelyUp() { File folder = new File(".").getAbsoluteFile(); File textSentimentFolder = FILE.findDirectoryRecursively(folder, 2, "txt_sentoken"); assertNotNull(textSentimentFolder); } @Test public void getContents() throws IOException { HttpClient httpClient = getHttpClient(); try { HttpGet httpGet = new HttpGet("http://www.google.com"); ResponseHandler<String> responseHandler = new ResponseHandler<String>() { @Override public String handleResponse(final HttpResponse response) { try { return FILE.getContents(response.getEntity().getContent(), Long.MAX_VALUE).toString(); } catch (final IOException e) { throw new RuntimeException(e); } } }; String response = httpClient.execute(httpGet, responseHandler); assertTrue(!StringUtils.isEmpty(response)); } catch (final UnknownHostException e) { // We ignore this exception as the machine could be offline logger.error("Machine offline?", e); } } @Test public void cleanFilePath() { String filePath = FILE.cleanFilePath("file:/path/to/the/directory"); assertEquals("/path/to/the/directory", filePath); } /** * NOTE: This tests needs to run in a directory where there is only one directory * that is called ikube. For example it will not work in a directory where the structure is * <pre> * Workspace * ikube * ikube-bck * </pre> * because it will look in both ikube directories and probably will not get the correct file. */ @Test public void relativeParent() { String dotFolderPath = FILE.cleanFilePath(new File(".").getAbsolutePath()); File dotFolder = new File(dotFolderPath); File relative = FILE.relative(dotFolder, "../../"); assertEquals(dotFolder.getParentFile().getParentFile(), relative); } @Test public void getOrCreateFile() { File file = null; try { String filePath = "./target/parent/file.txt"; file = FILE.getOrCreateFile(filePath); assertNotNull(file); assertTrue(file.exists()); } finally { FILE.deleteFile(file); } } @Test public void getOrCreateDirectory() { File directory = null; try { directory = FILE.getOrCreateDirectory(new File("./target/directory/file.txt").getParentFile()); assertNotNull(directory); assertTrue(directory.exists() && directory.isDirectory()); } finally { FILE.deleteFile(directory); } } @Test public void setContentsInputStream() { File file = FILE.findFileRecursively(new File("."), "csv-file-tools.csv"); String inputContents = FILE.getContents(file, Integer.MAX_VALUE).toString(); InputStream inputStream = new ByteArrayInputStream(inputContents.getBytes()); FILE.setContents("target/csv-file-tools-written.csv", inputStream); File outputFile = FILE.findFileRecursively(new File("."), "csv-file-tools-written.csv"); assertNotNull(outputFile); String outputContents = FILE.getContents(outputFile, Integer.MAX_VALUE).toString(); assertEquals(inputContents, outputContents); } @Test public void findFileRecursivelyAndGetContents() { String contents = FILE.findFileRecursivelyAndGetContents(new File("."), "csv-file-tools.csv"); assertNotNull(contents); } @Test public void moveUpDirectoriesToFolder() { File fromFolder = new File("./target"); File ikubeFolder = FILE.moveUpDirectories(fromFolder, "ikube"); assertEquals("ikube", ikubeFolder.getName()); File anotherIkubeFolder = FILE.moveUpDirectories(ikubeFolder, "ikube"); assertEquals(anotherIkubeFolder, ikubeFolder); } private HttpClient getHttpClient() { return new AutoRetryHttpClient(); } }
package ait.ffma.domain.preservation.riskmanagement; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import ait.ffma.domain.BaseFfmaDomainObject; import ait.ffma.domain.FieldDefEnum; import ait.ffma.factory.ComponentNameConstants; /** * This class is a container for LOD format information retrieved from LOD repositories. */ @SuppressWarnings({"restriction" }) @XmlRootElement public class LODFormat extends BaseFfmaDomainObject { private static final long serialVersionUID = -8919702531027568996L; @XmlTransient public enum FieldsEnum implements FieldDefEnum { Index { public Class<?> evalType() { return Integer.class; } }, Repository { public Class<?> evalType() { return String.class; } }, Timestamp { public Class<?> evalType() { return String.class; } }, FormatName { public Class<?> evalType() { return String.class; } }, CurrentVersionReleaseDate { public Class<?> evalType() { return String.class; } }, SoftwareCount { public Class<?> evalType() { return Integer.class; } }, Software { public Class<?> evalType() { return String.class; } }, CurrentFormatVersion { public Class<?> evalType() { return String.class; } }, FormatLicense { public Class<?> evalType() { return String.class; } }, Limitations { public Class<?> evalType() { return String.class; } }, Puid { public Class<?> evalType() { return String.class; } }, FormatHomepage { public Class<?> evalType() { return String.class; } }, MimeType { public Class<?> evalType() { return String.class; } }, FormatGenre { public Class<?> evalType() { return String.class; } }, FormatCreator { public Class<?> evalType() { return String.class; } }, OpenFormat { public Class<?> evalType() { return String.class; } }, FileExtensions { public Class<?> evalType() { return String.class; } }, Vendors { public Class<?> evalType() { return String.class; } }, Standards { public Class<?> evalType() { return String.class; } }, RepositoryId { public Class<?> evalType() { return String[].class; } }, Description { public Class<?> evalType() { return String[].class; } }, SoftwareName { public Class<?> evalType() { return String[].class; } }, SoftwareId { public Class<?> evalType() { return String[].class; } }, VendorName { public Class<?> evalType() { return String[].class; } }, VendorId { public Class<?> evalType() { return String[].class; } }; public String evalName() { return this.name(); } } @XmlTransient public FieldDefEnum[] getFieldsEnum(){ return FieldsEnum.values(); } /** * Constructor by component name */ public LODFormat() { setFfmaObjectName(LODFormat.class.getSimpleName()); setComponentName(ComponentNameConstants.COMPONENT_PRESERVATION_RISKMANAGEMENT); } @XmlElement public Integer getIndex() { return Integer.valueOf((String) get(FieldsEnum.Index.name())); } public void setIndex(Integer index) { this.put(FieldsEnum.Index.name(), index); } @XmlElement public String getRepository() { return getString(FieldsEnum.Repository.name()); } public void setRepository(String value) { put(FieldsEnum.Repository.name(), value); } @XmlElement public String getTimestamp() { return getString(FieldsEnum.Timestamp.name()); } public void setTimestamp(String value) { put(FieldsEnum.Timestamp.name(), value); } @XmlElement public String getFormatName() { return getString(FieldsEnum.FormatName.name()); } public void setFormatName(String value) { put(FieldsEnum.FormatName.name(), value); } @XmlElement public String getCurrentVersionReleaseDate() { return getString(FieldsEnum.CurrentVersionReleaseDate.name()); } public void setCurrentVersionReleaseDate(String value) { put(FieldsEnum.CurrentVersionReleaseDate.name(), value); } @XmlElement public Integer getSoftwareCount() { Object res = get(FieldsEnum.SoftwareCount.name()); if (res != null) { if (res.getClass().equals(Integer.class)) { return (Integer) res; } else { return Integer.valueOf((String) get(FieldsEnum.SoftwareCount.name())); } } else { return null; } } public void setSoftwareCount(Integer value) { this.put(FieldsEnum.SoftwareCount.name(), value); } @XmlElement public String getSoftware() { return getString(FieldsEnum.Software.name()); } public void setSoftware(String value) { put(FieldsEnum.Software.name(), value); } @XmlElement public String getCurrentFormatVersion() { return getString(FieldsEnum.CurrentFormatVersion.name()); } public void setCurrentFormatVersion(String value) { put(FieldsEnum.CurrentFormatVersion.name(), value); } @XmlElement public String getFormatLicense() { return getString(FieldsEnum.FormatLicense.name()); } public void setFormatLicense(String value) { put(FieldsEnum.FormatLicense.name(), value); } @XmlElement public String getLimitations() { return getString(FieldsEnum.Limitations.name()); } public void setLimitations(String value) { put(FieldsEnum.Limitations.name(), value); } @XmlElement public String getPuid() { return getString(FieldsEnum.Puid.name()); } public void setPuid(String value) { put(FieldsEnum.Puid.name(), value); } @XmlElement public String getFormatHomepage() { return getString(FieldsEnum.FormatHomepage.name()); } public void setFormatHomepage(String value) { put(FieldsEnum.FormatHomepage.name(), value); } @XmlElement public String getMimeType() { return getString(FieldsEnum.MimeType.name()); } public void setMimeType(String value) { put(FieldsEnum.MimeType.name(), value); } @XmlElement public String getFormatGenre() { return getString(FieldsEnum.FormatGenre.name()); } public void setFormatGenre(String value) { put(FieldsEnum.FormatGenre.name(), value); } @XmlElement public String getFormatCreator() { return getString(FieldsEnum.FormatCreator.name()); } public void setFormatCreator(String value) { put(FieldsEnum.FormatCreator.name(), value); } @XmlElement public String getOpenFormat() { return getString(FieldsEnum.OpenFormat.name()); } public void setOpenFormat(String value) { put(FieldsEnum.OpenFormat.name(), value); } @XmlElement public String getFileExtensions() { return getString(FieldsEnum.FileExtensions.name()); } public void setFileExtensions(String value) { put(FieldsEnum.FileExtensions.name(), value); } @XmlElement public String getVendors() { return getString(FieldsEnum.Vendors.name()); } public void setVendors(String value) { put(FieldsEnum.Vendors.name(), value); } @XmlElement public String getStandards() { return getString(FieldsEnum.Standards.name()); } public void setStandards(String value) { put(FieldsEnum.Standards.name(), value); } @XmlElement public String[] getRepositoryId() { return toStringArray(get(FieldsEnum.RepositoryId.name())); } public void setRepositoryId(String[] value) { put(FieldsEnum.RepositoryId.name(), value); } @XmlElement public String[] getDescription() { return toStringArray(get(FieldsEnum.Description.name())); } public void setDescription(String[] value) { put(FieldsEnum.Description.name(), value); } @XmlElement public String[] getSoftwareName() { return toStringArray(get(FieldsEnum.SoftwareName.name())); } public void setSoftwareName(String[] value) { put(FieldsEnum.SoftwareName.name(), value); } @XmlElement public String[] getSoftwareId() { return toStringArray(get(FieldsEnum.SoftwareId.name())); } public void setSoftwareId(String[] value) { put(FieldsEnum.SoftwareId.name(), value); } @XmlElement public String[] getVendorName() { return toStringArray(get(FieldsEnum.VendorName.name())); } public void setVendorName(String[] value) { put(FieldsEnum.VendorName.name(), value); } @XmlElement public String[] getVendorId() { return toStringArray(get(FieldsEnum.VendorId.name())); } public void setVendorId(String[] value) { put(FieldsEnum.VendorId.name(), value); } }
package gnu.expr; import java.io.*; import gnu.bytecode.*; import java.lang.reflect.Array; import java.util.*; import gnu.mapping.Table2D; import gnu.mapping.Values; /* #ifdef use:java.util.regex */ import java.util.regex.*; /* #endif */ /** Manages the literals of a Compilation. * Implements ObjectOutput, because we use externalization to determine * how literals get compiled into code that re-creates the literal. */ public class LitTable implements ObjectOutput { Compilation comp; ClassType mainClass; /* #ifdef use:java.util.IdentityHashMap */ IdentityHashMap literalTable = new IdentityHashMap(100); /* #else */ // Hashtable literalTable = new Hashtable(100); /* #endif */ /** A table mapping objects to public static final field literals. * When we a need a literal for a value that is an instance of some * class we automatically search the class for static fields. * We use a {@code Table2D} primarily to make use of weak references, * but we also use the 2nd argument: * {@code staticTable(value, null, defaultValue)} yields a Literal * if there is a public static final field for {@code value}, * and {@code defaultValue} otherwise. * {@code staticTable(class, Boolean.TRUE, null) != null} if and only if * we have scanned {@code class} (a {@code java.lang.Class} object). */ static Table2D staticTable = new Table2D (100); int literalsCount; /** Rembembers literals to initialize (in <clinit>). */ Literal literalsChain; public LitTable(Compilation comp) { this.comp = comp; this.mainClass = comp.mainClass; } public void emit() throws IOException { // We use two passes. The first generates the graph of // objects and how they are generated. // The second pass actually emits code. // The reason for using two passes is so we can detect cycles // and sharing using the first pass. This generates better code: // If an object is only used once, and is not a top-level literal, // they we don't need to allocate a Field for it. And if an object // does not cyclically depend on itself, we can allocate *and* // initialize using a single call, which generates better code. // Here is the first pass. for (Literal init = literalsChain; init != null; init = init.next) { writeObject(init.value); } // Here is the second pass. for (Literal init = literalsChain; init != null; init = init.next) { emit(init, true); } // For speedier garbage collection. literalTable = null; literalsCount = 0; } Object[] valueStack = new Object[20]; Type[] typeStack = new Type[20]; int stackPointer; void push(Object value, Type type) { if (stackPointer >= valueStack.length) { Object[] newValues = new Object[2 * valueStack.length]; Type[] newTypes = new Type[2 * typeStack.length]; System.arraycopy(valueStack, 0, newValues, 0, stackPointer); System.arraycopy(typeStack, 0, newTypes, 0, stackPointer); valueStack = newValues; typeStack = newTypes; } valueStack[stackPointer] = value; typeStack[stackPointer] = type; stackPointer++; } void error(String msg) { throw new Error(msg); } public void flush() { } public void close() { } public void write(int b) throws IOException { error("cannot handle call to write(int) when externalizing literal"); } public void writeBytes(String s) throws IOException { error("cannot handle call to writeBytes(String) when externalizing literal"); } public void write(byte[] b) throws IOException { error("cannot handle call to write(byte[]) when externalizing literal"); } public void write(byte[] b, int off, int len) throws IOException { error("cannot handle call to write(byte[],int,int) when externalizing literal"); } public void writeBoolean(boolean v) { push(new Boolean(v), Type.booleanType); } public void writeChar(int v) { push(new Character((char) v), Type.charType); } public void writeByte(int v) { push(new Byte((byte) v), Type.byteType); } public void writeShort(int v) { push(new Short((short) v), Type.shortType); } public void writeInt(int v) { push(new Integer(v), Type.intType); } public void writeLong(long v) { push(new Long(v), Type.longType); } public void writeFloat(float v) { push(new Float(v), Type.floatType); } public void writeDouble(double v) { push(new Double(v), Type.doubleType); } public void writeUTF(String v) { push(v, Type.string_type); } public void writeChars(String v) { push(v, Type.string_type); } public void writeObject(Object obj) throws IOException { Literal lit = findLiteral(obj); // Usually a no-op, but if the literalTable is a Hashtable (rather // than an IdentityHashMap) then we might find a literal whose // value is equals to obj, but not identical. This can lead to trouble, // e.g. if one is a Pair and the other is a PairWithPosition. /* #ifndef use:java.util.IdentityHashMap */ // obj = lit.value; /* #endif */ if ((lit.flags & (Literal.WRITTEN|Literal.WRITING)) != 0) { // It is referenced more than once, so we we need a Field // to save the value. if (lit.field == null && obj != null && ! (obj instanceof String)) lit.assign(this); if ((lit.flags & Literal.WRITTEN) == 0) lit.flags |= Literal.CYCLIC; } else { lit.flags |= Literal.WRITING; int oldStack = stackPointer; if (obj instanceof gnu.lists.FString && ((gnu.lists.FString) obj).size() < 65535) { // Optimization. push(obj.toString(), Type.string_type); } else if (obj instanceof Externalizable) { ((Externalizable) obj).writeExternal(this); } else if (obj instanceof Object[]) { Object[] arr = (Object[]) obj; for (int i = 0; i < arr.length; i++) { writeObject(arr[i]); } } else if (obj == null || obj instanceof String || lit.type instanceof ArrayType) { // nothing to do } else if (obj instanceof java.math.BigInteger) { writeChars(obj.toString()); } else if (obj instanceof java.math.BigDecimal) { java.math.BigDecimal dec = (java.math.BigDecimal) obj; /* #ifdef JAVA2 */ writeObject(dec.unscaledValue()); writeInt(dec.scale()); /* #else */ // writeChars(obj.toString()); /* #endif */ } else if (obj instanceof Integer) push(obj, Type.intType); else if (obj instanceof Short) push(obj, Type.shortType); else if (obj instanceof Byte) push(obj, Type.byteType); else if (obj instanceof Long) push(obj, Type.longType); else if (obj instanceof Double) push(obj, Type.doubleType); else if (obj instanceof Float) push(obj, Type.floatType); else if (obj instanceof Character) push(obj, Type.charType); else if (obj instanceof Class) push(obj, Type.java_lang_Class_type); /* #ifdef use:java.util.regex */ else if (obj instanceof Pattern) { Pattern pat = (Pattern) obj; push(pat.pattern(), Type.string_type); push(Integer.valueOf(pat.flags()), Type.intType); } /* #endif */ else error(obj.getClass().getName()+" does not implement Externalizable"); int nargs = stackPointer - oldStack; if (nargs == 0) { lit.argValues = gnu.mapping.Values.noArgs; lit.argTypes = Type.typeArray0; } else { lit.argValues = new Object[nargs]; lit.argTypes = new Type[nargs]; System.arraycopy(valueStack, oldStack, lit.argValues, 0, nargs); System.arraycopy(typeStack, oldStack, lit.argTypes, 0, nargs); stackPointer = oldStack; } lit.flags |= Literal.WRITTEN; } push(lit, lit.type); } public Literal findLiteral (Object value) { if (value == null) return Literal.nullLiteral; Literal literal = (Literal) literalTable.get(value); if (literal != null) return literal; if (comp.immediate) return new Literal (value, this); Class valueClass = value.getClass(); Type valueType = Type.make(valueClass); synchronized (staticTable) { literal = (Literal) staticTable.get(value, null, null); if ((literal == null || literal.value != value) && valueType instanceof ClassType) { // Add all the static final public fields to staticTable. int needed_mod = Access.STATIC | Access.FINAL | Access.PUBLIC; Class fldClass = valueClass; ClassType fldType = (ClassType) valueType; while (staticTable.get(fldClass, Boolean.TRUE, null) == null) { // This is a convention to note that we've scanned valueType. staticTable.put(fldClass, Boolean.TRUE, fldClass); for (Field fld = fldType.getFields(); fld != null; fld = fld.getNext()) { if ((fld.getModifiers() & needed_mod) == needed_mod) { try { java.lang.reflect.Field rfld = fld.getReflectField(); Object litValue = rfld.get(null); if (litValue == null || ! fldClass.isInstance(litValue)) continue; Literal lit = new Literal (litValue, fld, this); staticTable.put(litValue, null, lit); if (value == litValue) literal = lit; } catch (Throwable ex) { error("caught "+ex+" getting static field "+fld); } } } fldClass = fldClass.getSuperclass(); if (fldClass == null) break; fldType = (ClassType) Type.make(fldClass); } } } if (literal != null) literalTable.put(value, literal); else literal = new Literal (value, valueType, this); return literal; } Method getMethod (ClassType type, String name, Literal literal, boolean isStatic) { Type[] argTypes = literal.argTypes; Method method = type.getDeclaredMethods(); int argLength = argTypes.length; Method best = null; long bestArrayArgs = 0; boolean ambiguous = false; Type[] bParameters = null; methodLoop: for (; method != null; method = method.getNext()) { if (! name.equals(method.getName())) continue; boolean mstatic = method.getStaticFlag(); if (isStatic != mstatic) continue; // One bit set for each array parameter. long arrayArgs = 0; Type[] mParameters = method.getParameterTypes(); int iarg = 0; int iparam = 0; for (;; iarg++, iparam++) { if (iarg == argLength && iparam == mParameters.length) { if (best == null || (bestArrayArgs != 0 && arrayArgs == 0)) { best = method; bParameters = mParameters; bestArrayArgs = arrayArgs; } else if (arrayArgs == 0) { // Now see which of 'best' and 'method' is more specific. // True if we know best cannot be the more specific. boolean not1 = false; // True if we know new method cannot be the more specific. boolean not2 = false; for (int j = argLength; --j >= 0; ) { int c = bParameters[j].compare(mParameters[j]); if (c != 1) { not2 = true; if (not1) break; } if (c != -1) { not1 = true; if (not2) break; } } if (not1) { best = method; bParameters = mParameters; } ambiguous = not1 && not2; } continue methodLoop; // Look for other matches. } if (iarg == argLength || iparam == mParameters.length) continue methodLoop; // fail on this method Type aType = argTypes[iarg]; Type pType = mParameters[iparam]; if (aType.isSubtype(pType)) ; // OK so far else if (pType instanceof ArrayType && iparam < 64 && (aType == Type.intType || aType == Type.shortType)) { int count = ((Number) literal.argValues[iarg]).intValue(); if (count < 0 && type.getName().equals("gnu.math.IntNum")) count -= 0x80000000; // special hack for IntNum. Type elementType = ((ArrayType) pType).getComponentType(); if (count < 0 || iarg + count >= argLength) continue methodLoop; // fail on this method else { for (int j = count; --j >= 0; ) { Type t = argTypes[iarg + j + 1]; if (elementType instanceof PrimType ? elementType.getSignature() != t.getSignature() : ! t.isSubtype(elementType)) continue methodLoop; // fail on this method } iarg += count; arrayArgs |= 1 << iparam; } } else { continue methodLoop; // fail on this method } } } if (ambiguous) return null; if (bestArrayArgs != 0) { Object[] args = new Object[bParameters.length]; Type[] types = new Type[bParameters.length]; int iarg = 0; int iparam = 0; for (;; iarg++, iparam++) { if (iarg == argLength) break; Type pType = bParameters[iparam]; if ((bestArrayArgs & (1 << iparam)) == 0) { args[iparam] = literal.argValues[iarg]; types[iparam] = literal.argTypes[iarg]; } else { int count = ((Number) literal.argValues[iarg]).intValue(); boolean isIntNum = type.getName().equals("gnu.math.IntNum"); if (isIntNum) count -= 0x80000000; // special hack for IntNum. Type elementType = ((ArrayType) pType).getComponentType(); types[iparam] = pType; args[iparam] = Array.newInstance(elementType.getReflectClass(), count); Object[] argValues = literal.argValues; if (isIntNum) { // Special kludge for IntNum: words are Externalized // in big-endian (network) order, but the representation // is little-endian. int[] arr = (int[]) args[iparam]; for (int j = count; j > 0; j--) arr[count - j] = ((Integer) argValues[iarg + j]).intValue(); } else { for (int j = count; --j >= 0; ) Array.set(args[iparam], j, argValues[iarg + 1 + j]); } Literal arrayLiteral = new Literal(args[iparam], pType); if (elementType instanceof ObjectType) arrayLiteral.argValues = (Object[]) args[iparam]; args[iparam] = arrayLiteral; iarg += count; } } literal.argValues = args; literal.argTypes = types; } return best; } void putArgs(Literal literal, CodeAttr code) { Type[] argTypes = literal.argTypes; int len = argTypes.length; for (int i = 0; i < len; i++) { Object value = literal.argValues[i]; if (value instanceof Literal) emit((Literal) value, false); else comp.compileConstant(value, new StackTarget(argTypes[i])); } } private void store (Literal literal, boolean ignore, CodeAttr code) { if (literal.field != null) { if (! ignore) code.emitDup(literal.type); code.emitPutStatic(literal.field); } literal.flags |= Literal.EMITTED; } void emit(Literal literal, boolean ignore) { CodeAttr code = comp.getCode(); if (literal.value == null) { if (! ignore) code.emitPushNull(); } else if (literal.value instanceof String) { if (! ignore) code.emitPushString(literal.value.toString ()); } else if ((literal.flags & Literal.EMITTED) != 0) { if (! ignore) code.emitGetStatic(literal.field); } else if (literal.value instanceof Object[]) { int len = literal.argValues.length; Type elementType = ((ArrayType) literal.type).getComponentType(); code.emitPushInt(len); code.emitNewArray(elementType); store(literal, ignore, code); for (int i = 0; i < len; i++) { Literal el = (Literal) literal.argValues[i]; if (el.value == null) continue; code.emitDup(elementType); code.emitPushInt(i); emit(el, false); code.emitArrayStore(elementType); } } else if (literal.type instanceof ArrayType) { code.emitPushPrimArray(literal.value, (ArrayType) literal.type); store(literal, ignore, code); } else if (literal.value instanceof Class) { Class clas = (Class) literal.value; if (clas.isPrimitive()) { String cname = clas.getName(); if (cname.equals("int")) cname = "integer"; cname = "java.lang." +Character.toUpperCase(cname.charAt(0)) +cname.substring(1); code.emitGetStatic(ClassType.make(cname).getDeclaredField("TYPE")); } else comp.loadClassRef((ObjectType)Type.make(clas)); store(literal, ignore, code); } else if (literal.value instanceof ClassType && ! ((ClassType) literal.value).isExisting()) { // We need to special case ClassTypes that are (currently) // non-existing, because the corresponding reflective Class // needs to be loaded using the correct ClassLoader. comp.loadClassRef((ClassType) literal.value); Method meth = Compilation.typeType.getDeclaredMethod("valueOf", 1); if (meth == null) meth = Compilation.typeType.getDeclaredMethod("make", 1); code.emitInvokeStatic(meth); code.emitCheckcast(Compilation.typeClassType); store(literal, ignore, code); } else { ClassType type = (ClassType) literal.type; boolean useDefaultInit = (literal.flags & Literal.CYCLIC) != 0; Method method = null; boolean makeStatic = false; if (! useDefaultInit) { // Look for matching "valueOf" or "make" method. // (For backward compatibility for we prefer Symbol's 'make' // method over 'valueOf' - they differ in argument order.) if (! (literal.value instanceof gnu.mapping.Symbol)) method = getMethod(type, "valueOf", literal, true); if (method == null // Values.make has return type Object, so use the constructor. && ! (literal.value instanceof Values)) { String mname = "make"; /* #ifdef use:java.util.regex */ if (literal.value instanceof Pattern) mname = "compile"; /* #endif */ method = getMethod(type, mname, literal, true); } // otherwise look for matching constructor; if (method != null) makeStatic = true; else if (literal.argTypes.length > 0) method = getMethod(type, "<init>", literal, false); if (method == null) useDefaultInit = true; } if (useDefaultInit) { method = getMethod(type, "set", literal, false); // otherwise error; } if (method == null && literal.argTypes.length > 0) error("no method to construct "+literal.type); if (makeStatic) { putArgs(literal, code); code.emitInvokeStatic(method); } else if (useDefaultInit) { code.emitNew(type); code.emitDup(type); Method init0 = type.getDeclaredMethod("<init>", 0); code.emitInvokeSpecial(init0); } else { code.emitNew(type); code.emitDup(type); putArgs(literal, code); code.emitInvokeSpecial(method); } Method resolveMethod = makeStatic || literal.value instanceof Values ? null : type.getDeclaredMethod("readResolve", 0); if (resolveMethod != null) { code.emitInvokeVirtual(resolveMethod); type.emitCoerceFromObject(code); } store(literal, ignore && ! (useDefaultInit && method != null), code); if (useDefaultInit && method != null) { if (! ignore) code.emitDup(type); putArgs(literal, code); code.emitInvokeVirtual(method); } } } }
/** * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.bpmn2.xml; import static org.jbpm.bpmn2.xml.ProcessHandler.*; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.drools.core.xml.ExtensibleXmlParser; import org.jbpm.bpmn2.core.Error; import org.jbpm.bpmn2.core.Escalation; import org.jbpm.bpmn2.core.Message; import org.jbpm.compiler.xml.ProcessBuildData; import org.jbpm.workflow.core.DroolsAction; import org.jbpm.workflow.core.Node; import org.jbpm.workflow.core.NodeContainer; import org.jbpm.workflow.core.impl.DroolsConsequenceAction; import org.jbpm.workflow.core.node.EndNode; import org.jbpm.workflow.core.node.FaultNode; import org.w3c.dom.Element; import org.xml.sax.Attributes; import org.xml.sax.SAXException; public class EndEventHandler extends AbstractNodeHandler { protected Node createNode(Attributes attrs) { EndNode node = new EndNode(); node.setTerminate(false); return node; } @SuppressWarnings("unchecked") public Class generateNodeFor() { return EndNode.class; } public Object end(final String uri, final String localName, final ExtensibleXmlParser parser) throws SAXException { final Element element = parser.endElementBuilder(); Node node = (Node) parser.getCurrent(); // determine type of event definition, so the correct type of node // can be generated super.handleNode(node, element, uri, localName, parser); org.w3c.dom.Node xmlNode = element.getFirstChild(); while (xmlNode != null) { String nodeName = xmlNode.getNodeName(); if ("terminateEventDefinition".equals(nodeName)) { // reuse already created EndNode handleTerminateNode(node, element, uri, localName, parser); break; } else if ("signalEventDefinition".equals(nodeName)) { handleSignalNode(node, element, uri, localName, parser); } else if ("messageEventDefinition".equals(nodeName)) { handleMessageNode(node, element, uri, localName, parser); } else if ("errorEventDefinition".equals(nodeName)) { // create new faultNode FaultNode faultNode = new FaultNode(); faultNode.setId(node.getId()); faultNode.setName(node.getName()); faultNode.setTerminateParent(true); faultNode.setMetaData("UniqueId", node.getMetaData().get("UniqueId")); node = faultNode; super.handleNode(node, element, uri, localName, parser); handleErrorNode(node, element, uri, localName, parser); break; } else if ("escalationEventDefinition".equals(nodeName)) { // create new faultNode FaultNode faultNode = new FaultNode(); faultNode.setId(node.getId()); faultNode.setName(node.getName()); faultNode.setMetaData("UniqueId", node.getMetaData().get("UniqueId")); node = faultNode; super.handleNode(node, element, uri, localName, parser); handleEscalationNode(node, element, uri, localName, parser); break; } else if ("compensateEventDefinition".equals(nodeName)) { // reuse already created ActionNode handleThrowCompensationEventNode(node, element, uri, localName, parser); break; } xmlNode = xmlNode.getNextSibling(); } NodeContainer nodeContainer = (NodeContainer) parser.getParent(); nodeContainer.addNode(node); return node; } public void handleTerminateNode(final Node node, final Element element, final String uri, final String localName, final ExtensibleXmlParser parser) throws SAXException { ((EndNode) node).setTerminate(true); EndNode endNode = (EndNode) node; org.w3c.dom.Node xmlNode = element.getFirstChild(); while (xmlNode != null) { String nodeName = xmlNode.getNodeName(); if ("terminateEventDefinition".equals(nodeName)) { String scope = ((Element) xmlNode).getAttribute("scope"); if ("process".equalsIgnoreCase(scope)) { endNode.setScope(EndNode.PROCESS_SCOPE); } else { endNode.setScope(EndNode.CONTAINER_SCOPE); } } xmlNode = xmlNode.getNextSibling(); } } public void handleSignalNode(final Node node, final Element element, final String uri, final String localName, final ExtensibleXmlParser parser) throws SAXException { EndNode endNode = (EndNode) node; org.w3c.dom.Node xmlNode = element.getFirstChild(); while (xmlNode != null) { String nodeName = xmlNode.getNodeName(); if ("dataInput".equals(nodeName)) { String id = ((Element) xmlNode).getAttribute("id"); String inputName = ((Element) xmlNode).getAttribute("name"); dataInputs.put(id, inputName); } else if ("dataInputAssociation".equals(nodeName)) { readEndDataInputAssociation(xmlNode, endNode); } else if ("signalEventDefinition".equals(nodeName)) { String signalName = ((Element) xmlNode).getAttribute("signalRef"); String variable = (String) endNode.getMetaData("MappingVariable"); // check if signal should be send async if (dataInputs.containsValue("async")) { signalName = "ASYNC-" + signalName; } String signalExpression = getSignalExpression(endNode); List<DroolsAction> actions = new ArrayList<DroolsAction>(); actions.add(new DroolsConsequenceAction("mvel", signalExpression + signalName + "\", " + (variable == null ? "null" : variable) + ")")); endNode.setActions(EndNode.EVENT_NODE_ENTER, actions); } xmlNode = xmlNode.getNextSibling(); } } @SuppressWarnings("unchecked") public void handleMessageNode(final Node node, final Element element, final String uri, final String localName, final ExtensibleXmlParser parser) throws SAXException { EndNode endNode = (EndNode) node; org.w3c.dom.Node xmlNode = element.getFirstChild(); while (xmlNode != null) { String nodeName = xmlNode.getNodeName(); if ("dataInputAssociation".equals(nodeName)) { readEndDataInputAssociation(xmlNode, endNode); } else if ("messageEventDefinition".equals(nodeName)) { String messageRef = ((Element) xmlNode).getAttribute("messageRef"); Map<String, Message> messages = (Map<String, Message>) ((ProcessBuildData) parser.getData()).getMetaData("Messages"); if (messages == null) { throw new IllegalArgumentException("No messages found"); } Message message = messages.get(messageRef); if (message == null) { throw new IllegalArgumentException("Could not find message " + messageRef); } String variable = (String) endNode.getMetaData("MappingVariable"); endNode.setMetaData("MessageType", message.getType()); List<DroolsAction> actions = new ArrayList<DroolsAction>(); actions.add(new DroolsConsequenceAction("java", "org.drools.core.process.instance.impl.WorkItemImpl workItem = new org.drools.core.process.instance.impl.WorkItemImpl();" + EOL + "workItem.setName(\"Send Task\");" + EOL + "workItem.setParameter(\"MessageType\", \"" + message.getType() + "\");" + EOL + (variable == null ? "" : "workItem.setParameter(\"Message\", " + variable + ");" + EOL) + "((org.drools.core.process.instance.WorkItemManager) kcontext.getKnowledgeRuntime().getWorkItemManager()).internalExecuteWorkItem(workItem);")); endNode.setActions(EndNode.EVENT_NODE_ENTER, actions); } xmlNode = xmlNode.getNextSibling(); } } protected void readEndDataInputAssociation(org.w3c.dom.Node xmlNode, EndNode endNode) { // sourceRef org.w3c.dom.Node subNode = xmlNode.getFirstChild(); String eventVariable = subNode.getTextContent(); if (eventVariable != null && eventVariable.trim().length() > 0) { endNode.setMetaData("MappingVariable", eventVariable); } } @SuppressWarnings("unchecked") public void handleErrorNode(final Node node, final Element element, final String uri, final String localName, final ExtensibleXmlParser parser) throws SAXException { FaultNode faultNode = (FaultNode) node; org.w3c.dom.Node xmlNode = element.getFirstChild(); while (xmlNode != null) { String nodeName = xmlNode.getNodeName(); if ("dataInputAssociation".equals(nodeName)) { readFaultDataInputAssociation(xmlNode, faultNode); } else if ("errorEventDefinition".equals(nodeName)) { String errorRef = ((Element) xmlNode).getAttribute("errorRef"); if (errorRef != null && errorRef.trim().length() > 0) { List<Error> errors = (List<Error>) ((ProcessBuildData) parser.getData()).getMetaData("Errors"); if (errors == null) { throw new IllegalArgumentException("No errors found"); } Error error = null; for( Error listError: errors ) { if( errorRef.equals(listError.getId()) ) { error = listError; break; } } if (error == null) { throw new IllegalArgumentException("Could not find error " + errorRef); } faultNode.setFaultName(error.getErrorCode()); faultNode.setTerminateParent(true); } } xmlNode = xmlNode.getNextSibling(); } } @SuppressWarnings("unchecked") public void handleEscalationNode(final Node node, final Element element, final String uri, final String localName, final ExtensibleXmlParser parser) throws SAXException { FaultNode faultNode = (FaultNode) node; org.w3c.dom.Node xmlNode = element.getFirstChild(); while (xmlNode != null) { String nodeName = xmlNode.getNodeName(); if ("dataInputAssociation".equals(nodeName)) { readFaultDataInputAssociation(xmlNode, faultNode); } else if ("escalationEventDefinition".equals(nodeName)) { String escalationRef = ((Element) xmlNode).getAttribute("escalationRef"); if (escalationRef != null && escalationRef.trim().length() > 0) { Map<String, Escalation> escalations = (Map<String, Escalation>) ((ProcessBuildData) parser.getData()).getMetaData(ProcessHandler.ESCALATIONS); if (escalations == null) { throw new IllegalArgumentException("No escalations found"); } Escalation escalation = escalations.get(escalationRef); if (escalation == null) { throw new IllegalArgumentException("Could not find escalation " + escalationRef); } faultNode.setFaultName(escalation.getEscalationCode()); } else { // BPMN2 spec, p. 83: end event's with <escalationEventDefintions> // are _required_ to reference a specific escalation(-code). throw new IllegalArgumentException("End events throwing an escalation must throw *specific* escalations (and not general ones)."); } } xmlNode = xmlNode.getNextSibling(); } } protected void readFaultDataInputAssociation(org.w3c.dom.Node xmlNode, FaultNode faultNode) { // sourceRef org.w3c.dom.Node subNode = xmlNode.getFirstChild(); String faultVariable = subNode.getTextContent(); faultNode.setFaultVariable(faultVariable); } public void writeNode(Node node, StringBuilder xmlDump, int metaDataType) { throw new IllegalArgumentException("Writing out should be handled by specific handlers"); } }
package nam.model.module; import java.io.Serializable; import java.util.Collection; import java.util.List; import javax.enterprise.context.SessionScoped; import javax.enterprise.event.Observes; import javax.inject.Inject; import javax.inject.Named; import org.aries.runtime.BeanContext; import org.aries.ui.AbstractDomainListManager; import org.aries.ui.event.Cancelled; import org.aries.ui.event.Export; import org.aries.ui.event.Refresh; import org.aries.ui.manager.ExportManager; import nam.model.Application; import nam.model.Module; import nam.model.ModuleType; import nam.model.Project; import nam.model.Service; import nam.model.service.ServiceListObject; import nam.model.util.ModuleUtil; import nam.model.util.ProjectUtil; import nam.ui.design.SelectionContext; import nam.ui.design.WorkspaceManager; @SessionScoped @Named("clientModuleListManager") public class ModuleListManager_Client extends AbstractDomainListManager<Module, ModuleListObject> implements Serializable { @Inject private ModuleDataManager moduleDataManager; @Inject private ModuleEventManager moduleEventManager; @Inject private ModuleInfoManager moduleInfoManager; @Inject private SelectionContext selectionContext; @Override public String getClientId() { return "moduleList"; } @Override public String getTitle() { return "Module List"; } @Override public Object getRecordKey(Module module) { return ModuleUtil.getKey(module); } @Override public String getRecordName(Module module) { return ModuleUtil.getLabel(module); } @Override protected Class<Module> getRecordClass() { return Module.class; } @Override protected Module getRecord(ModuleListObject rowObject) { return rowObject.getModule(); } @Override public Module getSelectedRecord() { return super.getSelectedRecord(); } public String getSelectedRecordLabel() { return selectedRecord != null ? ModuleUtil.getLabel(selectedRecord) : null; } @Override public void setSelectedRecord(Module module) { super.setSelectedRecord(module); fireSelectedEvent(module); } protected void fireSelectedEvent(Module module) { moduleEventManager.fireSelectedEvent(module); } public boolean isSelected(Module module) { Module selection = selectionContext.getSelection("module"); boolean selected = selection != null && selection.equals(module); return selected; } @Override protected ModuleListObject createRowObject(Module module) { ModuleListObject listObject = new ModuleListObject(module); listObject.setSelected(isSelected(module)); return listObject; } @Override public void reset() { refresh(); } @Override public void initialize() { if (recordList != null) initialize(recordList); else refreshModel(); } @Override public void refreshModel() { refreshModel(createRecordList()); } @Override protected Collection<Module> createRecordList() { try { Collection<Module> moduleList = moduleDataManager.getModuleList(ModuleType.CLIENT); if (moduleList != null) return moduleList; return recordList; } catch (Exception e) { handleException(e); return null; } } public String viewModule() { return viewModule(selectedRecordKey); } public String viewModule(Object recordKey) { Module module = recordByKeyMap.get(recordKey); return viewModule(module); } public String viewModule(Module module) { String url = moduleInfoManager.viewModule(module); return url; } public String editModule() { return editModule(selectedRecordKey); } public String editModule(Object recordKey) { Module module = recordByKeyMap.get(recordKey); return editModule(module); } public String editModule(Module module) { String url = moduleInfoManager.editModule(module); return url; } public void removeModule() { removeModule(selectedRecordKey); } public void removeModule(Object recordKey) { Module module = recordByKeyMap.get(recordKey); removeModule(module); } public void removeModule(Module module) { try { if (moduleDataManager.removeModule(module)) clearSelection(); refresh(); } catch (Exception e) { handleException(e); } } public void cancelModule(@Observes @Cancelled Module module) { try { //Object key = ModuleUtil.getKey(module); //recordByKeyMap.put(key, module); initialize(recordByKeyMap.values()); BeanContext.removeFromSession("module"); } catch (Exception e) { handleException(e); } } public boolean validateModule(Collection<Module> moduleList) { return ModuleUtil.validate(moduleList); } public void exportModuleList(@Observes @Export String tableId) { //String tableId = "pageForm:moduleListTable"; ExportManager exportManager = BeanContext.getFromSession("org.aries.exportManager"); exportManager.exportToXLS(tableId); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spanner.connection; import com.google.api.core.InternalApi; import com.google.cloud.spanner.Dialect; import com.google.cloud.spanner.ErrorCode; import com.google.cloud.spanner.SpannerExceptionFactory; import com.google.common.base.Preconditions; import java.util.HashSet; import java.util.Set; import javax.annotation.Nullable; @InternalApi public class PostgreSQLStatementParser extends AbstractStatementParser { PostgreSQLStatementParser() { super(Dialect.POSTGRESQL); } /** * Indicates whether the parser supports the {@code EXPLAIN} clause. The PostgreSQL parser does * not support it. */ @Override protected boolean supportsExplain() { return false; } /** * Removes comments from and trims the given sql statement. PostgreSQL supports two types of * comments: * * <ul> * <li>Single line comments starting with '--' * <li>Multi line comments between '/&#42;' and '&#42;/'. Nested comments are supported and all * comments, including the nested comments, must be terminated. * </ul> * * Reference: https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-COMMENTS * * @param sql The sql statement to remove comments from and to trim. * @return the sql statement without the comments and leading and trailing spaces. */ @InternalApi @Override String removeCommentsAndTrimInternal(String sql) { Preconditions.checkNotNull(sql); boolean isInSingleLineComment = false; int multiLineCommentLevel = 0; StringBuilder res = new StringBuilder(sql.length()); int index = 0; while (index < sql.length()) { char c = sql.charAt(index); if (isInSingleLineComment) { if (c == '\n') { isInSingleLineComment = false; // Include the line feed in the result. res.append(c); } } else if (multiLineCommentLevel > 0) { if (sql.length() > index + 1 && c == ASTERISK && sql.charAt(index + 1) == SLASH) { multiLineCommentLevel--; index++; } else if (sql.length() > index + 1 && c == SLASH && sql.charAt(index + 1) == ASTERISK) { multiLineCommentLevel++; index++; } } else { // Check for -- which indicates the start of a single-line comment. if (sql.length() > index + 1 && c == HYPHEN && sql.charAt(index + 1) == HYPHEN) { // This is a single line comment. isInSingleLineComment = true; index += 2; continue; } else if (sql.length() > index + 1 && c == SLASH && sql.charAt(index + 1) == ASTERISK) { multiLineCommentLevel++; index += 2; continue; } else { index = skip(sql, index, res); continue; } } index++; } if (multiLineCommentLevel > 0) { throw SpannerExceptionFactory.newSpannerException( ErrorCode.INVALID_ARGUMENT, "SQL statement contains an unterminated block comment: " + sql); } if (res.length() > 0 && res.charAt(res.length() - 1) == ';') { res.deleteCharAt(res.length() - 1); } return res.toString().trim(); } String parseDollarQuotedString(String sql, int index) { // Look ahead to the next dollar sign (if any). Everything in between is the quote tag. StringBuilder tag = new StringBuilder(); while (index < sql.length()) { char c = sql.charAt(index); if (c == DOLLAR) { return tag.toString(); } if (!Character.isJavaIdentifierPart(c)) { break; } tag.append(c); index++; } return null; } /** PostgreSQL does not support statement hints. */ @Override String removeStatementHint(String sql) { return sql; } @InternalApi @Override ParametersInfo convertPositionalParametersToNamedParametersInternal(char paramChar, String sql) { Preconditions.checkNotNull(sql); final String namedParamPrefix = "$"; StringBuilder named = new StringBuilder(sql.length() + countOccurrencesOf(paramChar, sql)); int index = 0; int paramIndex = 1; while (index < sql.length()) { char c = sql.charAt(index); if (c == paramChar) { named.append(namedParamPrefix).append(paramIndex); paramIndex++; index++; } else { index = skip(sql, index, named); } } return new ParametersInfo(paramIndex - 1, named.toString()); } /** * Note: This is an internal API and breaking changes can be made without prior notice. * * <p>Returns the PostgreSQL-style query parameters ($1, $2, ...) in the given SQL string. The * SQL-string is assumed to not contain any comments. Use {@link #removeCommentsAndTrim(String)} * to remove all comments before calling this method. Occurrences of query-parameter like strings * inside quoted identifiers or string literals are ignored. * * <p>The following example will return a set containing ("$1", "$2"). <code> * select col1, col2, "col$4" * from some_table * where col1=$1 and col2=$2 * and not col3=$1 and col4='$3' * </code> * * @param sql the SQL-string to check for parameters. Must not contain comments. * @return A set containing all the parameters in the SQL-string. */ @InternalApi public Set<String> getQueryParameters(String sql) { Preconditions.checkNotNull(sql); int maxCount = countOccurrencesOf('$', sql); Set<String> parameters = new HashSet<>(maxCount); int currentIndex = 0; while (currentIndex < sql.length() - 1) { char c = sql.charAt(currentIndex); if (c == '$' && Character.isDigit(sql.charAt(currentIndex + 1))) { // Look ahead for the first non-digit. That is the end of the query parameter. int endIndex = currentIndex + 2; while (endIndex < sql.length() && Character.isDigit(sql.charAt(endIndex))) { endIndex++; } parameters.add(sql.substring(currentIndex, endIndex)); currentIndex = endIndex; } else { currentIndex = skip(sql, currentIndex, null); } } return parameters; } private int skip(String sql, int currentIndex, @Nullable StringBuilder result) { char currentChar = sql.charAt(currentIndex); if (currentChar == SINGLE_QUOTE || currentChar == DOUBLE_QUOTE) { appendIfNotNull(result, currentChar); return skipQuoted(sql, currentIndex, currentChar, result); } else if (currentChar == DOLLAR) { String dollarTag = parseDollarQuotedString(sql, currentIndex + 1); if (dollarTag != null) { appendIfNotNull(result, currentChar, dollarTag, currentChar); return skipQuoted( sql, currentIndex + dollarTag.length() + 1, currentChar, dollarTag, result); } } appendIfNotNull(result, currentChar); return currentIndex + 1; } private int skipQuoted( String sql, int startIndex, char startQuote, @Nullable StringBuilder result) { return skipQuoted(sql, startIndex, startQuote, null, result); } private int skipQuoted( String sql, int startIndex, char startQuote, String dollarTag, @Nullable StringBuilder result) { boolean lastCharWasEscapeChar = false; int currentIndex = startIndex + 1; while (currentIndex < sql.length()) { char currentChar = sql.charAt(currentIndex); if (currentChar == startQuote) { if (currentChar == DOLLAR) { // Check if this is the end of the current dollar quoted string. String tag = parseDollarQuotedString(sql, currentIndex + 1); if (tag != null && tag.equals(dollarTag)) { appendIfNotNull(result, currentChar, dollarTag, currentChar); return currentIndex + tag.length() + 2; } } else if (lastCharWasEscapeChar) { lastCharWasEscapeChar = false; } else if (sql.length() > currentIndex + 1 && sql.charAt(currentIndex + 1) == startQuote) { // This is an escaped quote (e.g. 'foo''bar') appendIfNotNull(result, currentChar); appendIfNotNull(result, currentChar); currentIndex += 2; continue; } else { appendIfNotNull(result, currentChar); return currentIndex + 1; } } else { lastCharWasEscapeChar = currentChar == '\\'; } currentIndex++; appendIfNotNull(result, currentChar); } throw SpannerExceptionFactory.newSpannerException( ErrorCode.INVALID_ARGUMENT, "SQL statement contains an unclosed literal: " + sql); } private void appendIfNotNull(@Nullable StringBuilder result, char currentChar) { if (result != null) { result.append(currentChar); } } private void appendIfNotNull( @Nullable StringBuilder result, char prefix, String tag, char suffix) { if (result != null) { result.append(prefix).append(tag).append(suffix); } } }
package graphql.relay; import graphql.schema.DataFetcher; import graphql.schema.GraphQLArgument; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLInputObjectField; import graphql.schema.GraphQLInputObjectType; import graphql.schema.GraphQLInterfaceType; import graphql.schema.GraphQLList; import graphql.schema.GraphQLNonNull; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLOutputType; import graphql.schema.TypeResolver; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import static graphql.Scalars.GraphQLBoolean; import static graphql.Scalars.GraphQLID; import static graphql.Scalars.GraphQLInt; import static graphql.Scalars.GraphQLString; import static graphql.schema.GraphQLArgument.newArgument; import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition; import static graphql.schema.GraphQLInputObjectField.newInputObjectField; import static graphql.schema.GraphQLInputObjectType.newInputObject; import static graphql.schema.GraphQLInterfaceType.newInterface; import static graphql.schema.GraphQLObjectType.newObject; public class Relay { public static final String NODE = "Node"; private GraphQLObjectType pageInfoType = newObject() .name("PageInfo") .description("Information about pagination in a connection.") .field(newFieldDefinition() .name("hasNextPage") .type(new GraphQLNonNull(GraphQLBoolean)) .description("When paginating forwards, are there more items?")) .field(newFieldDefinition() .name("hasPreviousPage") .type(new GraphQLNonNull(GraphQLBoolean)) .description("When paginating backwards, are there more items?")) .field(newFieldDefinition() .name("startCursor") .type(GraphQLString) .description("When paginating backwards, the cursor to continue.")) .field(newFieldDefinition() .name("endCursor") .type(GraphQLString) .description("When paginating forwards, the cursor to continue.")) .build(); public GraphQLInterfaceType nodeInterface(TypeResolver typeResolver) { return newInterface() .name(NODE) .description("An object with an ID") .typeResolver(typeResolver) .field(newFieldDefinition() .name("id") .description("The ID of an object") .type(new GraphQLNonNull(GraphQLID))) .build(); } public GraphQLFieldDefinition nodeField(GraphQLInterfaceType nodeInterface, DataFetcher nodeDataFetcher) { return newFieldDefinition() .name("node") .description("Fetches an object given its ID") .type(nodeInterface) .dataFetcher(nodeDataFetcher) .argument(newArgument() .name("id") .description("The ID of an object") .type(new GraphQLNonNull(GraphQLID))) .build(); } public List<GraphQLArgument> getConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("before") .description("fetching only nodes before this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("after") .description("fetching only nodes after this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("first") .description("fetching only the first certain number of nodes") .type(GraphQLInt) .build()); args.add(newArgument() .name("last") .description("fetching only the last certain number of nodes") .type(GraphQLInt) .build()); return args; } public List<GraphQLArgument> getBackwardPaginationConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("before") .description("fetching only nodes before this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("last") .description("fetching only the last certain number of nodes") .type(GraphQLInt) .build()); return args; } public List<GraphQLArgument> getForwardPaginationConnectionFieldArguments() { List<GraphQLArgument> args = new ArrayList<>(); args.add(newArgument() .name("after") .description("fetching only nodes after this node (exclusive)") .type(GraphQLString) .build()); args.add(newArgument() .name("first") .description("fetching only the first certain number of nodes") .type(GraphQLInt) .build()); return args; } public GraphQLObjectType edgeType(String name, GraphQLOutputType nodeType, GraphQLInterfaceType nodeInterface, List<GraphQLFieldDefinition> edgeFields) { return newObject() .name(name + "Edge") .description("An edge in a connection") .field(newFieldDefinition() .name("node") .type(nodeType) .description("The item at the end of the edge")) .field(newFieldDefinition() .name("cursor") .type(new GraphQLNonNull(GraphQLString)) .description("cursor marks a unique position or index into the connection")) .fields(edgeFields) .build(); } public GraphQLObjectType connectionType(String name, GraphQLObjectType edgeType, List<GraphQLFieldDefinition> connectionFields) { return newObject() .name(name + "Connection") .description("A connection to a list of items.") .field(newFieldDefinition() .name("edges") .description("a list of edges") .type(new GraphQLList(edgeType))) .field(newFieldDefinition() .name("pageInfo") .description("details about this specific page") .type(new GraphQLNonNull(pageInfoType))) .fields(connectionFields) .build(); } public GraphQLFieldDefinition mutationWithClientMutationId(String name, String fieldName, List<GraphQLInputObjectField> inputFields, List<GraphQLFieldDefinition> outputFields, DataFetcher dataFetcher) { GraphQLInputObjectType inputObjectType = newInputObject() .name(name + "Input") .field(newInputObjectField() .name("clientMutationId") .type(new GraphQLNonNull(GraphQLString))) .fields(inputFields) .build(); GraphQLObjectType outputType = newObject() .name(name + "Payload") .field(newFieldDefinition() .name("clientMutationId") .type(new GraphQLNonNull(GraphQLString))) .fields(outputFields) .build(); return newFieldDefinition() .name(fieldName) .type(outputType) .argument(newArgument() .name("input") .type(new GraphQLNonNull(inputObjectType))) .dataFetcher(dataFetcher) .build(); } public static class ResolvedGlobalId { public ResolvedGlobalId(String type, String id) { this.type = type; this.id = id; } /** * @deprecated use {@link #getType()} */ @Deprecated public String type; /** * @deprecated use {@link #getId()} */ @Deprecated public String id; public String getType() { return type; } public String getId() { return id; } } private static final java.util.Base64.Encoder encoder = java.util.Base64.getEncoder(); private static final java.util.Base64.Decoder decoder = java.util.Base64.getDecoder(); public String toGlobalId(String type, String id) { return encoder.encodeToString((type + ":" + id).getBytes(StandardCharsets.UTF_8)); } public ResolvedGlobalId fromGlobalId(String globalId) { String[] split = new String(decoder.decode(globalId), StandardCharsets.UTF_8).split(":", 2); if (split.length != 2) { throw new IllegalArgumentException(String.format("expecting a valid global id, got %s", globalId)); } return new ResolvedGlobalId(split[0], split[1]); } }
package org.apereo.cas.adaptors.jdbc; import org.apereo.cas.authentication.CoreAuthenticationTestUtils; import org.apereo.cas.authentication.PreventedException; import org.apereo.cas.authentication.exceptions.AccountDisabledException; import org.apereo.cas.authentication.exceptions.AccountPasswordMustChangeException; import org.apereo.cas.util.transforms.PrefixSuffixPrincipalNameTransformer; import lombok.SneakyThrows; import lombok.val; import org.apache.commons.lang3.StringUtils; import org.apache.shiro.crypto.hash.DefaultHashService; import org.apache.shiro.crypto.hash.HashRequest; import org.apache.shiro.util.ByteSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.test.annotation.DirtiesContext; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.security.auth.login.AccountNotFoundException; import javax.security.auth.login.FailedLoginException; import javax.sql.DataSource; import static org.junit.jupiter.api.Assertions.*; /** * @author Misagh Moayyed * @since 4.0.0 */ @SuppressWarnings("JDBCExecuteWithNonConstantString") @SpringBootTest(classes = { RefreshAutoConfiguration.class, DatabaseAuthenticationTestConfiguration.class }) @DirtiesContext public class QueryAndEncodeDatabaseAuthenticationHandlerTests { private static final String ALG_NAME = "SHA-512"; private static final String SQL = "SELECT * FROM users where %s"; private static final int NUM_ITERATIONS = 5; private static final String STATIC_SALT = "STATIC_SALT"; private static final String PASSWORD_FIELD_NAME = "password"; private static final String EXPIRED_FIELD_NAME = "expired"; private static final String DISABLED_FIELD_NAME = "disabled"; private static final String NUM_ITERATIONS_FIELD_NAME = "numIterations"; @Autowired @Qualifier("dataSource") private DataSource dataSource; private static String getSqlInsertStatementToCreateUserAccount(final int i, final String expired, final String disabled) { val psw = genPassword("user" + i, "salt" + i, NUM_ITERATIONS); return String.format( "insert into users (username, password, salt, numIterations, expired, disabled) values('%s', '%s', '%s', %s, '%s', '%s');", "user" + i, psw, "salt" + i, NUM_ITERATIONS, expired, disabled); } private static String buildSql(final String where) { return String.format(SQL, where); } private static String buildSql() { return String.format(SQL, "username=?;"); } @SneakyThrows private static String genPassword(final String psw, final String salt, final int iter) { val hash = new DefaultHashService(); hash.setPrivateSalt(ByteSource.Util.bytes(STATIC_SALT)); hash.setHashIterations(iter); hash.setGeneratePublicSalt(false); hash.setHashAlgorithmName(ALG_NAME); return hash.computeHash(new HashRequest.Builder().setSource(psw).setSalt(salt).setIterations(iter).build()).toHex(); } @BeforeEach @SneakyThrows public void initialize() { try (val c = this.dataSource.getConnection()) { try (val s = c.createStatement()) { c.setAutoCommit(true); s.execute(getSqlInsertStatementToCreateUserAccount(0, Boolean.FALSE.toString(), Boolean.FALSE.toString())); for (var i = 0; i < 10; i++) { s.execute(getSqlInsertStatementToCreateUserAccount(i, Boolean.FALSE.toString(), Boolean.FALSE.toString())); } s.execute(getSqlInsertStatementToCreateUserAccount(20, Boolean.TRUE.toString(), Boolean.FALSE.toString())); s.execute(getSqlInsertStatementToCreateUserAccount(21, Boolean.FALSE.toString(), Boolean.TRUE.toString())); } } } @AfterEach @SneakyThrows public void afterEachTest() { try (val c = this.dataSource.getConnection()) { try (val s = c.createStatement()) { c.setAutoCommit(true); s.execute("delete from users;"); } } } @Test public void verifyAuthenticationFailsToFindUser() { val q = new QueryAndEncodeDatabaseAuthenticationHandler(StringUtils.EMPTY, null, null, null, dataSource, ALG_NAME, buildSql(), PASSWORD_FIELD_NAME, "salt", null, null, "ops", 0, StringUtils.EMPTY); assertThrows(AccountNotFoundException.class, () -> q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithSameUsernameAndPassword())); } @Test public void verifyAuthenticationInvalidSql() { val q = new QueryAndEncodeDatabaseAuthenticationHandler(StringUtils.EMPTY, null, null, null, dataSource, ALG_NAME, buildSql("makesNoSenseInSql"), PASSWORD_FIELD_NAME, "salt", null, null, "ops", 0, StringUtils.EMPTY); assertThrows(PreventedException.class, () -> q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithSameUsernameAndPassword())); } @Test public void verifyAuthenticationMultipleAccounts() { val q = new QueryAndEncodeDatabaseAuthenticationHandler(StringUtils.EMPTY, null, null, null, dataSource, ALG_NAME, buildSql(), PASSWORD_FIELD_NAME, "salt", null, null, "ops", 0, StringUtils.EMPTY); assertThrows(FailedLoginException.class, () -> q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user0", "password0"))); } @Test @SneakyThrows public void verifyAuthenticationSuccessful() { val q = new QueryAndEncodeDatabaseAuthenticationHandler(StringUtils.EMPTY, null, null, null, dataSource, ALG_NAME, buildSql(), PASSWORD_FIELD_NAME, "salt", null, null, NUM_ITERATIONS_FIELD_NAME, 0, STATIC_SALT); val c = CoreAuthenticationTestUtils.getCredentialsWithSameUsernameAndPassword("user1"); val r = q.authenticate(c); assertNotNull(r); assertEquals("user1", r.getPrincipal().getId()); } @Test public void verifyAuthenticationWithExpiredField() { val q = new QueryAndEncodeDatabaseAuthenticationHandler(StringUtils.EMPTY, null, null, null, dataSource, ALG_NAME, buildSql(), PASSWORD_FIELD_NAME, "salt", EXPIRED_FIELD_NAME, null, NUM_ITERATIONS_FIELD_NAME, 0, STATIC_SALT); assertThrows(AccountPasswordMustChangeException.class, () -> q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithSameUsernameAndPassword("user20"))); } @Test public void verifyAuthenticationWithDisabledField() { val q = new QueryAndEncodeDatabaseAuthenticationHandler(StringUtils.EMPTY, null, null, null, dataSource, ALG_NAME, buildSql(), PASSWORD_FIELD_NAME, "salt", null, DISABLED_FIELD_NAME, NUM_ITERATIONS_FIELD_NAME, 0, STATIC_SALT); assertThrows(AccountDisabledException.class, () -> q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithSameUsernameAndPassword("user21"))); } @Test @SneakyThrows public void verifyAuthenticationSuccessfulWithAPasswordEncoder() { val q = new QueryAndEncodeDatabaseAuthenticationHandler(StringUtils.EMPTY, null, null, null, dataSource, ALG_NAME, buildSql(), PASSWORD_FIELD_NAME, "salt", null, null, NUM_ITERATIONS_FIELD_NAME, 0, STATIC_SALT); q.setPasswordEncoder(new PasswordEncoder() { @Override public String encode(final CharSequence password) { return password.toString().concat("1"); } @Override public boolean matches(final CharSequence rawPassword, final String encodedPassword) { return true; } }); q.setPrincipalNameTransformer(new PrefixSuffixPrincipalNameTransformer("user", null)); val r = q.authenticate( CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("1", "user")); assertNotNull(r); assertEquals("user1", r.getPrincipal().getId()); } @SuppressWarnings("unused") @Entity(name = "users") public static class UsersTable { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; private String username; private String password; private String salt; private String expired; private String disabled; private long numIterations; } }
/** * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.camel.facade; import org.apache.camel.api.management.mbean.ManagedBacklogTracerMBean; import org.apache.camel.component.seda.SedaEndpoint; import org.apache.camel.management.mbean.*; import io.fabric8.camel.facade.mbean.*; import javax.management.*; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * Common facade support for both local and remote. * <p/> * This implementation will provide most implementation supports as it turns out * that both the local and remote {@link org.apache.camel.CamelContext} will use the JMX API to * gather information. */ public abstract class CamelFacadeSupport implements CamelFacade { protected String camelContextManagementName; protected final MBeanServerConnection mBeanServer; protected CamelFacadeSupport(String camelContextManagementName, MBeanServerConnection mBeanServer) throws Exception { this.mBeanServer = mBeanServer; this.camelContextManagementName = camelContextManagementName; } protected MBeanServerConnection getMBeanServerConnection() throws Exception { return mBeanServer; } protected Set<ObjectInstance> queryNames(ObjectName name, QueryExp query) throws Exception { return getMBeanServerConnection().queryMBeans(name, query); } static public <T> T addGetId(Class<T> ic, final Object target, final String id) throws Exception { return ic.cast(Proxy.newProxyInstance(ic.getClassLoader(), new Class[]{ic}, new InvocationHandler() { @Override public Object invoke(Object o, Method method, Object[] objects) throws Throwable { if (method.getName() == "getId" && method.getParameterTypes().length == 0) { return id; } return method.invoke(target, objects); } })); } @SuppressWarnings("unchecked") protected Object newProxyInstance(ObjectName objectName, Class interfaceClass, boolean notificationBroadcaster) throws Exception { Object jmx_proxy = MBeanServerInvocationHandler.newProxyInstance(getMBeanServerConnection(), objectName, interfaceClass, notificationBroadcaster); return addGetId(interfaceClass, jmx_proxy, objectName.getCanonicalName()); } /** * Finds all CamelContext's registered on a certain JMX-Server or, if a * JMX-BrokerName has been set, the broker with that name. * * @param connection not <code>null</code> * @param managementName to find a specific context by its management name * @return Set with ObjectName-elements */ protected Set<ObjectName> findCamelContexts(MBeanServerConnection connection, String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName name; if (id != null) { name = new ObjectName("org.apache.camel:context=" + managementName + ",type=context,*"); } else { name = new ObjectName("org.apache.camel:context=*,type=context,*"); } Set<ObjectName> camels = connection.queryNames(name, null); return camels; } // CamelFacade //--------------------------------------------------------------- @Override public List<CamelContextMBean> getCamelContexts() throws Exception { MBeanServerConnection connection = getMBeanServerConnection(); Set<ObjectName> names = findCamelContexts(connection, null); List<CamelContextMBean> answer = new ArrayList<CamelContextMBean>(); for (ObjectName on : names) { CamelContextMBean context = (CamelContextMBean) newProxyInstance(on, CamelContextMBean.class, true); answer.add(context); } return answer; } @Override public CamelContextMBean getCamelContext(String managementName) throws Exception { MBeanServerConnection connection = getMBeanServerConnection(); Set contexts = findCamelContexts(connection, managementName); if (contexts.size() == 0) { throw new IOException("No CamelContext could be found in the JMX."); } // we just take the first CamelContext as it matches the context id ObjectName name = (ObjectName) contexts.iterator().next(); CamelContextMBean mbean = (CamelContextMBean) newProxyInstance(name, CamelContextMBean.class, true); return mbean; } @Override public CamelFabricTracerMBean getFabricTracer(String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=fabric,*"); Set<ObjectInstance> names = queryNames(query, null); for (ObjectInstance on : names) { if (on.getClassName().equals("org.apache.camel.fabric.FabricTracer")) { CamelFabricTracerMBean tracer = (CamelFabricTracerMBean) newProxyInstance(on.getObjectName(), CamelFabricTracerMBean.class, true); return tracer; } } // tracer not found return null; } @Override public ManagedBacklogTracerMBean getCamelTracer(String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=tracer,*"); Set<ObjectInstance> names = queryNames(query, null); for (ObjectInstance on : names) { if (on.getClassName().equals("org.apache.camel.management.mbean.ManagedBacklogTracer")) { ManagedBacklogTracerMBean tracer = (ManagedBacklogTracerMBean) newProxyInstance(on.getObjectName(), ManagedBacklogTracerMBean.class, true); return tracer; } } // tracer not found return null; } @Override public List<CamelComponentMBean> getComponents(String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=components,*"); Set<ObjectInstance> names = queryNames(query, null); List<CamelComponentMBean> answer = new ArrayList<CamelComponentMBean>(); for (ObjectInstance on : names) { CamelComponentMBean component = (CamelComponentMBean) newProxyInstance(on.getObjectName(), CamelComponentMBean.class, true); answer.add(component); } return answer; } @Override public List<CamelRouteMBean> getRoutes(String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=routes,*"); Set<ObjectInstance> names = queryNames(query, null); List<CamelRouteMBean> answer = new ArrayList<CamelRouteMBean>(); for (ObjectInstance on : names) { CamelRouteMBean route; if (ManagedSuspendableRoute.class.getName().equals(on.getClassName())) { route = (CamelRouteMBean) newProxyInstance(on.getObjectName(), CamelSuspendableRouteMBean.class, true); } else { route = (CamelRouteMBean) newProxyInstance(on.getObjectName(), CamelRouteMBean.class, true); } answer.add(route); } return answer; } @Override public List<CamelEndpointMBean> getEndpoints(String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=endpoints,*"); Set<ObjectInstance> names = queryNames(query, null); List<CamelEndpointMBean> answer = new ArrayList<CamelEndpointMBean>(); for (ObjectInstance on : names) { CamelEndpointMBean endpoint; if (ManagedBrowsableEndpoint.class.getName().equals(on.getClassName()) || SedaEndpoint.class.getName().equals(on.getClassName())) { endpoint = (CamelEndpointMBean) newProxyInstance(on.getObjectName(), CamelBrowsableEndpointMBean.class, true); } else if (on.getClassName().startsWith("org.apache.camel.component.jms")) { // special for JMS endpoints as they are browsable as well endpoint = (CamelEndpointMBean) newProxyInstance(on.getObjectName(), CamelBrowsableEndpointMBean.class, true); } else { endpoint = (CamelEndpointMBean) newProxyInstance(on.getObjectName(), CamelEndpointMBean.class, true); } answer.add(endpoint); } return answer; } @Override public List<CamelConsumerMBean> getConsumers(String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=consumers,*"); Set<ObjectInstance> names = queryNames(query, null); List<CamelConsumerMBean> answer = new ArrayList<CamelConsumerMBean>(); for (ObjectInstance on : names) { CamelConsumerMBean consumer; if (ManagedScheduledPollConsumer.class.getName().equals(on.getClassName())) { consumer = (CamelConsumerMBean) newProxyInstance(on.getObjectName(), CamelScheduledPollConsumerMBean.class, true); } else { consumer = (CamelConsumerMBean) newProxyInstance(on.getObjectName(), CamelConsumerMBean.class, true); } answer.add(consumer); } return answer; } @Override public List<CamelProcessorMBean> getProcessors(String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=processors,*"); Set<ObjectInstance> names = queryNames(query, null); List<CamelProcessorMBean> answer = new ArrayList<CamelProcessorMBean>(); for (ObjectInstance on : names) { CamelProcessorMBean processor; if (ManagedSendProcessor.class.getName().equals(on.getClassName())) { processor = (CamelProcessorMBean) newProxyInstance(on.getObjectName(), CamelSendProcessorMBean.class, true); } else if (ManagedDelayer.class.getName().equals(on.getClassName())) { processor = (CamelProcessorMBean) newProxyInstance(on.getObjectName(), CamelDelayProcessorMBean.class, true); } else if (ManagedThrottler.class.getName().equals(on.getClassName())) { processor = (CamelProcessorMBean) newProxyInstance(on.getObjectName(), CamelThrottleProcessorMBean.class, true); } else { processor = (CamelProcessorMBean) newProxyInstance(on.getObjectName(), CamelProcessorMBean.class, true); } answer.add(processor); } return answer; } @Override public List<CamelThreadPoolMBean> getThreadPools(String managementName) throws Exception { String id = managementName != null ? managementName : camelContextManagementName; ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=threadpools,*"); Set<ObjectInstance> names = queryNames(query, null); List<CamelThreadPoolMBean> answer = new ArrayList<CamelThreadPoolMBean>(); for (ObjectInstance on : names) { CamelThreadPoolMBean pool = (CamelThreadPoolMBean) newProxyInstance(on.getObjectName(), CamelThreadPoolMBean.class, true); answer.add(pool); } return answer; } @Override public String dumpRoutesStatsAsXml(String managementName) throws Exception { CamelContextMBean context = getCamelContext(managementName); try { return context.dumpRoutesStatsAsXml(false, true); } catch (Exception e) { // ignore as the method may not be available in older Camel releases } // fallback and use backwards compatible which is slower return CamelBackwardsCompatibleSupport.dumpRoutesStatsAsXml(this, managementName); } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2008 Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.site.api; import java.io.Serializable; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Set; import org.sakaiproject.authz.api.AuthzGroup; import org.sakaiproject.authz.api.AuthzRealmLockException; import org.sakaiproject.authz.api.Member; import org.sakaiproject.entity.api.Edit; import org.sakaiproject.time.api.Time; import org.sakaiproject.user.api.User; /** * <p> * Site is the object that knows the information, tools and layouts for a Sakai Site. * </p> */ public interface Site extends Edit, Comparable, Serializable, AuthzGroup { /** * property name the contact email */ public final static String PROP_SITE_CONTACT_EMAIL = "contact-email"; /** * property name for owners contact name */ public final static String PROP_SITE_CONTACT_NAME = "contact-name"; /** * property name for term */ public final static String PROP_SITE_TERM = "term"; /** * property name for term-eid */ public final static String PROP_SITE_TERM_EID = "term_eid"; /** * property name for portal-neochat */ public final static String PROP_SITE_PORTAL_NEOCHAT = "portal.neochat"; /** * property name for mathjax */ public final static String PROP_SITE_MATHJAX_ALLOWED = "mathJaxAllowed"; /** * property name for custom overview */ public final static String PROP_CUSTOM_OVERVIEW = "custom_overview"; /** * @return the user who created this. */ User getCreatedBy(); /** * @return the user who last modified this. */ User getModifiedBy(); /** * @return the time created. * @deprecated use {@link #getCreatedDate()} */ Time getCreatedTime(); /** * @return the time last modified. * @deprecated use {@link #getModifiedTime()} */ Time getModifiedTime(); /** @return The human readable Title of the site. */ String getTitle(); /** @return A short text Description of the site. */ String getShortDescription(); /** @return An HTML-safe version of the short Description of the site. */ String getHtmlShortDescription(); /** @return A longer text Description of the site. */ String getDescription(); /** @return An HTML-safe version of the Description of the site. */ String getHtmlDescription(); /** @return The Site's icon URL. */ String getIconUrl(); /** @return The Site's icon URL as a full URL. */ String getIconUrlFull(); /** @return The Site's info display URL. */ String getInfoUrl(); /** @return The Site's info display URL as a full URL. */ String getInfoUrlFull(); /** @return true if this Site can be joined by anyone, false if not. */ boolean isJoinable(); /** @return the role name given to users who join a joinable site. */ String getJoinerRole(); /** @return the skin to use for this site. */ String getSkin(); /** @return the List (SitePage) of Site Pages. */ List<SitePage> getPages(); /** * Make sure description, pages, tools, groups, and properties are loaded, not lazy */ void loadAll(); /** @return The pages ordered by the tool order constraint for this site's type (as tool category), or the site's pages in defined order if the site is set to have a custom page order. */ List<SitePage> getOrderedPages(); /** @return true if the site is published, false if not. */ boolean isPublished(); /** * Access the SitePage that has this id, if one is defined, else return null. * * @param id * The id of the SitePage. * @return The SitePage that has this id, if one is defined, else return null. */ SitePage getPage(String id); /** * Access the ToolConfiguration that has this id, if one is defined, else return null. The tool may be on any SitePage in the site. * * @param id * The id of the tool. * @return The ToolConfiguration that has this id, if one is defined, else return null. */ ToolConfiguration getTool(String id); /** * Get all the tools placed in the site on any page that are of any of these tool ids. * * @param toolIds * The tool id array (String, such as sakai.chat, not a tool configuration / placement uuid) to search for. * @return A Collection (ToolConfiguration) of all the tools placed in the site on any page that are of this tool id (may be empty). */ Collection<ToolConfiguration> getTools(String[] toolIds); /** * Get all the tools placed in the site on any page for a particular common Tool Id. * * @param commonToolId * The tool id (String, such as sakai.chat, not a tool configuration / placement uuid) to search for. * @return A Collection (ToolConfiguration) of all the tools placed in the site on any page that are of this tool id (may be empty). */ Collection<ToolConfiguration> getTools(String commonToolId); /** * Get the first tool placed on the site on any page with the specified common Tool id (such as sakai.chat) * * @param commonToolId * The common ToolID to search for (i.e. sakai.chat) * @return ToolConfiguration for the tool which has the ID (if any) or null if no tools match. */ ToolConfiguration getToolForCommonId(String commonToolId); /** * Access the site type. * * @return The site type. */ String getType(); /** * Test if the site is of this type. It is if the param is null. * * @param type * A String type to match, or a String[], List or Set of Strings, any of which can match. * @return true if the site is of the type(s) specified, false if not. */ boolean isType(Object type); /** * Check if the site is marked for viewing. * * @return True if the site is marked for viewing, false if not */ boolean isPubView(); /** * Get a site group * * @param id * The group id (or reference). * @return The Group object if found, or null if not found. */ Group getGroup(String id); /** * Get a collection of the groups in a Site. * * @return A collection (Group) of groups defined in the site, empty if there are none. */ Collection<Group> getGroups(); /** * Get a collection of the groups in a Site that have this user as a member. * * @param userId * The user id. * @return A collection (Group) of groups defined in the site that have this user as a member, empty if there are none. */ Collection<Group> getGroupsWithMember(String userId); /** * Get a collection of the groups in a Site that have all these users as members. * * @param userId * The user id. * @return A collection (Group) of groups defined in the site that have these users as members, empty if there are none. */ Collection<Group> getGroupsWithMembers(String[] userIds); /** * Get a collection of the groups in a Site that have this user as a member with this role. * * @param userId * The user id. * @param role * The role. * @return A collection (Group) of groups defined in the site that have this user as a member with this role, empty if there are none. */ Collection<Group> getGroupsWithMemberHasRole(String userId, String role); /** * Get user IDs of members of a set of groups in this site * * @param groupIds IDs of authZ groups (AuthzGroup selection criteria), * a null groupIds includes all groups in the site, an empty set includes none of them * @return collection of user IDs who are in (members of) a set of site groups * @since 1.3.0 */ Collection<String> getMembersInGroups(Set<String> groupIds); /** * Does the site have any groups defined? * * @return true if the site and has any groups, false if not. */ boolean hasGroups(); /** * Set the human readable Title of the site. * * @param title * the new title. */ void setTitle(String title); /** * Set the url of an icon for the site. * * @param url * The new icon's url. */ void setIconUrl(String url); /** * Set the url for information about the site. * * @param url * The new information url. */ void setInfoUrl(String url); /** * Set the joinable status of the site. * * @param joinable * represents whether the site is joinable (true) or not (false). */ void setJoinable(boolean joinable); /** * Set the joiner role for a site. * * @param role * the joiner role for a site. */ void setJoinerRole(String role); /** * Set the short Description of the site. Used to give a short text description of the site. * * @param description * The new short description. */ void setShortDescription(String description); /** * Set the Description of the site. Used to give a longer text description of the site. * * @param description * The new description. */ void setDescription(String description); /** * Set the published state of this site. * * @param published * The published state of the site. */ void setPublished(boolean published); /** * Set the skin to use for this site. * * @param skin * The skin to use for this site. */ void setSkin(String skin); /** * Create a new site page and add it to this site. * * @return The SitePage object for the new site page. */ SitePage addPage(); /** * Remove a site page from this site. * * @param page * The SitePage to remove. */ void removePage(SitePage page); /** * Generate a new set of pages and tools that have new, unique ids. Good if the site had non-unique-system-wide ids for pages and tools. The Site Id does not change. */ void regenerateIds(); /** * Set the site type. * * @param type * The site type. */ void setType(String type); /** * Set the site view. * * @param pubView * The site view setting. */ void setPubView(boolean pubView); /** * Add a new group. The Id is generated, the rest of the fields can be set using calls to the Group object returned. * NOTE: the title must be set before saving */ Group addGroup(); /** * Remove this group from the groups for this site. * * @deprecated Use deleteGroup() instead. * @param group * The group to remove. */ void removeGroup(Group group); /** * Remove a group from the groups for this site. * Its functionallity is the same as removeMember but throws IllegalStateException. * * @param group * The group to delete. */ void deleteGroup(Group group) throws AuthzRealmLockException; /** * Check if the site has a custom page order * * @return true if the site has a custom page order, false if not. */ boolean isCustomPageOrdered(); /** * Set the site's custom page order flag. * * @param custom * true if the site has a custom page ordering, false if not. */ void setCustomPageOrdered(boolean custom); /** * Is this site softly deleted and hence queued for a hard delete? * @return true if it has been softly deleted */ boolean isSoftlyDeleted(); /** * If softly deleted, the date that occurred * @return date if it has been softly deleted */ Date getSoftlyDeletedDate(); /** * Set params for this site as softly deleted * @param flag true or false */ void setSoftlyDeleted(boolean flag); }
/*Copyright [2013] [Jeff Gilfelt] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.schautup.views; import android.content.Context; import android.content.res.Resources; import android.graphics.Color; import android.graphics.Typeface; import android.graphics.drawable.ShapeDrawable; import android.graphics.drawable.shapes.RoundRectShape; import android.util.AttributeSet; import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.view.ViewGroup.LayoutParams; import android.view.ViewParent; import android.view.animation.AccelerateInterpolator; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.view.animation.DecelerateInterpolator; import android.widget.FrameLayout; import android.widget.TabWidget; import android.widget.TextView; /** * A simple text label view that can be applied as a "badge" to any given * {@link android.view.View}. This class is intended to be instantiated at * runtime rather than included in XML layouts. * * @author Jeff Gilfelt */ public class BadgeView extends TextView { public static final int POSITION_TOP_LEFT = 1; public static final int POSITION_TOP_RIGHT = 2; public static final int POSITION_BOTTOM_LEFT = 3; public static final int POSITION_BOTTOM_RIGHT = 4; public static final int POSITION_CENTER = 5; private static final int DEFAULT_MARGIN_DIP = 5; private static final int DEFAULT_LR_PADDING_DIP = 5; private static final int DEFAULT_CORNER_RADIUS_DIP = 8; private static final int DEFAULT_POSITION = POSITION_TOP_RIGHT; private static final int DEFAULT_BADGE_COLOR = Color.parseColor("#CCFF0000"); // Color.RED; private static final int DEFAULT_TEXT_COLOR = Color.WHITE; private static Animation fadeIn; private static Animation fadeOut; private Context context; private View target; private int badgePosition; private int badgeMarginH; private int badgeMarginV; private int badgeColor; private boolean isShown; private ShapeDrawable badgeBg; private int targetTabIndex; public BadgeView(Context context) { this(context, (AttributeSet) null, android.R.attr.textViewStyle); } public BadgeView(Context context, AttributeSet attrs) { this(context, attrs, android.R.attr.textViewStyle); } /** * Constructor - * * create a new BadgeView instance attached to a target * {@link android.view.View}. * * @param context * context for this view. * @param target * the View to attach the badge to. */ public BadgeView(Context context, View target) { this(context, null, android.R.attr.textViewStyle, target, 0); } /** * Constructor - * * create a new BadgeView instance attached to a target * {@link android.widget.TabWidget} tab at a given index. * * @param context * context for this view. * @param target * the TabWidget to attach the badge to. * @param index * the position of the tab within the target. */ public BadgeView(Context context, TabWidget target, int index) { this(context, null, android.R.attr.textViewStyle, target, index); } public BadgeView(Context context, AttributeSet attrs, int defStyle) { this(context, attrs, defStyle, null, 0); } public BadgeView(Context context, AttributeSet attrs, int defStyle, View target, int tabIndex) { super(context, attrs, defStyle); init(context, target, tabIndex); } private void init(Context context, View target, int tabIndex) { this.context = context; this.target = target; this.targetTabIndex = tabIndex; // apply defaults badgePosition = DEFAULT_POSITION; badgeMarginH = dipToPixels(DEFAULT_MARGIN_DIP); badgeMarginV = badgeMarginH; badgeColor = DEFAULT_BADGE_COLOR; setTypeface(Typeface.DEFAULT_BOLD); int paddingPixels = dipToPixels(DEFAULT_LR_PADDING_DIP); setPadding(paddingPixels, 0, paddingPixels, 0); setTextColor(DEFAULT_TEXT_COLOR); fadeIn = new AlphaAnimation(0, 1); fadeIn.setInterpolator(new DecelerateInterpolator()); fadeIn.setDuration(200); fadeOut = new AlphaAnimation(1, 0); fadeOut.setInterpolator(new AccelerateInterpolator()); fadeOut.setDuration(200); isShown = false; if (this.target != null) { applyTo(this.target); } else { show(); } } private void applyTo(View target) { LayoutParams lp = target.getLayoutParams(); ViewParent parent = target.getParent(); FrameLayout container = new FrameLayout(context); if (target instanceof TabWidget) { // set target to the relevant tab child container target = ((TabWidget) target).getChildTabViewAt(targetTabIndex); this.target = target; ((ViewGroup) target).addView(container, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); this.setVisibility(View.GONE); container.addView(this); } else { // TODO verify that parent is indeed a ViewGroup ViewGroup group = (ViewGroup) parent; int index = group.indexOfChild(target); group.removeView(target); group.addView(container, index, lp); container.addView(target); this.setVisibility(View.GONE); container.addView(this); group.invalidate(); } } /** * Make the badge visible in the UI. * */ public void show() { show(false, null); } /** * Make the badge visible in the UI. * * @param animate * flag to apply the default fade-in animation. */ public void show(boolean animate) { show(animate, fadeIn); } /** * Make the badge visible in the UI. * * @param anim * Animation to apply to the view when made visible. */ public void show(Animation anim) { show(true, anim); } /** * Make the badge non-visible in the UI. * */ public void hide() { hide(false, null); } /** * Make the badge non-visible in the UI. * * @param animate * flag to apply the default fade-out animation. */ public void hide(boolean animate) { hide(animate, fadeOut); } /** * Make the badge non-visible in the UI. * * @param anim * Animation to apply to the view when made non-visible. */ public void hide(Animation anim) { hide(true, anim); } /** * Toggle the badge visibility in the UI. * */ public void toggle() { toggle(false, null, null); } /** * Toggle the badge visibility in the UI. * * @param animate * flag to apply the default fade-in/out animation. */ public void toggle(boolean animate) { toggle(animate, fadeIn, fadeOut); } /** * Toggle the badge visibility in the UI. * * @param animIn * Animation to apply to the view when made visible. * @param animOut * Animation to apply to the view when made non-visible. */ public void toggle(Animation animIn, Animation animOut) { toggle(true, animIn, animOut); } @SuppressWarnings("deprecation") private void show(boolean animate, Animation anim) { if (getBackground() == null) { if (badgeBg == null) { badgeBg = getDefaultBackground(); } setBackgroundDrawable(badgeBg); } applyLayoutParams(); if (animate) { this.startAnimation(anim); } this.setVisibility(View.VISIBLE); isShown = true; } private void hide(boolean animate, Animation anim) { this.setVisibility(View.GONE); if (animate) { this.startAnimation(anim); } isShown = false; } private void toggle(boolean animate, Animation animIn, Animation animOut) { if (isShown) { hide(animate && (animOut != null), animOut); } else { show(animate && (animIn != null), animIn); } } /** * Increment the numeric badge label. If the current badge label cannot be * converted to an integer value, its label will be set to "0". * * @param offset * the increment offset. */ public int increment(int offset) { CharSequence txt = getText(); int i; if (txt != null) { try { i = Integer.parseInt(txt.toString()); } catch (NumberFormatException e) { i = 0; } } else { i = 0; } i = i + offset; setText(String.valueOf(i)); return i; } /** * Decrement the numeric badge label. If the current badge label cannot be * converted to an integer value, its label will be set to "0". * * @param offset * the decrement offset. */ public int decrement(int offset) { return increment(-offset); } private ShapeDrawable getDefaultBackground() { int r = dipToPixels(DEFAULT_CORNER_RADIUS_DIP); float[] outerR = new float[] { r, r, r, r, r, r, r, r }; RoundRectShape rr = new RoundRectShape(outerR, null, null); ShapeDrawable drawable = new ShapeDrawable(rr); drawable.getPaint().setColor(badgeColor); return drawable; } private void applyLayoutParams() { FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); switch (badgePosition) { case POSITION_TOP_LEFT: lp.gravity = Gravity.LEFT | Gravity.TOP; lp.setMargins(badgeMarginH, badgeMarginV, 0, 0); break; case POSITION_TOP_RIGHT: lp.gravity = Gravity.RIGHT | Gravity.TOP; lp.setMargins(0, badgeMarginV, badgeMarginH, 0); break; case POSITION_BOTTOM_LEFT: lp.gravity = Gravity.LEFT | Gravity.BOTTOM; lp.setMargins(badgeMarginH, 0, 0, badgeMarginV); break; case POSITION_BOTTOM_RIGHT: lp.gravity = Gravity.RIGHT | Gravity.BOTTOM; lp.setMargins(0, 0, badgeMarginH, badgeMarginV); break; case POSITION_CENTER: lp.gravity = Gravity.CENTER; lp.setMargins(0, 0, 0, 0); break; default: break; } setLayoutParams(lp); } /** * Returns the target View this badge has been attached to. * */ public View getTarget() { return target; } /** * Is this badge currently visible in the UI? * */ @Override public boolean isShown() { return isShown; } /** * Returns the positioning of this badge. * * one of POSITION_TOP_LEFT, POSITION_TOP_RIGHT, POSITION_BOTTOM_LEFT, * POSITION_BOTTOM_RIGHT, POSTION_CENTER. * */ public int getBadgePosition() { return badgePosition; } /** * Set the positioning of this badge. * * @param layoutPosition * one of POSITION_TOP_LEFT, POSITION_TOP_RIGHT, * POSITION_BOTTOM_LEFT, POSITION_BOTTOM_RIGHT, POSTION_CENTER. * */ public void setBadgePosition(int layoutPosition) { this.badgePosition = layoutPosition; } /** * Returns the horizontal margin from the target View that is applied to * this badge. * */ public int getHorizontalBadgeMargin() { return badgeMarginH; } /** * Returns the vertical margin from the target View that is applied to this * badge. * */ public int getVerticalBadgeMargin() { return badgeMarginV; } /** * Set the horizontal/vertical margin from the target View that is applied * to this badge. * * @param badgeMargin * the margin in pixels. */ public void setBadgeMargin(int badgeMargin) { this.badgeMarginH = badgeMargin; this.badgeMarginV = badgeMargin; } /** * Set the horizontal/vertical margin from the target View that is applied * to this badge. * * @param horizontal * margin in pixels. * @param vertical * margin in pixels. */ public void setBadgeMargin(int horizontal, int vertical) { this.badgeMarginH = horizontal; this.badgeMarginV = vertical; } /** * Returns the color value of the badge background. * */ public int getBadgeBackgroundColor() { return badgeColor; } /** * Set the color value of the badge background. * * @param badgeColor * the badge background color. */ public void setBadgeBackgroundColor(int badgeColor) { this.badgeColor = badgeColor; badgeBg = getDefaultBackground(); } private int dipToPixels(int dip) { Resources r = getResources(); float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dip, r.getDisplayMetrics()); return (int) px; } }
package com.ardnezar.lookapp; import android.util.Log; import com.github.nkzawa.emitter.Emitter; import com.github.nkzawa.socketio.client.IO; import com.github.nkzawa.socketio.client.Socket; import org.json.JSONException; import org.json.JSONObject; import org.webrtc.AudioSource; import org.webrtc.CameraEnumerationAndroid; import org.webrtc.DataChannel; import org.webrtc.IceCandidate; import org.webrtc.MediaConstraints; import org.webrtc.MediaStream; import org.webrtc.PeerConnection; import org.webrtc.PeerConnectionFactory; import org.webrtc.SdpObserver; import org.webrtc.SessionDescription; import org.webrtc.VideoCapturer; import org.webrtc.VideoCapturerAndroid; import org.webrtc.VideoSource; import java.net.URISyntaxException; import java.util.HashMap; import java.util.LinkedList; //import java.net.Socket; public class WebRtcClient { private final static String TAG = WebRtcClient.class.getCanonicalName(); private final static int MAX_PEER = 2; private boolean[] endPoints = new boolean[MAX_PEER]; private PeerConnectionFactory factory; private HashMap<String, Peer> peers = new HashMap<>(); private LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<>(); private PeerConnectionClient.PeerConnectionParameters pcParams; private MediaConstraints pcConstraints = new MediaConstraints(); private MediaStream localMS; private VideoSource videoSource; private RtcListener mListener; private Socket client; /** * Implement this interface to be notified of events. */ public interface RtcListener{ void onCallReady(String callId); void onStatusChanged(String newStatus); void onLocalStream(MediaStream localStream); void onAddRemoteStream(MediaStream remoteStream, int endPoint); void onRemoveRemoteStream(int endPoint); } private interface Command{ void execute(String peerId, JSONObject payload) throws JSONException; } private class CreateOfferCommand implements Command{ public void execute(String peerId, JSONObject payload) throws JSONException { Log.d(TAG,"CreateOfferCommand"); Peer peer = peers.get(peerId); peer.pc.createOffer(peer, pcConstraints); } } private class CreateAnswerCommand implements Command{ public void execute(String peerId, JSONObject payload) throws JSONException { Log.d(TAG,"CreateAnswerCommand"); Peer peer = peers.get(peerId); SessionDescription sdp = new SessionDescription( SessionDescription.Type.fromCanonicalForm(payload.getString("type")), payload.getString("sdp") ); peer.pc.setRemoteDescription(peer, sdp); peer.pc.createAnswer(peer, pcConstraints); } } private class SetRemoteSDPCommand implements Command{ public void execute(String peerId, JSONObject payload) throws JSONException { Log.d(TAG,"SetRemoteSDPCommand"); Peer peer = peers.get(peerId); SessionDescription sdp = new SessionDescription( SessionDescription.Type.fromCanonicalForm(payload.getString("type")), payload.getString("sdp") ); peer.pc.setRemoteDescription(peer, sdp); } } private class AddIceCandidateCommand implements Command{ public void execute(String peerId, JSONObject payload) throws JSONException { Log.d(TAG,"AddIceCandidateCommand"); PeerConnection pc = peers.get(peerId).pc; if (pc.getRemoteDescription() != null) { IceCandidate candidate = new IceCandidate( payload.getString("id"), payload.getInt("label"), payload.getString("candidate") ); pc.addIceCandidate(candidate); } } } /** * Send a message through the signaling server * * @param to id of recipient * @param type type of message * @param payload payload of message * @throws JSONException */ public void sendMessage(String to, String type, JSONObject payload) throws JSONException { JSONObject message = new JSONObject(); message.put("to", to); message.put("type", type); message.put("payload", payload); client.emit("message", message); } private class MessageHandler { private HashMap<String, Command> commandMap; private MessageHandler() { this.commandMap = new HashMap<>(); commandMap.put("init", new CreateOfferCommand()); commandMap.put("offer", new CreateAnswerCommand()); commandMap.put("answer", new SetRemoteSDPCommand()); commandMap.put("candidate", new AddIceCandidateCommand()); } private Emitter.Listener onMessage = new Emitter.Listener() { @Override public void call(Object... args) { JSONObject data = (JSONObject) args[0]; try { String from = data.getString("from"); String type = data.getString("type"); JSONObject payload = null; if(!type.equals("init")) { payload = data.getJSONObject("payload"); } // if peer is unknown, try to add him if(!peers.containsKey(from)) { // if MAX_PEER is reach, ignore the call int endPoint = findEndPoint(); if(endPoint != MAX_PEER) { Peer peer = addPeer(from, endPoint); peer.pc.addStream(localMS); commandMap.get(type).execute(from, payload); } } else { commandMap.get(type).execute(from, payload); } } catch (JSONException e) { e.printStackTrace(); } } }; private Emitter.Listener onId = new Emitter.Listener() { @Override public void call(Object... args) { String id = (String) args[0]; mListener.onCallReady(id); } }; } private class Peer implements SdpObserver, PeerConnection.Observer{ private PeerConnection pc; private String id; private int endPoint; @Override public void onCreateSuccess(final SessionDescription sdp) { // TODO: modify sdp to use pcParams prefered codecs try { JSONObject payload = new JSONObject(); payload.put("type", sdp.type.canonicalForm()); payload.put("sdp", sdp.description); sendMessage(id, sdp.type.canonicalForm(), payload); pc.setLocalDescription(Peer.this, sdp); } catch (JSONException e) { e.printStackTrace(); } } @Override public void onSetSuccess() {} @Override public void onCreateFailure(String s) {} @Override public void onSetFailure(String s) {} @Override public void onSignalingChange(PeerConnection.SignalingState signalingState) {} @Override public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) { if(iceConnectionState == PeerConnection.IceConnectionState.DISCONNECTED) { removePeer(id); mListener.onStatusChanged("DISCONNECTED"); } } @Override public void onIceConnectionReceivingChange(boolean b) { } @Override public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {} @Override public void onIceCandidate(final IceCandidate candidate) { try { JSONObject payload = new JSONObject(); payload.put("label", candidate.sdpMLineIndex); payload.put("id", candidate.sdpMid); payload.put("candidate", candidate.sdp); sendMessage(id, "candidate", payload); } catch (JSONException e) { e.printStackTrace(); } } @Override public void onAddStream(MediaStream mediaStream) { Log.d(TAG,"onAddStream "+mediaStream.label()); // remote streams are displayed from 1 to MAX_PEER (0 is localStream) mListener.onAddRemoteStream(mediaStream, endPoint+1); } @Override public void onRemoveStream(MediaStream mediaStream) { Log.d(TAG,"onRemoveStream "+mediaStream.label()); removePeer(id); } @Override public void onDataChannel(DataChannel dataChannel) {} @Override public void onRenegotiationNeeded() { } public Peer(String id, int endPoint) { Log.d(TAG,"new Peer: "+id + " " + endPoint); this.pc = factory.createPeerConnection(iceServers, pcConstraints, this); this.id = id; this.endPoint = endPoint; pc.addStream(localMS); //, new MediaConstraints() mListener.onStatusChanged("CONNECTING"); } } private Peer addPeer(String id, int endPoint) { Peer peer = new Peer(id, endPoint); peers.put(id, peer); endPoints[endPoint] = true; return peer; } private void removePeer(String id) { Peer peer = peers.get(id); mListener.onRemoveRemoteStream(peer.endPoint); peer.pc.close(); peers.remove(peer.id); endPoints[peer.endPoint] = false; } public WebRtcClient(RtcListener listener, String host, PeerConnectionClient.PeerConnectionParameters params) { mListener = listener; pcParams = params; PeerConnectionFactory.initializeAndroidGlobals(listener, true, true, params.videoCodecHwAcceleration); factory = new PeerConnectionFactory(); MessageHandler messageHandler = new MessageHandler(); try { client = IO.socket(host); } catch (URISyntaxException e) { e.printStackTrace(); } client.on("id", messageHandler.onId); client.on("message", messageHandler.onMessage); client.connect(); iceServers.add(new PeerConnection.IceServer("stun:23.21.150.121")); iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302")); pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true")); } /** * Call this method in Activity.onPause() */ public void onPause() { if(videoSource != null) videoSource.stop(); } /** * Call this method in Activity.onResume() */ public void onResume() { if(videoSource != null) videoSource.restart(); } /** * Call this method in Activity.onDestroy() */ public void onDestroy() { for (Peer peer : peers.values()) { peer.pc.dispose(); } videoSource.dispose(); factory.dispose(); client.disconnect(); client.close(); } private int findEndPoint() { for(int i = 0; i < MAX_PEER; i++) if (!endPoints[i]) return i; return MAX_PEER; } /** * Start the client. * * Set up the local stream and notify the signaling server. * Call this method after onCallReady. * * @param name client name */ public void start(String name){ setCamera(); try { JSONObject message = new JSONObject(); message.put("name", name); client.emit("readyToStream", message); } catch (JSONException e) { e.printStackTrace(); } } private void setCamera(){ localMS = factory.createLocalMediaStream("ARDAMS"); if(pcParams.videoCallEnabled){ MediaConstraints videoConstraints = new MediaConstraints(); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", Integer.toString(pcParams.videoHeight))); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", Integer.toString(pcParams.videoWidth))); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", Integer.toString(pcParams.videoFps))); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minFrameRate", Integer.toString(pcParams.videoFps))); videoSource = factory.createVideoSource(getVideoCapturer(), videoConstraints); localMS.addTrack(factory.createVideoTrack("ARDAMSv0", videoSource)); } AudioSource audioSource = factory.createAudioSource(new MediaConstraints()); localMS.addTrack(factory.createAudioTrack("ARDAMSa0", audioSource)); mListener.onLocalStream(localMS); } private VideoCapturer getVideoCapturer() { String frontCameraDeviceName = CameraEnumerationAndroid.getDeviceName(0); return VideoCapturerAndroid.create(frontCameraDeviceName); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.auditmanager.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/auditmanager-2017-07-25/BatchCreateDelegationByAssessment" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class BatchCreateDelegationByAssessmentRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The API request to batch create delegations in Audit Manager. * </p> */ private java.util.List<CreateDelegationRequest> createDelegationRequests; /** * <p> * The identifier for the assessment. * </p> */ private String assessmentId; /** * <p> * The API request to batch create delegations in Audit Manager. * </p> * * @return The API request to batch create delegations in Audit Manager. */ public java.util.List<CreateDelegationRequest> getCreateDelegationRequests() { return createDelegationRequests; } /** * <p> * The API request to batch create delegations in Audit Manager. * </p> * * @param createDelegationRequests * The API request to batch create delegations in Audit Manager. */ public void setCreateDelegationRequests(java.util.Collection<CreateDelegationRequest> createDelegationRequests) { if (createDelegationRequests == null) { this.createDelegationRequests = null; return; } this.createDelegationRequests = new java.util.ArrayList<CreateDelegationRequest>(createDelegationRequests); } /** * <p> * The API request to batch create delegations in Audit Manager. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setCreateDelegationRequests(java.util.Collection)} or * {@link #withCreateDelegationRequests(java.util.Collection)} if you want to override the existing values. * </p> * * @param createDelegationRequests * The API request to batch create delegations in Audit Manager. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchCreateDelegationByAssessmentRequest withCreateDelegationRequests(CreateDelegationRequest... createDelegationRequests) { if (this.createDelegationRequests == null) { setCreateDelegationRequests(new java.util.ArrayList<CreateDelegationRequest>(createDelegationRequests.length)); } for (CreateDelegationRequest ele : createDelegationRequests) { this.createDelegationRequests.add(ele); } return this; } /** * <p> * The API request to batch create delegations in Audit Manager. * </p> * * @param createDelegationRequests * The API request to batch create delegations in Audit Manager. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchCreateDelegationByAssessmentRequest withCreateDelegationRequests(java.util.Collection<CreateDelegationRequest> createDelegationRequests) { setCreateDelegationRequests(createDelegationRequests); return this; } /** * <p> * The identifier for the assessment. * </p> * * @param assessmentId * The identifier for the assessment. */ public void setAssessmentId(String assessmentId) { this.assessmentId = assessmentId; } /** * <p> * The identifier for the assessment. * </p> * * @return The identifier for the assessment. */ public String getAssessmentId() { return this.assessmentId; } /** * <p> * The identifier for the assessment. * </p> * * @param assessmentId * The identifier for the assessment. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchCreateDelegationByAssessmentRequest withAssessmentId(String assessmentId) { setAssessmentId(assessmentId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCreateDelegationRequests() != null) sb.append("CreateDelegationRequests: ").append(getCreateDelegationRequests()).append(","); if (getAssessmentId() != null) sb.append("AssessmentId: ").append(getAssessmentId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof BatchCreateDelegationByAssessmentRequest == false) return false; BatchCreateDelegationByAssessmentRequest other = (BatchCreateDelegationByAssessmentRequest) obj; if (other.getCreateDelegationRequests() == null ^ this.getCreateDelegationRequests() == null) return false; if (other.getCreateDelegationRequests() != null && other.getCreateDelegationRequests().equals(this.getCreateDelegationRequests()) == false) return false; if (other.getAssessmentId() == null ^ this.getAssessmentId() == null) return false; if (other.getAssessmentId() != null && other.getAssessmentId().equals(this.getAssessmentId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCreateDelegationRequests() == null) ? 0 : getCreateDelegationRequests().hashCode()); hashCode = prime * hashCode + ((getAssessmentId() == null) ? 0 : getAssessmentId().hashCode()); return hashCode; } @Override public BatchCreateDelegationByAssessmentRequest clone() { return (BatchCreateDelegationByAssessmentRequest) super.clone(); } }
/** * * Copyright 2003-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.util.asn1; import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; /** * Don't use this class. It will eventually disappear, use ASN1InputStream. * <br> * This class is scheduled for removal. * @deprecated use ASN1InputStream */ public class DERInputStream extends FilterInputStream implements DERTags { /** * @deprecated use ASN1InputStream */ public DERInputStream( InputStream is) { super(is); } protected int readLength() throws IOException { int length = read(); if (length < 0) { throw new IOException("EOF found when length expected"); } if (length == 0x80) { return -1; // indefinite-length encoding } if (length > 127) { int size = length & 0x7f; if (size > 4) { throw new IOException("DER length more than 4 bytes"); } length = 0; for (int i = 0; i < size; i++) { int next = read(); if (next < 0) { throw new IOException("EOF found reading length"); } length = (length << 8) + next; } if (length < 0) { throw new IOException("corrupted steam - negative length found"); } } return length; } protected void readFully( byte[] bytes) throws IOException { int left = bytes.length; if (left == 0) { return; } while (left > 0) { int l = read(bytes, bytes.length - left, left); if (l < 0) { throw new EOFException("unexpected end of stream"); } left -= l; } } /** * build an object given its tag and a byte stream to construct it * from. */ protected DERObject buildObject( int tag, byte[] bytes) throws IOException { switch (tag) { case NULL: return null; case SEQUENCE | CONSTRUCTED: ByteArrayInputStream bIn = new ByteArrayInputStream(bytes); BERInputStream dIn = new BERInputStream(bIn); DERConstructedSequence seq = new DERConstructedSequence(); try { for (;;) { DERObject obj = dIn.readObject(); seq.addObject(obj); } } catch (EOFException ex) { return seq; } case SET | CONSTRUCTED: bIn = new ByteArrayInputStream(bytes); dIn = new BERInputStream(bIn); ASN1EncodableVector v = new ASN1EncodableVector(); try { for (;;) { DERObject obj = dIn.readObject(); v.add(obj); } } catch (EOFException ex) { return new DERConstructedSet(v); } case BOOLEAN: return new DERBoolean(bytes); case INTEGER: return new DERInteger(bytes); case ENUMERATED: return new DEREnumerated(bytes); case OBJECT_IDENTIFIER: return new DERObjectIdentifier(bytes); case BIT_STRING: int padBits = bytes[0]; byte[] data = new byte[bytes.length - 1]; System.arraycopy(bytes, 1, data, 0, bytes.length - 1); return new DERBitString(data, padBits); case UTF8_STRING: return new DERUTF8String(bytes); case PRINTABLE_STRING: return new DERPrintableString(bytes); case IA5_STRING: return new DERIA5String(bytes); case T61_STRING: return new DERT61String(bytes); case VISIBLE_STRING: return new DERVisibleString(bytes); case UNIVERSAL_STRING: return new DERUniversalString(bytes); case GENERAL_STRING: return new DERGeneralString(bytes); case BMP_STRING: return new DERBMPString(bytes); case OCTET_STRING: return new DEROctetString(bytes); case UTC_TIME: return new DERUTCTime(bytes); case GENERALIZED_TIME: return new DERGeneralizedTime(bytes); default: // // with tagged object tag number is bottom 5 bits // if ((tag & TAGGED) != 0) { if ((tag & 0x1f) == 0x1f) { throw new IOException("unsupported high tag encountered"); } if (bytes.length == 0) // empty tag! { if ((tag & CONSTRUCTED) == 0) { return new DERTaggedObject(false, tag & 0x1f, new DERNull()); } else { return new DERTaggedObject(false, tag & 0x1f, new DERConstructedSequence()); } } // // simple type - implicit... return an octet string // if ((tag & CONSTRUCTED) == 0) { return new DERTaggedObject(false, tag & 0x1f, new DEROctetString(bytes)); } bIn = new ByteArrayInputStream(bytes); dIn = new BERInputStream(bIn); DEREncodable dObj = dIn.readObject(); // // explicitly tagged (probably!) - if it isn't we'd have to // tell from the context // if (dIn.available() == 0) { return new DERTaggedObject(tag & 0x1f, dObj); } // // another implicit object, we'll create a sequence... // seq = new DERConstructedSequence(); seq.addObject(dObj); try { for (;;) { dObj = dIn.readObject(); seq.addObject(dObj); } } catch (EOFException ex) { // ignore -- } return new DERTaggedObject(false, tag & 0x1f, seq); } return new DERUnknownTag(tag, bytes); } } public DERObject readObject() throws IOException { int tag = read(); if (tag == -1) { throw new EOFException(); } int length = readLength(); byte[] bytes = new byte[length]; readFully(bytes); return buildObject(tag, bytes); } }
/****************************************************************************** * Copyright (c) 2006, 2010 VMware Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html and the Apache License v2.0 * is available at http://www.opensource.org/licenses/apache2.0.php. * You may elect to redistribute this code under either of these licenses. * * Contributors: * VMware Inc. *****************************************************************************/ package org.eclipse.gemini.blueprint.extender.internal.support; import java.io.IOException; import java.util.Collections; import java.util.Dictionary; import java.util.Enumeration; import java.util.Hashtable; import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.xml.NamespaceHandlerResolver; import org.eclipse.gemini.blueprint.extender.internal.util.BundleUtils; import org.eclipse.gemini.blueprint.util.OsgiBundleUtils; import org.eclipse.gemini.blueprint.util.OsgiServiceUtils; import org.eclipse.gemini.blueprint.util.OsgiStringUtils; import org.springframework.util.Assert; import org.xml.sax.EntityResolver; /** * Support class that deals with namespace parsers discovered inside Spring bundles. * * @author Costin Leau * */ public class NamespaceManager implements InitializingBean, DisposableBean { private static final Log log = LogFactory.getLog(NamespaceManager.class); /** The set of all namespace plugins known to the extender */ private NamespacePlugins namespacePlugins; /** * ServiceRegistration object returned by OSGi when registering the NamespacePlugins instance as a service */ private ServiceRegistration nsResolverRegistration, enResolverRegistration = null; /** OSGi Environment */ private final BundleContext context; private final String extenderInfo; private static final String META_INF = "META-INF/"; private static final String SPRING_HANDLERS = "spring.handlers"; private static final String SPRING_SCHEMAS = "spring.schemas"; /** * Constructs a new <code>NamespaceManager</code> instance. * * @param context containing bundle context */ public NamespaceManager(BundleContext context) { this.context = context; extenderInfo = context.getBundle().getSymbolicName() + "|" + OsgiBundleUtils.getBundleVersion(context.getBundle()); // detect package admin this.namespacePlugins = new NamespacePlugins(); } /** * Registers the namespace plugin handler if this bundle defines handler mapping or schema mapping resources. * * <p/> This method considers only the bundle space and not the class space. * * @param bundle target bundle * @param isLazyBundle indicator if the bundle analyzed is lazily activated */ public void maybeAddNamespaceHandlerFor(Bundle bundle, boolean isLazyBundle) { // Ignore system bundle if (OsgiBundleUtils.isSystemBundle(bundle)) { return; } // Ignore non-wired Spring DM bundles if ("org.eclipse.gemini.blueprint.core".equals(bundle.getSymbolicName()) && !bundle.equals(BundleUtils.getDMCoreBundle(context))) { return; } boolean debug = log.isDebugEnabled(); boolean trace = log.isTraceEnabled(); // FIXME: Blueprint uber bundle temporary hack // since embedded libraries are not discovered by findEntries and inlining them doesn't work // (due to resource classes such as namespace handler definitions) // we use getResource boolean hasHandlers = false, hasSchemas = false; if (trace) { log.trace("Inspecting bundle " + bundle + " for Spring namespaces"); } // extender/RFC 124 bundle if (context.getBundle().equals(bundle)) { try { Enumeration<?> handlers = bundle.getResources(META_INF + SPRING_HANDLERS); Enumeration<?> schemas = bundle.getResources(META_INF + SPRING_SCHEMAS); hasHandlers = handlers != null; hasSchemas = schemas != null; if (hasHandlers && debug) { log.debug("Found namespace handlers: " + Collections.list(schemas)); } } catch (IOException ioe) { log.warn("Cannot discover own namespaces", ioe); } } else { hasHandlers = bundle.findEntries(META_INF, SPRING_HANDLERS, false) != null; hasSchemas = bundle.findEntries(META_INF, SPRING_SCHEMAS, false) != null; } // if the bundle defines handlers if (hasHandlers) { if (trace) log.trace("Bundle " + bundle + " provides Spring namespace handlers..."); if (isLazyBundle) { this.namespacePlugins.addPlugin(bundle, isLazyBundle, true); } else { // check type compatibility between the bundle's and spring-extender's spring version if (hasCompatibleNamespaceType(bundle)) { this.namespacePlugins.addPlugin(bundle, isLazyBundle, false); } else { if (debug) log.debug("Bundle [" + OsgiStringUtils.nullSafeNameAndSymName(bundle) + "] declares namespace handlers but is not compatible with extender [" + extenderInfo + "]; ignoring..."); } } } else { // bundle declares only schemas, add it though the handlers might not be compatible... if (hasSchemas) { this.namespacePlugins.addPlugin(bundle, isLazyBundle, false); if (trace) log.trace("Bundle " + bundle + " provides Spring schemas..."); } } } private boolean hasCompatibleNamespaceType(Bundle bundle) { return namespacePlugins.isTypeCompatible(bundle); } /** * Removes the target bundle from the set of those known to provide handler or schema mappings. * * @param bundle handler bundle */ public void maybeRemoveNameSpaceHandlerFor(Bundle bundle) { Assert.notNull(bundle); boolean removed = this.namespacePlugins.removePlugin(bundle); if (removed && log.isDebugEnabled()) { log.debug("Removed namespace handler resolver for " + OsgiStringUtils.nullSafeNameAndSymName(bundle)); } } /** * Registers the NamespacePlugins instance as an Osgi Resolver service */ private void registerResolverServices() { if (log.isDebugEnabled()) { log.debug("Registering Spring NamespaceHandlerResolver and EntityResolver..."); } Bundle bnd = BundleUtils.getDMCoreBundle(context); Dictionary<String, Object> props = null; if (bnd != null) { props = new Hashtable<String, Object>(); props.put(BundleUtils.DM_CORE_ID, bnd.getBundleId()); props.put(BundleUtils.DM_CORE_TS, bnd.getLastModified()); } nsResolverRegistration = context.registerService(new String[] { NamespaceHandlerResolver.class.getName() }, this.namespacePlugins, props); enResolverRegistration = context.registerService(new String[] { EntityResolver.class.getName() }, this.namespacePlugins, props); } /** * Unregisters the NamespaceHandler and EntityResolver service */ private void unregisterResolverService() { boolean result = OsgiServiceUtils.unregisterService(nsResolverRegistration); result = result || OsgiServiceUtils.unregisterService(enResolverRegistration); if (result) { if (log.isDebugEnabled()) log.debug("Unregistering Spring NamespaceHandler and EntityResolver service"); } this.nsResolverRegistration = null; this.enResolverRegistration = null; } public NamespacePlugins getNamespacePlugins() { return namespacePlugins; } // // Lifecycle methods // public void afterPropertiesSet() { registerResolverServices(); } public void destroy() { unregisterResolverService(); this.namespacePlugins.destroy(); this.namespacePlugins = null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.internal.jta; /** * <p> * GlobalTransaction is the JTA concept of a Global Transaction. * </p> * * @author Mitul Bid * * @since 4.0 */ import com.gemstone.gemfire.i18n.LogWriterI18n; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import java.util.*; import javax.transaction.xa.*; import javax.transaction.*; import com.gemstone.gemfire.SystemFailure; import com.gemstone.gemfire.distributed.DistributedSystemDisconnectedException; import com.gemstone.gemfire.distributed.internal.DM; import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem; public class GlobalTransaction { public static boolean DISABLE_TRANSACTION_TIMEOUT_SETTING = false; /** * GTid is a byte array identifying every instance of a global transaction * uniquely */ private final byte[] GTid; /** * An instance of the XidImpl class which implements Xid */ private final Xid xid; /** * Status represents the state the Global Transaction is in */ private int status = Status.STATUS_UNKNOWN; /** * A set of XAResources associated with the Global Transaction */ private Map resourceMap = Collections.synchronizedMap(new HashMap()); /** * List of local Transactions participating in a Global Transaction. */ private List transactions = Collections.synchronizedList(new ArrayList()); /** * A counter to uniquely generate the GTid */ private static long mCounter = 1; /** * A timer Task for Transaction TimeOut */ private boolean timedOut = false; /** * expirationTime for the Transaction */ private volatile long expirationTime; /* * to enable VERBOSE = true pass System parameter jta.VERBOSE = true while * running the test. */ private static boolean VERBOSE = Boolean.getBoolean("jta.VERBOSE"); /** * Construct a new Global Transaction. Generates the GTid and also the xid */ public GlobalTransaction() throws SystemException { try { GTid = generateGTid(); xid = XidImpl.createXid(GTid); } catch (Exception e) { LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (writer.severeEnabled()) writer.severe(LocalizedStrings.GlobalTransaction_GLOBALTRANSACTION_CONSTRUCTOR_ERROR_WHILE_TRYING_TO_CREATE_XID_DUE_TO_0, e, e); String exception = LocalizedStrings.GlobalTransaction_GLOBALTRANSACTION_CONSTRUCTOR_ERROR_WHILE_TRYING_TO_CREATE_XID_DUE_TO_0.toLocalizedString(new Object[] {e}); throw new SystemException(exception); } } /** * Add a transaction to the list of transactions participating in this global * Transaction The list of transactions is being maintained so that we can * remove the local transaction to global transaction entries from the map * being maintained by the Transaction Manager * * @param txn Transaction instance which is participating in this Global * Transaction */ public void addTransaction(Transaction txn) throws SystemException { if (txn == null) { String exception = LocalizedStrings.GlobalTransaction_GLOBALTRANSACTION_ADDTRANSACTION_CANNOT_ADD_A_NULL_TRANSACTION.toLocalizedString(); LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(exception); throw new SystemException(exception); } transactions.add(txn); } /** * Delists the XAResources associated with the Global Transaction and * Completes the Global transaction associated with the current thread. If any * exception is encountered, rollback is called on the current transaction. * @.concurrency Some paths invoke this method after taking a lock on "this" while * other paths invoke this method without taking a lock on "this". Since both types of path do * act on the resourceMap collection, it is being protected by a lock on resourceMap too. * * @throws RollbackException - * Thrown to indicate that the transaction has been rolled back * rather than committed. * @throws HeuristicMixedException - * Thrown to indicate that a heuristic decision was made and that * some relevant updates have been committed while others have been * rolled back. * @throws HeuristicRollbackException - * Thrown to indicate that a heuristic decision was made and that * all relevant updates have been rolled back. * @throws java.lang.SecurityException - * Thrown to indicate that the thread is not allowed to commit the * transaction. * @throws java.lang.IllegalStateException - * Thrown if the current thread is not associated with a * transaction. * @throws SystemException - * Thrown if the transaction manager encounters an unexpected error * condition. * * @see javax.transaction.TransactionManager#commit() */ //Asif : Changed the return type to int indicating the nature of Exception // encountered during commit public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, SystemException { LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); try { XAResource xar = null; XAResource xar1 = null; int loop = 0; Boolean isActive = Boolean.FALSE; synchronized (this.resourceMap) { Map.Entry entry; Iterator iterator = resourceMap.entrySet().iterator(); while (iterator.hasNext()) { try { entry = (Map.Entry)iterator.next(); xar = (XAResource)entry.getKey(); isActive = (Boolean)entry.getValue(); if (loop == 0) xar1 = xar; loop++; if (isActive.booleanValue()) { // delistResource(xar, XAResource.TMSUCCESS); xar.end(xid, XAResource.TMSUCCESS); entry.setValue(Boolean.FALSE); } } catch (Exception e) { if (VERBOSE) writer.info( LocalizedStrings.ONE_ARG, "GlobalTransaction::commit:Exception in delisting XAResource", e); } } } if (xar1 != null) xar1.commit(xid, true); status = Status.STATUS_COMMITTED; if (VERBOSE) writer .fine("GlobalTransaction::commit:Transaction committed successfully"); } catch (Exception e) { status = Status.STATUS_ROLLING_BACK; try { rollback(); } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable t) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); // we will throw an error later, make sure that the synchronizations rollback status = Status.STATUS_ROLLEDBACK; String exception = LocalizedStrings.GlobalTransaction_GLOBALTRANSACTION_COMMIT_ERROR_IN_COMMITTING_BUT_TRANSACTION_COULD_NOT_BE_ROLLED_BACK_DUE_TO_EXCEPTION_0.toLocalizedString(t); if (VERBOSE) writer.fine(exception, t); SystemException sysEx = new SystemException(exception); sysEx.initCause(t); throw sysEx; } String exception = LocalizedStrings.GlobalTransaction_GLOBALTRANSACTION_COMMIT_ERROR_IN_COMMITTING_THE_TRANSACTION_TRANSACTION_ROLLED_BACK_EXCEPTION_0_1.toLocalizedString(new Object[] {e, " " + (e instanceof XAException ? ("Error Code =" + ((XAException)e).errorCode) : "")}); if (VERBOSE) writer.fine(exception, e); RollbackException rbEx = new RollbackException(exception); rbEx.initCause(e); throw rbEx; } finally { //Map globalTransactions = tm.getGlobalTransactionMap(); TransactionManagerImpl.getTransactionManager().cleanGlobalTransactionMap( transactions); //Asif : Clear the list of transactions transactions.clear(); } } /** * Delists the XAResources associated with the Global Transaction and Roll * back the transaction associated with the current thread. * * @throws java.lang.SecurityException - * Thrown to indicate that the thread is not allowed to roll back * the transaction. * @throws java.lang.IllegalStateException - * Thrown if the current thread is not associated with a * transaction. * @throws SystemException - * Thrown if the transaction manager encounters an unexpected error * condition. * @.concurrency Some paths invoke this method after taking a lock on "this" while * other paths invoke this method without taking a lock on "this". Since both types of path do * act on the resourceMap collection, it is being protected by a lock on resourceMap too. * * @see javax.transaction.TransactionManager#rollback() */ public void rollback() throws IllegalStateException, SystemException { LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); try { XAResource xar = null; XAResource xar1 = null; int loop = 0; synchronized (this.resourceMap) { Iterator iterator = resourceMap.entrySet().iterator(); Boolean isActive = Boolean.FALSE; Map.Entry entry; while (iterator.hasNext()) { try { entry = (Map.Entry)iterator.next(); xar = (XAResource)entry.getKey(); isActive = (Boolean)entry.getValue(); if (loop == 0) { xar1 = xar; } loop++; if (isActive.booleanValue()) { // delistResource(xar, XAResource.TMSUCCESS); xar.end(xid, XAResource.TMSUCCESS); entry.setValue(Boolean.FALSE); } } catch (Exception e) { if (VERBOSE) writer.info( LocalizedStrings.ONE_ARG, "GlobalTransaction::rollback:Exception in delisting XAResource", e); } } } if (xar1 != null) xar1.rollback(xid); status = Status.STATUS_ROLLEDBACK; if (VERBOSE) writer.fine("Transaction rolled back successfully"); } catch (Exception e) { // we will throw an error later, make sure that the synchronizations rollback status = Status.STATUS_ROLLEDBACK; String exception = LocalizedStrings.GlobalTransaction_GLOBALTRANSACTION_ROLLBACK_ROLLBACK_NOT_SUCCESSFUL_DUE_TO_EXCEPTION_0_1.toLocalizedString(new Object[] {e, " " + (e instanceof XAException ? ("Error Code =" + ((XAException)e).errorCode) : "")}); if (VERBOSE) writer.fine(exception); SystemException sysEx = new SystemException(exception); sysEx.initCause(e); throw sysEx; } finally { // Map globalTransactions = tm.getGlobalTransactionMap(); TransactionManagerImpl.getTransactionManager().cleanGlobalTransactionMap( transactions); // Asif : Clear the list of transactions transactions.clear(); } } /** * Mark the state of the Global Transaction so that it can be rolled back */ public void setRollbackOnly() throws IllegalStateException, SystemException { setStatus(Status.STATUS_MARKED_ROLLBACK); } /** * Get the transaction state of the Global Transaction */ public int getStatus() throws SystemException { return status; } /** * Enlist the specified XAResource with this transaction. Currently only one * Resource Manager is being supported. enlistResource checks if there is no * XAResource, then enlists the current XAResource. For subsequent * XAResources, it checks if is the same Resource Manager. If it is, then the * XAResources are addded, else an exception is thrown * @.concurrency The order of acquiring lock will be lock on "this" followed * by lock on resourceMap. It is possible that in some functions of this * class both the locks are not needed , but if the two are acquired then the realitive * order will always be"this" followed by resourceMap. * * @param xaRes XAResource to be enlisted * @return true, if resource was enlisted successfully, otherwise false. * @throws SystemException - Thrown if the transaction manager encounters an * unexpected error condition. * @throws IllegalStateException - Thrown if the transaction in the target * object is in the prepared state or the transaction is inactive. * @throws RollbackException - Thrown to indicate that the transaction has * been marked for rollback only. * * @see javax.transaction.Transaction#enlistResource(javax.transaction.xa.XAResource) */ public boolean enlistResource(XAResource xaRes) throws RollbackException, IllegalStateException, SystemException { XAResource xar = null; try { synchronized (this) { if (status == Status.STATUS_MARKED_ROLLBACK) { String exception = "GlobalTransaction::enlistResource::Cannot enlist resource as the transaction has been marked for rollback"; LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(exception); throw new RollbackException(exception); } else if (status != Status.STATUS_ACTIVE) { String exception = LocalizedStrings.GlobalTransaction_GLOBALTRANSACTION_ENLISTRESOURCE_CANNOT_ENLIST_A_RESOURCE_TO_A_TRANSACTION_WHICH_IS_NOT_ACTIVE.toLocalizedString(); LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(exception); throw new IllegalStateException(exception); } if (resourceMap.isEmpty()) { xaRes.start(xid, XAResource.TMNOFLAGS); int delay = (int) ((expirationTime - System.currentTimeMillis()) / 1000); try { if(!DISABLE_TRANSACTION_TIMEOUT_SETTING) { xaRes.setTransactionTimeout(delay); } } catch (XAException xe) { String exception = LocalizedStrings.GlobalTransaction_GLOBALTRANSACTION_ENLISTRESOURCE_EXCEPTION_OCCURED_IN_TRYING_TO_SET_XARESOURCE_TIMEOUT_DUE_TO_0_ERROR_CODE_1.toLocalizedString(new Object[] {xe, Integer.valueOf(xe.errorCode)}); LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(exception); throw new SystemException(exception); } resourceMap.put(xaRes, Boolean.TRUE); } else { synchronized (this.resourceMap) { Iterator iterator = resourceMap.keySet().iterator(); xar = (XAResource) iterator.next(); } if (!xar.isSameRM(xaRes)) { LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (writer.severeEnabled()) writer.severe( LocalizedStrings.GlobalTransaction_GLOBALTRANSACTIONENLISTRESOURCEONLY_ONE_RESOUCE_MANAGER_SUPPORTED); throw new SystemException(LocalizedStrings.GlobalTransaction_GLOBALTRANSACTIONENLISTRESOURCEONLY_ONE_RESOUCE_MANAGER_SUPPORTED.toLocalizedString()); } else { xaRes.start(xid, XAResource.TMJOIN); resourceMap.put(xaRes, Boolean.TRUE); } } } } catch (Exception e) { String addon = (e instanceof XAException ? ("Error Code =" + ((XAException) e).errorCode) : ""); LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(LocalizedStrings.GLOBALTRANSACTION__ENLISTRESOURCE__ERROR_WHILE_ENLISTING_XARESOURCE_0_1.toLocalizedString(new Object[] {e, addon}), e); SystemException sysEx = new SystemException(LocalizedStrings.GLOBALTRANSACTION__ENLISTRESOURCE__ERROR_WHILE_ENLISTING_XARESOURCE_0_1.toLocalizedString(new Object[] {e, addon})); sysEx.initCause(e); throw sysEx; } return true; } /** * Disassociate the XAResource specified from this transaction. * * In the current implementation this call will never be made by the * application server. The delisting is happening at the time of * commit/rollback * * @param xaRes XAResource to be delisted * @param flag One of the values of TMSUCCESS, TMSUSPEND, or TMFAIL. * @return true, if resource was delisted successfully, otherwise false. * @throws SystemException Thrown if the transaction manager encounters an * unexpected error condition. * @throws IllegalStateException Thrown if the transaction in the target * object is not active. * * @see javax.transaction.Transaction#delistResource(javax.transaction.xa.XAResource, * int) */ public boolean delistResource(XAResource xaRes, int flag) throws IllegalStateException, SystemException { try { if (resourceMap.containsKey(xaRes)) { Boolean isActive = (Boolean) resourceMap.get(xaRes); if (isActive.booleanValue()) { xaRes.end(xid, flag); resourceMap.put(xaRes, Boolean.FALSE); } } } catch (Exception e) { String exception = LocalizedStrings.GlobalTransaction_ERROR_WHILE_DELISTING_XARESOURCE_0_1.toLocalizedString(new Object[] {e, " " + (e instanceof XAException ? ("Error Code =" + ((XAException) e).errorCode) : "")}); LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(exception, e); SystemException se = new SystemException(exception); se.initCause(e); } return true; } /** * Set the transaction state of the Global Transaction * * @param new_status Status (int) */ public void setStatus(int new_status) { status = new_status; } /** * suspends the current transaction by deactivating the XAResource (delist) */ public void suspend() throws SystemException { XAResource xar = null; synchronized (this.resourceMap) { Iterator iterator = resourceMap.entrySet().iterator(); Map.Entry entry; Boolean isActive = Boolean.FALSE; while (iterator.hasNext()) { entry = (Map.Entry)iterator.next(); xar = (XAResource)entry.getKey(); isActive = (Boolean)entry.getValue(); if (isActive.booleanValue()) try { // delistResource(xar, XAResource.TMSUCCESS); xar.end(xid, XAResource.TMSUSPEND); entry.setValue(Boolean.FALSE); } catch (Exception e) { String exception = LocalizedStrings.GlobalTransaction_ERROR_WHILE_DELISTING_XARESOURCE_0_1.toLocalizedString(new Object[] {e, " " + (e instanceof XAException ? ("Error Code =" + ((XAException) e).errorCode) : "")}); LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(exception); throw new SystemException(exception); } /* * catch (SystemException e) { String exception = * "GlobaTransaction::suspend not succesful due to " + e; LogWriterI18n * writer = TransactionUtils.getLogWriter(); if (VERBOSE) * writer.fine(exception); throw new SystemException(exception); } */ } } } /** * resume the current transaction by activating all the XAResources associated * with the current transaction */ public void resume() throws SystemException { XAResource xar = null; synchronized (this.resourceMap) { Iterator iterator = resourceMap.entrySet().iterator(); Map.Entry entry; Boolean isActive = Boolean.FALSE; while (iterator.hasNext()) { entry = (Map.Entry)iterator.next(); xar = (XAResource)entry.getKey(); isActive = (Boolean)entry.getValue(); if (!isActive.booleanValue()) try { xar.start(xid, XAResource.TMRESUME); entry.setValue(Boolean.TRUE); } catch (Exception e) { String exception = LocalizedStrings.GlobalTransaction_GLOBATRANSACTION_RESUME_RESUME_NOT_SUCCESFUL_DUE_TO_0.toLocalizedString(e); LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(exception, e); throw new SystemException(exception); } } } } /** * String for current distributed system * * @see #IdsForId * @guarded.By {@link #DmidMutex} */ private static String DMid = null; /** * Distributed system for given string * @see #DMid * @guarded.By {@link #DmidMutex} */ private static InternalDistributedSystem IdsForId = null; /** * Mutex controls update of {@link #DMid} * @see #DMid * @see #IdsForId */ private static final Object DmidMutex = new Object(); /** * Read current {@link #DMid} and return it * @return current DMid */ private static String getId() { synchronized (DmidMutex) { InternalDistributedSystem ids = InternalDistributedSystem .getAnyInstance(); if (ids == null) { throw new DistributedSystemDisconnectedException("No distributed system"); } if (ids == IdsForId) { return DMid; } IdsForId = ids; DM dm = ids.getDistributionManager(); DMid = dm.getId().toString(); return DMid; } } /** * Returns a byte array which uses a static synchronized counter to ensure * uniqueness * */ private static byte[] generateGTid() { //Asif: The counter should be attached to the string inside Synch block StringBuffer sbuff = new StringBuffer(getId()); synchronized (GlobalTransaction.class) { if (mCounter == 99999) mCounter = 1; else ++mCounter; sbuff.append(String.valueOf(mCounter)); } sbuff.append('_').append(System.currentTimeMillis()); byte[] byte_array = sbuff.toString().getBytes(); return byte_array; } /** * A timer task cleaup method. This is called when Transaction timeout occurs. * On timeout the transaction is rolled back and the thread removed from * thread-Transaction Map. */ void expireGTX() { if (timedOut) return; // this method is only called by a single thread so this is safe timedOut = true; LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); try { if (writer.infoEnabled()) writer .info(LocalizedStrings.GlobalTransaction_TRANSACTION_0_HAS_TIMED_OUT, this); TransactionManagerImpl.getTransactionManager() .removeTranxnMappings(transactions); setStatus(Status.STATUS_NO_TRANSACTION); } catch (Exception e) { if (writer.severeEnabled()) writer.severe(LocalizedStrings.GlobalTransaction_GLOBATRANSACTION_EXPIREGTX_ERROR_OCCURED_WHILE_REMOVING_TRANSACTIONAL_MAPPINGS_0, e, e); } } /** * Set the transaction TimeOut of the Global Transaction Asif : It returns the * new expiry time for the GTX. * * @param seconds * @throws SystemException */ long setTransactionTimeoutForXARes(int seconds) throws SystemException { XAResource xar = null; boolean resetXATimeOut = true; Map.Entry entry; synchronized (this.resourceMap) { Iterator iterator = resourceMap.entrySet().iterator(); while (iterator.hasNext()) { entry= (Map.Entry)iterator.next(); xar = (XAResource)entry.getKey(); if (((Boolean)entry.getValue()).booleanValue()) { try { resetXATimeOut = xar.setTransactionTimeout(seconds); } catch (XAException e) { String exception = LocalizedStrings.GlobalTransaction_EXCEPTION_OCCURED_WHILE_TRYING_TO_SET_THE_XARESOURCE_TIMEOUT_DUE_TO_0_ERROR_CODE_1.toLocalizedString(new Object[] {e, Integer.valueOf(e.errorCode)}); LogWriterI18n writer = TransactionUtils.getLogWriterI18n(); if (VERBOSE) writer.fine(exception); throw new SystemException(exception); } break; } } } long newExp = System.currentTimeMillis() + (seconds * 1000); if (!resetXATimeOut) newExp = -1; return newExp; } /** * Testmethod to provide the size of the resource map * */ int getResourceMapSize() { return this.resourceMap.size(); } /** * Returns the List of Transactions associated with this GlobalTransaction * Asif : Reduced the visibility */ List getTransactions() { return transactions; } long getExpirationTime() { return expirationTime; } void setTimeoutValue(long time) { expirationTime = time; } boolean isExpired() { return timedOut; } public int compare(GlobalTransaction other) { if (this == other) { return 0; } long compare = getExpirationTime() - other.getExpirationTime(); if (compare < 0) { return -1; } else if (compare > 0) { return 1; } // need to compare something else to break the tie to fix bug 39579 if (this.GTid.length < other.GTid.length) { return -1; } else if (this.GTid.length > other.GTid.length) { return 1; } // need to compare the bytes for (int i=0; i < this.GTid.length; i++) { if (this.GTid[i] < other.GTid[i]) { return -1; } else if (this.GTid[i] > other.GTid[i]) { return 1; } } // If we get here the GTids are the same! int myId = System.identityHashCode(this); int otherId = System.identityHashCode(other); if (myId < otherId) { return -1; } else if (myId > otherId) { return 1; } else { // we could just add another field to this class which has a value // obtained from a static atomic throw new IllegalStateException( LocalizedStrings.GlobalTransaction_COULD_NOT_COMPARE_0_TO_1 .toLocalizedString(new Object[] {this, other})); } } // public String toString() { // StringBuffer sb = new StringBuffer(); // sb.append("<"); // sb.append(super.toString()); // sb.append(" expireTime=" + getExpirationTime()); // sb.append(">"); // return sb.toString(); // } }
package com.pdffiller.client.api; import java.util.*; import com.pdffiller.client.ApiClient; import com.pdffiller.client.dto.AddRecipientBody; import com.pdffiller.client.dto.Params; import com.pdffiller.client.dto.SignatureRequestBody; import com.pdffiller.client.exception.PdfFillerAPIException; public class SignatureRequest { private ApiClient apiClient; private String API_PATH = "/signature_request"; public SignatureRequest(ApiClient apiClient) { this.apiClient = apiClient; } /** * Retrieve a list of all document signature requests * @param authorization Bearer Access Token obtained from client credentials * @return Json String representing signature request list */ public String listSignatureRequests (String authorization) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String body = null; String method = "GET"; return apiClient.call(API_PATH, method, queryParams, headerParams, body); } /** * Creates a new sendtosign request. Two sendtosign methods supported - sendtoeach and sendtogroup. Sendtogroup method requires envelope_name and sign_in_order parameters * @param body Signature request details and recipients information * @return String */ public String createSignatureRequest (SignatureRequestBody body) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String method = "POST"; if (body == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'body' when calling createSignatureRequest"); } return apiClient.call(API_PATH, method, queryParams, headerParams, body); } /** * Retrieve a signature request information based on the signature request ID * @param signatureRequestId Signature request Id * @return SignatureRequest */ public String findSignatureRequestId (Long signatureRequestId) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String body = null; String method = "GET"; // verify the required parameter 'fillRequestId' is set if (signatureRequestId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'fillRequestId' when calling infoLinkToFillId"); } String path = API_PATH + "/" + signatureRequestId; return apiClient.call(path, method, queryParams, headerParams, body); } /** * Cancel a signature request for the specified sendtosign ID * @param signatureRequestId Signature request Id * @return Json string */ public String cancelSignatureRequest (String authorization, Long signatureRequestId) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String body = null; String method = "DELETE"; // verify the required parameter 'fillRequestId' is set if (signatureRequestId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'signatureRequestId' when calling deleteLinkToFillId"); } String path = API_PATH + "/" + signatureRequestId; return apiClient.call(path, method, queryParams, headerParams, body); } /** * Returns a signature request certificate by signatureRequestId * @param signatureRequestId Signature request Id * @return File body download string */ public String getCertificateById (Long signatureRequestId) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String body = null; String method = "GET"; if (signatureRequestId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'signatureRequestId' when calling infoLinkToFillId"); } String path = API_PATH + "/" + signatureRequestId + "/certificate"; return apiClient.call(path, method, queryParams, headerParams, body); } /** * Adds additional recipient to sendtosign request * @param body Adds additional recipient top signature request * @param signatureRequestId Signature request Id * @return Json String */ public String addRecipient (AddRecipientBody body, Long signatureRequestId) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String method = "POST"; // verify the required parameter 'signatureRequestId' is set if (signatureRequestId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'signatureRequestId' when calling addRecipient"); } // verify the required parameter 'body' is set if (body == null) { throw new PdfFillerAPIException(400, "Missing the required parameter recipient body when calling addRecipient"); } String path = API_PATH + "/" + signatureRequestId + "/recipient"; return apiClient.call(path, method, queryParams, headerParams, body); } /** * Returns information about sendtosign recipient and signature status * @param signatureRequestId Signature request Id * @param fillableTemplateId Fillable template ID * @return Json string */ public String getRecipientStatus (Long signatureRequestId, Long recipientId) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String body = null; String method = "GET"; // verify the required parameter 'signatureRequestId' is set if (signatureRequestId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'signatureRequestId' when calling getRecipientStatus"); } // verify the required parameter 'fillableTemplateId' is set if (recipientId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'recipientId' when calling getRecipientStatus"); } String path = API_PATH + "/" + signatureRequestId + "/recipient/" + recipientId; return apiClient.call(path, method, queryParams, headerParams, body); } /** * Remind a sendtosign recipient about the sendtosign request * @param signatureRequestId Signature request Id * @param fillableTemplateId Fillable template ID * @return Json String */ public String remindRecipient (Long signatureRequestId, Long recipientId) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String body = ""; String method = "PUT"; // verify the required parameter 'signatureRequestId' is set if (signatureRequestId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'signatureRequestId' when calling remindRecipient"); } // verify the required parameter 'fillableTemplateId' is set if (recipientId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'recipientId' when calling remindRecipient"); } String path = API_PATH + "/" + signatureRequestId + "/recipient/" + recipientId + "/remind"; return apiClient.call(path, method, queryParams, headerParams, body); } /** * Retrieve a signed document by signatureRequestId * @param signatureRequestId Signature request Id * @return File body String */ public String getDocumentSignedDocument (Long signatureRequestId) throws PdfFillerAPIException { List<Params> queryParams = null; HashMap<String, String> headerParams = null; String body = null; String method = "GET"; // verify the required parameter 'signatureRequestId' is set if (signatureRequestId == null) { throw new PdfFillerAPIException(400, "Missing the required parameter 'signatureRequestId' when calling getDocumentById"); } String path = API_PATH + "/" + signatureRequestId + "/signed_document"; return apiClient.call(path, method, queryParams, headerParams, body); } }
package com.twitter.elephantbird.mapred.input; import java.io.*; import java.util.List; import com.twitter.elephantbird.util.HadoopCompat; import com.twitter.elephantbird.util.SplitUtil; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.StatusReporter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.util.ReflectionUtils; import com.twitter.elephantbird.mapreduce.input.MapredInputFormatCompatible; import com.twitter.elephantbird.mapred.output.DeprecatedOutputFormatWrapper; import com.twitter.elephantbird.util.HadoopUtils; /** * The wrapper enables an {@link InputFormat} written for new * <code>mapreduce</code> interface to be used unmodified in contexts where * a {@link org.apache.hadoop.mapred.InputFormat} with old <code>mapred</code> * interface is required. </p> * * Current restrictions on InputFormat: <ul> * <li> the record reader should reuse key and value objects * or implement {@link com.twitter.elephantbird.mapred.input.MapredInputFormatCompatible} </li> * </ul> * * While this restriction is satisfied by most input formats, * it could be removed with a configuration option. * <p> * * Usage: <pre> * // set InputFormat class using a mapreduce InputFormat * DeprecatedInputFormatWrapper.setInputFormat(org.apache.hadoop.mapreduce.lib.input.TextInputFormat.class, jobConf); * jobConf.setOutputFormat(org.apache.hadoop.mapred.TextOutputFormat.class); * // ... * </pre> * * @see DeprecatedOutputFormatWrapper * * @author Raghu Angadi * */ @SuppressWarnings("deprecation") public class DeprecatedInputFormatWrapper<K, V> implements org.apache.hadoop.mapred.InputFormat<K, V> { public static final String CLASS_CONF_KEY = "elephantbird.class.for.DeprecatedInputFormatWrapper"; public static final String VALUE_COPY_CONF_KEY = "elephantbird.class.for.ValueCopyClass"; protected InputFormat<K, V> realInputFormat; protected DeprecatedInputFormatValueCopier<V> valueCopier = null; /** * Sets jobs input format to {@link DeprecatedInputFormatWrapper} and stores * supplied real {@link InputFormat} class name in job configuration. * This configuration is read on the remote tasks to instantiate actual * InputFormat correctly. */ public static void setInputFormat(Class<?> realInputFormatClass, JobConf jobConf) { jobConf.setInputFormat(DeprecatedInputFormatWrapper.class); HadoopUtils.setClassConf(jobConf, CLASS_CONF_KEY, realInputFormatClass); } /** * For cases where we need to set hadoop1 input format in a hadoop2 Configuration object. */ public static void setInputFormat(Class<?> realInputFormatClass, Configuration conf) { conf.setClass("mapred.input.format.class", DeprecatedInputFormatWrapper.class, org.apache.hadoop.mapred.InputFormat.class); HadoopUtils.setClassConf(conf, CLASS_CONF_KEY, realInputFormatClass); } public static void setInputFormat(Class<?> realInputFormatClass, JobConf jobConf, Class<? extends DeprecatedInputFormatValueCopier<?>> valueCopyClass) { jobConf.setInputFormat(DeprecatedInputFormatWrapper.class); HadoopUtils.setClassConf(jobConf, CLASS_CONF_KEY, realInputFormatClass); HadoopUtils.setClassConf(jobConf, VALUE_COPY_CONF_KEY, valueCopyClass); } /** * Only used in very specific cases and does not absolve one from * calling the static setInptuFormat methods to set up the hadoop job * properly. * @param inputFormat */ public void setInputFormatInstance(InputFormat<K, V> inputFormat) { realInputFormat = inputFormat; } @SuppressWarnings("unchecked") private void initInputFormat(JobConf conf) { if (realInputFormat == null) { realInputFormat = ReflectionUtils.newInstance(conf.getClass(CLASS_CONF_KEY, null, InputFormat.class), conf); if (conf.get(VALUE_COPY_CONF_KEY) != null) { Class<? extends DeprecatedInputFormatValueCopier> copierClass = conf.getClass(VALUE_COPY_CONF_KEY, null, DeprecatedInputFormatValueCopier.class); if (null != copierClass) { valueCopier = ReflectionUtils.newInstance(copierClass, conf); } } } } public DeprecatedInputFormatWrapper() { // real inputFormat is initialized based on conf. } public DeprecatedInputFormatWrapper(InputFormat<K, V> realInputFormat) { this.realInputFormat = realInputFormat; } @Override public RecordReader<K, V> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { initInputFormat(job); return new RecordReaderWrapper<K, V>(realInputFormat, split, job, reporter, valueCopier); } @Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { initInputFormat(job); try { List<org.apache.hadoop.mapreduce.InputSplit> splits = realInputFormat.getSplits(HadoopCompat.newJobContext(job, null)); if (splits == null) { return null; } InputSplit[] resultSplits = new InputSplit[splits.size()]; int i = 0; for (org.apache.hadoop.mapreduce.InputSplit split : splits) { if (split.getClass() == org.apache.hadoop.mapreduce.lib.input.FileSplit.class) { org.apache.hadoop.mapreduce.lib.input.FileSplit mapreduceFileSplit = ((org.apache.hadoop.mapreduce.lib.input.FileSplit)split); resultSplits[i++] = new FileSplit( mapreduceFileSplit.getPath(), mapreduceFileSplit.getStart(), mapreduceFileSplit.getLength(), mapreduceFileSplit.getLocations()); } else { InputSplitWrapper wrapper = new InputSplitWrapper(split); wrapper.setConf(job); resultSplits[i++] = wrapper; } } return resultSplits; } catch (InterruptedException e) { throw new IOException(e); } } /** * A reporter that works with both mapred and mapreduce APIs. */ public static class ReporterWrapper extends StatusReporter implements Reporter { private Reporter wrappedReporter; public ReporterWrapper(Reporter reporter) { wrappedReporter = reporter; } @Override public Counters.Counter getCounter(Enum<?> anEnum) { return wrappedReporter.getCounter(anEnum); } @Override public Counters.Counter getCounter(String s, String s1) { return wrappedReporter.getCounter(s, s1); } @Override public void incrCounter(Enum<?> anEnum, long l) { wrappedReporter.incrCounter(anEnum, l); } @Override public void incrCounter(String s, String s1, long l) { wrappedReporter.incrCounter(s, s1, l); } @Override public InputSplit getInputSplit() throws UnsupportedOperationException { return wrappedReporter.getInputSplit(); } @Override public void progress() { wrappedReporter.progress(); } // @Override public float getProgress() { throw new UnsupportedOperationException(); } @Override public void setStatus(String s) { wrappedReporter.setStatus(s); } } private static class RecordReaderWrapper<K, V> implements RecordReader<K, V> { private org.apache.hadoop.mapreduce.RecordReader<K, V> realReader; private MapredInputFormatCompatible mifcReader = null; private long splitLen; // for getPos() // expect readReader return same Key & Value objects (common case) // this avoids extra serialization & deserialazion of these objects private K keyObj = null; private V valueObj = null; private boolean firstRecord = false; private boolean eof = false; private DeprecatedInputFormatValueCopier<V> valueCopier = null; public RecordReaderWrapper(InputFormat<K, V> newInputFormat, InputSplit oldSplit, JobConf oldJobConf, Reporter reporter, DeprecatedInputFormatValueCopier<V> valueCopier) throws IOException { this.valueCopier = valueCopier; splitLen = oldSplit.getLength(); org.apache.hadoop.mapreduce.InputSplit split; if (oldSplit.getClass() == FileSplit.class) { split = new org.apache.hadoop.mapreduce.lib.input.FileSplit( ((FileSplit)oldSplit).getPath(), ((FileSplit)oldSplit).getStart(), ((FileSplit)oldSplit).getLength(), oldSplit.getLocations()); } else { split = ((InputSplitWrapper)oldSplit).realSplit; } TaskAttemptID taskAttemptID = TaskAttemptID.forName(oldJobConf.get("mapred.task.id")); if (taskAttemptID == null) { taskAttemptID = new TaskAttemptID(); } // create a MapContext to pass reporter to record reader (for counters) TaskAttemptContext taskContext = HadoopCompat .newMapContext(oldJobConf, taskAttemptID, null, null, null, new ReporterWrapper(reporter), null); try { realReader = newInputFormat.createRecordReader(split, taskContext); realReader.initialize(split, taskContext); if (realReader instanceof MapredInputFormatCompatible) { mifcReader = ((MapredInputFormatCompatible) realReader); } } catch (InterruptedException e) { throw new IOException(e); } } private void initKeyValueObjects() { // read once to gain access to key and value objects try { if (!firstRecord & !eof) { if (realReader.nextKeyValue()) { firstRecord = true; keyObj = realReader.getCurrentKey(); valueObj = realReader.getCurrentValue(); } else { eof = true; } } } catch (Exception e) { throw new RuntimeException("Could not read first record (and it was not an EOF)", e); } } @Override public void close() throws IOException { realReader.close(); } @Override public K createKey() { initKeyValueObjects(); return keyObj; } @Override public V createValue() { initKeyValueObjects(); return valueObj; } @Override public long getPos() throws IOException { return (long) (splitLen * getProgress()); } @Override public float getProgress() throws IOException { try { return realReader.getProgress(); } catch (InterruptedException e) { throw new IOException(e); } } @Override public boolean next(K key, V value) throws IOException { if (eof) { return false; } if (firstRecord) { // key & value are already read. firstRecord = false; return true; } if (mifcReader != null) { mifcReader.setKeyValue(key, value); } try { if (realReader.nextKeyValue()) { if (key != realReader.getCurrentKey()) { if (mifcReader != null) { throw new IOException("The RecordReader returned a key and value that do not match " + "the key and value sent to it. This means the RecordReader did not properly implement " + "com.twitter.elephantbird.mapred.input.MapredInputFormatCompatible. " + "Current reader class : " + realReader.getClass()); } else { throw new IOException("DeprecatedInputFormatWrapper only " + "supports RecordReaders that return the same key & value " + "objects or implement com.twitter.elephantbird.mapred.input.MapredInputFormatCompatible. " + "Current reader class : " + realReader.getClass()); } } if (value != realReader.getCurrentValue()) { if (null != valueCopier) valueCopier.copyValue(value, realReader.getCurrentValue()); else { throw new IOException("DeprecatedInputFormatWrapper - value is different " + "and no value copier provided. " + "Current reader class : " + realReader.getClass()); } } return true; } } catch (InterruptedException e) { throw new IOException(e); } eof = true; // strictly not required, just for consistency return false; } } private static class InputSplitWrapper implements InputSplit, Configurable { org.apache.hadoop.mapreduce.InputSplit realSplit; private Configuration conf; @SuppressWarnings("unused") // MapReduce instantiates this. public InputSplitWrapper() {} public InputSplitWrapper(org.apache.hadoop.mapreduce.InputSplit realSplit) { this.realSplit = realSplit; } @Override public long getLength() throws IOException { try { return realSplit.getLength(); } catch (InterruptedException e) { throw new IOException(e); } } @Override public String[] getLocations() throws IOException { try { return realSplit.getLocations(); } catch (InterruptedException e) { throw new IOException(e); } } @Override public void readFields(DataInput in) throws IOException { realSplit = SplitUtil.deserializeInputSplit(conf, (DataInputStream) in); } @Override public void write(DataOutput out) throws IOException { SplitUtil.serializeInputSplit(conf, (DataOutputStream) out, realSplit); } @Override public void setConf(Configuration conf) { this.conf = conf; } @Override public Configuration getConf() { return conf; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing.allocation.decider; import com.carrotsearch.hppc.ObjectLookupContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.EmptyClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RatioValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; import java.util.Set; /** * The {@link DiskThresholdDecider} checks that the node a shard is potentially * being allocated to has enough disk space. * * It has three configurable settings, all of which can be changed dynamically: * * <code>cluster.routing.allocation.disk.watermark.low</code> is the low disk * watermark. New shards will not allocated to a node with usage higher than this, * although this watermark may be passed by allocating a shard. It defaults to * 0.85 (85.0%). * * <code>cluster.routing.allocation.disk.watermark.high</code> is the high disk * watermark. If a node has usage higher than this, shards are not allowed to * remain on the node. In addition, if allocating a shard to a node causes the * node to pass this watermark, it will not be allowed. It defaults to * 0.90 (90.0%). * * Both watermark settings are expressed in terms of used disk percentage, or * exact byte values for free space (like "500mb") * * <code>cluster.routing.allocation.disk.threshold_enabled</code> is used to * enable or disable this decider. It defaults to false (disabled). */ public class DiskThresholdDecider extends AllocationDecider { public static final String NAME = "disk_threshold"; private volatile Double freeDiskThresholdLow; private volatile Double freeDiskThresholdHigh; private volatile ByteSizeValue freeBytesThresholdLow; private volatile ByteSizeValue freeBytesThresholdHigh; private volatile boolean includeRelocations; private volatile boolean enabled; private volatile TimeValue rerouteInterval; public static final Setting<Boolean> CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, Setting.Scope.CLUSTER); public static final Setting<String> CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.CLUSTER); public static final Setting<String> CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, Setting.Scope.CLUSTER); public static final Setting<Boolean> CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, Setting.Scope.CLUSTER);; public static final Setting<TimeValue> CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); /** * Listens for a node to go over the high watermark and kicks off an empty * reroute if it does. Also responsible for logging about nodes that have * passed the disk watermarks */ class DiskListener implements ClusterInfoService.Listener { private final Client client; private final Set<String> nodeHasPassedWatermark = Sets.newConcurrentHashSet(); private long lastRunNS; DiskListener(Client client) { this.client = client; } /** * Warn about the given disk usage if the low or high watermark has been passed */ private void warnAboutDiskIfNeeded(DiskUsage usage) { // Check absolute disk values if (usage.getFreeBytes() < DiskThresholdDecider.this.freeBytesThresholdHigh.bytes()) { logger.warn("high disk watermark [{}] exceeded on {}, shards will be relocated away from this node", DiskThresholdDecider.this.freeBytesThresholdHigh, usage); } else if (usage.getFreeBytes() < DiskThresholdDecider.this.freeBytesThresholdLow.bytes()) { logger.info("low disk watermark [{}] exceeded on {}, replicas will not be assigned to this node", DiskThresholdDecider.this.freeBytesThresholdLow, usage); } // Check percentage disk values if (usage.getFreeDiskAsPercentage() < DiskThresholdDecider.this.freeDiskThresholdHigh) { logger.warn("high disk watermark [{}] exceeded on {}, shards will be relocated away from this node", Strings.format1Decimals(100.0 - DiskThresholdDecider.this.freeDiskThresholdHigh, "%"), usage); } else if (usage.getFreeDiskAsPercentage() < DiskThresholdDecider.this.freeDiskThresholdLow) { logger.info("low disk watermark [{}] exceeded on {}, replicas will not be assigned to this node", Strings.format1Decimals(100.0 - DiskThresholdDecider.this.freeDiskThresholdLow, "%"), usage); } } @Override public void onNewInfo(ClusterInfo info) { ImmutableOpenMap<String, DiskUsage> usages = info.getNodeLeastAvailableDiskUsages(); if (usages != null) { boolean reroute = false; String explanation = ""; // Garbage collect nodes that have been removed from the cluster // from the map that tracks watermark crossing ObjectLookupContainer<String> nodes = usages.keys(); for (String node : nodeHasPassedWatermark) { if (nodes.contains(node) == false) { nodeHasPassedWatermark.remove(node); } } for (ObjectObjectCursor<String, DiskUsage> entry : usages) { String node = entry.key; DiskUsage usage = entry.value; warnAboutDiskIfNeeded(usage); if (usage.getFreeBytes() < DiskThresholdDecider.this.freeBytesThresholdHigh.bytes() || usage.getFreeDiskAsPercentage() < DiskThresholdDecider.this.freeDiskThresholdHigh) { if ((System.nanoTime() - lastRunNS) > DiskThresholdDecider.this.rerouteInterval.nanos()) { lastRunNS = System.nanoTime(); reroute = true; explanation = "high disk watermark exceeded on one or more nodes"; } else { logger.debug("high disk watermark exceeded on {} but an automatic reroute has occurred in the last [{}], skipping reroute", node, DiskThresholdDecider.this.rerouteInterval); } nodeHasPassedWatermark.add(node); } else if (usage.getFreeBytes() < DiskThresholdDecider.this.freeBytesThresholdLow.bytes() || usage.getFreeDiskAsPercentage() < DiskThresholdDecider.this.freeDiskThresholdLow) { nodeHasPassedWatermark.add(node); } else { if (nodeHasPassedWatermark.contains(node)) { // The node has previously been over the high or // low watermark, but is no longer, so we should // reroute so any unassigned shards can be allocated // if they are able to be if ((System.nanoTime() - lastRunNS) > DiskThresholdDecider.this.rerouteInterval.nanos()) { lastRunNS = System.nanoTime(); reroute = true; explanation = "one or more nodes has gone under the high or low watermark"; nodeHasPassedWatermark.remove(node); } else { logger.debug("{} has gone below a disk threshold, but an automatic reroute has occurred in the last [{}], skipping reroute", node, DiskThresholdDecider.this.rerouteInterval); } } } } if (reroute) { logger.info("rerouting shards: [{}]", explanation); // Execute an empty reroute, but don't block on the response client.admin().cluster().prepareReroute().execute(); } } } } public DiskThresholdDecider(Settings settings) { // It's okay the Client is null here, because the empty cluster info // service will never actually call the listener where the client is // needed. Also this constructor is only used for tests this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), EmptyClusterInfoService.INSTANCE, null); } @Inject public DiskThresholdDecider(Settings settings, ClusterSettings clusterSettings, ClusterInfoService infoService, Client client) { super(settings); final String lowWatermark = CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.get(settings); final String highWatermark = CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.get(settings); setHighWatermark(highWatermark); setLowWatermark(lowWatermark); this.includeRelocations = CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.get(settings); this.rerouteInterval = CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.get(settings); this.enabled = CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, this::setLowWatermark); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, this::setHighWatermark); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, this::setIncludeRelocations); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, this::setRerouteInterval); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); infoService.addListener(new DiskListener(client)); } private void setIncludeRelocations(boolean includeRelocations) { this.includeRelocations = includeRelocations; } private void setRerouteInterval(TimeValue rerouteInterval) { this.rerouteInterval = rerouteInterval; } private void setEnabled(boolean enabled) { this.enabled = enabled; } private void setLowWatermark(String lowWatermark) { // Watermark is expressed in terms of used data, but we need "free" data watermark this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(lowWatermark); this.freeBytesThresholdLow = thresholdBytesFromWatermark(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()); } private void setHighWatermark(String highWatermark) { // Watermark is expressed in terms of used data, but we need "free" data watermark this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(highWatermark); this.freeBytesThresholdHigh = thresholdBytesFromWatermark(highWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()); } // For Testing public Double getFreeDiskThresholdLow() { return freeDiskThresholdLow; } // For Testing public Double getFreeDiskThresholdHigh() { return freeDiskThresholdHigh; } // For Testing public Double getUsedDiskThresholdLow() { return 100.0 - freeDiskThresholdLow; } // For Testing public Double getUsedDiskThresholdHigh() { return 100.0 - freeDiskThresholdHigh; } // For Testing public ByteSizeValue getFreeBytesThresholdLow() { return freeBytesThresholdLow; } // For Testing public ByteSizeValue getFreeBytesThresholdHigh() { return freeBytesThresholdHigh; } // For Testing public boolean isIncludeRelocations() { return includeRelocations; } // For Testing public boolean isEnabled() { return enabled; } // For Testing public TimeValue getRerouteInterval() { return rerouteInterval; } /** * Returns the size of all shards that are currently being relocated to * the node, but may not be finished transfering yet. * * If subtractShardsMovingAway is set then the size of shards moving away is subtracted from the total size * of all shards */ public static long sizeOfRelocatingShards(RoutingNode node, ClusterInfo clusterInfo, boolean subtractShardsMovingAway, String dataPath) { long totalSize = 0; for (ShardRouting routing : node.shardsWithState(ShardRoutingState.RELOCATING, ShardRoutingState.INITIALIZING)) { String actualPath = clusterInfo.getDataPath(routing); if (dataPath.equals(actualPath)) { if (routing.initializing() && routing.relocatingNodeId() != null) { totalSize += getShardSize(routing, clusterInfo); } else if (subtractShardsMovingAway && routing.relocating()) { totalSize -= getShardSize(routing, clusterInfo); } } } return totalSize; } static long getShardSize(ShardRouting routing, ClusterInfo clusterInfo) { Long shardSize = clusterInfo.getShardSize(routing); return shardSize == null ? 0 : shardSize; } @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { ClusterInfo clusterInfo = allocation.clusterInfo(); ImmutableOpenMap<String, DiskUsage> usages = clusterInfo.getNodeMostAvailableDiskUsages(); final Decision decision = earlyTerminate(allocation, usages); if (decision != null) { return decision; } final double usedDiskThresholdLow = 100.0 - DiskThresholdDecider.this.freeDiskThresholdLow; final double usedDiskThresholdHigh = 100.0 - DiskThresholdDecider.this.freeDiskThresholdHigh; DiskUsage usage = getDiskUsage(node, allocation, usages); // First, check that the node currently over the low watermark double freeDiskPercentage = usage.getFreeDiskAsPercentage(); // Cache the used disk percentage for displaying disk percentages consistent with documentation double usedDiskPercentage = usage.getUsedDiskAsPercentage(); long freeBytes = usage.getFreeBytes(); if (logger.isTraceEnabled()) { logger.trace("node [{}] has {}% used disk", node.nodeId(), usedDiskPercentage); } // a flag for whether the primary shard has been previously allocated IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); boolean primaryHasBeenAllocated = shardRouting.primary() && shardRouting.allocatedPostIndexCreate(indexMetaData); // checks for exact byte comparisons if (freeBytes < freeBytesThresholdLow.bytes()) { // If the shard is a replica or has a primary that has already been allocated before, check the low threshold if (!shardRouting.primary() || (shardRouting.primary() && primaryHasBeenAllocated)) { if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, preventing allocation", freeBytesThresholdLow, freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, "less than required [%s] free on node, free: [%s]", freeBytesThresholdLow, new ByteSizeValue(freeBytes)); } else if (freeBytes > freeBytesThresholdHigh.bytes()) { // Allow the shard to be allocated because it is primary that // has never been allocated if it's under the high watermark if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " + "but allowing allocation because primary has never been allocated", freeBytesThresholdLow, freeBytes, node.nodeId()); } return allocation.decision(Decision.YES, NAME, "primary has never been allocated before"); } else { // Even though the primary has never been allocated, the node is // above the high watermark, so don't allow allocating the shard if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " + "preventing allocation even though primary has never been allocated", freeBytesThresholdHigh, freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, "less than required [%s] free on node, free: [%s]", freeBytesThresholdHigh, new ByteSizeValue(freeBytes)); } } // checks for percentage comparisons if (freeDiskPercentage < freeDiskThresholdLow) { // If the shard is a replica or has a primary that has already been allocated before, check the low threshold if (!shardRouting.primary() || (shardRouting.primary() && primaryHasBeenAllocated)) { if (logger.isDebugEnabled()) { logger.debug("more than the allowed {} used disk threshold ({} used) on node [{}], preventing allocation", Strings.format1Decimals(usedDiskThresholdLow, "%"), Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.NO, NAME, "more than allowed [%s%%] used disk on node, free: [%s%%]", usedDiskThresholdLow, freeDiskPercentage); } else if (freeDiskPercentage > freeDiskThresholdHigh) { // Allow the shard to be allocated because it is primary that // has never been allocated if it's under the high watermark if (logger.isDebugEnabled()) { logger.debug("more than the allowed {} used disk threshold ({} used) on node [{}], " + "but allowing allocation because primary has never been allocated", Strings.format1Decimals(usedDiskThresholdLow, "%"), Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.YES, NAME, "primary has never been allocated before"); } else { // Even though the primary has never been allocated, the node is // above the high watermark, so don't allow allocating the shard if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " + "preventing allocation even though primary has never been allocated", Strings.format1Decimals(freeDiskThresholdHigh, "%"), Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.NO, NAME, "more than allowed [%s%%] used disk on node, free: [%s%%]", usedDiskThresholdHigh, freeDiskPercentage); } } // Secondly, check that allocating the shard to this node doesn't put it above the high watermark final long shardSize = getShardSize(shardRouting, allocation.clusterInfo()); double freeSpaceAfterShard = freeDiskPercentageAfterShardAssigned(usage, shardSize); long freeBytesAfterShard = freeBytes - shardSize; if (freeBytesAfterShard < freeBytesThresholdHigh.bytes()) { logger.warn("after allocating, node [{}] would have less than the required {} free bytes threshold ({} bytes free), preventing allocation", node.nodeId(), freeBytesThresholdHigh, freeBytesAfterShard); return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s] free on node, free: [%s]", freeBytesThresholdLow, new ByteSizeValue(freeBytesAfterShard)); } if (freeSpaceAfterShard < freeDiskThresholdHigh) { logger.warn("after allocating, node [{}] would have more than the allowed {} free disk threshold ({} free), preventing allocation", node.nodeId(), Strings.format1Decimals(freeDiskThresholdHigh, "%"), Strings.format1Decimals(freeSpaceAfterShard, "%")); return allocation.decision(Decision.NO, NAME, "after allocation more than allowed [%s%%] used disk on node, free: [%s%%]", usedDiskThresholdLow, freeSpaceAfterShard); } return allocation.decision(Decision.YES, NAME, "enough disk for shard on node, free: [%s]", new ByteSizeValue(freeBytes)); } @Override public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (shardRouting.currentNodeId().equals(node.nodeId()) == false) { throw new IllegalArgumentException("Shard [" + shardRouting + "] is not allocated on node: [" + node.nodeId() + "]"); } final ClusterInfo clusterInfo = allocation.clusterInfo(); final ImmutableOpenMap<String, DiskUsage> usages = clusterInfo.getNodeLeastAvailableDiskUsages(); final Decision decision = earlyTerminate(allocation, usages); if (decision != null) { return decision; } final DiskUsage usage = getDiskUsage(node, allocation, usages); final String dataPath = clusterInfo.getDataPath(shardRouting); // If this node is already above the high threshold, the shard cannot remain (get it off!) final double freeDiskPercentage = usage.getFreeDiskAsPercentage(); final long freeBytes = usage.getFreeBytes(); if (logger.isTraceEnabled()) { logger.trace("node [{}] has {}% free disk ({} bytes)", node.nodeId(), freeDiskPercentage, freeBytes); } if (dataPath == null || usage.getPath().equals(dataPath) == false) { return allocation.decision(Decision.YES, NAME, "shard is not allocated on the most utilized disk"); } if (freeBytes < freeBytesThresholdHigh.bytes()) { if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, shard cannot remain", freeBytesThresholdHigh, freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s] free on node, free: [%s]", freeBytesThresholdHigh, new ByteSizeValue(freeBytes)); } if (freeDiskPercentage < freeDiskThresholdHigh) { if (logger.isDebugEnabled()) { logger.debug("less than the required {}% free disk threshold ({}% free) on node {}, shard cannot remain", freeDiskThresholdHigh, freeDiskPercentage, node.nodeId()); } return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s%%] free disk on node, free: [%s%%]", freeDiskThresholdHigh, freeDiskPercentage); } return allocation.decision(Decision.YES, NAME, "enough disk for shard to remain on node, free: [%s]", new ByteSizeValue(freeBytes)); } private DiskUsage getDiskUsage(RoutingNode node, RoutingAllocation allocation, ImmutableOpenMap<String, DiskUsage> usages) { ClusterInfo clusterInfo = allocation.clusterInfo(); DiskUsage usage = usages.get(node.nodeId()); if (usage == null) { // If there is no usage, and we have other nodes in the cluster, // use the average usage for all nodes as the usage for this node usage = averageUsage(node, usages); if (logger.isDebugEnabled()) { logger.debug("unable to determine disk usage for {}, defaulting to average across nodes [{} total] [{} free] [{}% free]", node.nodeId(), usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeDiskAsPercentage()); } } if (includeRelocations) { long relocatingShardsSize = sizeOfRelocatingShards(node, clusterInfo, true, usage.getPath()); DiskUsage usageIncludingRelocations = new DiskUsage(node.nodeId(), node.node().name(), usage.getPath(), usage.getTotalBytes(), usage.getFreeBytes() - relocatingShardsSize); if (logger.isTraceEnabled()) { logger.trace("usage without relocations: {}", usage); logger.trace("usage with relocations: [{} bytes] {}", relocatingShardsSize, usageIncludingRelocations); } usage = usageIncludingRelocations; } return usage; } /** * Returns a {@link DiskUsage} for the {@link RoutingNode} using the * average usage of other nodes in the disk usage map. * @param node Node to return an averaged DiskUsage object for * @param usages Map of nodeId to DiskUsage for all known nodes * @return DiskUsage representing given node using the average disk usage */ public DiskUsage averageUsage(RoutingNode node, ImmutableOpenMap<String, DiskUsage> usages) { if (usages.size() == 0) { return new DiskUsage(node.nodeId(), node.node().name(), "_na_", 0, 0); } long totalBytes = 0; long freeBytes = 0; for (ObjectCursor<DiskUsage> du : usages.values()) { totalBytes += du.value.getTotalBytes(); freeBytes += du.value.getFreeBytes(); } return new DiskUsage(node.nodeId(), node.node().name(), "_na_", totalBytes / usages.size(), freeBytes / usages.size()); } /** * Given the DiskUsage for a node and the size of the shard, return the * percentage of free disk if the shard were to be allocated to the node. * @param usage A DiskUsage for the node to have space computed for * @param shardSize Size in bytes of the shard * @return Percentage of free space after the shard is assigned to the node */ public double freeDiskPercentageAfterShardAssigned(DiskUsage usage, Long shardSize) { shardSize = (shardSize == null) ? 0 : shardSize; DiskUsage newUsage = new DiskUsage(usage.getNodeId(), usage.getNodeName(), usage.getPath(), usage.getTotalBytes(), usage.getFreeBytes() - shardSize); return newUsage.getFreeDiskAsPercentage(); } /** * Attempts to parse the watermark into a percentage, returning 100.0% if * it cannot be parsed. */ public double thresholdPercentageFromWatermark(String watermark) { try { return RatioValue.parseRatioValue(watermark).getAsPercent(); } catch (ElasticsearchParseException ex) { // NOTE: this is not end-user leniency, since up above we check that it's a valid byte or percentage, and then store the two cases separately return 100.0; } } /** * Attempts to parse the watermark into a {@link ByteSizeValue}, returning * a ByteSizeValue of 0 bytes if the value cannot be parsed. */ public ByteSizeValue thresholdBytesFromWatermark(String watermark, String settingName) { try { return ByteSizeValue.parseBytesSizeValue(watermark, settingName); } catch (ElasticsearchParseException ex) { // NOTE: this is not end-user leniency, since up above we check that it's a valid byte or percentage, and then store the two cases separately return ByteSizeValue.parseBytesSizeValue("0b", settingName); } } /** * Checks if a watermark string is a valid percentage or byte size value, * @return the watermark value given */ public static String validWatermarkSetting(String watermark, String settingName) { try { RatioValue.parseRatioValue(watermark); } catch (ElasticsearchParseException e) { try { ByteSizeValue.parseBytesSizeValue(watermark, settingName); } catch (ElasticsearchParseException ex) { ex.addSuppressed(e); throw ex; } } return watermark; } private Decision earlyTerminate(RoutingAllocation allocation, ImmutableOpenMap<String, DiskUsage> usages) { // Always allow allocation if the decider is disabled if (!enabled) { return allocation.decision(Decision.YES, NAME, "disk threshold decider disabled"); } // Allow allocation regardless if only a single data node is available if (allocation.nodes().dataNodes().size() <= 1) { if (logger.isTraceEnabled()) { logger.trace("only a single data node is present, allowing allocation"); } return allocation.decision(Decision.YES, NAME, "only a single data node is present"); } // Fail open there is no info available final ClusterInfo clusterInfo = allocation.clusterInfo(); if (clusterInfo == null) { if (logger.isTraceEnabled()) { logger.trace("cluster info unavailable for disk threshold decider, allowing allocation."); } return allocation.decision(Decision.YES, NAME, "cluster info unavailable"); } // Fail open if there are no disk usages available if (usages.isEmpty()) { if (logger.isTraceEnabled()) { logger.trace("unable to determine disk usages for disk-aware allocation, allowing allocation"); } return allocation.decision(Decision.YES, NAME, "disk usages unavailable"); } return null; } }
package com.ociweb.pronghorn.ring.stream; import static com.ociweb.pronghorn.ring.FieldReferenceOffsetManager.lookupFieldLocator; import static com.ociweb.pronghorn.ring.FieldReferenceOffsetManager.lookupFragmentLocator; import static com.ociweb.pronghorn.ring.FieldReferenceOffsetManager.lookupTemplateLocator; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import org.junit.Test; import com.ociweb.jfast.catalog.loader.ClientConfig; import com.ociweb.jfast.catalog.loader.TemplateCatalogConfig; import com.ociweb.jfast.catalog.loader.TemplateLoader; import com.ociweb.pronghorn.ring.FieldReferenceOffsetManager; import com.ociweb.pronghorn.ring.RingBuffer; import com.ociweb.pronghorn.ring.RingBufferConfig; import com.ociweb.pronghorn.ring.RingReader; import com.ociweb.pronghorn.ring.RingWriter; public class StreamParserTest { private static final byte[] ASCII_VERSION = "1.0".getBytes(); private static final FieldReferenceOffsetManager FROM = buildFROM(); private final int MSG_BOXES_LOC = lookupTemplateLocator("Boxes",FROM); private final int MSG_SAMPLE_LOC = lookupTemplateLocator("Sample",FROM); private final int MSG_RESET_LOC = lookupTemplateLocator("Reset",FROM); private final int MSG_TRUCKS_LOC = lookupTemplateLocator("TrucksMark2",FROM); private final int BOX_COUNT_LOC = lookupFieldLocator("Count", MSG_BOXES_LOC, FROM); private final int BOX_OWNER_LOC = lookupFieldLocator("Owner", MSG_BOXES_LOC, FROM); private final int SAMPLE_YEAR_LOC = lookupFieldLocator("Year", MSG_SAMPLE_LOC, FROM); private final int SAMPLE_MONTH_LOC = lookupFieldLocator("Month", MSG_SAMPLE_LOC, FROM); private final int SAMPLE_DATE_LOC = lookupFieldLocator("Date", MSG_SAMPLE_LOC, FROM); private final int SAMPLE_WEIGHT = lookupFieldLocator("Weight", MSG_SAMPLE_LOC, FROM); private final int REST_VERSION = lookupFieldLocator("Version", MSG_RESET_LOC, FROM); private final int SQUAD_NAME = lookupFieldLocator("Squad", MSG_TRUCKS_LOC, FROM); private final int SQUAD_NO_MEMBERS = lookupFieldLocator("NoMembers", MSG_TRUCKS_LOC, FROM); private final int MSG_TRUCK_SEQ_LOC = lookupFragmentLocator("Members", MSG_TRUCKS_LOC, FROM); private final int SQUAD_TRUCK_ID = lookupFieldLocator("TruckId", MSG_TRUCK_SEQ_LOC, FROM); private final int TRUCK_CAPACITY = lookupFieldLocator("Capacity", MSG_TRUCK_SEQ_LOC, FROM); private final int THING_NO_LOC = lookupFieldLocator("NoThings", MSG_TRUCK_SEQ_LOC, FROM); private final int MSG_TRUCK_THING_SEQ_LOC = lookupFragmentLocator("Things", MSG_TRUCK_SEQ_LOC, FROM); private final int THING_ID_LOC = lookupFieldLocator("AThing", MSG_TRUCK_THING_SEQ_LOC, FROM); public static FieldReferenceOffsetManager buildFROM() { String source = "/template/smallExample.xml"; TemplateCatalogConfig catalog = new TemplateCatalogConfig(TemplateLoader.buildCatBytes(source, new ClientConfig())); return catalog.getFROM(); } @Test public void sequenceFragmentWriteRead() { byte primaryRingSizeInBits = 9; byte byteRingSizeInBits = 18; RingBuffer ring = new RingBuffer(new RingBufferConfig(primaryRingSizeInBits, byteRingSizeInBits, null, FROM)); int testSize = 5; //in this method we write two sequence members but only record the count after writing the members populateRingBufferWithSequence(ring, testSize); // 0 Group/OpenTemplPMap/3 // 1 IntegerUnsigned/None/0 // 2 ASCII/Copy/0 // 3 Group/ClosePMap/3 // 4 Group/OpenTemplPMap/6 // 5 IntegerUnsigned/Copy/1 // 6 IntegerUnsigned/Copy/2 // 7 IntegerUnsigned/Copy/3 // 8 Decimal/Default/4 // 9 LongSigned/Delta/0 // 10 Group/ClosePMap/6 // 11 Group/OpenTempl/2 // 12 ASCII/Constant/1 // 13 Group/Close/2 // 14 Group/OpenTemplPMap/8 // 15 ASCII/Copy/2 // 16 Length/None/5 // 17 Group/OpenSeqPMap/4 // 18 LongUnsigned/None/1 // 19 Decimal/Default/6 // 20 LongSigned/Delta/2 // 21 Group/CloseSeqPMap/4 // 22 Group/ClosePMap/8 StreamingConsumer visitor = new StreamingConsumerToJSON(System.out); StreamingConsumerReader parser = new StreamingConsumerReader(ring, visitor ); //ring is fully populated so we should not need to call this run again.s parser.run(); // // // //Ring is full of messages, this loop runs until the ring is empty. // while (RingReader.tryReadFragment(ring)) { // assertTrue(RingReader.isNewMessage(ring)); // // int msgIdx = RingReader.getMsgIdx(ring); // if (msgIdx<0) { // break; // } // assertEquals(MSG_TRUCKS_LOC, msgIdx); // // assertEquals("TheBobSquad", RingReader.readASCII(ring, SQUAD_NAME, new StringBuilder()).toString()); // // int sequenceCount = RingReader.readInt(ring, SQUAD_NO_MEMBERS); // assertEquals(2,sequenceCount); // // // //now we now that we have 2 fragments to read // RingReader.tryReadFragment(ring); // assertEquals(10, RingReader.readLong(ring, SQUAD_TRUCK_ID)); // assertEquals(2000, RingReader.readDecimalMantissa(ring, TRUCK_CAPACITY)); // assertEquals(2, RingReader.readDecimalExponent(ring, TRUCK_CAPACITY)); // assertEquals(20.00d, RingReader.readDouble(ring, TRUCK_CAPACITY),.001); // // RingReader.tryReadFragment(ring); // assertEquals(11, RingReader.readLong(ring, SQUAD_TRUCK_ID)); // assertEquals(3000, RingReader.readDecimalMantissa(ring, TRUCK_CAPACITY)); // assertEquals(2, RingReader.readDecimalExponent(ring, TRUCK_CAPACITY)); // assertEquals(30.00d, RingReader.readDouble(ring, TRUCK_CAPACITY),.001); // // } } private void populateRingBufferWithSequence(RingBuffer ring, int testSize) { int j = testSize; while (true) { if (j==0) { RingWriter.publishEOF(ring); return;//done } if (RingWriter.tryWriteFragment(ring, MSG_TRUCKS_LOC)) { //AUTO writes template id as needed RingWriter.writeASCII(ring, SQUAD_NAME, "TheBobSquad"); //WRITE THE FIRST MEMBER OF THE SEQ //block to ensure we have room for the next fragment, and ensure that bytes consumed gets recorded RingWriter.blockWriteFragment(ring, MSG_TRUCK_SEQ_LOC);//could use tryWrite here but it would make this example more complex RingWriter.writeLong(ring, SQUAD_TRUCK_ID, 10); RingWriter.writeDecimal(ring, TRUCK_CAPACITY, 2, 2000); RingWriter.writeInt(ring, THING_NO_LOC, 1); RingWriter.blockWriteFragment(ring, MSG_TRUCK_THING_SEQ_LOC); RingWriter.writeInt(ring, THING_ID_LOC, 7); // //WRITE THE SECOND MEMBER OF THE SEQ //block to ensure we have room for the next fragment, and ensure that bytes consumed gets recorded RingWriter.blockWriteFragment(ring, MSG_TRUCK_SEQ_LOC); RingWriter.writeLong(ring, SQUAD_TRUCK_ID, 11); RingWriter.writeDouble(ring, TRUCK_CAPACITY, 30d, 2); //alternate way of writing a decimal RingWriter.writeInt(ring, THING_NO_LOC, 1); RingWriter.blockWriteFragment(ring, MSG_TRUCK_THING_SEQ_LOC); RingWriter.writeInt(ring, THING_ID_LOC, 7); //NOTE: because we are waiting until the end of the sequence to write its length we have two rules // 1. Publish can not be called between these fragments because it will publish a zero for the count // 2. The RingBuffer must be large enough to hold all the fragments in the sequence. // Neither one of these apply when the length can be set first. RingWriter.writeInt(ring, SQUAD_NO_MEMBERS, 2); //NOTE: we are writing this field very late because we now know how many we wrote. // RingWriter.blockWriteFragment(ring, MSG_TRUCK_AGE_FRAG_LOC); // RingWriter.writeLong(ring, SQUAD_AGE, 42); RingWriter.publishWrites(ring); j--; } else { //Unable to write because there is no room so do something else while we are waiting. Thread.yield(); } } } }
/* Copyright 2004,2006 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.dom.svg; import org.apache.batik.parser.DefaultPreserveAspectRatioHandler; import org.apache.batik.parser.ParseException; import org.apache.batik.parser.PreserveAspectRatioParser; import org.apache.batik.util.SVGConstants; import org.w3c.dom.DOMException; import org.w3c.dom.svg.SVGPreserveAspectRatio; /** * Abstract implementation for SVGPreservAspectRatio * * This is the base implementation for SVGPreservAspectRatio * * @author Tonny Kohar */ public abstract class AbstractSVGPreserveAspectRatio implements SVGPreserveAspectRatio, SVGConstants { /** * Strings for the 'align' values. */ protected static final String[] ALIGN_VALUES = { null, SVG_NONE_VALUE, SVG_XMINYMIN_VALUE, SVG_XMIDYMIN_VALUE, SVG_XMAXYMIN_VALUE, SVG_XMINYMID_VALUE, SVG_XMIDYMID_VALUE, SVG_XMAXYMID_VALUE, SVG_XMINYMAX_VALUE, SVG_XMIDYMAX_VALUE, SVG_XMAXYMAX_VALUE }; /** * Strings for the 'meet-or-slice' values. */ protected static final String[] MEET_OR_SLICE_VALUES = { null, SVG_MEET_VALUE, SVG_SLICE_VALUE }; /** * Returns a string representation of a preserve aspect ratio value * specified numerically. * @param align the align value, one of the * SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_* constants * @param meetOrSlice the meet-or-slice value, one of the * SVGPreserveAspectRatio.SVG_MEETORSLICE_* constants */ public static String getValueAsString(short align, short meetOrSlice) { if (align < 1 || align > 10) { return null; } String value = ALIGN_VALUES[align]; if (align == SVG_PRESERVEASPECTRATIO_NONE) { return value; } if (meetOrSlice < 1 || meetOrSlice > 2) { return null; } return value + ' ' + MEET_OR_SLICE_VALUES[meetOrSlice]; } /** * align property by default the value is * SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMIDYMID */ protected short align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMIDYMID; /** * meetOrSlice property * by default the value is SVGPreserveAspectRatio.SVG_MEETORSLICE_MEET; */ protected short meetOrSlice = SVGPreserveAspectRatio.SVG_MEETORSLICE_MEET; /** Creates a new instance of AbstractSVGPreserveAspectRatio */ public AbstractSVGPreserveAspectRatio() { } public short getAlign() { return this.align; } public short getMeetOrSlice() { return this.meetOrSlice; } public void setAlign(short align) { this.align = align; setAttributeValue(getValueAsString()); } public void setMeetOrSlice(short meetOrSlice) { this.meetOrSlice = meetOrSlice; setAttributeValue(getValueAsString()); } public void reset() { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMIDYMID; meetOrSlice = SVGPreserveAspectRatio.SVG_MEETORSLICE_MEET; //setAttributeValue(getValueAsString()); } protected abstract void setAttributeValue(String value) throws DOMException; protected abstract DOMException createDOMException(short type, String key, Object[] args); protected void setValueAsString(String value) throws DOMException { PreserveAspectRatioParserHandler ph; ph = new PreserveAspectRatioParserHandler(); try { PreserveAspectRatioParser p = new PreserveAspectRatioParser(); p.setPreserveAspectRatioHandler(ph); p.parse(value); align = ph.getAlign(); meetOrSlice = ph.getMeetOrSlice(); } catch (ParseException ex) { throw createDOMException (DOMException.INVALID_MODIFICATION_ERR, "preserve.aspect.ratio", new Object[] { value }); } } /** * Returns the string representation of the preserve aspect ratio value. */ protected String getValueAsString() { if (align < 1 || align > 10) { throw createDOMException (DOMException.INVALID_MODIFICATION_ERR, "preserve.aspect.ratio.align", new Object[] { new Integer(align) }); } String value = ALIGN_VALUES[align]; if (align == SVG_PRESERVEASPECTRATIO_NONE) { return value; } if (meetOrSlice < 1 || meetOrSlice > 2) { throw createDOMException (DOMException.INVALID_MODIFICATION_ERR, "preserve.aspect.ratio.meet.or.slice", new Object[] { new Integer(meetOrSlice) }); } return value + ' ' + MEET_OR_SLICE_VALUES[meetOrSlice]; } protected class PreserveAspectRatioParserHandler extends DefaultPreserveAspectRatioHandler { public short align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMIDYMID; public short meetOrSlice = SVGPreserveAspectRatio.SVG_MEETORSLICE_MEET; public short getAlign() { return align; } public short getMeetOrSlice() { return meetOrSlice; } /** * Invoked when 'none' been parsed. * @exception ParseException if an error occured while processing * the transform */ public void none() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_NONE; } /** * Invoked when 'xMaxYMax' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMaxYMax() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMAXYMAX; } /** * Invoked when 'xMaxYMid' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMaxYMid() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMAXYMID; } /** * Invoked when 'xMaxYMin' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMaxYMin() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMAXYMIN; } /** * Invoked when 'xMidYMax' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMidYMax() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMIDYMAX; } /** * Invoked when 'xMidYMid' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMidYMid() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMIDYMID; } /** * Invoked when 'xMidYMin' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMidYMin() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMIDYMIN; } /** * Invoked when 'xMinYMax' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMinYMax() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMINYMAX; } /** * Invoked when 'xMinYMid' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMinYMid() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMINYMID; } /** * Invoked when 'xMinYMin' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void xMinYMin() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMINYMIN; } /** * Invoked when 'meet' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void meet() throws ParseException { meetOrSlice = SVGPreserveAspectRatio.SVG_MEETORSLICE_MEET; } /** * Invoked when 'slice' has been parsed. * @exception ParseException if an error occured while processing * the transform */ public void slice() throws ParseException { meetOrSlice = SVGPreserveAspectRatio.SVG_MEETORSLICE_SLICE; } } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.app; import android.content.Context; import android.content.SharedPreferences; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.preference.PreferenceManager; import android.text.format.Time; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; public class Utility { public static String getPreferredLocation(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_location_key), context.getString(R.string.pref_location_default)); } public static boolean isMetric(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_units_key), context.getString(R.string.pref_units_metric)) .equals(context.getString(R.string.pref_units_metric)); } public static String formatTemperature(Context context, double temperature) { // Data stored in Celsius by default. If user prefers to see in Fahrenheit, convert // the values here. String suffix = "\u00B0"; if (!isMetric(context)) { temperature = (temperature * 1.8) + 32; } // For presentation, assume the user doesn't care about tenths of a degree. return String.format(context.getString(R.string.format_temperature), temperature); } static String formatDate(long dateInMilliseconds) { Date date = new Date(dateInMilliseconds); return DateFormat.getDateInstance().format(date); } // Format used for storing dates in the database. ALso used for converting those strings // back into date objects for comparison/processing. public static final String DATE_FORMAT = "yyyyMMdd"; /** * Helper method to convert the database representation of the date into something to display * to users. As classy and polished a user experience as "20140102" is, we can do better. * * @param context Context to use for resource localization * @param dateInMillis The date in milliseconds * @return a user-friendly representation of the date. */ public static String getFriendlyDayString(Context context, long dateInMillis) { // The day string for forecast uses the following logic: // For today: "Today, June 8" // For tomorrow: "Tomorrow" // For the next 5 days: "Wednesday" (just the day name) // For all days after that: "Mon Jun 8" Time time = new Time(); time.setToNow(); long currentTime = System.currentTimeMillis(); int julianDay = Time.getJulianDay(dateInMillis, time.gmtoff); int currentJulianDay = Time.getJulianDay(currentTime, time.gmtoff); // If the date we're building the String for is today's date, the format // is "Today, June 24" if (julianDay == currentJulianDay) { String today = context.getString(R.string.today); int formatId = R.string.format_full_friendly_date; return String.format(context.getString( formatId, today, getFormattedMonthDay(context, dateInMillis))); } else if ( julianDay < currentJulianDay + 7 ) { // If the input date is less than a week in the future, just return the day name. return getDayName(context, dateInMillis); } else { // Otherwise, use the form "Mon Jun 3" SimpleDateFormat shortenedDateFormat = new SimpleDateFormat("EEE MMM dd"); return shortenedDateFormat.format(dateInMillis); } } /** * Given a day, returns just the name to use for that day. * E.g "today", "tomorrow", "wednesday". * * @param context Context to use for resource localization * @param dateInMillis The date in milliseconds * @return */ public static String getDayName(Context context, long dateInMillis) { // If the date is today, return the localized version of "Today" instead of the actual // day name. Time t = new Time(); t.setToNow(); int julianDay = Time.getJulianDay(dateInMillis, t.gmtoff); int currentJulianDay = Time.getJulianDay(System.currentTimeMillis(), t.gmtoff); if (julianDay == currentJulianDay) { return context.getString(R.string.today); } else if ( julianDay == currentJulianDay +1 ) { return context.getString(R.string.tomorrow); } else { Time time = new Time(); time.setToNow(); // Otherwise, the format is just the day of the week (e.g "Wednesday". SimpleDateFormat dayFormat = new SimpleDateFormat("EEEE"); return dayFormat.format(dateInMillis); } } /** * Converts db date format to the format "Month day", e.g "June 24". * @param context Context to use for resource localization * @param dateInMillis The db formatted date string, expected to be of the form specified * in Utility.DATE_FORMAT * @return The day in the form of a string formatted "December 6" */ public static String getFormattedMonthDay(Context context, long dateInMillis ) { Time time = new Time(); time.setToNow(); SimpleDateFormat dbDateFormat = new SimpleDateFormat(Utility.DATE_FORMAT); SimpleDateFormat monthDayFormat = new SimpleDateFormat("MMMM dd"); String monthDayString = monthDayFormat.format(dateInMillis); return monthDayString; } public static String getFormattedWind(Context context, float windSpeed, float degrees) { int windFormat; if (Utility.isMetric(context)) { windFormat = R.string.format_wind_kmh; } else { windFormat = R.string.format_wind_mph; windSpeed = .621371192237334f * windSpeed; } // From wind direction in degrees, determine compass direction as a string (e.g NW) // You know what's fun, writing really long if/else statements with tons of possible // conditions. Seriously, try it! String direction = "Unknown"; if (degrees >= 337.5 || degrees < 22.5) { direction = "N"; } else if (degrees >= 22.5 && degrees < 67.5) { direction = "NE"; } else if (degrees >= 67.5 && degrees < 112.5) { direction = "E"; } else if (degrees >= 112.5 && degrees < 157.5) { direction = "SE"; } else if (degrees >= 157.5 && degrees < 202.5) { direction = "S"; } else if (degrees >= 202.5 && degrees < 247.5) { direction = "SW"; } else if (degrees >= 247.5 && degrees < 292.5) { direction = "W"; } else if (degrees >= 292.5 && degrees < 337.5) { direction = "NW"; } return String.format(context.getString(windFormat), windSpeed, direction); } /** * Helper method to provide the icon resource id according to the weather condition id returned * by the OpenWeatherMap call. * @param weatherId from OpenWeatherMap API response * @return resource id for the corresponding icon. -1 if no relation is found. */ public static int getIconResourceForWeatherCondition(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return R.drawable.ic_storm; } else if (weatherId >= 300 && weatherId <= 321) { return R.drawable.ic_light_rain; } else if (weatherId >= 500 && weatherId <= 504) { return R.drawable.ic_rain; } else if (weatherId == 511) { return R.drawable.ic_snow; } else if (weatherId >= 520 && weatherId <= 531) { return R.drawable.ic_rain; } else if (weatherId >= 600 && weatherId <= 622) { return R.drawable.ic_snow; } else if (weatherId >= 701 && weatherId <= 761) { return R.drawable.ic_fog; } else if (weatherId == 761 || weatherId == 781) { return R.drawable.ic_storm; } else if (weatherId == 800) { return R.drawable.ic_clear; } else if (weatherId == 801) { return R.drawable.ic_light_clouds; } else if (weatherId >= 802 && weatherId <= 804) { return R.drawable.ic_cloudy; } return -1; } /** * Helper method to provide the art resource id according to the weather condition id returned * by the OpenWeatherMap call. * @param weatherId from OpenWeatherMap API response * @return resource id for the corresponding icon. -1 if no relation is found. */ public static int getArtResourceForWeatherCondition(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return R.drawable.art_storm; } else if (weatherId >= 300 && weatherId <= 321) { return R.drawable.art_light_rain; } else if (weatherId >= 500 && weatherId <= 504) { return R.drawable.art_rain; } else if (weatherId == 511) { return R.drawable.art_snow; } else if (weatherId >= 520 && weatherId <= 531) { return R.drawable.art_rain; } else if (weatherId >= 600 && weatherId <= 622) { return R.drawable.art_snow; } else if (weatherId >= 701 && weatherId <= 761) { return R.drawable.art_fog; } else if (weatherId == 761 || weatherId == 781) { return R.drawable.art_storm; } else if (weatherId == 800) { return R.drawable.art_clear; } else if (weatherId == 801) { return R.drawable.art_light_clouds; } else if (weatherId >= 802 && weatherId <= 804) { return R.drawable.art_clouds; } return -1; } static public boolean isNetworkAvailable(Context c) { ConnectivityManager cm = (ConnectivityManager) c.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo net = cm.getActiveNetworkInfo(); return net != null && net.isConnectedOrConnecting(); } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.client5.http.impl.classic; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.hc.client5.http.HttpRoute; import org.apache.hc.client5.http.config.RequestConfig; import org.apache.hc.client5.http.io.ConnectionEndpoint; import org.apache.hc.client5.http.io.HttpClientConnectionManager; import org.apache.hc.client5.http.io.LeaseRequest; import org.apache.hc.client5.http.protocol.HttpClientContext; import org.apache.hc.core5.concurrent.Cancellable; import org.apache.hc.core5.concurrent.CancellableDependency; import org.apache.hc.core5.http.ConnectionRequestTimeoutException; import org.apache.hc.core5.http.HttpHost; import org.apache.hc.core5.http.impl.io.HttpRequestExecutor; import org.apache.hc.core5.io.CloseMode; import org.apache.hc.core5.util.TimeValue; import org.apache.hc.core5.util.Timeout; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.slf4j.Logger; @SuppressWarnings({"static-access"}) // test code public class TestInternalExecRuntime { @Mock private Logger log; @Mock private HttpClientConnectionManager mgr; @Mock private LeaseRequest leaseRequest; @Mock private HttpRequestExecutor requestExecutor; @Mock private CancellableDependency cancellableDependency; @Mock private ConnectionEndpoint connectionEndpoint; private HttpRoute route; private InternalExecRuntime execRuntime; @Before public void setup() { MockitoAnnotations.initMocks(this); route = new HttpRoute(new HttpHost("host", 80)); execRuntime = new InternalExecRuntime(log, mgr, requestExecutor, cancellableDependency); } @Test public void testAcquireEndpoint() throws Exception { final HttpClientContext context = HttpClientContext.create(); final RequestConfig config = RequestConfig.custom() .setConnectionTimeout(123, TimeUnit.MILLISECONDS) .setConnectionRequestTimeout(345, TimeUnit.MILLISECONDS) .build(); context.setRequestConfig(config); final HttpRoute route = new HttpRoute(new HttpHost("host", 80)); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(connectionEndpoint); execRuntime.acquireEndpoint(route, null, context); Assert.assertTrue(execRuntime.isEndpointAcquired()); Assert.assertSame(connectionEndpoint, execRuntime.ensureValid()); Assert.assertFalse(execRuntime.isEndpointConnected()); Assert.assertFalse(execRuntime.isConnectionReusable()); Mockito.verify(leaseRequest).get(345, TimeUnit.MILLISECONDS); Mockito.verify(cancellableDependency, Mockito.times(1)).setDependency(leaseRequest); Mockito.verify(cancellableDependency, Mockito.times(1)).setDependency(execRuntime); Mockito.verify(cancellableDependency, Mockito.times(2)).setDependency(Mockito.<Cancellable>any()); } @Test(expected = IllegalStateException.class) public void testAcquireEndpointAlreadyAcquired() throws Exception { final HttpClientContext context = HttpClientContext.create(); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(connectionEndpoint); execRuntime.acquireEndpoint(route, null, context); Assert.assertTrue(execRuntime.isEndpointAcquired()); Assert.assertSame(connectionEndpoint, execRuntime.ensureValid()); execRuntime.acquireEndpoint(route, null, context); } @Test(expected = ConnectionRequestTimeoutException.class) public void testAcquireEndpointLeaseRequestTimeout() throws Exception { final HttpClientContext context = HttpClientContext.create(); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenThrow(new TimeoutException("timeout")); execRuntime.acquireEndpoint(route, null, context); } @Test(expected = RequestFailedException.class) public void testAcquireEndpointLeaseRequestFailure() throws Exception { final HttpClientContext context = HttpClientContext.create(); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenThrow(new ExecutionException(new IllegalStateException())); execRuntime.acquireEndpoint(route, null, context); } @Test public void testAbortEndpoint() throws Exception { final HttpClientContext context = HttpClientContext.create(); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(connectionEndpoint); execRuntime.acquireEndpoint(new HttpRoute(new HttpHost("host", 80)), null, context); Assert.assertTrue(execRuntime.isEndpointAcquired()); execRuntime.discardEndpoint(); Assert.assertFalse(execRuntime.isEndpointAcquired()); Mockito.verify(connectionEndpoint).close(CloseMode.IMMEDIATE); Mockito.verify(mgr).release(connectionEndpoint, null, TimeValue.ZERO_MILLISECONDS); execRuntime.discardEndpoint(); Mockito.verify(connectionEndpoint, Mockito.times(1)).close(CloseMode.IMMEDIATE); Mockito.verify(mgr, Mockito.times(1)).release( Mockito.<ConnectionEndpoint>any(), Mockito.any(), Mockito.<TimeValue>any()); } @Test public void testCancell() throws Exception { final HttpClientContext context = HttpClientContext.create(); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(connectionEndpoint); execRuntime.acquireEndpoint(route, null, context); Assert.assertTrue(execRuntime.isEndpointAcquired()); Assert.assertTrue(execRuntime.cancel()); Assert.assertFalse(execRuntime.isEndpointAcquired()); Mockito.verify(connectionEndpoint).close(CloseMode.IMMEDIATE); Mockito.verify(mgr).release(connectionEndpoint, null, TimeValue.ZERO_MILLISECONDS); Assert.assertFalse(execRuntime.cancel()); Mockito.verify(connectionEndpoint, Mockito.times(1)).close(CloseMode.IMMEDIATE); Mockito.verify(mgr, Mockito.times(1)).release( Mockito.<ConnectionEndpoint>any(), Mockito.any(), Mockito.<TimeValue>any()); } @Test public void testReleaseEndpointReusable() throws Exception { final HttpClientContext context = HttpClientContext.create(); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(connectionEndpoint); execRuntime.acquireEndpoint(route, null, context); Assert.assertTrue(execRuntime.isEndpointAcquired()); execRuntime.markConnectionReusable("some state", TimeValue.ofMillis(100000)); execRuntime.releaseEndpoint(); Assert.assertFalse(execRuntime.isEndpointAcquired()); Mockito.verify(connectionEndpoint, Mockito.never()).close(); Mockito.verify(mgr).release(connectionEndpoint, "some state", TimeValue.ofMillis(100000)); execRuntime.releaseEndpoint(); Mockito.verify(mgr, Mockito.times(1)).release( Mockito.<ConnectionEndpoint>any(), Mockito.any(), Mockito.<TimeValue>any()); } @Test public void testReleaseEndpointNonReusable() throws Exception { final HttpClientContext context = HttpClientContext.create(); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(connectionEndpoint); execRuntime.acquireEndpoint(route, null, context); Assert.assertTrue(execRuntime.isEndpointAcquired()); execRuntime.markConnectionReusable("some state", TimeValue.ofMillis(100000)); execRuntime.markConnectionNonReusable(); execRuntime.releaseEndpoint(); Assert.assertFalse(execRuntime.isEndpointAcquired()); Mockito.verify(connectionEndpoint, Mockito.times(1)).close(); Mockito.verify(mgr).release(connectionEndpoint, null, TimeValue.ZERO_MILLISECONDS); execRuntime.releaseEndpoint(); Mockito.verify(mgr, Mockito.times(1)).release( Mockito.<ConnectionEndpoint>any(), Mockito.any(), Mockito.<TimeValue>any()); } @Test public void testConnectEndpoint() throws Exception { final HttpClientContext context = HttpClientContext.create(); final RequestConfig config = RequestConfig.custom() .setConnectionTimeout(123, TimeUnit.MILLISECONDS) .setConnectionRequestTimeout(345, TimeUnit.MILLISECONDS) .build(); context.setRequestConfig(config); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(connectionEndpoint); execRuntime.acquireEndpoint(route, null, context); Assert.assertTrue(execRuntime.isEndpointAcquired()); Mockito.when(connectionEndpoint.isConnected()).thenReturn(false); Assert.assertFalse(execRuntime.isEndpointConnected()); execRuntime.connectEndpoint(context); Mockito.verify(mgr).connect(connectionEndpoint, Timeout.ofMillis(123), context); Mockito.verify(connectionEndpoint).setSocketTimeout(Timeout.ofMillis(123)); } @Test public void testDisonnectEndpoint() throws Exception { final HttpClientContext context = HttpClientContext.create(); Mockito.when(mgr.lease(Mockito.eq(route), Mockito.<Timeout>any(), Mockito.any())).thenReturn(leaseRequest); Mockito.when(leaseRequest.get( Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(connectionEndpoint); execRuntime.acquireEndpoint(route, null, context); Assert.assertTrue(execRuntime.isEndpointAcquired()); Mockito.when(connectionEndpoint.isConnected()).thenReturn(true); Assert.assertTrue(execRuntime.isEndpointConnected()); execRuntime.connectEndpoint(context); Mockito.verify(mgr, Mockito.never()).connect( Mockito.same(connectionEndpoint), Mockito.<TimeValue>any(), Mockito.<HttpClientContext>any()); execRuntime.disconnectEndpoint(); Mockito.verify(connectionEndpoint).close(); } }
/* * Copyright (c) 2010-2018 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.prism.path; import com.evolveum.midpoint.prism.PrismInternalTestUtil; import com.evolveum.midpoint.prism.impl.marshaller.ItemPathHolder; import com.evolveum.midpoint.prism.impl.marshaller.PathHolderSegment; import com.evolveum.midpoint.prism.impl.marshaller.TrivialItemPathParser; import com.evolveum.midpoint.prism.util.PrismTestUtil; import com.evolveum.midpoint.util.DOMUtil; import com.evolveum.midpoint.util.PrettyPrinter; import com.evolveum.midpoint.util.exception.SchemaException; import org.testng.AssertJUnit; import org.testng.annotations.BeforeSuite; import org.testng.annotations.Test; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.evolveum.midpoint.prism.PrismInternalTestUtil.DEFAULT_NAMESPACE_PREFIX; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertTrue; /** * This is low-level ItemPath parsing/serialization test. * * @author semancik */ public class ItemPathTest { private static final String FILENAME_STRANGECHARS = "src/test/resources/path/strange.txt"; private static final String FILENAME_DATA_XML = "src/test/resources/path/data.xml"; private static final String NS_C = "http://midpoint.evolveum.com/xml/ns/public/common/common-3"; private static final String NS_FOO = "http://foo.com/"; private static final String NS_BAR = "http://bar.com/"; private static final String FILENAME_CHANGETYPE = "src/test/resources/path/changetype-1.xml"; public ItemPathTest() { } @BeforeSuite public void setup() throws SchemaException, SAXException, IOException { PrettyPrinter.setDefaultNamespacePrefix(DEFAULT_NAMESPACE_PREFIX); PrismTestUtil.resetPrismContext(new PrismInternalTestUtil()); } @Test public void xPathFromDomNode1() throws ParserConfigurationException, SAXException, IOException { // Given Element el1 = parseDataGetEl1(); String xpathString = "/root/x:el1[100]"; el1.setTextContent(xpathString); // When ItemPathHolder xpath = ItemPathHolder.createForTesting(el1); // Then List<PathHolderSegment> segments = xpath.toSegments(); AssertJUnit.assertNotNull(segments); AssertJUnit.assertEquals(3, segments.size()); AssertJUnit.assertEquals(new QName("", "root"), segments.get(0).getQName()); AssertJUnit.assertFalse(segments.get(0).isVariable()); AssertJUnit.assertFalse(segments.get(0).isIdValueFilter()); AssertJUnit.assertEquals(new QName("http://xx.com/", "el1"), segments.get(1).getQName()); AssertJUnit.assertFalse(segments.get(1).isVariable()); AssertJUnit.assertFalse(segments.get(1).isIdValueFilter()); AssertJUnit.assertNull(segments.get(2).getQName()); AssertJUnit.assertFalse(segments.get(2).isVariable()); AssertJUnit.assertTrue(segments.get(2).isIdValueFilter()); AssertJUnit.assertEquals("100", segments.get(2).getValue()); } private Element parseDataGetEl1() throws ParserConfigurationException, SAXException, IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); File file = new File(FILENAME_DATA_XML); Document doc = builder.parse(file); //NodeList childNodes = doc.getChildNodes(); NodeList rootNodes = doc.getElementsByTagName("root"); Node rootNode = rootNodes.item(0); NodeList nodes = ((Element) rootNode).getElementsByTagNameNS("http://xx.com/", "el1"); Node el1 = nodes.item(0); return (Element)el1; } @Test public void variableTest() { String xpathStr = "declare namespace v='http://vvv.com';" + "declare namespace x='http://www.xxx.com';" + "$v:var/x:xyz[10]"; ItemPathHolder xpath = ItemPathHolder.createForTesting(xpathStr); AssertJUnit.assertEquals("$v:var/x:xyz[10]", xpath.getXPathWithoutDeclarations()); AssertJUnit.assertEquals("http://vvv.com", xpath.getNamespaceMap().get("v")); AssertJUnit.assertEquals("http://www.xxx.com", xpath.getNamespaceMap().get("x")); } @Test public void dotTest() { ItemPathHolder dotPath = ItemPathHolder.createForTesting("."); AssertJUnit.assertTrue(dotPath.toSegments().isEmpty()); AssertJUnit.assertEquals(".", dotPath.getXPathWithoutDeclarations()); } @Test public void explicitNsParseTest() { String xpathStr = "declare namespace foo='http://ff.com/';\ndeclare default namespace 'http://default.com/';\n declare namespace bar = 'http://www.b.com' ;declare namespace x= \"http://xxx.com/\";\nfoo:foofoo[1]/x:bar"; TrivialItemPathParser parser = TrivialItemPathParser.parse(xpathStr); AssertJUnit.assertEquals("http://ff.com/", parser.getNamespaceMap().get("foo")); AssertJUnit.assertEquals("http://www.b.com", parser.getNamespaceMap().get("bar")); AssertJUnit.assertEquals("http://xxx.com/", parser.getNamespaceMap().get("x")); AssertJUnit.assertEquals("http://default.com/", parser.getNamespaceMap().get("")); AssertJUnit.assertEquals("foo:foofoo[1]/x:bar", parser.getPureItemPathString()); } @Test public void simpleXPathParseTest() { String xpathStr = "foo/bar"; TrivialItemPathParser parser = TrivialItemPathParser.parse(xpathStr); AssertJUnit.assertEquals("foo/bar", parser.getPureItemPathString()); } @Test public void explicitNsRoundTripTest() { String xpathStr = "declare namespace foo='http://ff.com/';\ndeclare default namespace 'http://default.com/';\n declare namespace bar = 'http://www.b.com' ;declare namespace x= \"http://xxx.com/\";\nfoo:foofoo/x:bar"; ItemPathHolder xpath = ItemPathHolder.createForTesting(xpathStr); System.out.println("Pure XPath: "+xpath.getXPathWithoutDeclarations()); AssertJUnit.assertEquals("foo:foofoo/x:bar", xpath.getXPathWithoutDeclarations()); System.out.println("ROUND TRIP: "+xpath.getXPathWithDeclarations()); List<String> expected = Arrays.asList( "declare default namespace 'http://default.com/'; declare namespace foo='http://ff.com/'; declare namespace bar='http://www.b.com'; declare namespace x='http://xxx.com/'; foo:foofoo/x:bar", // java7 "declare default namespace 'http://default.com/'; declare namespace bar='http://www.b.com'; declare namespace foo='http://ff.com/'; declare namespace x='http://xxx.com/'; foo:foofoo/x:bar", // java8 "declare default namespace 'http://default.com/'; declare namespace x='http://xxx.com/'; declare namespace bar='http://www.b.com'; declare namespace foo='http://ff.com/'; foo:foofoo/x:bar" // after JSON/YAML serialization fix (java8) ); AssertJUnit.assertTrue("Unexpected path with declarations: "+xpath.getXPathWithDeclarations(), expected.contains(xpath.getXPathWithDeclarations())); } @Test public void pureXPathRoundTripTest() { Map<String, String> namespaceMap = new HashMap<>(); namespaceMap.put("foo", "http://foo"); namespaceMap.put("bar", "http://bar"); String xpathStr = "foo:foo/bar:bar"; ItemPathHolder xpath = ItemPathHolder.createForTesting(xpathStr, namespaceMap); System.out.println("Pure XPath: "+xpath.getXPathWithoutDeclarations()); AssertJUnit.assertEquals("foo:foo/bar:bar", xpath.getXPathWithoutDeclarations()); System.out.println("ROUND TRIP: "+xpath.getXPathWithDeclarations()); AssertJUnit.assertEquals("foo:foo/bar:bar", xpath.getXPathWithDeclarations()); } @Test public void strangeCharsTest() throws IOException { String xpathStr; // The file contains strange chanrs (no-break spaces), so we need to pull // it in exactly as it is. File file = new File(FILENAME_STRANGECHARS); try (FileInputStream stream = new FileInputStream(file)) { FileChannel fc = stream.getChannel(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); xpathStr = StandardCharsets.UTF_8.decode(bb).toString(); } ItemPathHolder xpath = ItemPathHolder.createForTesting(xpathStr); System.out.println("Stragechars Pure XPath: "+xpath.getXPathWithoutDeclarations()); AssertJUnit.assertEquals("$i:user/i:extension/ri:foobar", xpath.getXPathWithoutDeclarations()); System.out.println("Stragechars ROUND TRIP: "+xpath.getXPathWithDeclarations()); } @Test public void xpathFromQNameTest() { // GIVEN QName qname = new QName(NS_FOO, "foo"); ItemPathHolder xpath = ItemPathHolder.createForTesting(qname); QName elementQName = new QName(NS_BAR, "bar"); // WHEN Element element = xpath.toElement(elementQName, DOMUtil.getDocument()); // THEN System.out.println("XPath from Qname:"); System.out.println(DOMUtil.serializeDOMToString(element)); assertEquals("Wrong element name", "bar", element.getLocalName()); assertEquals("Wrong element namespace", NS_BAR, element.getNamespaceURI()); Map<String, String> nsdecls = DOMUtil.getNamespaceDeclarations(element); // assertEquals("Wrong declaration for prefix "+XPathHolder.DEFAULT_PREFIX, NS_FOO, nsdecls.get(XPathHolder.DEFAULT_PREFIX)); String prefix = nsdecls.keySet().iterator().next(); assertEquals("Wrong element content", prefix+":foo", element.getTextContent()); } @Test public void testXPathSerializationToDom() { // GIVEN QName qname1 = new QName(NS_C, "extension"); QName qname2 = new QName(NS_FOO, "foo"); ItemPathHolder itemPathHolder1 = ItemPathHolder.createForTesting(qname1, qname2); QName elementQName = new QName(NS_BAR, "bar"); // WHEN Element element = itemPathHolder1.toElement(elementQName, DOMUtil.getDocument()); ItemPathHolder itemPathHolder2 = ItemPathHolder.createForTesting(element); // THEN System.out.println("XPath from QNames:"); System.out.println(DOMUtil.serializeDOMToString(element)); UniformItemPath xpath1 = itemPathHolder1.toItemPath(); UniformItemPath xpath2 = itemPathHolder2.toItemPath(); assertTrue("Paths are not equivalent", xpath1.equivalent(xpath2)); } @Test public void parseSpecial() { final String D = "declare namespace x='http://xyz.com/'; "; AssertJUnit.assertEquals("..", TrivialItemPathParser.parse("..").getPureItemPathString()); AssertJUnit.assertEquals("..", TrivialItemPathParser.parse(D+"..").getPureItemPathString()); AssertJUnit.assertEquals("a/../b", TrivialItemPathParser.parse("a/../b").getPureItemPathString()); AssertJUnit.assertEquals("a/../b", TrivialItemPathParser.parse(D+"a/../b").getPureItemPathString()); AssertJUnit.assertEquals("@", TrivialItemPathParser.parse("@").getPureItemPathString()); AssertJUnit.assertEquals("@", TrivialItemPathParser.parse(D+"@").getPureItemPathString()); AssertJUnit.assertEquals("a/@/b", TrivialItemPathParser.parse("a/@/b").getPureItemPathString()); AssertJUnit.assertEquals("a/@/b", TrivialItemPathParser.parse(D+"a/@/b").getPureItemPathString()); AssertJUnit.assertEquals("#", TrivialItemPathParser.parse("#").getPureItemPathString()); AssertJUnit.assertEquals("#", TrivialItemPathParser.parse(D+"#").getPureItemPathString()); AssertJUnit.assertEquals("a/#/b", TrivialItemPathParser.parse("a/#/b").getPureItemPathString()); AssertJUnit.assertEquals("a/#/b", TrivialItemPathParser.parse(D+"a/#/b").getPureItemPathString()); } /* * The following is unfinished test. It was moved here from the schema module, because after migration of prism * into prism-api/prism-impl it would need to access some of the internal structures. * * If needed, it has to be finished, probably by eliminating schema-related artifacts. */ // /** // * This is not a proper test yet. // * It does some operations with XPath. If it does not die, then the // * code some somehow consistent. // * // * It should be improved later. // */ // @Test // public void xpathTest() throws IOException, ParserConfigurationException, SchemaException { // // ObjectModificationType objectModification = PrismTestUtil.parseAtomicValue(new File(FILENAME_CHANGETYPE), // ObjectModificationType.COMPLEX_TYPE); // // for (ItemDeltaType change : objectModification.getItemDelta()) { // ItemPathType pathType = change.getPath(); // System.out.println(" path=" + pathType + " (" + pathType.getClass().getName() + ") " + pathType.toString()); // UniformItemPath path = pathType.getUniformItemPath(); // // AssertJUnit.assertEquals("c:extension/piracy:ship[2]/c:name", path.serializeWithoutDeclarations()); // // DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); // factory.setNamespaceAware(true); // DocumentBuilder loader = factory.newDocumentBuilder(); // Document doc = loader.newDocument(); // // Element xpathElement = path.xpath.toElement("http://elelel/", "path", doc); // // Attr nsC = xpathElement.getAttributeNodeNS("http://www.w3.org/2000/xmlns/", "c"); // Attr nsPiracy = xpathElement.getAttributeNodeNS("http://www.w3.org/2000/xmlns/", "piracy"); // // System.out.println("c: "+nsC); // System.out.println("piracy: "+nsPiracy); // // // AssertJUnit.assertEquals("http://midpoint.evolveum.com/xml/ns/public/common/common-3", nsC.getValue()); // // AssertJUnit.assertEquals("http://midpoint.evolveum.com/xml/ns/samples/piracy", nsPiracy.getValue()); // // System.out.println("XPATH Element: " + xpathElement); // // ItemPathHolderTestWrapper xpathFromElement = ItemPathHolderTestWrapper.createForTesting(xpathElement); // ItemPathHolderTestWrapper.assertEquals(xpath, xpathFromElement); // // // attributes = xpathElement.getAttributes(); // // for (int i = 0; i < attributes.getLength(); i++) { // // Node n = attributes.item(i); // // System.out.println(" A: " + n.getNodeName() + "(" + n.getPrefix() + " : " + n.getLocalName() + ") = " + n.getNodeValue()); // // } // // List<PathHolderSegment> segments = xpath.toSegments(); // // System.out.println("XPATH segments: " + segments); // // ItemPathHolderTestWrapper xpathFromSegments = ItemPathHolderTestWrapper.createForTesting(segments); // // System.out.println("XPath from segments: " + xpathFromSegments); // // AssertJUnit.assertEquals("c:extension/piracy:ship[2]/c:name", xpathFromSegments.getXPathWithoutDeclarations()); // } // // } }
package org.bouncycastle.math.ec.custom.sec; import java.math.BigInteger; import org.bouncycastle.math.raw.Interleave; import org.bouncycastle.math.raw.Nat256; public class SecT239Field { private static final long M47 = -1L >>> 17; private static final long M60 = -1L >>> 4; public static void add(long[] x, long[] y, long[] z) { z[0] = x[0] ^ y[0]; z[1] = x[1] ^ y[1]; z[2] = x[2] ^ y[2]; z[3] = x[3] ^ y[3]; } public static void addExt(long[] xx, long[] yy, long[] zz) { zz[0] = xx[0] ^ yy[0]; zz[1] = xx[1] ^ yy[1]; zz[2] = xx[2] ^ yy[2]; zz[3] = xx[3] ^ yy[3]; zz[4] = xx[4] ^ yy[4]; zz[5] = xx[5] ^ yy[5]; zz[6] = xx[6] ^ yy[6]; zz[7] = xx[7] ^ yy[7]; } public static void addOne(long[] x, long[] z) { z[0] = x[0] ^ 1L; z[1] = x[1]; z[2] = x[2]; z[3] = x[3]; } public static long[] fromBigInteger(BigInteger x) { long[] z = Nat256.fromBigInteger64(x); reduce17(z, 0); return z; } public static void multiply(long[] x, long[] y, long[] z) { long[] tt = Nat256.createExt64(); implMultiply(x, y, tt); reduce(tt, z); } public static void multiplyAddToExt(long[] x, long[] y, long[] zz) { long[] tt = Nat256.createExt64(); implMultiply(x, y, tt); addExt(zz, tt, zz); } public static void reduce(long[] xx, long[] z) { long x0 = xx[0], x1 = xx[1], x2 = xx[2], x3 = xx[3]; long x4 = xx[4], x5 = xx[5], x6 = xx[6], x7 = xx[7]; x3 ^= (x7 << 17); x4 ^= (x7 >>> 47); x5 ^= (x7 << 47); x6 ^= (x7 >>> 17); x2 ^= (x6 << 17); x3 ^= (x6 >>> 47); x4 ^= (x6 << 47); x5 ^= (x6 >>> 17); x1 ^= (x5 << 17); x2 ^= (x5 >>> 47); x3 ^= (x5 << 47); x4 ^= (x5 >>> 17); x0 ^= (x4 << 17); x1 ^= (x4 >>> 47); x2 ^= (x4 << 47); x3 ^= (x4 >>> 17); long t = x3 >>> 47; z[0] = x0 ^ t; z[1] = x1; z[2] = x2 ^ (t << 30); z[3] = x3 & M47; } public static void reduce17(long[] z, int zOff) { long z3 = z[zOff + 3], t = z3 >>> 47; z[zOff ] ^= t; z[zOff + 2] ^= (t << 30); z[zOff + 3] = z3 & M47; } public static void square(long[] x, long[] z) { long[] tt = Nat256.createExt64(); implSquare(x, tt); reduce(tt, z); } public static void squareAddToExt(long[] x, long[] zz) { long[] tt = Nat256.createExt64(); implSquare(x, tt); addExt(zz, tt, zz); } public static void squareN(long[] x, int n, long[] z) { // assert n > 0; long[] tt = Nat256.createExt64(); implSquare(x, tt); reduce(tt, z); while (--n > 0) { implSquare(z, tt); reduce(tt, z); } } protected static void implCompactExt(long[] zz) { long z0 = zz[0], z1 = zz[1], z2 = zz[2], z3 = zz[3], z4 = zz[4], z5 = zz[5], z6 = zz[6], z7 = zz[7]; zz[0] = z0 ^ (z1 << 60); zz[1] = (z1 >>> 4) ^ (z2 << 56); zz[2] = (z2 >>> 8) ^ (z3 << 52); zz[3] = (z3 >>> 12) ^ (z4 << 48); zz[4] = (z4 >>> 16) ^ (z5 << 44); zz[5] = (z5 >>> 20) ^ (z6 << 40); zz[6] = (z6 >>> 24) ^ (z7 << 36); zz[7] = (z7 >>> 28); } protected static void implExpand(long[] x, long[] z) { long x0 = x[0], x1 = x[1], x2 = x[2], x3 = x[3]; z[0] = x0 & M60; z[1] = ((x0 >>> 60) ^ (x1 << 4)) & M60; z[2] = ((x1 >>> 56) ^ (x2 << 8)) & M60; z[3] = ((x2 >>> 52) ^ (x3 << 12)); } protected static void implMultiply(long[] x, long[] y, long[] zz) { /* * "Two-level seven-way recursion" as described in "Batch binary Edwards", Daniel J. Bernstein. */ long[] f = new long[4], g = new long[4]; implExpand(x, f); implExpand(y, g); implMulwAcc(f[0], g[0], zz, 0); implMulwAcc(f[1], g[1], zz, 1); implMulwAcc(f[2], g[2], zz, 2); implMulwAcc(f[3], g[3], zz, 3); // U *= (1 - t^n) for (int i = 5; i > 0; --i) { zz[i] ^= zz[i - 1]; } implMulwAcc(f[0] ^ f[1], g[0] ^ g[1], zz, 1); implMulwAcc(f[2] ^ f[3], g[2] ^ g[3], zz, 3); // V *= (1 - t^2n) for (int i = 7; i > 1; --i) { zz[i] ^= zz[i - 2]; } // Double-length recursion { long c0 = f[0] ^ f[2], c1 = f[1] ^ f[3]; long d0 = g[0] ^ g[2], d1 = g[1] ^ g[3]; implMulwAcc(c0 ^ c1, d0 ^ d1, zz, 3); long[] t = new long[3]; implMulwAcc(c0, d0, t, 0); implMulwAcc(c1, d1, t, 1); long t0 = t[0], t1 = t[1], t2 = t[2]; zz[2] ^= t0; zz[3] ^= t0 ^ t1; zz[4] ^= t2 ^ t1; zz[5] ^= t2; } implCompactExt(zz); } protected static void implMulwAcc(long x, long y, long[] z, int zOff) { // assert x >>> 60 == 0; // assert y >>> 60 == 0; long[] u = new long[8]; // u[0] = 0; u[1] = y; u[2] = u[1] << 1; u[3] = u[2] ^ y; u[4] = u[2] << 1; u[5] = u[4] ^ y; u[6] = u[3] << 1; u[7] = u[6] ^ y; int j = (int)x; long g, h = 0, l = u[j & 7] ^ (u[(j >>> 3) & 7] << 3); int k = 54; do { j = (int)(x >>> k); g = u[j & 7] ^ u[(j >>> 3) & 7] << 3; l ^= (g << k); h ^= (g >>> -k); } while ((k -= 6) > 0); h ^= ((x & 0x0820820820820820L) & ((y << 4) >> 63)) >>> 5; // assert h >>> 55 == 0; z[zOff ] ^= l & M60; z[zOff + 1] ^= (l >>> 60) ^ (h << 4); } protected static void implSquare(long[] x, long[] zz) { Interleave.expand64To128(x[0], zz, 0); Interleave.expand64To128(x[1], zz, 2); Interleave.expand64To128(x[2], zz, 4); long x3 = x[3]; zz[6] = Interleave.expand32to64((int)x3); zz[7] = Interleave.expand16to32((int)(x3 >>> 32)) & 0xFFFFFFFFL; } }
/* * Copyright 2002-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package permafrost.tundra.org.springframework.util; import java.io.Serializable; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import java.util.Set; /** * {@link LinkedHashMap} variant that stores String keys in a case-insensitive * manner, for example for key-based access in a results table. * * <p>Preserves the original order as well as the original casing of keys, * while allowing for contains, get and remove calls with any case of key. * * <p>Does <i>not</i> support {@code null} keys. * * @author Juergen Hoeller * @since 3.0 */ @SuppressWarnings("serial") public class LinkedCaseInsensitiveMap<V> implements Map<String, V>, Serializable, Cloneable { private final LinkedHashMap<String, V> targetMap; private final HashMap<String, String> caseInsensitiveKeys; private final Locale locale; /** * Create a new LinkedCaseInsensitiveMap that stores case-insensitive keys * according to the default Locale (by default in lower case). * @see #convertKey(String) */ public LinkedCaseInsensitiveMap() { this((Locale) null); } /** * Create a new LinkedCaseInsensitiveMap that stores case-insensitive keys * according to the given Locale (by default in lower case). * @param locale the Locale to use for case-insensitive key conversion * @see #convertKey(String) */ public LinkedCaseInsensitiveMap(Locale locale) { this(16, locale); } /** * Create a new LinkedCaseInsensitiveMap that wraps a {@link LinkedHashMap} * with the given initial capacity and stores case-insensitive keys * according to the default Locale (by default in lower case). * @param initialCapacity the initial capacity * @see #convertKey(String) */ public LinkedCaseInsensitiveMap(int initialCapacity) { this(initialCapacity, null); } /** * Create a new LinkedCaseInsensitiveMap that wraps a {@link LinkedHashMap} * with the given initial capacity and stores case-insensitive keys * according to the given Locale (by default in lower case). * @param initialCapacity the initial capacity * @param locale the Locale to use for case-insensitive key conversion * @see #convertKey(String) */ public LinkedCaseInsensitiveMap(int initialCapacity, Locale locale) { this.targetMap = new LinkedHashMap<String, V>(initialCapacity) { @Override public boolean containsKey(Object key) { return LinkedCaseInsensitiveMap.this.containsKey(key); } @Override protected boolean removeEldestEntry(Map.Entry<String, V> eldest) { boolean doRemove = LinkedCaseInsensitiveMap.this.removeEldestEntry(eldest); if (doRemove) { caseInsensitiveKeys.remove(convertKey(eldest.getKey())); } return doRemove; } }; this.caseInsensitiveKeys = new HashMap<String, String>(initialCapacity); this.locale = (locale != null ? locale : Locale.getDefault()); } /** * Copy constructor. */ @SuppressWarnings("unchecked") private LinkedCaseInsensitiveMap(LinkedCaseInsensitiveMap<V> other) { this.targetMap = (LinkedHashMap<String, V>) other.targetMap.clone(); this.caseInsensitiveKeys = (HashMap<String, String>) other.caseInsensitiveKeys.clone(); this.locale = other.locale; } // Implementation of java.util.Map @Override public int size() { return this.targetMap.size(); } @Override public boolean isEmpty() { return this.targetMap.isEmpty(); } @Override public boolean containsKey(Object key) { return (key instanceof String && this.caseInsensitiveKeys.containsKey(convertKey((String) key))); } @Override public boolean containsValue(Object value) { return this.targetMap.containsValue(value); } @Override public V get(Object key) { if (key instanceof String) { String caseInsensitiveKey = this.caseInsensitiveKeys.get(convertKey((String) key)); if (caseInsensitiveKey != null) { return this.targetMap.get(caseInsensitiveKey); } } return null; } public V getOrDefault(Object key, V defaultValue) { if (key instanceof String) { String caseInsensitiveKey = this.caseInsensitiveKeys.get(convertKey((String) key)); if (caseInsensitiveKey != null) { return this.targetMap.get(caseInsensitiveKey); } } return defaultValue; } @Override public V put(String key, V value) { String oldKey = this.caseInsensitiveKeys.put(convertKey(key), key); if (oldKey != null && !oldKey.equals(key)) { this.targetMap.remove(oldKey); } return this.targetMap.put(key, value); } @Override public void putAll(Map<? extends String, ? extends V> map) { if (map.isEmpty()) { return; } for (Map.Entry<? extends String, ? extends V> entry : map.entrySet()) { put(entry.getKey(), entry.getValue()); } } @Override public V remove(Object key) { if (key instanceof String) { String caseInsensitiveKey = this.caseInsensitiveKeys.remove(convertKey((String) key)); if (caseInsensitiveKey != null) { return this.targetMap.remove(caseInsensitiveKey); } } return null; } @Override public void clear() { this.caseInsensitiveKeys.clear(); this.targetMap.clear(); } @Override public Set<String> keySet() { return this.targetMap.keySet(); } @Override public Collection<V> values() { return this.targetMap.values(); } @Override public Set<Entry<String, V>> entrySet() { return this.targetMap.entrySet(); } @Override public LinkedCaseInsensitiveMap<V> clone() { return new LinkedCaseInsensitiveMap<V>(this); } @Override public boolean equals(Object obj) { return this.targetMap.equals(obj); } @Override public int hashCode() { return this.targetMap.hashCode(); } @Override public String toString() { return this.targetMap.toString(); } // Specific to LinkedCaseInsensitiveMap /** * Return the locale used by this {@code LinkedCaseInsensitiveMap}. * Used for case-insensitive key conversion. * @since 4.3.10 * @see #LinkedCaseInsensitiveMap(Locale) * @see #convertKey(String) */ public Locale getLocale() { return this.locale; } /** * Convert the given key to a case-insensitive key. * <p>The default implementation converts the key * to lower-case according to this Map's Locale. * @param key the user-specified key * @return the key to use for storing * @see String#toLowerCase(Locale) */ protected String convertKey(String key) { return key.toLowerCase(getLocale()); } /** * Determine whether this map should remove the given eldest entry. * @param eldest the candidate entry * @return {@code true} for removing it, {@code false} for keeping it * @see LinkedHashMap#removeEldestEntry */ protected boolean removeEldestEntry(Map.Entry<String, V> eldest) { return false; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.discovery.zen.publish; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.discovery.zen.publish.PendingClusterStatesQueue.ClusterStateContext; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; public class PendingClusterStatesQueueTests extends ESTestCase { public void testSelectNextStateToProcess_empty() { PendingClusterStatesQueue queue = new PendingClusterStatesQueue(logger, randomIntBetween(1, 200)); assertThat(queue.getNextClusterStateToProcess(), nullValue()); } public void testDroppingStatesAtCapacity() { List<ClusterState> states = randomStates(scaledRandomIntBetween(10, 300), "master1", "master2", "master3", "master4"); Collections.shuffle(states, random()); // insert half of the states final int numberOfStateToDrop = states.size() / 2; List<ClusterState> stateToDrop = states.subList(0, numberOfStateToDrop); final int queueSize = states.size() - numberOfStateToDrop; PendingClusterStatesQueue queue = createQueueWithStates(stateToDrop, queueSize); List<ClusterStateContext> committedContexts = randomCommitStates(queue); for (ClusterState state : states.subList(numberOfStateToDrop, states.size())) { queue.addPending(state); } assertThat(queue.pendingClusterStates().length, equalTo(queueSize)); // check all committed states got a failure due to the drop for (ClusterStateContext context : committedContexts) { assertThat(((MockListener) context.listener).failure, notNullValue()); } // all states that should have dropped are indeed dropped. for (ClusterState state : stateToDrop) { assertThat(queue.findState(state.stateUUID()), nullValue()); } } public void testSimpleQueueSameMaster() { final int numUpdates = scaledRandomIntBetween(50, 100); List<ClusterState> states = randomStates(numUpdates, "master"); Collections.shuffle(states, random()); PendingClusterStatesQueue queue; queue = createQueueWithStates(states); // no state is committed yet assertThat(queue.getNextClusterStateToProcess(), nullValue()); ClusterState highestCommitted = null; for (ClusterStateContext context : randomCommitStates(queue)) { if (highestCommitted == null || context.state.supersedes(highestCommitted)) { highestCommitted = context.state; } } assertThat(queue.getNextClusterStateToProcess(), sameInstance(highestCommitted)); queue.markAsProcessed(highestCommitted); // now there is nothing more to process assertThat(queue.getNextClusterStateToProcess(), nullValue()); } public void testProcessedStateCleansStatesFromOtherMasters() { List<ClusterState> states = randomStates(scaledRandomIntBetween(10, 300), "master1", "master2", "master3", "master4"); PendingClusterStatesQueue queue = createQueueWithStates(states); List<ClusterStateContext> committedContexts = randomCommitStates(queue); ClusterState randomCommitted = randomFrom(committedContexts).state; queue.markAsProcessed(randomCommitted); final String processedMaster = randomCommitted.nodes().masterNodeId(); // now check that queue doesn't contain anything pending from another master for (ClusterStateContext context : queue.pendingStates) { final String pendingMaster = context.state.nodes().masterNodeId(); assertThat("found a cluster state from [" + pendingMaster + "], after a state from [" + processedMaster + "] was processed", pendingMaster, equalTo(processedMaster)); } // and check all committed contexts from another master were failed for (ClusterStateContext context : committedContexts) { if (context.state.nodes().masterNodeId().equals(processedMaster) == false) { assertThat(((MockListener) context.listener).failure, notNullValue()); } } } public void testFailedStateCleansSupersededStatesOnly() { List<ClusterState> states = randomStates(scaledRandomIntBetween(10, 50), "master1", "master2", "master3", "master4"); PendingClusterStatesQueue queue = createQueueWithStates(states); List<ClusterStateContext> committedContexts = randomCommitStates(queue); ClusterState toFail = randomFrom(committedContexts).state; queue.markAsFailed(toFail, new ElasticsearchException("boo!")); final Map<String, ClusterStateContext> committedContextsById = new HashMap<>(); for (ClusterStateContext context : committedContexts) { committedContextsById.put(context.stateUUID(), context); } // now check that queue doesn't contain superseded states for (ClusterStateContext context : queue.pendingStates) { if (context.committed()) { assertFalse("found a committed cluster state, which is superseded by a failed state.\nFound:" + context.state + "\nfailed:" + toFail, toFail.supersedes(context.state)); } } // check no state has been erroneously removed for (ClusterState state : states) { ClusterStateContext pendingContext = queue.findState(state.stateUUID()); if (pendingContext != null) { continue; } if (state.equals(toFail)) { continue; } assertThat("non-committed states should never be removed", committedContextsById, hasKey(state.stateUUID())); final ClusterStateContext context = committedContextsById.get(state.stateUUID()); assertThat("removed state is not superseded by failed state. \nRemoved state:" + context + "\nfailed: " + toFail, toFail.supersedes(context.state), equalTo(true)); assertThat("removed state was failed with wrong exception", ((MockListener) context.listener).failure, notNullValue()); assertThat("removed state was failed with wrong exception", ((MockListener) context.listener).failure.getMessage(), containsString("boo")); } } public void testFailAllAndClear() { List<ClusterState> states = randomStates(scaledRandomIntBetween(10, 50), "master1", "master2", "master3", "master4"); PendingClusterStatesQueue queue = createQueueWithStates(states); List<ClusterStateContext> committedContexts = randomCommitStates(queue); queue.failAllStatesAndClear(new ElasticsearchException("boo!")); assertThat(queue.pendingStates, empty()); assertThat(queue.getNextClusterStateToProcess(), nullValue()); for (ClusterStateContext context : committedContexts) { assertThat("state was failed with wrong exception", ((MockListener) context.listener).failure, notNullValue()); assertThat("state was failed with wrong exception", ((MockListener) context.listener).failure.getMessage(), containsString("boo")); } } public void testQueueStats() { List<ClusterState> states = randomStates(scaledRandomIntBetween(10, 100), "master"); PendingClusterStatesQueue queue = createQueueWithStates(states); assertThat(queue.stats().getTotal(), equalTo(states.size())); assertThat(queue.stats().getPending(), equalTo(states.size())); assertThat(queue.stats().getCommitted(), equalTo(0)); List<ClusterStateContext> committedContexts = randomCommitStates(queue); assertThat(queue.stats().getTotal(), equalTo(states.size())); assertThat(queue.stats().getPending(), equalTo(states.size() - committedContexts.size())); assertThat(queue.stats().getCommitted(), equalTo(committedContexts.size())); ClusterState highestCommitted = null; for (ClusterStateContext context : committedContexts) { if (highestCommitted == null || context.state.supersedes(highestCommitted)) { highestCommitted = context.state; } } queue.markAsProcessed(highestCommitted); assertThat(queue.stats().getTotal(), equalTo(states.size() - committedContexts.size())); assertThat(queue.stats().getPending(), equalTo(states.size() - committedContexts.size())); assertThat(queue.stats().getCommitted(), equalTo(0)); } protected List<ClusterStateContext> randomCommitStates(PendingClusterStatesQueue queue) { List<ClusterStateContext> committedContexts = new ArrayList<>(); for (int iter = randomInt(queue.pendingStates.size() - 1); iter >= 0; iter--) { ClusterState state = queue.markAsCommitted(randomFrom(queue.pendingStates).stateUUID(), new MockListener()); if (state != null) { // null cluster state means we committed twice committedContexts.add(queue.findState(state.stateUUID())); } } return committedContexts; } PendingClusterStatesQueue createQueueWithStates(List<ClusterState> states) { return createQueueWithStates(states, states.size() * 2); // we don't care about limits (there are dedicated tests for that) } PendingClusterStatesQueue createQueueWithStates(List<ClusterState> states, int maxQueueSize) { PendingClusterStatesQueue queue; queue = new PendingClusterStatesQueue(logger, maxQueueSize); for (ClusterState state : states) { queue.addPending(state); } return queue; } List<ClusterState> randomStates(int count, String... masters) { ArrayList<ClusterState> states = new ArrayList<>(count); ClusterState[] lastClusterStatePerMaster = new ClusterState[masters.length]; for (; count > 0; count--) { int masterIndex = randomInt(masters.length - 1); ClusterState state = lastClusterStatePerMaster[masterIndex]; if (state == null) { state = ClusterState.builder(ClusterName.DEFAULT).nodes(DiscoveryNodes.builder() .put(new DiscoveryNode(masters[masterIndex], DummyTransportAddress.INSTANCE, Version.CURRENT)).masterNodeId(masters[masterIndex]).build() ).build(); } else { state = ClusterState.builder(state).incrementVersion().build(); } states.add(state); lastClusterStatePerMaster[masterIndex] = state; } return states; } static class MockListener implements PendingClusterStatesQueue.StateProcessedListener { volatile boolean processed; volatile Throwable failure; @Override public void onNewClusterStateProcessed() { processed = true; } @Override public void onNewClusterStateFailed(Throwable t) { failure = t; } } }
// Verbatim.java - Xalan extensions supporting DocBook verbatim environments package com.nwalsh.xalan; import java.util.Stack; import java.util.StringTokenizer; import org.xml.sax.*; import org.xml.sax.helpers.AttributesImpl; import org.w3c.dom.*; import org.w3c.dom.traversal.NodeIterator; import org.apache.xerces.dom.*; import org.apache.xpath.objects.XObject; import org.apache.xpath.XPath; import org.apache.xpath.XPathContext; import org.apache.xpath.NodeSet; import org.apache.xpath.DOMHelper; import org.apache.xalan.extensions.XSLProcessorContext; import org.apache.xalan.extensions.ExpressionContext; import org.apache.xalan.transformer.TransformerImpl; import org.apache.xalan.templates.StylesheetRoot; import org.apache.xalan.templates.ElemExtensionCall; import org.apache.xalan.templates.OutputProperties; import org.apache.xalan.res.XSLTErrorResources; import org.apache.xml.utils.DOMBuilder; import org.apache.xml.utils.AttList; import org.apache.xml.utils.QName; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.TransformerException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import com.nwalsh.xalan.Callout; import com.nwalsh.xalan.Params; /** * <p>Xalan extensions supporting DocBook verbatim environments</p> * * <p>$Id: Verbatim.java,v 1.5 2003/12/17 01:01:34 nwalsh Exp $</p> * * <p>Copyright (C) 2001 Norman Walsh.</p> * * <p>This class provides a * <a href="http://xml.apache.org/xalan">Xalan</a> * implementation of two features that would be impractical to * implement directly in XSLT: line numbering and callouts.</p> * * <p><b>Line Numbering</b></p> * <p>The <tt>numberLines</tt> family of functions takes a result tree * fragment (assumed to contain the contents of a formatted verbatim * element in DocBook: programlisting, screen, address, literallayout, * or synopsis) and returns a result tree fragment decorated with * line numbers.</p> * * <p><b>Callouts</b></p> * <p>The <tt>insertCallouts</tt> family of functions takes an * <tt>areaspec</tt> and a result tree fragment * (assumed to contain the contents of a formatted verbatim * element in DocBook: programlisting, screen, address, literallayout, * or synopsis) and returns a result tree fragment decorated with * callouts.</p> * * <p><b>Change Log:</b></p> * <dl> * <dt>1.0</dt> * <dd><p>Initial release.</p></dd> * </dl> * * @author Norman Walsh * <a href="mailto:[email protected]">[email protected]</a> * * @version $Id: Verbatim.java,v 1.5 2003/12/17 01:01:34 nwalsh Exp $ * */ public class Verbatim { /** A stack to hold the open elements while walking through a RTF. */ private Stack elementStack = null; /** A stack to hold the temporarily closed elements. */ private Stack tempStack = null; /** The current line number. */ private int lineNumber = 0; /** The current column number. */ private int colNumber = 0; /** The modulus for line numbering (every 'modulus' line is numbered). */ private int modulus = 0; /** The width (in characters) of line numbers (for padding). */ private int width = 0; /** The separator between the line number and the verbatim text. */ private String separator = ""; /** The (sorted) array of callouts obtained from the areaspec. */ private Callout callout[] = null; /** The number of callouts in the callout array. */ private int calloutCount = 0; /** A pointer used to keep track of our position in the callout array. */ private int calloutPos = 0; /** The path to use for graphical callout decorations. */ private String graphicsPath = null; /** The extension to use for graphical callout decorations. */ private String graphicsExt = null; /** The largest callout number that can be represented graphically. */ private int graphicsMax = 10; /** Should graphic callouts use fo:external-graphics or imgs. */ private boolean graphicsFO = false; private static final String foURI = "http://www.w3.org/1999/XSL/Format"; private static final String xhURI = "http://www.w3.org/1999/xhtml"; /** * <p>Constructor for Verbatim</p> * * <p>All of the methods are static, so the constructor does nothing.</p> */ public Verbatim() { } /** * <p>Number lines in a verbatim environment.</p> * * <p>This method adds line numbers to a result tree fragment. Each * newline that occurs in a text node is assumed to start a new line. * The first line is always numbered, every subsequent xalanMod line * is numbered (so if xalanMod=5, lines 1, 5, 10, 15, etc. will be * numbered. If there are fewer than xalanMod lines in the environment, * every line is numbered.</p> * * <p>xalanMod is taken from the $linenumbering.everyNth parameter.</p> * * <p>Every line number will be right justified in a string xalanWidth * characters long. If the line number of the last line in the * environment is too long to fit in the specified width, the width * is automatically increased to the smallest value that can hold the * number of the last line. (In other words, if you specify the value 2 * and attempt to enumerate the lines of an environment that is 100 lines * long, the value 3 will automatically be used for every line in the * environment.)</p> * * <p>xalanWidth is taken from the $linenumbering.width parameter.</p> * * <p>The xalanSep string is inserted between the line * number and the original program listing. Lines that aren't numbered * are preceded by a xalanWidth blank string and the separator.</p> * * <p>xalanSep is taken from the $linenumbering.separator parameter.</p> * * <p>If inline markup extends across line breaks, markup changes are * required. All the open elements are closed before the line break and * "reopened" afterwards. The reopened elements will have the same * attributes as the originals, except that 'name' and 'id' attributes * are not duplicated.</p> * * @param xalanRTF The result tree fragment of the verbatim environment. * * @return The modified result tree fragment. */ public DocumentFragment numberLines (ExpressionContext context, NodeIterator xalanNI) { int xalanMod = Params.getInt(context, "linenumbering.everyNth"); int xalanWidth = Params.getInt(context, "linenumbering.width"); String xalanSep = Params.getString(context, "linenumbering.separator"); DocumentFragment xalanRTF = (DocumentFragment) xalanNI.nextNode(); int numLines = countLineBreaks(xalanRTF) + 1; DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = null; try { docBuilder = docFactory.newDocumentBuilder(); } catch (ParserConfigurationException e) { System.out.println("PCE!"); return xalanRTF; } Document doc = docBuilder.newDocument(); DocumentFragment df = doc.createDocumentFragment(); DOMBuilder db = new DOMBuilder(doc, df); elementStack = new Stack(); lineNumber = 0; modulus = numLines < xalanMod ? 1 : xalanMod; width = xalanWidth; separator = xalanSep; double log10numLines = Math.log(numLines) / Math.log(10); if (width < log10numLines + 1) { width = (int) Math.floor(log10numLines + 1); } lineNumberFragment(db, xalanRTF); return df; } /** * <p>Count the number of lines in a verbatim environment.</p> * * <p>This method walks over the nodes of a DocumentFragment and * returns the number of lines breaks that it contains.</p> * * @param node The root of the tree walk over. */ private int countLineBreaks(Node node) { int numLines = 0; if (node.getNodeType() == Node.DOCUMENT_FRAGMENT_NODE || node.getNodeType() == Node.DOCUMENT_NODE || node.getNodeType() == Node.ELEMENT_NODE) { Node child = node.getFirstChild(); while (child != null) { numLines += countLineBreaks(child); child = child.getNextSibling(); } } else if (node.getNodeType() == Node.TEXT_NODE) { String text = node.getNodeValue(); // Walk through the text node looking for newlines int pos = 0; for (int count = 0; count < text.length(); count++) { if (text.charAt(count) == '\n') { numLines++; } } } else { // nop } return numLines; } /** * <p>Build a DocumentFragment with numbered lines.</p> * * <p>This is the method that actually does the work of numbering * lines in a verbatim environment. It recursively walks through a * tree of nodes, copying the structure into the rtf. Text nodes * are examined for new lines and modified as requested by the * global line numbering parameters.</p> * * <p>When called, rtf should be an empty DocumentFragment and node * should be the first child of the result tree fragment that contains * the existing, formatted verbatim text.</p> * * @param rtf The resulting verbatim environment with numbered lines. * @param node The root of the tree to copy. */ private void lineNumberFragment(DOMBuilder rtf, Node node) { try { if (node.getNodeType() == Node.DOCUMENT_FRAGMENT_NODE || node.getNodeType() == Node.DOCUMENT_NODE) { Node child = node.getFirstChild(); while (child != null) { lineNumberFragment(rtf, child); child = child.getNextSibling(); } } else if (node.getNodeType() == Node.ELEMENT_NODE) { String ns = node.getNamespaceURI(); String localName = node.getLocalName(); String name = ((Element) node).getTagName(); rtf.startElement(ns, localName, name, copyAttributes((Element) node)); elementStack.push(node); Node child = node.getFirstChild(); while (child != null) { lineNumberFragment(rtf, child); child = child.getNextSibling(); } } else if (node.getNodeType() == Node.TEXT_NODE) { String text = node.getNodeValue(); if (lineNumber == 0) { // The first line is always numbered formatLineNumber(rtf, ++lineNumber); } // Walk through the text node looking for newlines char chars[] = text.toCharArray(); int pos = 0; for (int count = 0; count < text.length(); count++) { if (text.charAt(count) == '\n') { // This is the tricky bit; if we find a newline, make sure // it doesn't occur inside any markup. if (pos > 0) { rtf.characters(chars, 0, pos); pos = 0; } closeOpenElements(rtf); // Copy the newline to the output chars[pos++] = text.charAt(count); rtf.characters(chars, 0, pos); pos = 0; // Add the line number formatLineNumber(rtf, ++lineNumber); openClosedElements(rtf); } else { chars[pos++] = text.charAt(count); } } if (pos > 0) { rtf.characters(chars, 0, pos); } } else if (node.getNodeType() == Node.COMMENT_NODE) { String text = node.getNodeValue(); char chars[] = text.toCharArray(); rtf.comment(chars, 0, text.length()); } else if (node.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE) { rtf.processingInstruction(node.getNodeName(), node.getNodeValue()); } else { System.out.println("Warning: unexpected node type in lineNumberFragment"); } if (node.getNodeType() == Node.ELEMENT_NODE) { String ns = node.getNamespaceURI(); String localName = node.getLocalName(); String name = ((Element) node).getTagName(); rtf.endElement(ns, localName, name); elementStack.pop(); } } catch (SAXException e) { System.out.println("SAX Exception in lineNumberFragment"); } } /** * <p>Add a formatted line number to the result tree fragment.</p> * * <p>This method examines the global parameters that control line * number presentation (modulus, width, and separator) and adds * the appropriate text to the result tree fragment.</p> * * @param rtf The resulting verbatim environment with numbered lines. * @param lineNumber The number of the current line. */ private void formatLineNumber(DOMBuilder rtf, int lineNumber) { char ch = 160; String lno = ""; if (lineNumber == 1 || (modulus >= 1 && (lineNumber % modulus == 0))) { lno = "" + lineNumber; } while (lno.length() < width) { lno = ch + lno; } lno += separator; char chars[] = lno.toCharArray(); try { rtf.characters(chars, 0, lno.length()); } catch (SAXException e) { System.out.println("SAX Exception in formatLineNumber"); } } /** * <p>Insert text callouts into a verbatim environment.</p> * * <p>This method examines the <tt>areaset</tt> and <tt>area</tt> elements * in the supplied <tt>areaspec</tt> and decorates the supplied * result tree fragment with appropriate callout markers.</p> * * <p>If a <tt>label</tt> attribute is supplied on an <tt>area</tt>, * its content will be used for the label, otherwise the callout * number will be used, surrounded by parenthesis. Callouts are * numbered in document order. All of the <tt>area</tt>s in an * <tt>areaset</tt> get the same number.</p> * * <p>Only the <tt>linecolumn</tt> and <tt>linerange</tt> units are * supported. If no unit is specifed, <tt>linecolumn</tt> is assumed. * If only a line is specified, the callout decoration appears in * the defaultColumn. Lines will be padded with blanks to reach the * necessary column, but callouts that are located beyond the last * line of the verbatim environment will be ignored.</p> * * <p>Callouts are inserted before the character at the line/column * where they are to occur.</p> * * @param areaspecNodeSet The source node set that contains the areaspec. * @param xalanRTF The result tree fragment of the verbatim environment. * @param defaultColumn The column for callouts that specify only a line. * * @return The modified result tree fragment. */ /** * <p>Insert graphical callouts into a verbatim environment.</p> * * <p>This method examines the <tt>areaset</tt> and <tt>area</tt> elements * in the supplied <tt>areaspec</tt> and decorates the supplied * result tree fragment with appropriate callout markers.</p> * * <p>If a <tt>label</tt> attribute is supplied on an <tt>area</tt>, * its content will be used for the label, otherwise the callout * number will be used. Callouts are * numbered in document order. All of the <tt>area</tt>s in an * <tt>areaset</tt> get the same number.</p> * * <p>If the callout number is not greater than <tt>gMax</tt>, the * callout generated will be:</p> * * <pre> * &lt;img src="$gPath/conumber$gExt" alt="conumber"> * </pre> * * <p>Otherwise, it will be the callout number surrounded by * parenthesis.</p> * * <p>Only the <tt>linecolumn</tt> and <tt>linerange</tt> units are * supported. If no unit is specifed, <tt>linecolumn</tt> is assumed. * If only a line is specified, the callout decoration appears in * the defaultColumn. Lines will be padded with blanks to reach the * necessary column, but callouts that are located beyond the last * line of the verbatim environment will be ignored.</p> * * <p>Callouts are inserted before the character at the line/column * where they are to occur.</p> * * @param areaspecNodeSet The source node set that contains the areaspec. * @param xalanRTF The result tree fragment of the verbatim environment. * @param defaultColumn The column for callouts that specify only a line. * @param gPath The path to use for callout graphics. * @param gExt The extension to use for callout graphics. * @param gMax The largest number that can be represented as a graphic. * @param useFO Should fo:external-graphics be produced, as opposed to * HTML imgs. This is bogus, the extension should figure it out, but I * haven't figured out how to do that yet. * * @return The modified result tree fragment. */ public DocumentFragment insertCallouts (ExpressionContext context, NodeIterator areaspecNodeSet, NodeIterator xalanNI) { String type = Params.getString(context, "stylesheet.result.type"); boolean useFO = type.equals("fo"); int defaultColumn = Params.getInt(context, "callout.defaultcolumn"); if (Params.getBoolean(context, "callout.graphics")) { String gPath = Params.getString(context, "callout.graphics.path"); String gExt = Params.getString(context, "callout.graphics.extension"); int gMax = Params.getInt(context, "callout.graphics.number.limit"); return insertGraphicCallouts(areaspecNodeSet, xalanNI, defaultColumn, gPath, gExt, gMax, useFO); } else if (Params.getBoolean(context, "callout.unicode")) { int uStart = Params.getInt(context, "callout.unicode.start.character"); int uMax = Params.getInt(context, "callout.unicode.number.limit"); String uFont = Params.getString(context, "callout.unicode.font"); return insertUnicodeCallouts(areaspecNodeSet, xalanNI, defaultColumn, uFont, uStart, uMax, useFO); } else if (Params.getBoolean(context, "callout.dingbats")) { int dMax = 10; return insertDingbatCallouts(areaspecNodeSet, xalanNI, defaultColumn, dMax, useFO); } else { return insertTextCallouts(areaspecNodeSet, xalanNI, defaultColumn, useFO); } } public DocumentFragment insertGraphicCallouts (NodeIterator areaspecNodeSet, NodeIterator xalanNI, int defaultColumn, String gPath, String gExt, int gMax, boolean useFO) { FormatGraphicCallout fgc = new FormatGraphicCallout(gPath,gExt,gMax,useFO); return insertCallouts(areaspecNodeSet, xalanNI, defaultColumn, fgc); } public DocumentFragment insertUnicodeCallouts (NodeIterator areaspecNodeSet, NodeIterator xalanNI, int defaultColumn, String uFont, int uStart, int uMax, boolean useFO) { FormatUnicodeCallout fuc = new FormatUnicodeCallout(uFont, uStart, uMax, useFO); return insertCallouts(areaspecNodeSet, xalanNI, defaultColumn, fuc); } public DocumentFragment insertDingbatCallouts (NodeIterator areaspecNodeSet, NodeIterator xalanNI, int defaultColumn, int gMax, boolean useFO) { FormatDingbatCallout fdc = new FormatDingbatCallout(gMax,useFO); return insertCallouts(areaspecNodeSet, xalanNI, defaultColumn, fdc); } public DocumentFragment insertTextCallouts (NodeIterator areaspecNodeSet, NodeIterator xalanNI, int defaultColumn, boolean useFO) { FormatTextCallout ftc = new FormatTextCallout(useFO); return insertCallouts(areaspecNodeSet, xalanNI, defaultColumn, ftc); } public DocumentFragment insertCallouts (NodeIterator areaspecNodeSet, NodeIterator xalanNI, int defaultColumn, FormatCallout fCallout) { DocumentFragment xalanRTF = (DocumentFragment) xalanNI.nextNode(); callout = new Callout[10]; calloutCount = 0; calloutPos = 0; lineNumber = 1; colNumber = 1; // First we walk through the areaspec to calculate the position // of the callouts // <areaspec> // <areaset id="ex.plco.const" coords=""> // <area id="ex.plco.c1" coords="4"/> // <area id="ex.plco.c2" coords="8"/> // </areaset> // <area id="ex.plco.ret" coords="12"/> // <area id="ex.plco.dest" coords="12"/> // </areaspec> int pos = 0; int coNum = 0; boolean inAreaSet = false; Node node = areaspecNodeSet.nextNode(); node = node.getFirstChild(); while (node != null) { if (node.getNodeType() == Node.ELEMENT_NODE) { if (node.getNodeName().equals("areaset")) { coNum++; Node area = node.getFirstChild(); while (area != null) { if (area.getNodeType() == Node.ELEMENT_NODE) { if (area.getNodeName().equals("area")) { addCallout(coNum, area, defaultColumn); } else { System.out.println("Unexpected element in areaset: " + area.getNodeName()); } } area = area.getNextSibling(); } } else if (node.getNodeName().equalsIgnoreCase("area")) { coNum++; addCallout(coNum, node, defaultColumn); } else { System.out.println("Unexpected element in areaspec: " + node.getNodeName()); } } node = node.getNextSibling(); } // Now sort them java.util.Arrays.sort(callout, 0, calloutCount); DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = null; try { docBuilder = docFactory.newDocumentBuilder(); } catch (ParserConfigurationException e) { System.out.println("PCE 2!"); return xalanRTF; } Document doc = docBuilder.newDocument(); DocumentFragment df = doc.createDocumentFragment(); DOMBuilder db = new DOMBuilder(doc, df); elementStack = new Stack(); calloutFragment(db, xalanRTF, fCallout); return df; } /** * <p>Build a FragmentValue with callout decorations.</p> * * <p>This is the method that actually does the work of adding * callouts to a verbatim environment. It recursively walks through a * tree of nodes, copying the structure into the rtf. Text nodes * are examined for the position of callouts as described by the * global callout parameters.</p> * * <p>When called, rtf should be an empty FragmentValue and node * should be the first child of the result tree fragment that contains * the existing, formatted verbatim text.</p> * * @param rtf The resulting verbatim environment with numbered lines. * @param node The root of the tree to copy. */ private void calloutFragment(DOMBuilder rtf, Node node, FormatCallout fCallout) { try { if (node.getNodeType() == Node.DOCUMENT_FRAGMENT_NODE || node.getNodeType() == Node.DOCUMENT_NODE) { Node child = node.getFirstChild(); while (child != null) { calloutFragment(rtf, child, fCallout); child = child.getNextSibling(); } } else if (node.getNodeType() == Node.ELEMENT_NODE) { String ns = node.getNamespaceURI(); String localName = node.getLocalName(); String name = ((Element) node).getTagName(); rtf.startElement(ns, localName, name, copyAttributes((Element) node)); elementStack.push(node); Node child = node.getFirstChild(); while (child != null) { calloutFragment(rtf, child, fCallout); child = child.getNextSibling(); } } else if (node.getNodeType() == Node.TEXT_NODE) { String text = node.getNodeValue(); char chars[] = text.toCharArray(); int pos = 0; for (int count = 0; count < text.length(); count++) { if (calloutPos < calloutCount && callout[calloutPos].getLine() == lineNumber && callout[calloutPos].getColumn() == colNumber) { if (pos > 0) { rtf.characters(chars, 0, pos); pos = 0; } closeOpenElements(rtf); while (calloutPos < calloutCount && callout[calloutPos].getLine() == lineNumber && callout[calloutPos].getColumn() == colNumber) { fCallout.formatCallout(rtf, callout[calloutPos]); calloutPos++; } openClosedElements(rtf); } if (text.charAt(count) == '\n') { // What if we need to pad this line? if (calloutPos < calloutCount && callout[calloutPos].getLine() == lineNumber && callout[calloutPos].getColumn() > colNumber) { if (pos > 0) { rtf.characters(chars, 0, pos); pos = 0; } closeOpenElements(rtf); while (calloutPos < calloutCount && callout[calloutPos].getLine() == lineNumber && callout[calloutPos].getColumn() > colNumber) { formatPad(rtf, callout[calloutPos].getColumn() - colNumber); colNumber = callout[calloutPos].getColumn(); while (calloutPos < calloutCount && callout[calloutPos].getLine() == lineNumber && callout[calloutPos].getColumn() == colNumber) { fCallout.formatCallout(rtf, callout[calloutPos]); calloutPos++; } } openClosedElements(rtf); } lineNumber++; colNumber = 1; } else { colNumber++; } chars[pos++] = text.charAt(count); } if (pos > 0) { rtf.characters(chars, 0, pos); } } else if (node.getNodeType() == Node.COMMENT_NODE) { String text = node.getNodeValue(); char chars[] = text.toCharArray(); rtf.comment(chars, 0, text.length()); } else if (node.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE) { rtf.processingInstruction(node.getNodeName(), node.getNodeValue()); } else { System.out.println("Warning: unexpected node type in calloutFragment: " + node.getNodeType() + ": " + node.getNodeName()); } if (node.getNodeType() == Node.ELEMENT_NODE) { String ns = node.getNamespaceURI(); String localName = node.getLocalName(); String name = ((Element) node).getTagName(); rtf.endElement(ns, localName, name); elementStack.pop(); } else { // nop } } catch (SAXException e) { System.out.println("SAX Exception in calloutFragment"); } } /** * <p>Add a callout to the global callout array</p> * * <p>This method examines a callout <tt>area</tt> and adds it to * the global callout array if it can be interpreted.</p> * * <p>Only the <tt>linecolumn</tt> and <tt>linerange</tt> units are * supported. If no unit is specifed, <tt>linecolumn</tt> is assumed. * If only a line is specified, the callout decoration appears in * the <tt>defaultColumn</tt>.</p> * * @param coNum The callout number. * @param node The <tt>area</tt>. * @param defaultColumn The default column for callouts. */ private void addCallout (int coNum, Node node, int defaultColumn) { Element area = (Element) node; String units = area.getAttribute("units"); String otherUnits = area.getAttribute("otherunits"); String coords = area.getAttribute("coords"); int type = 0; String otherType = null; if (units == null || units.equals("linecolumn")) { type = Callout.LINE_COLUMN; // the default } else if (units.equals("linerange")) { type = Callout.LINE_RANGE; } else if (units.equals("linecolumnpair")) { type = Callout.LINE_COLUMN_PAIR; } else if (units.equals("calspair")) { type = Callout.CALS_PAIR; } else { type = Callout.OTHER; otherType = otherUnits; } if (type != Callout.LINE_COLUMN && type != Callout.LINE_RANGE) { System.out.println("Only linecolumn and linerange units are supported"); return; } if (coords == null) { System.out.println("Coords must be specified"); return; } // Now let's see if we can interpret the coordinates... StringTokenizer st = new StringTokenizer(coords); int tokenCount = 0; int c1 = 0; int c2 = 0; while (st.hasMoreTokens()) { tokenCount++; if (tokenCount > 2) { System.out.println("Unparseable coordinates"); return; } try { String token = st.nextToken(); int coord = Integer.parseInt(token); c2 = coord; if (tokenCount == 1) { c1 = coord; } } catch (NumberFormatException e) { System.out.println("Unparseable coordinate"); return; } } // Make sure we aren't going to blow past the end of our array if (calloutCount == callout.length) { Callout bigger[] = new Callout[calloutCount+10]; for (int count = 0; count < callout.length; count++) { bigger[count] = callout[count]; } callout = bigger; } // Ok, add the callout if (tokenCount == 2) { if (type == Callout.LINE_RANGE) { for (int count = c1; count <= c2; count++) { callout[calloutCount++] = new Callout(coNum, area, count, defaultColumn, type); } } else { // assume linecolumn callout[calloutCount++] = new Callout(coNum, area, c1, c2, type); } } else { // if there's only one number, assume it's the line callout[calloutCount++] = new Callout(coNum, area, c1, defaultColumn, type); } } /** * <p>Add blanks to the result tree fragment.</p> * * <p>This method adds <tt>numBlanks</tt> to the result tree fragment. * It's used to pad lines when callouts occur after the last existing * characater in a line.</p> * * @param rtf The resulting verbatim environment with numbered lines. * @param numBlanks The number of blanks to add. */ private void formatPad(DOMBuilder rtf, int numBlanks) { char chars[] = new char[numBlanks]; for (int count = 0; count < numBlanks; count++) { chars[count] = ' '; } try { rtf.characters(chars, 0, numBlanks); } catch (SAXException e) { System.out.println("SAX Exception in formatCallout"); } } private void closeOpenElements(DOMBuilder rtf) throws SAXException { // Close all the open elements... tempStack = new Stack(); while (!elementStack.empty()) { Node elem = (Node) elementStack.pop(); String ns = elem.getNamespaceURI(); String localName = elem.getLocalName(); String name = ((Element) elem).getTagName(); // If this is the bottom of the stack and it's an fo:block // or an HTML pre or div, don't duplicate it... if (elementStack.empty() && (((ns != null) && ns.equals(foURI) && localName.equals("block")) || (((ns == null) && localName.equalsIgnoreCase("pre")) || ((ns != null) && ns.equals(xhURI) && localName.equals("pre"))) || (((ns == null) && localName.equalsIgnoreCase("div")) || ((ns != null) && ns.equals(xhURI) && localName.equals("div"))))) { elementStack.push(elem); break; } else { rtf.endElement(ns, localName, name); tempStack.push(elem); } } } private void openClosedElements(DOMBuilder rtf) throws SAXException { // Now "reopen" the elements that we closed... while (!tempStack.empty()) { Node elem = (Node) tempStack.pop(); String ns = elem.getNamespaceURI(); String localName = elem.getLocalName(); String name = ((Element) elem).getTagName(); NamedNodeMap domAttr = elem.getAttributes(); AttributesImpl attr = new AttributesImpl(); for (int acount = 0; acount < domAttr.getLength(); acount++) { Node a = domAttr.item(acount); if (((ns == null || ns == "http://www.w3.org/1999/xhtml") && localName.equalsIgnoreCase("a")) || (a.getLocalName().equalsIgnoreCase("id"))) { // skip this attribute } else { attr.addAttribute(a.getNamespaceURI(), a.getLocalName(), a.getNodeName(), "CDATA", a.getNodeValue()); } } rtf.startElement(ns, localName, name, attr); elementStack.push(elem); } tempStack = null; } private Attributes copyAttributes(Element node) { AttributesImpl attrs = new AttributesImpl(); NamedNodeMap nnm = node.getAttributes(); for (int count = 0; count < nnm.getLength(); count++) { Attr attr = (Attr) nnm.item(count); String name = attr.getName(); if (name.startsWith("xmlns:") || name.equals("xmlns")) { // Skip it; (don't ya just love it!!) } else { attrs.addAttribute(attr.getNamespaceURI(), attr.getName(), attr.getName(), "CDATA", attr.getValue()); } } return attrs; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.streamMigration; import com.intellij.codeInsight.intention.impl.StreamRefactoringUtil; import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet; import com.intellij.codeInspection.dataFlow.value.DfaRelationValue; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import com.siyeh.ig.callMatcher.CallMatcher; import com.siyeh.ig.psiutils.*; import one.util.streamex.StreamEx; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.function.BiFunction; import java.util.function.Predicate; import java.util.stream.Collectors; import static com.intellij.codeInsight.daemon.impl.analysis.HighlightControlFlowUtil.isEffectivelyFinal; import static com.intellij.codeInspection.streamMigration.StreamApiMigrationInspection.isCallOf; import static com.intellij.util.ObjectUtils.tryCast; import static com.siyeh.ig.psiutils.ControlFlowUtils.getInitializerUsageStatus; import static com.siyeh.ig.psiutils.ExpressionUtils.resolveLocalVariable; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; public class JoiningMigration extends BaseStreamApiMigration { protected JoiningMigration(boolean shouldWarn) { super(shouldWarn, "collect"); } @Override PsiElement migrate(@NotNull Project project, @NotNull PsiElement body, @NotNull TerminalBlock tb) { JoiningTerminal terminal = extractTerminal(tb, null); if(terminal == null) return null; TerminalBlock block = terminal.getTerminalBlock(); PsiStatement loopStatement = block.getStreamSourceStatement(); CommentTracker ct = new CommentTracker(); String stream = terminal.generateStreamCode(ct); PsiVariable builder = terminal.getBuilder(); terminal.preCleanUp(ct); ControlFlowUtils.InitializerUsageStatus status = getInitializerUsageStatus(builder, loopStatement); if(builder instanceof PsiLocalVariable) { PsiElement result = replaceInitializer(loopStatement, builder, builder.getInitializer(), stream, status, ct); terminal.cleanUp((PsiLocalVariable)builder); JoiningTerminal.replaceUsages((PsiLocalVariable)terminal.getBuilder()); return result; } else { return new CommentTracker().replaceAndRestoreComments(tb.getStreamSourceStatement(), builder.getName() + ".append(" + stream + ");"); } } @Nullable static JoiningTerminal extractTerminal(@NotNull TerminalBlock terminalBlock, @Nullable("when fix applied") List<PsiVariable> nonFinalVariables) { List<BiFunction<TerminalBlock, List<PsiVariable>, JoiningTerminal>> extractors = Arrays.asList( JoiningTerminal.CountedLoopJoiningTerminal::extractCountedLoopTerminal, JoiningTerminal.PlainJoiningTerminal::extractPlainJoining, JoiningTerminal.LengthBasedJoiningTerminal::extractLengthBasedTerminal, JoiningTerminal.BoolFlagJoiningTerminal::extractBoolFlagTerminal, JoiningTerminal.LengthTruncateJoiningTerminal::extractLengthTruncateTerminal, JoiningTerminal.DelimiterRewriteJoiningTerminal::extractDelimiterRewritingTerminal, JoiningTerminal.IndexBasedJoiningTerminal::extractIndexBasedTerminal ); return StreamEx.of(extractors) .map(extractor -> extractor.apply(terminalBlock, nonFinalVariables)) .nonNull() .findFirst() .orElse(null); } static class JoiningTerminal { private static final CallMatcher APPEND = CallMatcher.anyOf( CallMatcher.instanceCall(CommonClassNames.JAVA_LANG_STRING_BUILDER, "append").parameterCount(1), CallMatcher.instanceCall(CommonClassNames.JAVA_LANG_STRING_BUFFER, "append").parameterCount(1) ); private static final CallMatcher LENGTH = CallMatcher.instanceCall(CommonClassNames.JAVA_LANG_ABSTRACT_STRING_BUILDER, "length").parameterCount(0); private static final CallMatcher EMPTY_LENGTH = CallMatcher.instanceCall(CommonClassNames.JAVA_LANG_ABSTRACT_STRING_BUILDER, "isEmpty").parameterCount(0); private static final CallMatcher SET_LENGTH = CallMatcher.instanceCall(CommonClassNames.JAVA_LANG_ABSTRACT_STRING_BUILDER, "setLength").parameterCount(1); private static final EquivalenceChecker ourEquivalence = EquivalenceChecker.getCanonicalPsiEquivalence(); private final @NotNull TerminalBlock myTerminalBlock; private final @NotNull PsiVariable myBuilder; private final @NotNull PsiVariable myLoopVariable; private final @NotNull List<PsiExpression> myMainJoinParts; private final @NotNull List<PsiExpression> myPrefixJoinParts; private final @NotNull List<PsiExpression> mySuffixJoinParts; private final @NotNull List<PsiExpression> myDelimiterJoinParts; private final @Nullable PsiMethodCallExpression myBeforeLoopAppend; private final @Nullable PsiMethodCallExpression myAfterLoopAppend; @NotNull public TerminalBlock getTerminalBlock() { return myTerminalBlock; } @NotNull public PsiVariable getBuilder() { return myBuilder; } protected JoiningTerminal(@NotNull TerminalBlock block, @NotNull PsiVariable targetBuilder, @NotNull PsiVariable variable, @NotNull List<PsiExpression> mainJoinParts, @NotNull List<PsiExpression> prefixJoinParts, @NotNull List<PsiExpression> suffixJoinParts, @NotNull List<PsiExpression> delimiterJoinParts, @Nullable PsiMethodCallExpression beforeLoopAppend, @Nullable PsiMethodCallExpression afterLoopAppend) { myTerminalBlock = block; myBuilder = targetBuilder; myLoopVariable = variable; myMainJoinParts = mainJoinParts; myPrefixJoinParts = prefixJoinParts; mySuffixJoinParts = suffixJoinParts; myDelimiterJoinParts = delimiterJoinParts; myBeforeLoopAppend = beforeLoopAppend; myAfterLoopAppend = afterLoopAppend; } void cleanUp(@NotNull PsiLocalVariable target) { replaceInitializer(target); replaceUsages(target); } void preCleanUp(CommentTracker ct) { cleanUpCall(ct, myBeforeLoopAppend); cleanUpCall(ct, myAfterLoopAppend); } @NotNull String generateStreamCode(CommentTracker ct) { return myTerminalBlock.generate(ct) + generateIntermediate(ct) + generateTerminal(ct); } private static void replaceInitializer(@NotNull PsiLocalVariable target) { PsiElementFactory factory = JavaPsiFacade.getElementFactory(target.getProject()); target.getTypeElement().replace(factory.createTypeElementFromText(CommonClassNames.JAVA_LANG_STRING, target)); PsiExpression initializer = target.getInitializer(); String initialText = ConstructionUtils.getStringBuilderInitializerText(initializer); if (initialText != null) { initializer.replace(factory.createExpressionFromText("\"\"", target)); } } private static boolean canBeMadeNonFinal(@NotNull PsiLocalVariable variable, @NotNull PsiStatement sourceStatement) { NavigatablePsiElement loopBound = PsiTreeUtil.getParentOfType(sourceStatement, PsiMember.class, PsiLambdaExpression.class); Predicate<PsiReference> referenceBoundPredicate; if (sourceStatement instanceof PsiLoopStatement) { referenceBoundPredicate = (reference) -> PsiTreeUtil.getParentOfType(reference.getElement(), PsiMember.class, PsiLambdaExpression.class) == loopBound; } else { referenceBoundPredicate = (reference) -> { PsiLambdaExpression lambda = PsiTreeUtil.getParentOfType(reference.getElement(), PsiLambdaExpression.class); return PsiTreeUtil.getParentOfType(lambda, PsiMember.class, PsiLambdaExpression.class) == loopBound; }; } return ReferencesSearch.search(variable).forEach((Processor<PsiReference>)reference -> referenceBoundPredicate.test(reference)) && FinalUtils.canBeFinal(variable); } String generateTerminal(CommentTracker ct) { final String collectArguments; if (myDelimiterJoinParts.isEmpty() && myPrefixJoinParts.isEmpty() && mySuffixJoinParts.isEmpty()) { collectArguments = ""; } else { String delimiter = myDelimiterJoinParts.isEmpty() ? "\"\"" : getExpressionText(ct, myDelimiterJoinParts); if (mySuffixJoinParts.isEmpty() && myPrefixJoinParts.isEmpty()) { collectArguments = delimiter; } else { String suffix = mySuffixJoinParts.isEmpty() ? "\"\"" : getExpressionText(ct, mySuffixJoinParts); String prefix = myPrefixJoinParts.isEmpty() ? "\"\"" : getExpressionText(ct, myPrefixJoinParts); collectArguments = delimiter + "," + prefix + "," + suffix; } } return ".collect(" + CommonClassNames.JAVA_UTIL_STREAM_COLLECTORS + ".joining(" + collectArguments + "))"; } String generateIntermediate(CommentTracker ct) { if (TypeUtils.isJavaLangString(myLoopVariable.getType()) && myMainJoinParts.size() == 1 && myMainJoinParts.get(0) instanceof PsiReferenceExpression) { return ""; } PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(myLoopVariable.getProject()); String joinTransformation = getExpressionText(ct, myMainJoinParts); PsiExpression mapping = elementFactory.createExpressionFromText(joinTransformation, myLoopVariable); return StreamRefactoringUtil.generateMapOperation(myLoopVariable, null, mapping); } private static void replaceUsages(PsiLocalVariable target) { Collection<PsiReference> usages = ReferencesSearch.search(target).findAll(); for (PsiReference usage : usages) { PsiElement element = usage.getElement(); if (element.isValid() && element instanceof PsiExpression) { PsiMethodCallExpression call = ExpressionUtils.getCallForQualifier((PsiExpression)element); if (call != null && "toString".equals(call.getMethodExpression().getReferenceName())) { new CommentTracker().replaceAndRestoreComments(call, element); } } } } private static void cleanUpCall(CommentTracker ct, PsiMethodCallExpression call) { if (call != null) { if (call.getParent() instanceof PsiExpressionStatement) { ct.delete(call); } else { PsiMethodCallExpression nextCall = ExpressionUtils.getCallForQualifier(call); PsiExpression qualifier = call.getMethodExpression().getQualifierExpression(); if (nextCall != null && qualifier != null) { ct.replace(nextCall, qualifier); } } } } private static String getExpressionText(CommentTracker ct, @NotNull List<PsiExpression> joinParts) { StringJoiner joiner = new StringJoiner("+"); int size = joinParts.size(); for (int i = 0; i < joinParts.size(); i++) { PsiExpression joinPart = joinParts.get(i); String partText; if (i == 0) { boolean neighborIsString = false; if (joinParts.size() > 1) { PsiExpression second = joinParts.get(1); if (TypeUtils.isJavaLangString(second.getType())) { neighborIsString = true; } } partText = expressionToCharSequence(ct, joinPart, size, neighborIsString); } else { partText = expressionToCharSequence(ct, joinPart, size, true); } joiner.add(partText); } return joiner.toString(); } @Nullable private static String computeConstant(@NotNull PsiExpression expression) { Object constantExpression = ExpressionUtils.computeConstantExpression(expression); if (constantExpression != null) { return String.valueOf(constantExpression); } else { PsiLocalVariable variable = resolveLocalVariable(expression); if(variable == null) return null; PsiElement parent = variable.getParent(); PsiExpression initializer = variable.getInitializer(); if(parent == null || initializer == null) return null; if(!isEffectivelyFinal(variable, parent, null)) return null; Object initializerConstant = ExpressionUtils.computeConstantExpression(initializer); if(initializerConstant == null) return null; return String.valueOf(initializerConstant); } } @Nullable private static String computeConstant(@NotNull List<PsiExpression> joinParts) { StringBuilder sb = new StringBuilder(); for (PsiExpression expression : joinParts) { String constant = computeConstant(expression); if(constant == null) return null; sb.append(constant); } return sb.toString(); } @NotNull private static String expressionToCharSequence(CommentTracker ct, @NotNull PsiExpression expression, int expressionCount, boolean neighborIsString) { PsiType type = expression.getType(); if(expression instanceof PsiMethodCallExpression) { PsiMethodCallExpression callExpression = (PsiMethodCallExpression)expression; if(isCallOf(callExpression, CommonClassNames.JAVA_LANG_STRING, "charAt")) { PsiExpression qualifierExpression = callExpression.getMethodExpression().getQualifierExpression(); PsiExpression[] expressions = callExpression.getArgumentList().getExpressions(); if(expressions.length == 1) { PsiExpression first = expressions[0]; if(qualifierExpression != null) { Object constantExpression = ExpressionUtils.computeConstantExpression(first); if(constantExpression instanceof Integer) { String endIndex = String.valueOf((int)constantExpression + 1); return ct.text(qualifierExpression) + ".substring(" + ct.text(first) + "," + endIndex + ")"; } } } } } if (!InheritanceUtil.isInheritor(type, "java.lang.CharSequence")) { if (!neighborIsString || (type instanceof PsiArrayType && ((PsiArrayType)type).getComponentType().equals(PsiType.CHAR))) { PsiLiteralExpression literalExpression = tryCast(expression, PsiLiteralExpression.class); if (literalExpression != null) { Object value = literalExpression.getValue(); if (value instanceof Character) { String text = ct.text(literalExpression); if ("'\"'".equals(text)) return "\"\\\"\""; return "\"" + text.substring(1, text.length() - 1) + "\""; } } return CommonClassNames.JAVA_LANG_STRING + ".valueOf(" + ct.text(expression) + ")"; } if (ParenthesesUtils.getPrecedence(expression) > ParenthesesUtils.ADDITIVE_PRECEDENCE || (expression.getType() instanceof PsiPrimitiveType && ParenthesesUtils.getPrecedence(expression) == ParenthesesUtils.ADDITIVE_PRECEDENCE) || expressionCount == 1) { return "(" + ct.text(expression) + ")"; } return ct.text(expression); } String expressionText = ct.text(expression); if(ParenthesesUtils.getPrecedence(expression) > ParenthesesUtils.ADDITIVE_PRECEDENCE && expressionCount > 1) { expressionText = "(" + expressionText + ")"; } return expressionText; } /** * from statement like sb.append(a).append(b) extracts sb */ // @Nullable("when failed to extract") private static PsiVariable extractStringBuilder(@NotNull PsiStatement statement) { PsiExpressionStatement expressionStatement = tryCast(statement, PsiExpressionStatement.class); if (expressionStatement == null) return null; PsiMethodCallExpression methodCallExpression = tryCast(expressionStatement.getExpression(), PsiMethodCallExpression.class); if (methodCallExpression == null) return null; PsiMethodCallExpression currentExpression = methodCallExpression; while (APPEND.test(methodCallExpression)) { PsiExpression qualifierExpression = currentExpression.getMethodExpression().getQualifierExpression(); PsiMethodCallExpression callerExpression = MethodCallUtils.getQualifierMethodCall(currentExpression); if (callerExpression == null) { PsiReferenceExpression refExpression = tryCast(qualifierExpression, PsiReferenceExpression.class); if(refExpression == null) return null; return tryCast(refExpression.resolve(), PsiVariable.class); } currentExpression = callerExpression; } return null; } @Nullable("when failed to extract") private static List<PsiExpression> extractJoinParts(@Nullable PsiExpression expression) { List<PsiExpression> joinParts = new ArrayList<>(); if (expression == null) return joinParts; return tryExtractJoinPart(expression, joinParts) ? joinParts : null; } /** * @param statements list of statements. Only appends expected inside. * @return list of joining expressions */ @Nullable("when failed to extract") private static List<PsiExpression> extractJoinParts(@NotNull List<PsiStatement> statements) { List<PsiExpression> joinParts = new ArrayList<>(); for (PsiStatement statement : statements) { PsiExpressionStatement expressionStatement = tryCast(statement, PsiExpressionStatement.class); if (expressionStatement == null) return null; PsiExpression expression = expressionStatement.getExpression(); if (!tryExtractJoinPart(expression, joinParts)) { return null; } } return joinParts; } /** * @param joinParts list to append joining parts into it * @return true on success */ private static boolean tryExtractJoinPart(@NotNull PsiExpression expression, @NotNull List<PsiExpression> joinParts) { PsiMethodCallExpression methodCallExpression = tryCast(expression, PsiMethodCallExpression.class); if (methodCallExpression != null) { if (!APPEND.test(methodCallExpression)) return false; PsiExpression appendArgument = methodCallExpression.getArgumentList().getExpressions()[0]; PsiExpression qualifierExpression = methodCallExpression.getMethodExpression().getQualifierExpression(); if (qualifierExpression == null) return false; PsiReferenceExpression referenceExpression = tryCast(qualifierExpression, PsiReferenceExpression.class); if (referenceExpression == null) { // assume expr like sb.append(a).append(b).append(c) if (!tryExtractJoinPart(qualifierExpression, joinParts)) return false; } if (!tryExtractConcatenationParts(appendArgument, joinParts)) return false; return true; } return false; } private static boolean tryExtractConcatenationParts(@NotNull PsiExpression expression, @NotNull List<PsiExpression> joinParts) { PsiType type = expression.getType(); if (type == null) return false; if (!TypeUtils.isJavaLangString(type)) { joinParts.add(expression); return true; } PsiBinaryExpression binaryExpression = tryCast(expression, PsiBinaryExpression.class); if (binaryExpression != null) { if (binaryExpression.getOperationTokenType().equals(JavaTokenType.PLUS)) { PsiExpression lOperand = PsiUtil.skipParenthesizedExprDown(binaryExpression.getLOperand()); PsiExpression rOperand = PsiUtil.skipParenthesizedExprDown(binaryExpression.getROperand()); if (lOperand == null || rOperand == null) return false; PsiType lOperandType = lOperand.getType(); PsiType rOperandType = rOperand.getType(); if (lOperandType == null || rOperandType == null) return false; if (!tryExtractConcatenationParts(lOperand, joinParts) || !tryExtractConcatenationParts(rOperand, joinParts)) { return false; } return true; } } joinParts.add(expression); return true; } @Nullable("when failed to extract join parts from initializer statement") private static List<PsiExpression> extractStringBuilderInitializer(PsiExpression construction) { List<PsiExpression> joinParts = new ArrayList<>(); PsiExpression expression = construction; PsiMethodCallExpression current = tryCast(construction, PsiMethodCallExpression.class); while(current != null) { if (APPEND.test(current)) { joinParts.add(current.getArgumentList().getExpressions()[0]); } else { return null; } expression = current.getMethodExpression().getQualifierExpression(); current = MethodCallUtils.getQualifierMethodCall(current); } PsiNewExpression newExpression = tryCast(PsiUtil.skipParenthesizedExprDown(expression), PsiNewExpression.class); if (newExpression == null) return null; final PsiJavaCodeReferenceElement classReference = newExpression.getClassReference(); if (classReference == null) return null; PsiClass aClass = tryCast(classReference.resolve(), PsiClass.class); if (aClass == null) return null; final String qualifiedName = aClass.getQualifiedName(); if (!CommonClassNames.JAVA_LANG_STRING_BUILDER.equals(qualifiedName) && !CommonClassNames.JAVA_LANG_STRING_BUFFER.equals(qualifiedName)) { return null; } final PsiExpressionList argumentList = newExpression.getArgumentList(); if (argumentList == null) return null; final PsiExpression[] arguments = argumentList.getExpressions(); if (arguments.length != 0) { if(arguments.length != 1) return null; final PsiExpression argument = arguments[0]; final PsiType argumentType = argument.getType(); if (!PsiType.INT.equals(argumentType)) { joinParts.add(argument); } } Collections.reverse(joinParts); return joinParts; } @Nullable private static PsiMethodCallExpression getCallAfterStatement(PsiStatement statement, PsiVariable receiver, CallMatcher callMatcher) { PsiElement next = PsiTreeUtil.skipWhitespacesAndCommentsForward(statement); return getCallExpression(receiver, callMatcher, next); } @Nullable private static PsiMethodCallExpression getCallBeforeStatement(@NotNull PsiStatement statement, @NotNull PsiVariable receiver, @NotNull CallMatcher callMatcher, @NotNull List<PsiDeclarationStatement> declarationsToSkip) { PsiElement previous = PsiTreeUtil.skipWhitespacesAndCommentsBackward(statement); PsiDeclarationStatement previousDeclaration = tryCast(previous, PsiDeclarationStatement.class); while (previousDeclaration != null && declarationsToSkip.contains(previousDeclaration)) { previous = PsiTreeUtil.skipWhitespacesAndCommentsBackward(previousDeclaration); previousDeclaration = tryCast(previous, PsiDeclarationStatement.class); } return getCallExpression(receiver, callMatcher, previous); } @Contract("_, _, null -> null") @Nullable private static PsiMethodCallExpression getCallExpression(@NotNull PsiVariable target, @NotNull CallMatcher callMatcher, @Nullable PsiElement element) { if (!(element instanceof PsiExpressionStatement)) return null; PsiExpression expression = ((PsiExpressionStatement)element).getExpression(); if (!(expression instanceof PsiMethodCallExpression)) return null; PsiMethodCallExpression call = (PsiMethodCallExpression)expression; if (callMatcher.test(call) && ExpressionUtils.isReferenceTo(call.getMethodExpression().getQualifierExpression(), target)) return call; return null; } private static boolean areReferencesAllowed(@NotNull List<PsiElement> refs, @NotNull Set<PsiMethodCallExpression> allowedReferencePlaces) { return StreamEx.of(refs).select(PsiExpression.class).allMatch(expression -> { PsiMethodCallExpression usage = ExpressionUtils.getCallForQualifier(expression); if (usage != null) { if (allowedReferencePlaces.contains(usage)) return true; PsiExpression[] usageArgs = usage.getArgumentList().getExpressions(); String name = usage.getMethodExpression().getReferenceName(); if (usageArgs.length == 0 && ("toString".equals(name) || "length".equals(name))) return true; } PsiElement parent = PsiUtil.skipParenthesizedExprUp(expression.getParent()); if (parent instanceof PsiPolyadicExpression && ((PsiPolyadicExpression)parent).getOperationTokenType().equals(JavaTokenType.PLUS)) { return true; } if (parent instanceof PsiAssignmentExpression && ((PsiAssignmentExpression)parent).getOperationTokenType().equals(JavaTokenType.PLUSEQ)) { return true; } return false; }); } @Nullable private static PsiMethodCallExpression tryExtractCombinedToString(PsiMethodCallExpression afterLoopAppend, List<PsiElement> refs) { if (refs.size() == 1 && afterLoopAppend == null) { // case like return sb.append(postfix).toString(); PsiMethodCallExpression usage = ExpressionUtils.getCallForQualifier((PsiExpression)refs.get(0)); if (APPEND.test(usage)) { PsiMethodCallExpression nextCall = ExpressionUtils.getCallForQualifier(usage); if (nextCall != null && "toString".equals(nextCall.getMethodExpression().getReferenceName())) { return usage; } } } return null; } private static boolean joinPartsAreEquivalent(@NotNull List<PsiExpression> joinParts1, @NotNull List<PsiExpression> joinParts2) { if (joinParts1.size() != joinParts2.size()) return false; for (int i = 0, size = joinParts1.size(); i < size; i++) { PsiExpression joinPart1 = joinParts1.get(i); PsiExpression joinPart2 = joinParts2.get(i); if (!ourEquivalence.expressionsAreEquivalent(joinPart1, joinPart2)) return false; } return true; } /** * Like: if(!sb.isEmpty()) => prefixLength == 0 or if(sb.length() > 2) => prefixLength == 2 */ @Nullable private static Integer extractConditionPrefixLength(@NotNull PsiExpression expression, PsiVariable targetBuilder) { Integer explicitLengthCondition = extractExplicitLengthCheck(expression, targetBuilder); if (explicitLengthCondition != null) return explicitLengthCondition; return extractEmptyLengthCheck(expression, targetBuilder); } @Nullable private static Integer extractEmptyLengthCheck(@NotNull PsiExpression expression, PsiVariable targetBuilder) { PsiMethodCallExpression maybeEmptyCall = tryCast(BoolUtils.getNegated(expression), PsiMethodCallExpression.class); if (!EMPTY_LENGTH.test(maybeEmptyCall)) return null; // extract call matcher if (!ExpressionUtils.isReferenceTo(maybeEmptyCall.getMethodExpression().getQualifierExpression(), targetBuilder)) return null; return 0; } @Nullable("when failed to extract length") private static Integer extractExplicitLengthCheck(@NotNull PsiExpression expression, PsiVariable targetBuilder) { PsiBinaryExpression condition = tryCast(expression, PsiBinaryExpression.class); if (condition == null) return null; PsiExpression rOperand = condition.getROperand(); if (rOperand == null) return null; PsiExpression lOperand = condition.getLOperand(); DfaRelationValue.RelationType relation = DfaRelationValue.RelationType.fromElementType(condition.getOperationTokenType()); if (relation == null) return null; int lSize = computeConstantIntExpression(lOperand); if (lSize >= 0) { return extractLength(rOperand, relation.getFlipped(), lSize, targetBuilder); } else { int rSize = computeConstantIntExpression(condition.getROperand()); return rSize >= 0 ? extractLength(lOperand, relation, rSize, targetBuilder) : null; } } @Nullable private static Integer extractLength(PsiExpression rOperand, DfaRelationValue.RelationType relation, int size, PsiVariable targetBuilder) { if (!isStringBuilderLengthCall(rOperand, targetBuilder)) return null; LongRangeSet rangeSet = LongRangeSet.point(size).fromRelation(relation); if (rangeSet == null || rangeSet.max() != Long.MAX_VALUE) return null; long min = rangeSet.min(); return min > 0 ? (int)(min - 1) : null; } private static boolean isStringBuilderLengthCall(@NotNull PsiExpression expression, PsiVariable targetBuilder) { PsiMethodCallExpression methodCallExpression = tryCast(expression, PsiMethodCallExpression.class); return LENGTH.test(methodCallExpression) && ExpressionUtils.isReferenceTo(methodCallExpression.getMethodExpression().getQualifierExpression(), targetBuilder); } /** * @param expression that is expected to be positive or 0 * @return evaluated value or -1 when error */ private static int computeConstantIntExpression(@NotNull PsiExpression expression) { Object constantExpression = ExpressionUtils.computeConstantExpression(expression); if (!(constantExpression instanceof Integer)) return -1; return (int)constantExpression; } private static class PrefixSuffixContext { private final @Nullable PsiMethodCallExpression myBeforeLoopStatement; private final @Nullable PsiMethodCallExpression myAfterLoopStatement; private final @NotNull List<PsiExpression> myPrefixJoinParts; private final @NotNull List<PsiExpression> mySuffixJoinParts; PrefixSuffixContext(@Nullable PsiMethodCallExpression beforeLoopStatement, @Nullable PsiMethodCallExpression afterLoopStatement, @NotNull List<PsiExpression> prefixJoinParts, @NotNull List<PsiExpression> suffixJoinParts) { myBeforeLoopStatement = beforeLoopStatement; myAfterLoopStatement = afterLoopStatement; myPrefixJoinParts = prefixJoinParts; mySuffixJoinParts = suffixJoinParts; } @Nullable public PsiMethodCallExpression getBeforeLoopStatement() { return myBeforeLoopStatement; } @Nullable public PsiMethodCallExpression getAfterLoopStatement() { return myAfterLoopStatement; } @NotNull public List<PsiExpression> getPrefixJoinParts() { return myPrefixJoinParts; } @NotNull public List<PsiExpression> getSuffixJoinParts() { return mySuffixJoinParts; } /** * @param finalAppendPredecessor - statement, after and which expected suffix append (loop statement generally) * @param firstAppendSuccessor - statement before which expected prefix append and possibly some declarations used in loop * @param targetBuilder - string builder used * @param possibleVariablesBeforeLoop - variable, which declarations that can be before loop * @param allowedReferencePlaces * @return prefix and suffix data */ @Nullable static PrefixSuffixContext extractAndVerifyRefs(@NotNull PsiStatement finalAppendPredecessor, @NotNull PsiStatement firstAppendSuccessor, @NotNull PsiVariable targetBuilder, @NotNull TerminalBlock terminalBlock, @NotNull List<PsiLocalVariable> possibleVariablesBeforeLoop, @NotNull Set<PsiMethodCallExpression> allowedReferencePlaces) { PsiMethodCallExpression afterLoopAppend = getCallAfterStatement(finalAppendPredecessor, targetBuilder, APPEND); List<PsiDeclarationStatement> declarations = getDeclarations(possibleVariablesBeforeLoop); if(declarations == null) return null; PsiMethodCallExpression beforeLoopAppend = getCallBeforeStatement(firstAppendSuccessor, targetBuilder, APPEND, declarations); List<PsiExpression> builderStrInitializers = null; if(targetBuilder instanceof PsiLocalVariable) { if(!canBeMadeNonFinal((PsiLocalVariable)targetBuilder, terminalBlock.getStreamSourceStatement())) return null; List<PsiElement> refs = StreamEx.of(ReferencesSearch.search(targetBuilder).findAll()) .map(PsiReference::getElement) .remove(e -> PsiTreeUtil.isAncestor(targetBuilder, e, false) || PsiTreeUtil.isAncestor(terminalBlock.getStreamSourceStatement(), e, false)) .toList(); allowedReferencePlaces.add(afterLoopAppend); allowedReferencePlaces.add(beforeLoopAppend); boolean allowed = areReferencesAllowed(refs, allowedReferencePlaces); if (!allowed) { PsiMethodCallExpression newAfterLoopAppend = tryExtractCombinedToString(afterLoopAppend, refs); if (newAfterLoopAppend == null) return null; afterLoopAppend = newAfterLoopAppend; } builderStrInitializers = extractStringBuilderInitializer(targetBuilder.getInitializer()); if(builderStrInitializers == null) return null; } List<PsiExpression> prefixJoinParts = extractJoinParts(beforeLoopAppend); if (prefixJoinParts == null) return null; if(builderStrInitializers != null) { prefixJoinParts.addAll(0, builderStrInitializers); } if (prefixJoinParts.stream().anyMatch(joinPart -> SideEffectChecker.mayHaveSideEffects(joinPart))) return null; if (afterLoopAppend != null && VariableAccessUtils.variableIsUsed(targetBuilder, afterLoopAppend.getArgumentList())) return null; List<PsiExpression> suffixJoinParts = extractJoinParts(afterLoopAppend); if (suffixJoinParts == null) return null; return new PrefixSuffixContext(beforeLoopAppend, afterLoopAppend, prefixJoinParts, suffixJoinParts); } /** * @return list of declaration statements or null if error */ @Nullable("when failed to get declaration of any var") static List<PsiDeclarationStatement> getDeclarations(@NotNull List<PsiLocalVariable> variables) { List<PsiDeclarationStatement> list = new ArrayList<>(); for (PsiLocalVariable var : variables) { PsiDeclarationStatement declarationStatement = PsiTreeUtil.getParentOfType(var, PsiDeclarationStatement.class); if(declarationStatement == null) return null; list.add(declarationStatement); } return list; } } /** * Joining without delimiter, but maybe with prefix and suffix */ private static class PlainJoiningTerminal extends JoiningTerminal { protected PlainJoiningTerminal(@NotNull PsiVariable targetBuilder, @NotNull PsiVariable variable, @NotNull List<PsiExpression> mainJoinParts, @NotNull PrefixSuffixContext prefixSuffixContext, @NotNull TerminalBlock block) { super(block, targetBuilder, variable, mainJoinParts, prefixSuffixContext.getPrefixJoinParts(), prefixSuffixContext.getSuffixJoinParts(), emptyList(), prefixSuffixContext.getBeforeLoopStatement(), prefixSuffixContext.getAfterLoopStatement()); } @Nullable static PlainJoiningTerminal extractPlainJoining(@NotNull TerminalBlock terminalBlock, @Nullable List<PsiVariable> nonFinalVariables) { if (nonFinalVariables != null && !nonFinalVariables.isEmpty()) return null; List<PsiStatement> statements = Arrays.asList(terminalBlock.getStatements()); List<PsiExpression> mainJoinParts = extractJoinParts(statements); if (mainJoinParts == null || mainJoinParts.isEmpty()) return null; PsiVariable targetBuilder = extractStringBuilder(statements.get(0)); if (targetBuilder == null) return null; PsiStatement loop = terminalBlock.getStreamSourceStatement(); PrefixSuffixContext context = PrefixSuffixContext.extractAndVerifyRefs(loop, loop, targetBuilder, terminalBlock, emptyList(), new HashSet<>(emptyList())); if (context == null) return null; return new PlainJoiningTerminal(targetBuilder, terminalBlock.getVariable(), mainJoinParts, context, terminalBlock); } } /** * if(sb.length() > prefixLength) sb.append(","); */ private static class LengthBasedJoiningTerminal extends JoiningTerminal { protected LengthBasedJoiningTerminal(@NotNull PsiVariable targetBuilder, @NotNull PsiVariable variable, @NotNull List<PsiExpression> mainJoinParts, @NotNull PrefixSuffixContext prefixSuffixContext, @NotNull List<PsiExpression> delimiter, @NotNull TerminalBlock block) { super(block, targetBuilder, variable, mainJoinParts, prefixSuffixContext.getPrefixJoinParts(), prefixSuffixContext.getSuffixJoinParts(), delimiter, prefixSuffixContext.getBeforeLoopStatement(), prefixSuffixContext.getAfterLoopStatement()); } @Nullable static LengthBasedJoiningTerminal extractLengthBasedTerminal(@NotNull TerminalBlock terminalBlock, @Nullable List<PsiVariable> nonFinalVariables) { if (nonFinalVariables != null && !nonFinalVariables.isEmpty()) return null; ArrayList<PsiStatement> statements = ContainerUtil.newArrayList(terminalBlock.getStatements()); if (statements.size() < 2) return null; PsiIfStatement ifStatement = tryCast(statements.get(0), PsiIfStatement.class); if (ifStatement == null) return null; PsiExpression condition = ifStatement.getCondition(); if (condition == null || ifStatement.getElseBranch() != null) return null; List<PsiExpression> delimiter = extractDelimiter(ifStatement); if (delimiter == null) return null; List<PsiStatement> withoutCondition = statements.subList(1, statements.size()); PsiVariable targetBuilder = extractStringBuilder(withoutCondition.get(0)); if(!(targetBuilder instanceof PsiLocalVariable)) return null; Integer conditionPrefixLength = extractConditionPrefixLength(condition, targetBuilder); if (conditionPrefixLength == null) return null; List<PsiExpression> mainJoinParts = extractJoinParts(withoutCondition); if (mainJoinParts == null) return null; PsiStatement loop = terminalBlock.getStreamSourceStatement(); PrefixSuffixContext context = PrefixSuffixContext.extractAndVerifyRefs(loop, loop, targetBuilder, terminalBlock, emptyList(), new HashSet<>(emptyList())); if (context == null) return null; String prefix = computeConstant(context.getPrefixJoinParts()); if (prefix == null || prefix.length() != conditionPrefixLength) return null; return new LengthBasedJoiningTerminal(targetBuilder, terminalBlock.getVariable(), mainJoinParts, context, delimiter, terminalBlock); } @Nullable private static List<PsiExpression> extractDelimiter(PsiIfStatement ifStatement) { PsiStatement thenBranch = ifStatement.getThenBranch(); if (thenBranch == null) return null; List<PsiStatement> delimiterAppendStatements = Arrays.asList(ControlFlowUtils.unwrapBlock(thenBranch)); List<PsiExpression> delimiterJoinParts = extractJoinParts(delimiterAppendStatements); if (delimiterJoinParts == null) return null; if(computeConstant(delimiterJoinParts) == null) return null; return delimiterJoinParts; } } /** * if(first) sb.append(mainPart) else sb.append(delimiter).append(","); */ private static class BoolFlagJoiningTerminal extends JoiningTerminal { private final @NotNull PsiVariable myBoolVariable; protected BoolFlagJoiningTerminal(@NotNull PsiVariable targetBuilder, @NotNull PsiVariable variable, @NotNull List<PsiExpression> mainJoinParts, @NotNull PrefixSuffixContext prefixSuffixContext, @NotNull List<PsiExpression> delimiter, @NotNull PsiVariable boolVariable, @NotNull TerminalBlock block) { super(block, targetBuilder, variable, mainJoinParts, prefixSuffixContext.getPrefixJoinParts(), prefixSuffixContext.getSuffixJoinParts(), delimiter, prefixSuffixContext.getBeforeLoopStatement(), prefixSuffixContext.getAfterLoopStatement()); this.myBoolVariable = boolVariable; } @Override void preCleanUp(CommentTracker ct) { super.preCleanUp(ct); ct.delete(myBoolVariable); } @Nullable static JoiningTerminal extractBoolFlagTerminal(@NotNull TerminalBlock terminalBlock, @Nullable List<PsiVariable> nonFinalVariables) { if (nonFinalVariables != null && nonFinalVariables.size() != 1) return null; SpecialFirstIterationLoop specialFirstIterationLoop = SpecialFirstIterationLoop.BoolFlagLoop.extract(terminalBlock); if (specialFirstIterationLoop == null) return null; PsiLocalVariable boolVar = specialFirstIterationLoop.getVariable(); if (boolVar == null) return null; if (nonFinalVariables != null && !nonFinalVariables.get(0).equals(boolVar)) return null; List<PsiStatement> firstIterationStatements = specialFirstIterationLoop.getFirstIterationStatements(); List<PsiStatement> otherIterationStatements = specialFirstIterationLoop.getOtherIterationStatements(); if (firstIterationStatements.isEmpty() || otherIterationStatements.isEmpty()) return null; List<PsiExpression> firstIterationJoinParts = extractJoinParts(firstIterationStatements); List<PsiExpression> otherIterationJoinParts = extractJoinParts(otherIterationStatements); if (firstIterationJoinParts == null || otherIterationJoinParts == null) return null; JoinData joinData = JoinData.extractLeftDelimiter(otherIterationJoinParts); if (!joinPartsAreEquivalent(joinData.getMainJoinParts(), firstIterationJoinParts)) return null; PsiVariable targetBuilder = extractStringBuilder(firstIterationStatements.get(0)); if (targetBuilder == null) return null; PsiStatement loop = terminalBlock.getStreamSourceStatement(); PrefixSuffixContext context = PrefixSuffixContext.extractAndVerifyRefs(loop, loop, targetBuilder, terminalBlock, singletonList(boolVar), new HashSet<>(emptyList())); if (context == null) return null; return new BoolFlagJoiningTerminal(targetBuilder, terminalBlock.getVariable(), firstIterationJoinParts, context, joinData.getDelimiterJoinParts(), boolVar, terminalBlock); } } /** * for() ... * if(sb.length() > prefixLength) sb.seLength(sb.length() - delimiterSize) */ private static class LengthTruncateJoiningTerminal extends JoiningTerminal { private final @NotNull PsiIfStatement myTruncateIfStatement; protected LengthTruncateJoiningTerminal(@NotNull PsiVariable targetBuilder, @NotNull PsiVariable variable, @NotNull List<PsiExpression> mainJoinParts, @NotNull PrefixSuffixContext prefixSuffixContext, @NotNull List<PsiExpression> delimiter, @NotNull PsiIfStatement truncateIfStatement, @NotNull TerminalBlock block) { super(block, targetBuilder, variable, mainJoinParts, prefixSuffixContext.getPrefixJoinParts(), prefixSuffixContext.getSuffixJoinParts(), delimiter, prefixSuffixContext.getBeforeLoopStatement(), prefixSuffixContext.getAfterLoopStatement()); myTruncateIfStatement = truncateIfStatement; } @Override void preCleanUp(CommentTracker ct) { super.preCleanUp(ct); ct.delete(myTruncateIfStatement); } @Nullable static LengthTruncateJoiningTerminal extractLengthTruncateTerminal(@NotNull TerminalBlock terminalBlock, @Nullable List<PsiVariable> nonFinalVariables) { if (nonFinalVariables != null && !nonFinalVariables.isEmpty()) return null; List<PsiStatement> statements = Arrays.asList(terminalBlock.getStatements()); if (statements.size() < 1) return null; PsiVariable targetBuilder = extractStringBuilder(statements.get(0)); if(!(targetBuilder instanceof PsiLocalVariable)) return null; List<PsiExpression> joinParts = extractJoinParts(statements); if (joinParts == null) return null; JoinData joinData = JoinData.extractRightDelimiter(joinParts); List<PsiExpression> mainJoinParts = joinData.getMainJoinParts(); List<PsiExpression> delimiterJoinParts = joinData.getDelimiterJoinParts(); PsiStatement loop = terminalBlock.getStreamSourceStatement(); PsiIfStatement ifStatement = tryCast(PsiTreeUtil.skipWhitespacesAndCommentsForward(loop), PsiIfStatement.class); if(ifStatement == null) return null; PsiExpression condition = ifStatement.getCondition(); if (condition == null) return null; Integer conditionPrefixLength = extractConditionPrefixLength(condition, targetBuilder); if (conditionPrefixLength == null) return null; PsiMethodCallExpression truncateCall = extractTruncateCall(targetBuilder, ifStatement); if (truncateCall == null) return null; Integer truncateSize = tryExtractTruncationSize(targetBuilder, truncateCall); String delimiter = joinData.getDelimiter(); if(delimiter == null) return null; if (truncateSize == null || truncateSize != delimiter.length()) return null; PrefixSuffixContext context = PrefixSuffixContext .extractAndVerifyRefs(ifStatement, loop, targetBuilder, terminalBlock, emptyList(), new HashSet<>(singletonList(truncateCall))); if (context == null) return null; String prefix = computeConstant(context.getPrefixJoinParts()); if (prefix == null || prefix.length() != conditionPrefixLength) return null; PsiVariable loopVariable = terminalBlock.getVariable(); return new LengthTruncateJoiningTerminal(targetBuilder, loopVariable, mainJoinParts, context, delimiterJoinParts, ifStatement, terminalBlock); } @Nullable private static PsiMethodCallExpression extractTruncateCall(@NotNull PsiVariable targetBuilder, @NotNull PsiIfStatement ifStatement) { if (ifStatement.getElseBranch() != null) return null; PsiStatement block = ifStatement.getThenBranch(); PsiStatement[] thenBranch = ControlFlowUtils.unwrapBlock(block); if (thenBranch.length != 1) return null; PsiExpressionStatement expressionStatement = tryCast(thenBranch[0], PsiExpressionStatement.class); if(expressionStatement == null) return null; PsiMethodCallExpression call = tryCast(expressionStatement.getExpression(), PsiMethodCallExpression.class); if (!SET_LENGTH.test(call)) return null; PsiLocalVariable localVariable = resolveLocalVariable(call.getMethodExpression().getQualifierExpression()); if (!targetBuilder.equals(localVariable)) return null; return call; } @Nullable private static Integer tryExtractTruncationSize(@NotNull PsiVariable targetBuilder, @NotNull PsiMethodCallExpression truncateCall) { PsiExpression[] expressions = truncateCall.getArgumentList().getExpressions(); if (expressions.length == 0) return null; PsiExpression parameter = expressions[0]; if (parameter == null) return null; PsiBinaryExpression binaryExpression = tryCast(parameter, PsiBinaryExpression.class); if (binaryExpression == null || !binaryExpression.getOperationTokenType().equals(JavaTokenType.MINUS)) return null; PsiExpression lOperand = binaryExpression.getLOperand(); PsiExpression rOperand = binaryExpression.getROperand(); if (rOperand == null) return null; Object constantExpression = ExpressionUtils.computeConstantExpression(rOperand); if (!(constantExpression instanceof Integer)) return null; int truncationSize = (int)constantExpression; PsiMethodCallExpression lengthCall = tryCast(lOperand, PsiMethodCallExpression.class); if (!LENGTH.test(lengthCall)) return null; PsiLocalVariable variable = resolveLocalVariable(lengthCall.getMethodExpression().getQualifierExpression()); if (variable == null || !variable.equals(targetBuilder)) return null; return truncationSize; } } /** * String delimiter = ""; * for() { * sb.append(mainPart).append(delimiter); * delimiter = ","; * } */ private static class DelimiterRewriteJoiningTerminal extends JoiningTerminal { private final @NotNull PsiVariable myDelimiterVariable; protected DelimiterRewriteJoiningTerminal(@NotNull PsiVariable targetBuilder, @NotNull PsiVariable variable, @NotNull List<PsiExpression> mainJoinParts, @NotNull PrefixSuffixContext prefixSuffixContext, @NotNull List<PsiExpression> delimiter, @NotNull PsiVariable delimiterVariable, @NotNull TerminalBlock block) { super(block, targetBuilder, variable, mainJoinParts, prefixSuffixContext.getPrefixJoinParts(), prefixSuffixContext.getSuffixJoinParts(), delimiter, prefixSuffixContext.getBeforeLoopStatement(), prefixSuffixContext.getAfterLoopStatement()); myDelimiterVariable = delimiterVariable; } @Override void preCleanUp(CommentTracker ct) { super.preCleanUp(ct); ct.delete(myDelimiterVariable); } @Nullable static DelimiterRewriteJoiningTerminal extractDelimiterRewritingTerminal(@NotNull TerminalBlock terminalBlock, @Nullable List<PsiVariable> nonFinalVariables) { if (nonFinalVariables != null && nonFinalVariables.size() != 1) return null; List<PsiStatement> statements = ContainerUtil.newArrayList(terminalBlock.getStatements()); if (statements.size() < 2) return null; // TODO maybe not just last, but check if delimiter not used after assignment? PsiAssignmentExpression assignment = extractAssignment(statements.get(statements.size() - 1)); if (assignment == null) return null; PsiLocalVariable delimiterVar = extractDelimiterVar(assignment); if (delimiterVar == null) return null; PsiExpression delimiter = extractDelimiter(assignment); if (delimiter == null) return null; List<PsiStatement> mainStatements = statements.subList(0, statements.size() - 1); List<PsiExpression> joinParts = extractJoinParts(mainStatements); if (joinParts == null || joinParts.isEmpty()) return null; if (isSeparator(delimiterVar, joinParts.get(0))) return null; joinParts.remove(0); if (ReferencesSearch.search(delimiterVar, new LocalSearchScope(terminalBlock.getStatements())).findAll().size() != 2) return null; PsiVariable targetBuilder = extractStringBuilder(mainStatements.get(0)); if (targetBuilder == null) return null; PsiStatement loop = terminalBlock.getStreamSourceStatement(); PrefixSuffixContext context = PrefixSuffixContext.extractAndVerifyRefs(loop, loop, targetBuilder, terminalBlock, singletonList(delimiterVar), new HashSet<>(emptyList())); if (context == null) return null; PsiVariable variable = terminalBlock.getVariable(); return new DelimiterRewriteJoiningTerminal(targetBuilder, variable, joinParts, context, singletonList(delimiter), delimiterVar, terminalBlock); } private static boolean isSeparator(PsiLocalVariable delimiterVar, PsiExpression joinPart) { PsiLocalVariable maybeDelimiter = resolveLocalVariable(joinPart); if(maybeDelimiter == null || !maybeDelimiter.equals(delimiterVar)) return true; return false; } @Nullable private static PsiExpression extractDelimiter(@NotNull PsiAssignmentExpression assignmentExpression) { PsiExpression expression = assignmentExpression.getRExpression(); if (expression == null) return null; Object constantExpression = ExpressionUtils.computeConstantExpression(expression); if (!(constantExpression instanceof String)) return null; return expression; } @Nullable private static PsiLocalVariable extractDelimiterVar(@NotNull PsiAssignmentExpression assignmentExpression) { PsiLocalVariable delimiterVar = resolveLocalVariable(assignmentExpression.getLExpression()); if (delimiterVar == null) return null; PsiType delimiterVarType = delimiterVar.getType(); if (!delimiterVarType.equalsToText(CommonClassNames.JAVA_LANG_STRING)) return null; PsiExpression initializer = delimiterVar.getInitializer(); if (initializer == null) return null; Object constantExpression = ExpressionUtils.computeConstantExpression(initializer); if (!"".equals(constantExpression)) return null; return delimiterVar; } @Nullable private static PsiAssignmentExpression extractAssignment(@NotNull PsiStatement last) { PsiExpressionStatement expressionStatement = tryCast(last, PsiExpressionStatement.class); if (expressionStatement == null) return null; PsiAssignmentExpression assignment = tryCast(expressionStatement.getExpression(), PsiAssignmentExpression.class); if (assignment == null) return null; return assignment; } } /** * if(i > 0) append(","); */ private static class IndexBasedJoiningTerminal extends JoiningTerminal { protected IndexBasedJoiningTerminal(@NotNull PsiVariable targetBuilder, @NotNull PsiVariable variable, @NotNull List<PsiExpression> mainJoinParts, @NotNull PrefixSuffixContext prefixSuffixContext, @NotNull List<PsiExpression> delimiter, @NotNull TerminalBlock block) { super(block, targetBuilder, variable, mainJoinParts, prefixSuffixContext.getPrefixJoinParts(), prefixSuffixContext.getSuffixJoinParts(), delimiter, prefixSuffixContext.getBeforeLoopStatement(), prefixSuffixContext.getAfterLoopStatement()); } @Nullable static JoiningTerminal extractIndexBasedTerminal(@NotNull TerminalBlock terminalBlock, @Nullable List<PsiVariable> nonFinalVariables) { if (nonFinalVariables != null && !nonFinalVariables.isEmpty()) return null; SpecialFirstIterationLoop specialFirstIterationLoop = SpecialFirstIterationLoop.IndexBasedLoop.extract(terminalBlock); if (specialFirstIterationLoop == null) return null; List<PsiStatement> firstIterationStatements = specialFirstIterationLoop.getFirstIterationStatements(); List<PsiStatement> otherIterationStatements = specialFirstIterationLoop.getOtherIterationStatements(); if (firstIterationStatements.isEmpty() || otherIterationStatements.isEmpty()) return null; List<PsiExpression> firstIterationJoinParts = extractJoinParts(firstIterationStatements); List<PsiExpression> otherIterationJoinParts = extractJoinParts(otherIterationStatements); if (firstIterationJoinParts == null || otherIterationJoinParts == null) return null; JoinData joinData = JoinData.extractLeftDelimiter(otherIterationJoinParts); if (!joinPartsAreEquivalent(joinData.getMainJoinParts(), firstIterationJoinParts)) return null; PsiVariable targetBuilder = extractStringBuilder(firstIterationStatements.get(0)); if (targetBuilder == null) return null; PsiStatement loop = terminalBlock.getStreamSourceStatement(); PrefixSuffixContext context = PrefixSuffixContext.extractAndVerifyRefs(loop, loop, targetBuilder, terminalBlock, emptyList(), new HashSet<>(emptyList())); if (context == null) return null; return new IndexBasedJoiningTerminal(targetBuilder, terminalBlock.getVariable(), firstIterationJoinParts, context, joinData.getDelimiterJoinParts(), terminalBlock); } } /** * sb.append(elements[0]); * for(int i = 1; i < elements.length; i++) { * sb.append(delimiter).append(element[i]); * } */ private static class CountedLoopJoiningTerminal extends JoiningTerminal { @NotNull private final StreamApiMigrationInspection.CountingLoopSource mySource; @NotNull private final PsiStatement myBeforeLoopAppend; protected CountedLoopJoiningTerminal(@NotNull PsiVariable targetBuilder, @NotNull PsiVariable variable, @NotNull List<PsiExpression> mainJoinParts, @NotNull PrefixSuffixContext prefixSuffixContext, @NotNull List<PsiExpression> delimiter, @NotNull TerminalBlock block, @NotNull StreamApiMigrationInspection.CountingLoopSource newSource, @NotNull PsiStatement beforeLoopAppendStatement) { super(block, targetBuilder, variable, mainJoinParts, prefixSuffixContext.getPrefixJoinParts(), prefixSuffixContext.getSuffixJoinParts(), delimiter, prefixSuffixContext.getBeforeLoopStatement(), prefixSuffixContext.getAfterLoopStatement()); mySource = newSource; myBeforeLoopAppend = beforeLoopAppendStatement; } @Override void preCleanUp(CommentTracker ct) { super.preCleanUp(ct); ct.delete(myBeforeLoopAppend); } private static List<PsiExpression> copyReplacingVar(@NotNull List<PsiExpression> joinParts, @NotNull PsiLocalVariable localVariable, @NotNull PsiExpression replacement) { List<PsiExpression> copies = joinParts.stream().map(expression -> (PsiExpression)expression.copy()).collect(Collectors.toList()); for (PsiElement joinPart : copies) { ReferencesSearch.search(localVariable, new LocalSearchScope(joinPart)).forEach(reference -> { reference.getElement().replace(replacement); }); } return copies; } @NotNull @Override String generateStreamCode(CommentTracker ct) { return mySource.createReplacement(ct) + generateIntermediate(ct) + generateTerminal(ct); } @Nullable static CountedLoopJoiningTerminal extractCountedLoopTerminal(@NotNull TerminalBlock terminalBlock, @Nullable List<PsiVariable> nonFinalVariables) { if (nonFinalVariables != null && !nonFinalVariables.isEmpty()) return null; StreamApiMigrationInspection.CountingLoopSource loopSource = terminalBlock.getLastOperation(StreamApiMigrationInspection.CountingLoopSource.class); if (loopSource == null) return null; PsiExpression initializer = loopSource.getVariable().getInitializer(); Object constantExpression = ExpressionUtils.computeConstantExpression(initializer); if (!Integer.valueOf(1).equals(constantExpression)) return null; List<PsiStatement> statements = ContainerUtil.newArrayList(terminalBlock.getStatements()); if (statements.isEmpty()) return null; List<PsiExpression> joinParts = extractJoinParts(statements); if (joinParts == null) return null; JoinData joinData = JoinData.extractLeftDelimiter(joinParts); List<PsiExpression> delimiterJoinParts = joinData.getDelimiterJoinParts(); if (delimiterJoinParts.isEmpty()) return null; PsiStatement loop = terminalBlock.getStreamSourceStatement(); PsiLocalVariable variable = tryCast(terminalBlock.getVariable(), PsiLocalVariable.class); if (variable == null) return null; PsiVariable targetBuilder = extractStringBuilder(statements.get(0)); if (targetBuilder == null) return null; PsiMethodCallExpression beforeLoopAppend = JoiningTerminal.getCallBeforeStatement(loop, targetBuilder, APPEND, emptyList()); if (beforeLoopAppend == null) return null; PsiStatement beforeLoopAppendStatement = PsiTreeUtil.getParentOfType(beforeLoopAppend, PsiStatement.class); if (beforeLoopAppendStatement == null) return null; List<PsiExpression> firstIterationJoinParts = extractJoinParts(beforeLoopAppend); if (firstIterationJoinParts == null) return null; PsiElementFactory factory = JavaPsiFacade.getElementFactory(targetBuilder.getProject()); PsiExpression expression = factory.createExpressionFromText("0", variable); List<PsiExpression> replacedMainJoinParts = copyReplacingVar(joinData.getMainJoinParts(), variable, expression); if (!joinPartsAreEquivalent(replacedMainJoinParts, firstIterationJoinParts)) return null; PrefixSuffixContext context = PrefixSuffixContext.extractAndVerifyRefs(loop, beforeLoopAppendStatement, targetBuilder, terminalBlock, emptyList(), new HashSet<>(singletonList(beforeLoopAppend))); if (context == null) return null; StreamApiMigrationInspection.CountingLoopSource newSource = loopSource.withInitializer(expression); return new CountedLoopJoiningTerminal(targetBuilder, variable, joinData.getMainJoinParts(), context, delimiterJoinParts, terminalBlock, newSource, beforeLoopAppendStatement); } } private static class JoinData { // TODO confusing naming private final @Nullable String myDelimiter; private final @NotNull List<PsiExpression> myMainJoinParts; private final @NotNull List<PsiExpression> myDelimiterJoinParts; JoinData(@Nullable String delimiter, @NotNull List<PsiExpression> mainJoinParts, @NotNull List<PsiExpression> delimiterJoinParts) { myDelimiter = delimiter; myMainJoinParts = mainJoinParts; myDelimiterJoinParts = delimiterJoinParts; } @Nullable public String getDelimiter() { return myDelimiter; } @NotNull public List<PsiExpression> getMainJoinParts() { return myMainJoinParts; } @NotNull public List<PsiExpression> getDelimiterJoinParts() { return myDelimiterJoinParts; } @NotNull static JoinData extractLeftDelimiter(@NotNull List<PsiExpression> joinParts) { List<PsiExpression> delimiterJoinParts = new ArrayList<>(); int firstNonConstant = -1; StringBuilder sb = new StringBuilder(); for (int i = 0, size = joinParts.size(); i < size; i++) { PsiExpression joinPart = joinParts.get(i); String constantExpression = computeConstant(joinPart); if (constantExpression == null) { firstNonConstant = i; break; } delimiterJoinParts.add(joinPart); sb.append(constantExpression); } String separator = sb.length() == 0 ? null : sb.toString(); if (firstNonConstant != -1) { List<PsiExpression> mainJoinParts = joinParts.subList(firstNonConstant, joinParts.size()); return new JoinData(separator, mainJoinParts, delimiterJoinParts); } return new JoinData(separator, emptyList(), delimiterJoinParts); } @NotNull static JoinData extractRightDelimiter(@NotNull List<PsiExpression> joinParts) { List<PsiExpression> delimiterJoinParts = new ArrayList<>(); int firstNonConstant = -1; StringBuilder sb = new StringBuilder(); for (int i = joinParts.size() - 1; i >= 0; i--) { PsiExpression joinPart = joinParts.get(i); String constantExpression = computeConstant(joinPart); if (constantExpression == null) { firstNonConstant = i; break; } sb.append(constantExpression); delimiterJoinParts.add(joinPart); } String separator = sb.length() == 0 ? null : sb.toString(); if (firstNonConstant != -1) { List<PsiExpression> mainJoinParts = joinParts.subList(0, firstNonConstant + 1); return new JoinData(separator, mainJoinParts, delimiterJoinParts); } return new JoinData(separator, emptyList(), delimiterJoinParts); } } } }
package org.opencds.cqf.ruler.cr.dstu3.builder; import java.util.ArrayList; import java.util.List; import org.hl7.fhir.dstu3.model.Annotation; import org.hl7.fhir.dstu3.model.CarePlan; import org.hl7.fhir.dstu3.model.CodeableConcept; import org.hl7.fhir.dstu3.model.Identifier; import org.hl7.fhir.dstu3.model.Period; import org.hl7.fhir.dstu3.model.Reference; import org.hl7.fhir.dstu3.model.Resource; import org.hl7.fhir.exceptions.FHIRException; public class CarePlanBuilder extends BaseBuilder<CarePlan> { public CarePlanBuilder() { super(new CarePlan()); } public CarePlanBuilder(CarePlan carePlan) { super(carePlan); } public CarePlanBuilder buildIdentifier(List<Identifier> identifiers) { complexProperty.setIdentifier(identifiers); return this; } public CarePlanBuilder buildIdentifier(Identifier identifier) { if (!complexProperty.hasIdentifier()) { complexProperty.setIdentifier(new ArrayList<>()); } complexProperty.addIdentifier(identifier); return this; } public CarePlanBuilder buildDefinition(List<Reference> references) { complexProperty.setDefinition(references); return this; } public CarePlanBuilder buildDefinition(Reference reference) { if (!complexProperty.hasDefinition()) { complexProperty.setDefinition(new ArrayList<>()); } complexProperty.addDefinition(reference); return this; } public CarePlanBuilder buildBasedOn(List<Reference> references) { complexProperty.setBasedOn(references); return this; } public CarePlanBuilder buildBasedOn(Reference reference) { if (!complexProperty.hasBasedOn()) { complexProperty.setBasedOn(new ArrayList<>()); } complexProperty.addBasedOn(reference); return this; } public CarePlanBuilder buildReplaces(List<Reference> references) { complexProperty.setReplaces(references); return this; } public CarePlanBuilder buildReplaces(Reference reference) { if (!complexProperty.hasReplaces()) { complexProperty.setReplaces(new ArrayList<>()); } complexProperty.addReplaces(reference); return this; } public CarePlanBuilder buildPartOf(List<Reference> references) { complexProperty.setPartOf(references); return this; } public CarePlanBuilder buildPartOf(Reference reference) { if (!complexProperty.hasPartOf()) { complexProperty.setPartOf(new ArrayList<>()); } complexProperty.addPartOf(reference); return this; } // required public CarePlanBuilder buildStatus(CarePlan.CarePlanStatus status) { complexProperty.setStatus(status); return this; } // String overload public CarePlanBuilder buildStatus(String status) throws FHIRException { complexProperty.setStatus(CarePlan.CarePlanStatus.fromCode(status)); return this; } // required public CarePlanBuilder buildIntent(CarePlan.CarePlanIntent intent) { complexProperty.setIntent(intent); return this; } // String overload public CarePlanBuilder buildIntent(String intent) throws FHIRException { complexProperty.setIntent(CarePlan.CarePlanIntent.fromCode(intent)); return this; } public CarePlanBuilder buildCategory(List<CodeableConcept> categories) { complexProperty.setCategory(categories); return this; } public CarePlanBuilder buildCategory(CodeableConcept category) { if (!complexProperty.hasCategory()) { complexProperty.setCategory(new ArrayList<>()); } complexProperty.addCategory(category); return this; } public CarePlanBuilder buildTitle(String title) { complexProperty.setTitle(title); return this; } public CarePlanBuilder buildDescription(String description) { complexProperty.setDescription(description); return this; } // required public CarePlanBuilder buildSubject(Reference reference) { complexProperty.setSubject(reference); return this; } public CarePlanBuilder buildContext(Reference reference) { complexProperty.setContext(reference); return this; } public CarePlanBuilder buildPeriod(Period period) { complexProperty.setPeriod(period); return this; } public CarePlanBuilder buildAuthor(List<Reference> references) { complexProperty.setAuthor(references); return this; } public CarePlanBuilder buildAuthor(Reference reference) { if (!complexProperty.hasAuthor()) { complexProperty.setAuthor(new ArrayList<>()); } complexProperty.addAuthor(reference); return this; } public CarePlanBuilder buildCareTeam(List<Reference> careTeams) { complexProperty.setCareTeam(careTeams); return this; } public CarePlanBuilder buildCareTeam(Reference careTeam) { if (!complexProperty.hasCareTeam()) { complexProperty.setCareTeam(new ArrayList<>()); } complexProperty.addCareTeam(careTeam); return this; } public CarePlanBuilder buildAddresses(List<Reference> addresses) { complexProperty.setAddresses(addresses); return this; } public CarePlanBuilder buildAddresses(Reference address) { if (!complexProperty.hasAddresses()) { complexProperty.setAddresses(new ArrayList<>()); } complexProperty.addAddresses(address); return this; } public CarePlanBuilder buildSupportingInfo(List<Reference> supportingInfo) { complexProperty.setSupportingInfo(supportingInfo); return this; } public CarePlanBuilder buildSupportingInfo(Reference supportingInfo) { if (!complexProperty.hasSupportingInfo()) { complexProperty.setSupportingInfo(new ArrayList<>()); } complexProperty.addSupportingInfo(supportingInfo); return this; } public CarePlanBuilder buildGoal(List<Reference> goals) { complexProperty.setGoal(goals); return this; } public CarePlanBuilder buildGoal(Reference goal) { if (!complexProperty.hasGoal()) { complexProperty.setGoal(new ArrayList<>()); } complexProperty.addGoal(goal); return this; } public CarePlanBuilder buildActivity(List<CarePlan.CarePlanActivityComponent> activities) { complexProperty.setActivity(activities); return this; } public CarePlanBuilder buildActivity(CarePlan.CarePlanActivityComponent activity) { complexProperty.addActivity(activity); return this; } public CarePlanBuilder buildNotes(List<Annotation> notes) { complexProperty.setNote(notes); return this; } public CarePlanBuilder buildNotes(Annotation note) { if (!complexProperty.hasNote()) { complexProperty.setNote(new ArrayList<>()); } complexProperty.addNote(note); return this; } public CarePlanBuilder buildLanguage(String language) { complexProperty.setLanguage(language); return this; } public CarePlanBuilder buildContained(Resource result) { complexProperty.addContained(result); return this; } }
/* * Copyright 2021 Google LLC. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.hadoop.util.testing.logging; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.CheckReturnValue; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.LogRecord; import java.util.logging.Logger; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runners.model.Statement; /** * The ExpectedLogMessages Rule allows in-test specification of expected console log messages: * * <pre> * // These tests all pass. * public static class DisallowMessagesByDefault { * private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); * * &#64;Rule * public final ExpectedLogMessages logged = ExpectedLogMessages.none(); * * &#64;Test * public void logsNothing() { * // allow nothing, logs nothing: it passes. * } * * &#64;Test * public void logsExpectedMessages() { * logged.expect("foo"); * logger.atWarning().log("This would fail if I didn't say 'foo' and would also fail " * + "if I logged any other message."); * } * * &#64;Test * public void filtersMessages() { * logged.filter(logger, Level.SEVERE); * logger.atWarning().log("If this message were SEVERE, this test would fail, but " * + "since it's a WARNING, the rule doesn't even see it."); * } * } * * public static class AllowAllMessagesByDefault { * private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); * * &#64;Rule * public final ExpectedLogMessages logged = ExpectedLogMessages.any(); * * &#64;Test * public void logsAnything() { * logger.atWarning().log("This is allowed"); * logger.atWarning().log("And so is this"); * logger.atWarning().log("And so is anything..."); * } * * &#64;Test * public void logsExpectedMessages() { * logged.expect("foo"); * logger.atWarning().log("Any log statement is allowed"); * logger.atWarning().log("As long as one of them contains 'foo'"); * } * } * * public static class AllowWhitelistedMessagesByDefault { * private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); * * &#64;Rule * public final ExpectedLogMessages logged = ExpectedLogMessages.containing("foo"); * * &#64;Test * public void logsWhitelistedMessages() { * logger.atWarning().log("Any log message not containing 'foo' will cause the test to fail"); * } * * &#64;Test * public void logsMultipleMessages() { * logged.allow("bar"); * logger.atWarning().log("'foo' is whitelisted"); * logger.atWarning().log("And so is 'bar', but nothing else is..."); * } * } * * public static class SpecificLogger { * private static final GoogleLogger myLogger = GoogleLogger.forEnclosingClass(); * * &#64;Rule * public final ExpectedLogMessages logged = ExpectedLogMessages.forLogger(SpecificLogger.class); * * static class Bar { * private static final GoogleLogger barLogger = GoogleLogger.forEnclosingClass(); * * static bar() { * barLogger.atWarning().log("Not impacted by `logged`, since it's not `myLogger`!"); * } * } * * &#64;Test * public void otherLoggersAreIrrelevant() { * Bar.bar(); * } * } * * public static class BoqSpecificLogger { * private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); * * &#64;Rule * public final BoqRules rules = new BoqRules(); * * &#64;Test * public void logsExpectedMessage() { * // Write a log message that would normally fail this test. * logger.atSevere().log("This is an error message"); * * // Add an expectation so the test passes. * rules.getLogged().expect(".*This is an error message.*"); * } * } * </pre> * * <p>Given that this rule may throw exceptions, it is not designed to be used as a * {@literal @}{@code ClassRule} (since throwing an exception from a {@code ClassRule} will result * in undefined behavior). * * <p>Note that all assertions are about messages that get printed, which are a subset of all * messages that are issued. Specifically, for example, if the {@code java.util.logging.Logger} for * {@code com.google.my.package} is configured to only print messages at, say, {@code Level.SEVERE}, * any messages issued against that Logger with a lower level will not be seen by this class. */ @CanIgnoreReturnValue public final class ExpectedLogMessages implements TestRule { private static final Logger GOOGLE_LOGGER = Logger.getLogger("com.google"); private final Logger logger; private final AssertingHandler handler; /** * The expectations to enforce at the end of the test. Thread-unsafe because only the test thread * should be setting expectations. */ private final List<String> expectedRegexs = new ArrayList<>(); private final AtomicBoolean handlerHasBeenAdded = new AtomicBoolean(false); private ExpectedLogMessages(Logger logger, AssertingHandler handler) { this.logger = logger; this.handler = handler; } /** * Creates an {@code ExpectedLogMessages} that disallows all log messages for the {@code * com.google} logger (but can be mutated to allow/require expected messages). * * <p>Prefer to use {@link #any} or {@link #forLogger}, since by using this construct, you will * likely end up with an order-dependent or otherwise flaky test. */ public static ExpectedLogMessages none() { return forLogger(GOOGLE_LOGGER); } /** * Creates an {@code ExpectedLogMessages} that allows all log messages for the {@code com.google} * logger (but can be mutated to require expected messages). */ public static ExpectedLogMessages any() { return forLogger(GOOGLE_LOGGER).allow(".*"); } /** * Creates an {@code ExpectedLogMessages} that allows specific log messages for the {@code * com.google} logger (but can be mutated to require expected messages). */ public static ExpectedLogMessages containing(String regex) { return forLogger(GOOGLE_LOGGER).allow(regex); } /** * Creates an {@code ExpectedLogMessages} for the logger with the specified name. The instance is * initialized to disallow all log messages for the logger (but can be mutated to allow or require * expected messages). */ public static ExpectedLogMessages forLogger(String name) { return forLogger(Logger.getLogger(checkNotNull(name))); } /** * Creates an {@code ExpectedLogMessages} for the specified logger. The instance is initialized to * disallow all log messages for the logger (but can be mutated to allow or require expected * messages). */ public static ExpectedLogMessages forLogger(Logger logger) { return new ExpectedLogMessages(logger, new AssertingHandler()); } /** * Creates an {@code ExpectedLogMessages} for the logger in the specified class. The instance is * initialized to disallow all log messages for the logger (but can be mutated to allow or require * expected messages). * * <p>The logger is identified by its class name like {@link * com.google.common.flogger.GoogleLogger#forEnclosingClass} and friends do. */ public static ExpectedLogMessages forLogger(Class<?> loggerClass) { return forLogger(loggerClass.getCanonicalName()); } /** * Adds an allowed log message, causing tests to allow but not require log messages matching the * specified regex. Note that this method is a no-op if called in conjunction with {@link #any()}. */ public ExpectedLogMessages allow(String regex) { handler.addIgnoredRegexes(checkNotNull(regex)); return this; } /** * Filters all log messages by the specified logger that aren't of the specified level or higher. * Note that filtered messages will not be visible to this rule, meaning that they cannot be used * to fulfill log message expectations. * * @throws IllegalArgumentException if the specified logger isn't a child logger of this rule's * logger */ public ExpectedLogMessages filter(Logger logger, Level level) { return filter(logger.getName(), level); } /** * Filters all log messages by the specified logger that aren't of the specified level or higher. * Note that filtered messages will not be visible to this rule, meaning that they cannot be used * to fulfill log message expectations. * * <p>The logger is identified by its class name like {@link * com.google.common.flogger.GoogleLogger#forEnclosingClass} and friends do. * * @throws IllegalArgumentException if the specified logger isn't a child logger of this rule's * logger */ public ExpectedLogMessages filter(Class<?> loggerClass, Level level) { return filter(loggerClass.getCanonicalName(), level); } /** * Filters all log messages by the specified logger that aren't of the specified level or higher. * Note that filtered messages will not be visible to this rule, meaning that they cannot be used * to fulfill log message expectations. * * @throws IllegalArgumentException if the specified logger isn't a child logger of this rule's * logger */ public ExpectedLogMessages filter(String loggerName, Level level) { checkArgument( loggerName.startsWith(logger.getName()), "%s is not a child logger of %s", loggerName, logger.getName()); logger.setLevel(level); handler.appendConfiguration(loggerName + ".level = " + checkNotNull(level)); return this; } /** * Adds an expectation that (exactly) one (not-filtered) message will contain the given {@code * regex}. This also implicitly allows the specified log message, though it is <em>not</em> * equivalent to also calling {@link #allow} with the same {@code regex}, since {@link #allow} * would allow any number of messages with the given {@code regex}. * * <p>Calling this method multiple times adds multiple expectations -- i.e. it will cause this * class to expect the {@code regex} to be in as many messages as the number of times this method * is called. * * <p>You are strongly advised to use narrow regular expressions, particularly if calling this * method multiple times with different {@code regex} params -- you should make sure that no * string can match more than one of the {@code regex}es, as this may lead to confusing behavior, * depending on the order that the logs are printed. It may even lead to flaky tests, if the order * of the logging of the messages is not deterministic. */ public ExpectedLogMessages expect(String regex) { expectedRegexs.add(checkNotNull(regex)); return this; } /** The same as calling {@code #expect()} the given {@code numTimes}. */ public ExpectedLogMessages expect(String regex, int numTimes) { checkNotNull(regex); expectedRegexs.addAll(Collections.nCopies(numTimes, regex)); return this; } /** * Sets the minimum log level for which messages will be visible to this rule. Message levels * lower than this value will be discarded, meaning they will always be allowed, and they will not * satisfy any expectations. The default level is {@code WARNING}. * * <p>Note that this is a filter on top of the standard level filter from {@code * java.util.logging.Logger}, and that changing this level will not affect {@code * java.util.logging.Logger}'s filter (if any). */ public ExpectedLogMessages setMinimumLevel(Level level) { return filter(logger, checkNotNull(level)); } /** * Defines whether to capture log records regardless of the emitting thread ({@code true}), or * only log records of the thread that created the asserting handler and its descendants ({@code * false}, the default). * * <p>Useful to capture logs for code invoked by singleton thread pools possibly created outside * of tests. */ public ExpectedLogMessages setCaptureAllThreads(boolean captureAllThreads) { handler.setCaptureAllThreads(captureAllThreads); return this; } /** Clears all log records this rule has recorded so far. */ public void clear() { handler.clear(); } /** Returns a snapshot of the log records this rule has recorded so far, ordered by event time. */ @CheckReturnValue public ImmutableList<LogRecord> getRecords() { return handler.getLogRecords(); } /** * Returns a snapshot of the formatted log records this rule has recorded so far, ordered by event * time. * * <p>Formats the log records consistently with what {@link #expect} matches against. */ @CheckReturnValue public ImmutableList<String> getFormattedRecords() { return handler.getFormattedLogRecords(); } /** * Starts capturing logs. * * <p>This method exists only for legacy users who do not use this class as a JUnit rule. * * @deprecated this method should not be called if this class is used as a JUnit rule */ @Deprecated public void captureLogs() { addHandler(); } @Override public Statement apply(Statement base, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { addHandler(); base.evaluate(); try { handler.assertContainsRegex(Iterables.toArray(expectedRegexs, String.class)); } finally { logger.removeHandler(handler); handler.close(); } } }; } /** Should only be called once. */ private void addHandler() { checkState(!handlerHasBeenAdded.getAndSet(true), "Handler has already been added"); // Work around a bug in android on KitKat - doesn't synchronize the children field of Logger. synchronized (LogManager.getLogManager()) { logger.addHandler(handler); } } }
/** * Copyright 2012 Terremark Worldwide Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.terremark.handlers; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.util.List; import java.util.Map; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import org.apache.wink.client.ClientRequest; import org.apache.wink.client.ClientResponse; import org.apache.wink.client.handlers.ClientHandler; import org.apache.wink.client.handlers.HandlerContext; import org.apache.wink.client.handlers.InputStreamAdapter; import org.apache.wink.client.handlers.OutputStreamAdapter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Logging handler. Logger {@code com.terremark.data} is active only when category is set to {@code DEBUG} level. Setting logger to debug * level will log the HTTP headers and data (if any). * * @author <a href="mailto:[email protected]">Seshu Pasam</a> */ public class HTTPLoggingHandler implements ClientHandler { /** Logger */ private static final Logger LOGGER = LoggerFactory.getLogger("com.terremark.data"); /** Platform specified new line */ private static final String NL = System.getProperty("line.separator"); /** * Method invoked in the chain. The request is logged, if necessary. The chain is invoked. Once the response is received, it is logged. * * @param request Client request. * @param context Request context. * @return Client response. * @throws Exception If there is a problem invoking the chain. */ @Override @SuppressWarnings("PMD.SignatureDeclareThrowsException") public final ClientResponse handle(final ClientRequest request, final HandlerContext context) throws Exception { if (LOGGER.isDebugEnabled()) { if (request.getEntity() == null) { // Log headers here since the adapter for output stream will not be called without an entity StringBuilder builder = new StringBuilder().append("Request").append(NL).append(request.getMethod()).append(' ') .append(request.getURI().toString()).append(NL); for (final Map.Entry<String, List<String>> entry : request.getHeaders().entrySet()) { if (entry.getKey() != null) { builder.append(entry.getKey()).append(": ").append(entry.getValue()).append(NL); } } LOGGER.debug("{}", builder); } else { // Add adapters for output stream context.addOutputStreamAdapter(new OutputStreamAdapter() { @Override public OutputStream adapt(final OutputStream os, final ClientRequest request) throws IOException { return new LoggingOutputStream(os, request.getMethod(), request.getURI(), request.getHeaders()); } }); } // Add adapters for input stream context.addInputStreamAdapter(new InputStreamAdapter() { @Override public InputStream adapt(final InputStream is, final ClientResponse response) throws IOException { return new LoggingInputStream(is, response.getStatusCode(), response.getStatusType(), response.getHeaders()); } }); } // Invoke the chain return context.doChain(request); } /** * Output stream delegate wrapper. * * @author <a href="mailto:[email protected]">Seshu Pasam</a> */ static class LoggingOutputStream extends OutputStream { /** Actual output stream */ private final OutputStream os; /** String builder to aggregate data */ private final StringBuilder builder; /** * Default constructor. * * @param os Actual output stream. * @param method Request method. * @param uri Request URI. * @param headers Request headers. */ LoggingOutputStream(final OutputStream os, String method, URI uri, MultivaluedMap<String, String> headers) { this.os = os; this.builder = new StringBuilder().append("Request").append(NL).append(method).append(' ').append(uri.toString()).append(NL); for (final Map.Entry<String, List<String>> entry : headers.entrySet()) { if (entry.getKey() != null) { this.builder.append(entry.getKey()).append(": ").append(entry.getValue()).append(NL); } } this.builder.append(NL); } /** * If there is data, log it and close the output stream. * * @see java.io.OutputStream#close() */ @Override public void close() throws IOException { LOGGER.debug("{}{}", builder, NL); super.close(); } /** * Writes the specified byte to the actual output stream. The data is appended to the string aggregator. If the byte is a new line * or line feed, it is logged. * * @see java.io.OutputStream#write(int) */ @Override public void write(final int i) throws IOException { os.write(i); builder.append((char) i); } } /** * Input stream delegate wrapper. * * @author <a href="mailto:[email protected]">Seshu Pasam</a> */ static class LoggingInputStream extends InputStream { /** Actual input stream */ private final InputStream is; /** String builder to aggregate data */ private final StringBuilder builder; /** * Default constructor. * * @param code Response code. * @param status Response status type. * @param headers Response headers. * @param is Actual input stream. */ LoggingInputStream(final InputStream is, final int code, final Response.StatusType status, MultivaluedMap<String, String> headers) { this.is = is; this.builder = new StringBuilder().append("Response").append(NL).append(code).append(' ').append(status.toString()).append(NL); for (final Map.Entry<String, List<String>> entry : headers.entrySet()) { if (entry.getKey() != null) { this.builder.append(entry.getKey()).append(": ").append(entry.getValue()).append(NL); } } this.builder.append(NL); } /** * If there is data, log it and close the input stream. * * @see java.io.InputStream#close() */ @Override public void close() throws IOException { LOGGER.debug("{}{}", builder, NL); super.close(); } /** * Reads one byte from the actual input stream. The data is appended to the string aggregator. If EOF is reached or if the byte read * is a new line or line feed, it is logged. * * @see java.io.InputStream#read() */ @Override public int read() throws IOException { final int i = is.read(); if (i > 0) { builder.append((char) i); } return i; } } }
/** * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.api; import com.fasterxml.jackson.annotation.JsonInclude; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; /** * Represents the requirements to successfully provision a profile such as the minimum instances required * and which other profiles should be profiled before hand. */ @JsonInclude(JsonInclude.Include.NON_NULL) public class ProfileRequirements implements Comparable<ProfileRequirements> { private String profile; private Integer minimumInstances; private Integer maximumInstances; private List<String> dependentProfiles; private ChildScalingRequirements childScalingRequirements; private SshScalingRequirements sshScalingRequirements; private DockerScalingRequirements dockerScalingRequirements; private OpenShiftScalingRequirements openShiftScalingRequirements; private Integer maximumInstancesPerHost; public ProfileRequirements() { } public ProfileRequirements(String profile) { this.profile = profile; } public ProfileRequirements(String profile, Integer minimumInstances) { this(profile); this.minimumInstances = minimumInstances; } public ProfileRequirements(String profile, Integer minimumInstances, Integer maximumInstances) { this(profile, minimumInstances); this.maximumInstances = maximumInstances; } public ProfileRequirements(String profile, Integer minimumInstances, Integer maximumInstances, List<String> dependentProfiles) { this(profile, minimumInstances, maximumInstances); this.dependentProfiles = dependentProfiles; } public ProfileRequirements(String profile, Integer minimumInstances, Integer maximumInstances, String... dependentProfiles) { this(profile, minimumInstances, maximumInstances); this.dependentProfiles = new ArrayList<String>(Arrays.asList(dependentProfiles)); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ProfileRequirements that = (ProfileRequirements) o; if (!profile.equals(that.profile)) return false; return true; } @Override public int compareTo(ProfileRequirements o) { return this.profile.compareTo(o.profile); } @Override public int hashCode() { return profile.hashCode(); } @Override public String toString() { return "ProfileRequirements[" + profile + " " + getOrBlank(minimumInstances) + ".." + getOrBlank(maximumInstances) + "]"; } private static String getOrBlank(Object value) { return value != null ? value.toString() : ""; } /** * Checks that the configuation of these requirements are valid */ public void validate() { if (maximumInstances != null) { if (maximumInstances < 0) { throw new IllegalArgumentException("Maximum instances should be >= 0"); } if (minimumInstances != null) { if (minimumInstances > maximumInstances) { throw new IllegalArgumentException("Minimum instances must not be greater than the maximum instances"); } } } if (minimumInstances != null) { if (minimumInstances < 0) { throw new IllegalArgumentException("Minimum instances should be >= 0"); } } } // Builder DSL //------------------------------------------------------------------------- public ProfileRequirements dependentProfiles(List<String> profiles) { setDependentProfiles(profiles); return this; } public ProfileRequirements dependentProfiles(String... profiles) { return dependentProfiles(Arrays.asList(profiles)); } public ProfileRequirements minimumInstances(Integer value) { setMinimumInstances(value); return this; } public ProfileRequirements maximumInstances(Integer value) { setMaximumInstances(value); return this; } /** * Lazily creates the scaling requirements for the child container provider */ public ChildScalingRequirements childScaling() { if (childScalingRequirements == null) { childScalingRequirements = new ChildScalingRequirements(); } return getChildScalingRequirements(); } /** * Lazily creates the scaling requirements for the ssh container provider */ public SshScalingRequirements sshScaling() { if (sshScalingRequirements == null) { sshScalingRequirements = new SshScalingRequirements(); } return getSshScalingRequirements(); } /** * Lazily creates the scaling requirements for the docker container provider */ public DockerScalingRequirements dockerScaling() { if (dockerScalingRequirements == null) { dockerScalingRequirements = new DockerScalingRequirements(); } return getDockerScalingRequirements(); } /** * Lazily creates the scaling requirements for the OpenShift container provider */ public OpenShiftScalingRequirements openShiftScaling() { if (openShiftScalingRequirements == null) { openShiftScalingRequirements = new OpenShiftScalingRequirements(); } return getOpenShiftScalingRequirements(); } /** * Specifies the maximum number of instances of this profile per host. e.g. set to 1 to ensure that only 1 instance of a profile is provisioned per host */ public ProfileRequirements maximumInstancesPerHost(final Integer maximumInstancesPerHost) { this.maximumInstancesPerHost = maximumInstancesPerHost; return this; } // Properties //------------------------------------------------------------------------- public String getProfile() { return profile; } public void setProfile(String profile) { this.profile = profile; } public List<String> getDependentProfiles() { return dependentProfiles; } public void setDependentProfiles(List<String> dependentProfiles) { this.dependentProfiles = dependentProfiles; } @JsonInclude(JsonInclude.Include.NON_NULL) public Integer getMaximumInstances() { return maximumInstances; } public void setMaximumInstances(Integer maximumInstances) { this.maximumInstances = maximumInstances; } @JsonInclude(JsonInclude.Include.NON_NULL) public Integer getMinimumInstances() { return minimumInstances; } public void setMinimumInstances(Integer minimumInstances) { this.minimumInstances = minimumInstances; } public ChildScalingRequirements getChildScalingRequirements() { return childScalingRequirements; } public void setChildScalingRequirements(ChildScalingRequirements childScalingRequirements) { this.childScalingRequirements = childScalingRequirements; } public SshScalingRequirements getSshScalingRequirements() { return sshScalingRequirements; } public void setSshScalingRequirements(SshScalingRequirements sshScalingRequirements) { this.sshScalingRequirements = sshScalingRequirements; } public DockerScalingRequirements getDockerScalingRequirements() { return dockerScalingRequirements; } public void setDockerScalingRequirements(DockerScalingRequirements dockerScalingRequirements) { this.dockerScalingRequirements = dockerScalingRequirements; } public OpenShiftScalingRequirements getOpenShiftScalingRequirements() { return openShiftScalingRequirements; } public void setOpenShiftScalingRequirements(OpenShiftScalingRequirements openShiftScalingRequirements) { this.openShiftScalingRequirements = openShiftScalingRequirements; } public Integer getMaximumInstancesPerHost() { return maximumInstancesPerHost; } public void setMaximumInstancesPerHost(Integer maximumInstancesPerHost) { this.maximumInstancesPerHost = maximumInstancesPerHost; } /** * Returns the health as a percentage for tools based on the current number of instances and the requirements */ public double getHealth(int instances) { if (instances <= 0) { return 0.0; } if (minimumInstances != null) { int min = minimumInstances.intValue(); if (min <= 0) { return 1.0; } else { return 1.0 * instances / min; } } // if no minimum assume fine? return 1.0; } /** * Returns true if these requirements are empty (and so do not need to be persisted) */ //@JsonIgnore // name this differently so it's not picked up as a property public boolean checkIsEmpty() { // we allow 0 maximum instances as being non-empty so we can keep the requirements around to // stop things return isEmpty(minimumInstances) && isEmpty(dependentProfiles) && maximumInstances == null; } protected static boolean isEmpty(Integer number) { return number == null || number == 0; } protected static boolean isEmpty(Collection<?> coll) { return coll == null || coll.isEmpty(); } /** * Returns true if this profile requirements has at least 1 {@link #getMinimumInstances()} */ public boolean hasMinimumInstances() { return minimumInstances != null && minimumInstances.intValue() > 0; } }
package com.github.btrekkie.programmatic_image; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Rectangle; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.HierarchyBoundsAdapter; import java.awt.event.HierarchyEvent; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.swing.JComponent; import javax.swing.event.AncestorEvent; import javax.swing.event.AncestorListener; /** * A JComponent that renders an IProgrammaticImageRenderer image. ProgrammaticImageComponent performs best if * IProgrammaticImageRenderer.render terminates in a timely fashion when its thread is interrupted, as in * Thread.interrupted(). */ /* ProgrammaticImageComponent maintains a cache of BufferedImages for different subrectangles of the image at different * levels of zoom. We use background threads to render the image at visible rectangle, but we render it in pieces. The * pieces tile the underlying image into imageWidth x imageHeight images, starting at the origin, where * imageWidth x imageHeight is (normally) somewhat smaller than the visible rectangle. At any given moment, for each * tile that overlaps the visible rectangle, either the cache contains the image for the tile, or we are computing the * image in a background thread. By making the tiles smaller than the visible rectangle, we can display different * portions of the image as they become available. This is useful if rendering takes a long time. We also compute * images for tiles that are just outside the visible rectangle, so that if the ProgrammaticImageComponent is in a * scroll pane and the user scrolls slowly enough, we will always be able to draw the entire visible rectangle. */ public class ProgrammaticImageComponent extends JComponent { /** * The amount by which to multiply the size of the visible rectangle to obtain a minimum value of * cache.maxPixelCount. */ private static final double MAX_PIXEL_COUNT_MULT = 10; /** The minimum value of cache.maxPixelCount. */ private static final int MIN_MAX_PIXEL_COUNT = 10000000; /** * The minimum factor by which the desired value of imageWidth or imageHeight must change, as suggested by * CACHE_IMAGES_PER_AXIS and MIN_IMAGE_SIZE, before changing the image size. */ private static final double MIN_SIZE_CHANGE_MULT = 1.8; /** The maximum number of cache images that fit in the visible rectangle in the vertical or horizontal direction. */ private static final double CACHE_IMAGES_PER_AXIS = 6; /** The minimum values of imageWidth and imageHeight. */ private static final int MIN_IMAGE_SIZE = 100; private static final long serialVersionUID = 9212293386643020026L; /** The IProgrammaticImageRenderer whose image we are rendering. */ private final IProgrammaticImageRenderer renderer; /** The cache for storing different portions of the underlying image. */ private ProgrammaticImageCache cache; /** The ProgrammaticImageWorkers that are currently rendering different portions of the image. */ private Map<ProgrammaticImageRect, ProgrammaticImageWorker> workers = new HashMap<ProgrammaticImageRect, ProgrammaticImageWorker>(); /** The factor by which we are scaling the image. */ private double scale = 1; /** Whether the component has been "initialized", or properly added to the hierarchy of some frame or window. */ private boolean hasInitialized = false; /** * The width of the images we store in "cache" (although the images at the far right of "renderer" might be narrower * than this). */ private int imageWidth = 1; /** * The height of the images we store in "cache" (although the images at the bottom of "renderer" might be shorter * than this). */ private int imageHeight = 1; public ProgrammaticImageComponent(IProgrammaticImageRenderer renderer) { this.renderer = renderer; cache = new ProgrammaticImageCache(); addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent event) { handleVisibleRectChanged(); } }); addAncestorListener(new AncestorListener() { @Override public void ancestorMoved(AncestorEvent event) { handleVisibleRectChanged(); } @Override public void ancestorAdded(AncestorEvent event) { handleVisibleRectChanged(); } @Override public void ancestorRemoved(AncestorEvent event) { handleVisibleRectChanged(); } }); } /** * Returns the regions of the image we should make sure are available, given the current visible rectangle and level * of zoom. */ private Collection<ProgrammaticImageRect> rects() { Collection<ProgrammaticImageRect> rects = new ArrayList<ProgrammaticImageRect>(); Rectangle visibleRect = getVisibleRect(); Dimension preferredSize = getPreferredSize(); int minX = Math.max(0, visibleRect.x / imageWidth * imageWidth - imageWidth); int maxX = Math.min((int)preferredSize.getWidth(), visibleRect.x + visibleRect.width + imageWidth); int minY = Math.max(0, visibleRect.y / imageHeight * imageHeight - imageHeight); int maxY = Math.min((int)preferredSize.getHeight(), visibleRect.y + visibleRect.height + imageHeight); for (int y = minY; y < maxY; y += imageHeight) { for (int x = minX; x < maxX; x += imageWidth) { rects.add( new ProgrammaticImageRect( scale, x, y, Math.min(imageWidth, (int)preferredSize.getWidth() - x), Math.min(imageHeight, (int)preferredSize.getHeight() - y))); } } return rects; } /** * Responds to a (potential) change in what we are displaying - either a change in the visible rectangle or the * level of zoom. */ private void handleVisibleRectOrScaleChanged() { setPreferredSize( new Dimension((int)Math.ceil(renderer.width() * scale), (int)Math.ceil(renderer.height() * scale))); revalidate(); // Cancel any workers for regions we are no longer interested in Set<ProgrammaticImageRect> rects = new LinkedHashSet<ProgrammaticImageRect>(rects()); Map<ProgrammaticImageRect, ProgrammaticImageWorker> newWorkers = new HashMap<ProgrammaticImageRect, ProgrammaticImageWorker>(); for (Entry<ProgrammaticImageRect, ProgrammaticImageWorker> entry : workers.entrySet()) { ProgrammaticImageRect rect = entry.getKey(); ProgrammaticImageWorker worker = entry.getValue(); if (rects.contains(rect)) { newWorkers.put(rect, worker); } else { worker.cancel(true); } } // Start workers for regions we are newly interesed in for (ProgrammaticImageRect rect : rects) { if (cache.result(rect) == null && !newWorkers.containsKey(rect)) { ProgrammaticImageWorker worker = new ProgrammaticImageWorker(renderer, rect, this); worker.execute(); newWorkers.put(rect, worker); } } workers = newWorkers; } /** * Sets the factor by which we are scaling the image. * * Note that ProgrammaticImageComponent may use the exact scale factor value as part of a caching scheme. Thus, if * the user zooms in and then zooms back out by the same amount, we should make sure to pass the same value for * "scale". It would not be sufficient if zooming in multiplied the scale factor by some amount, and zooming out * divided the scale factor by this amount. Due to floating point imprecision, multiplying by a certain amount and * then dividing by the same amount may result in a different value. */ public void setScale(double scale) { if (scale != this.scale) { this.scale = scale; if (hasInitialized) { repaint(); handleVisibleRectOrScaleChanged(); } } } /** Responds to a (potential) change in the visible rectangle. */ private void handleVisibleRectChanged() { if (!hasInitialized) { hasInitialized = true; addHierarchyBoundsListener(new HierarchyBoundsAdapter() { @Override public void ancestorResized(HierarchyEvent e) { handleVisibleRectChanged(); } }); } // Update imageWidth and imageHeight Rectangle visibleRect = getVisibleRect(); int targetWidth = Math.max(MIN_IMAGE_SIZE, (int)(visibleRect.width / CACHE_IMAGES_PER_AXIS)); int targetHeight = Math.max(MIN_IMAGE_SIZE, (int)(visibleRect.height / CACHE_IMAGES_PER_AXIS)); if (targetWidth < imageWidth / MIN_SIZE_CHANGE_MULT || targetWidth > MIN_SIZE_CHANGE_MULT * imageWidth || targetHeight < imageHeight / MIN_SIZE_CHANGE_MULT || targetHeight > MIN_SIZE_CHANGE_MULT * imageHeight) { imageWidth = targetWidth; imageHeight = targetHeight; // Ideally, we would use the existing cache entries to construct new imageWidth x imageHeight cache entries cache.clear(); } cache.setMaxPixelCount( Math.max(MIN_MAX_PIXEL_COUNT, (int)(MAX_PIXEL_COUNT_MULT * getWidth() * getHeight()))); handleVisibleRectOrScaleChanged(); } @Override protected void paintComponent(Graphics graphics) { for (ProgrammaticImageRect rect : rects()) { BufferedImage image = cache.result(rect); if (image != null) { graphics.drawImage(image, rect.x, rect.y, null); } } } /** Stores the specified BufferedImage as the result of rendering the specified region. */ void handleResult(BufferedImage image, ProgrammaticImageRect rect) { cache.addResult(image, rect); workers.remove(rect); repaint(rect.x, rect.y, rect.width, rect.height); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.Set; import javax.security.sasl.SaslException; import org.apache.jute.BinaryInputArchive; import org.apache.jute.BinaryOutputArchive; import org.apache.jute.Record; import org.apache.zookeeper.Environment; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.Code; import org.apache.zookeeper.KeeperException.SessionExpiredException; import org.apache.zookeeper.ZooDefs.OpCode; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Id; import org.apache.zookeeper.data.StatPersisted; import org.apache.zookeeper.jmx.MBeanRegistry; import org.apache.zookeeper.proto.AuthPacket; import org.apache.zookeeper.proto.ConnectRequest; import org.apache.zookeeper.proto.ConnectResponse; import org.apache.zookeeper.proto.GetSASLRequest; import org.apache.zookeeper.proto.ReplyHeader; import org.apache.zookeeper.proto.RequestHeader; import org.apache.zookeeper.proto.SetSASLResponse; import org.apache.zookeeper.server.DataTree.ProcessTxnResult; import org.apache.zookeeper.server.RequestProcessor.RequestProcessorException; import org.apache.zookeeper.server.ServerCnxn.CloseRequestException; import org.apache.zookeeper.server.SessionTracker.Session; import org.apache.zookeeper.server.SessionTracker.SessionExpirer; import org.apache.zookeeper.server.auth.AuthenticationProvider; import org.apache.zookeeper.server.auth.ProviderRegistry; import org.apache.zookeeper.server.persistence.FileTxnSnapLog; import org.apache.zookeeper.server.quorum.ReadOnlyZooKeeperServer; import org.apache.zookeeper.txn.CreateSessionTxn; import org.apache.zookeeper.txn.TxnHeader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class implements a simple standalone ZooKeeperServer. It sets up the * following chain of RequestProcessors to process requests: * PrepRequestProcessor -> SyncRequestProcessor -> FinalRequestProcessor */ public class ZooKeeperServer implements SessionExpirer, ServerStats.Provider { protected static final Logger LOG; static { LOG = LoggerFactory.getLogger(ZooKeeperServer.class); Environment.logEnv("Server environment:", LOG); } protected ZooKeeperServerBean jmxServerBean; protected DataTreeBean jmxDataTreeBean; public static final int DEFAULT_TICK_TIME = 3000; protected int tickTime = DEFAULT_TICK_TIME; /** value of -1 indicates unset, use default */ protected int minSessionTimeout = -1; /** value of -1 indicates unset, use default */ protected int maxSessionTimeout = -1; protected SessionTracker sessionTracker; private FileTxnSnapLog txnLogFactory = null; private ZKDatabase zkDb; private final AtomicLong hzxid = new AtomicLong(0); public final static Exception ok = new Exception("No prob"); protected RequestProcessor firstProcessor; protected volatile boolean running; /** * This is the secret that we use to generate passwords, for the moment it * is more of a sanity check. */ static final private long superSecret = 0XB3415C00L; private final AtomicInteger requestsInProcess = new AtomicInteger(0); final List<ChangeRecord> outstandingChanges = new ArrayList<ChangeRecord>(); // this data structure must be accessed under the outstandingChanges lock final HashMap<String, ChangeRecord> outstandingChangesForPath = new HashMap<String, ChangeRecord>(); private ServerCnxnFactory serverCnxnFactory; private final ServerStats serverStats; void removeCnxn(ServerCnxn cnxn) { zkDb.removeCnxn(cnxn); } /** * Creates a ZooKeeperServer instance. Nothing is setup, use the setX * methods to prepare the instance (eg datadir, datalogdir, ticktime, * builder, etc...) * * @throws IOException */ public ZooKeeperServer() { serverStats = new ServerStats(this); } /** * Creates a ZooKeeperServer instance. It sets everything up, but doesn't * actually start listening for clients until run() is invoked. * * @param dataDir the directory to put the data */ public ZooKeeperServer(FileTxnSnapLog txnLogFactory, int tickTime, int minSessionTimeout, int maxSessionTimeout, ZKDatabase zkDb) { serverStats = new ServerStats(this); this.txnLogFactory = txnLogFactory; this.zkDb = zkDb; this.tickTime = tickTime; setMinSessionTimeout(minSessionTimeout); setMaxSessionTimeout(maxSessionTimeout); LOG.info("Created server with tickTime " + tickTime + " minSessionTimeout " + getMinSessionTimeout() + " maxSessionTimeout " + getMaxSessionTimeout() + " datadir " + txnLogFactory.getDataDir() + " snapdir " + txnLogFactory.getSnapDir()); } /** * creates a zookeeperserver instance. * @param txnLogFactory the file transaction snapshot logging class * @param tickTime the ticktime for the server * @param treeBuilder the datatree builder * @throws IOException */ public ZooKeeperServer(FileTxnSnapLog txnLogFactory, int tickTime) throws IOException { this(txnLogFactory, tickTime, -1, -1, new ZKDatabase(txnLogFactory)); } public ServerStats serverStats() { return serverStats; } public void dumpConf(PrintWriter pwriter) { pwriter.print("clientPort="); pwriter.println(getClientPort()); pwriter.print("dataDir="); pwriter.println(zkDb.snapLog.getSnapDir().getAbsolutePath()); pwriter.print("dataLogDir="); pwriter.println(zkDb.snapLog.getDataDir().getAbsolutePath()); pwriter.print("tickTime="); pwriter.println(getTickTime()); pwriter.print("maxClientCnxns="); pwriter.println(serverCnxnFactory.getMaxClientCnxnsPerHost()); pwriter.print("minSessionTimeout="); pwriter.println(getMinSessionTimeout()); pwriter.print("maxSessionTimeout="); pwriter.println(getMaxSessionTimeout()); pwriter.print("serverId="); pwriter.println(getServerId()); } public ZooKeeperServerConf getConf() { return new ZooKeeperServerConf (getClientPort(), zkDb.snapLog.getSnapDir().getAbsolutePath(), zkDb.snapLog.getDataDir().getAbsolutePath(), getTickTime(), serverCnxnFactory.getMaxClientCnxnsPerHost(), getMinSessionTimeout(), getMaxSessionTimeout(), getServerId()); } /** * This constructor is for backward compatibility with the existing unit * test code. * It defaults to FileLogProvider persistence provider. */ public ZooKeeperServer(File snapDir, File logDir, int tickTime) throws IOException { this( new FileTxnSnapLog(snapDir, logDir), tickTime); } /** * Default constructor, relies on the config for its agrument values * * @throws IOException */ public ZooKeeperServer(FileTxnSnapLog txnLogFactory) throws IOException { this(txnLogFactory, DEFAULT_TICK_TIME, -1, -1, new ZKDatabase(txnLogFactory)); } /** * get the zookeeper database for this server * @return the zookeeper database for this server */ public ZKDatabase getZKDatabase() { return this.zkDb; } /** * set the zkdatabase for this zookeeper server * @param zkDb */ public void setZKDatabase(ZKDatabase zkDb) { this.zkDb = zkDb; } /** * Restore sessions and data */ public void loadData() throws IOException, InterruptedException { /* * When a new leader starts executing Leader#lead, it * invokes this method. The database, however, has been * initialized before running leader election so that * the server could pick its zxid for its initial vote. * It does it by invoking QuorumPeer#getLastLoggedZxid. * Consequently, we don't need to initialize it once more * and avoid the penalty of loading it a second time. Not * reloading it is particularly important for applications * that host a large database. * * The following if block checks whether the database has * been initialized or not. Note that this method is * invoked by at least one other method: * ZooKeeperServer#startdata. * * See ZOOKEEPER-1642 for more detail. */ if(zkDb.isInitialized()){ setZxid(zkDb.getDataTreeLastProcessedZxid()); } else { setZxid(zkDb.loadDataBase()); } // Clean up dead sessions LinkedList<Long> deadSessions = new LinkedList<Long>(); for (Long session : zkDb.getSessions()) { if (zkDb.getSessionWithTimeOuts().get(session) == null) { deadSessions.add(session); } } for (long session : deadSessions) { // XXX: Is lastProcessedZxid really the best thing to use? killSession(session, zkDb.getDataTreeLastProcessedZxid()); } // Make a clean snapshot takeSnapshot(); } public void takeSnapshot(){ try { txnLogFactory.save(zkDb.getDataTree(), zkDb.getSessionWithTimeOuts()); } catch (IOException e) { LOG.error("Severe unrecoverable error, exiting", e); // This is a severe error that we cannot recover from, // so we need to exit System.exit(10); } } public long getZxid() { return hzxid.get(); } public SessionTracker getSessionTracker() { return sessionTracker; } long getNextZxid() { return hzxid.incrementAndGet(); } public void setZxid(long zxid) { hzxid.set(zxid); } long getTime() { return System.currentTimeMillis(); } private void close(long sessionId) { Request si = new Request(null, sessionId, 0, OpCode.closeSession, null, null); setLocalSessionFlag(si); submitRequest(si); } public void closeSession(long sessionId) { LOG.info("Closing session 0x" + Long.toHexString(sessionId)); // we do not want to wait for a session close. send it as soon as we // detect it! close(sessionId); } protected void killSession(long sessionId, long zxid) { zkDb.killSession(sessionId, zxid); if (LOG.isTraceEnabled()) { ZooTrace.logTraceMessage(LOG, ZooTrace.SESSION_TRACE_MASK, "ZooKeeperServer --- killSession: 0x" + Long.toHexString(sessionId)); } if (sessionTracker != null) { sessionTracker.removeSession(sessionId); } } public void expire(Session session) { long sessionId = session.getSessionId(); LOG.info("Expiring session 0x" + Long.toHexString(sessionId) + ", timeout of " + session.getTimeout() + "ms exceeded"); close(sessionId); } public static class MissingSessionException extends IOException { private static final long serialVersionUID = 7467414635467261007L; public MissingSessionException(String msg) { super(msg); } } void touch(ServerCnxn cnxn) throws MissingSessionException { if (cnxn == null) { return; } long id = cnxn.getSessionId(); int to = cnxn.getSessionTimeout(); if (!sessionTracker.touchSession(id, to)) { throw new MissingSessionException( "No session with sessionid 0x" + Long.toHexString(id) + " exists, probably expired and removed"); } } protected void registerJMX() { // register with JMX try { jmxServerBean = new ZooKeeperServerBean(this); MBeanRegistry.getInstance().register(jmxServerBean, null); try { jmxDataTreeBean = new DataTreeBean(zkDb.getDataTree()); MBeanRegistry.getInstance().register(jmxDataTreeBean, jmxServerBean); } catch (Exception e) { LOG.warn("Failed to register with JMX", e); jmxDataTreeBean = null; } } catch (Exception e) { LOG.warn("Failed to register with JMX", e); jmxServerBean = null; } } public void startdata() throws IOException, InterruptedException { //check to see if zkDb is not null if (zkDb == null) { zkDb = new ZKDatabase(this.txnLogFactory); } if (!zkDb.isInitialized()) { loadData(); } } public void startup() { if (sessionTracker == null) { createSessionTracker(); } startSessionTracker(); setupRequestProcessors(); registerJMX(); synchronized (this) { running = true; notifyAll(); } } protected void setupRequestProcessors() { RequestProcessor finalProcessor = new FinalRequestProcessor(this); RequestProcessor syncProcessor = new SyncRequestProcessor(this, finalProcessor); ((SyncRequestProcessor)syncProcessor).start(); firstProcessor = new PrepRequestProcessor(this, syncProcessor); ((PrepRequestProcessor)firstProcessor).start(); } protected void createSessionTracker() { sessionTracker = new SessionTrackerImpl(this, zkDb.getSessionWithTimeOuts(), tickTime, 1); } protected void startSessionTracker() { ((SessionTrackerImpl)sessionTracker).start(); } public boolean isRunning() { return running; } public void shutdown() { LOG.info("shutting down"); // new RuntimeException("Calling shutdown").printStackTrace(); this.running = false; // Since sessionTracker and syncThreads poll we just have to // set running to false and they will detect it during the poll // interval. if (sessionTracker != null) { sessionTracker.shutdown(); } if (firstProcessor != null) { firstProcessor.shutdown(); } if (zkDb != null) { zkDb.clear(); } unregisterJMX(); } protected void unregisterJMX() { // unregister from JMX try { if (jmxDataTreeBean != null) { MBeanRegistry.getInstance().unregister(jmxDataTreeBean); } } catch (Exception e) { LOG.warn("Failed to unregister with JMX", e); } try { if (jmxServerBean != null) { MBeanRegistry.getInstance().unregister(jmxServerBean); } } catch (Exception e) { LOG.warn("Failed to unregister with JMX", e); } jmxServerBean = null; jmxDataTreeBean = null; } public void incInProcess() { requestsInProcess.incrementAndGet(); } public void decInProcess() { requestsInProcess.decrementAndGet(); } public int getInProcess() { return requestsInProcess.get(); } /** * This structure is used to facilitate information sharing between PrepRP * and FinalRP. */ static class ChangeRecord { ChangeRecord(long zxid, String path, StatPersisted stat, int childCount, List<ACL> acl) { this.zxid = zxid; this.path = path; this.stat = stat; this.childCount = childCount; this.acl = acl; } long zxid; String path; StatPersisted stat; /* Make sure to create a new object when changing */ int childCount; List<ACL> acl; /* Make sure to create a new object when changing */ ChangeRecord duplicate(long zxid) { StatPersisted stat = new StatPersisted(); if (this.stat != null) { DataTree.copyStatPersisted(this.stat, stat); } return new ChangeRecord(zxid, path, stat, childCount, acl == null ? new ArrayList<ACL>() : new ArrayList<ACL>(acl)); } } byte[] generatePasswd(long id) { Random r = new Random(id ^ superSecret); byte p[] = new byte[16]; r.nextBytes(p); return p; } protected boolean checkPasswd(long sessionId, byte[] passwd) { return sessionId != 0 && Arrays.equals(passwd, generatePasswd(sessionId)); } long createSession(ServerCnxn cnxn, byte passwd[], int timeout) { if (passwd == null) { // Possible since it's just deserialized from a packet on the wire. passwd = new byte[0]; } long sessionId = sessionTracker.createSession(timeout); Random r = new Random(sessionId ^ superSecret); r.nextBytes(passwd); ByteBuffer to = ByteBuffer.allocate(4); to.putInt(timeout); cnxn.setSessionId(sessionId); Request si = new Request(cnxn, sessionId, 0, OpCode.createSession, to, null); setLocalSessionFlag(si); submitRequest(si); return sessionId; } /** * set the owner of this session as owner * @param id the session id * @param owner the owner of the session * @throws SessionExpiredException */ public void setOwner(long id, Object owner) throws SessionExpiredException { sessionTracker.setOwner(id, owner); } protected void revalidateSession(ServerCnxn cnxn, long sessionId, int sessionTimeout) throws IOException { boolean rc = sessionTracker.touchSession(sessionId, sessionTimeout); if (LOG.isTraceEnabled()) { ZooTrace.logTraceMessage(LOG,ZooTrace.SESSION_TRACE_MASK, "Session 0x" + Long.toHexString(sessionId) + " is valid: " + rc); } finishSessionInit(cnxn, rc); } public void reopenSession(ServerCnxn cnxn, long sessionId, byte[] passwd, int sessionTimeout) throws IOException { if (checkPasswd(sessionId, passwd)) { revalidateSession(cnxn, sessionId, sessionTimeout); } else { LOG.warn("Incorrect password from " + cnxn.getRemoteSocketAddress() + " for session 0x" + Long.toHexString(sessionId)); finishSessionInit(cnxn, false); } } public void finishSessionInit(ServerCnxn cnxn, boolean valid) { // register with JMX try { if (valid) { serverCnxnFactory.registerConnection(cnxn); } } catch (Exception e) { LOG.warn("Failed to register with JMX", e); } try { ConnectResponse rsp = new ConnectResponse(0, valid ? cnxn.getSessionTimeout() : 0, valid ? cnxn.getSessionId() : 0, // send 0 if session is no // longer valid valid ? generatePasswd(cnxn.getSessionId()) : new byte[16]); ByteArrayOutputStream baos = new ByteArrayOutputStream(); BinaryOutputArchive bos = BinaryOutputArchive.getArchive(baos); bos.writeInt(-1, "len"); rsp.serialize(bos, "connect"); if (!cnxn.isOldClient) { bos.writeBool( this instanceof ReadOnlyZooKeeperServer, "readOnly"); } baos.close(); ByteBuffer bb = ByteBuffer.wrap(baos.toByteArray()); bb.putInt(bb.remaining() - 4).rewind(); cnxn.sendBuffer(bb); if (valid) { LOG.info("Established session 0x" + Long.toHexString(cnxn.getSessionId()) + " with negotiated timeout " + cnxn.getSessionTimeout() + " for client " + cnxn.getRemoteSocketAddress()); cnxn.enableRecv(); } else { LOG.info("Invalid session 0x" + Long.toHexString(cnxn.getSessionId()) + " for client " + cnxn.getRemoteSocketAddress() + ", probably expired"); cnxn.sendBuffer(ServerCnxnFactory.closeConn); } } catch (Exception e) { LOG.warn("Exception while establishing session, closing", e); cnxn.close(); } } public void closeSession(ServerCnxn cnxn, RequestHeader requestHeader) { closeSession(cnxn.getSessionId()); } public long getServerId() { return 0; } /** * If the underlying Zookeeper server support local session, this method * will set a isLocalSession to true if a request is associated with * a local session. * * @param si */ protected void setLocalSessionFlag(Request si) { } public void submitRequest(Request si) { if (firstProcessor == null) { synchronized (this) { try { while (!running) { wait(1000); } } catch (InterruptedException e) { LOG.warn("Unexpected interruption", e); } if (firstProcessor == null) { throw new RuntimeException("Not started"); } } } try { touch(si.cnxn); boolean validpacket = Request.isValid(si.type); if (validpacket) { firstProcessor.processRequest(si); if (si.cnxn != null) { incInProcess(); } } else { LOG.warn("Received packet at server of unknown type " + si.type); new UnimplementedRequestProcessor().processRequest(si); } } catch (MissingSessionException e) { if (LOG.isDebugEnabled()) { LOG.debug("Dropping request: " + e.getMessage()); } } catch (RequestProcessorException e) { LOG.error("Unable to process request:" + e.getMessage(), e); } } public static int getSnapCount() { String sc = System.getProperty("zookeeper.snapCount"); try { int snapCount = Integer.parseInt(sc); // snapCount must be 2 or more. See org.apache.zookeeper.server.SyncRequestProcessor if( snapCount < 2 ) { LOG.warn("SnapCount should be 2 or more. Now, snapCount is reset to 2"); snapCount = 2; } snapCount=2; return snapCount; } catch (Exception e) { return 2; } } public int getGlobalOutstandingLimit() { String sc = System.getProperty("zookeeper.globalOutstandingLimit"); int limit; try { limit = Integer.parseInt(sc); } catch (Exception e) { limit = 1000; } return limit; } public void setServerCnxnFactory(ServerCnxnFactory factory) { serverCnxnFactory = factory; } public ServerCnxnFactory getServerCnxnFactory() { return serverCnxnFactory; } /** * return the last proceesed id from the * datatree */ public long getLastProcessedZxid() { return zkDb.getDataTreeLastProcessedZxid(); } /** * return the outstanding requests * in the queue, which havent been * processed yet */ public long getOutstandingRequests() { return getInProcess(); } /** * return the total number of client connections that are alive * to this server */ public int getNumAliveConnections() { return serverCnxnFactory.getNumAliveConnections(); } /** * trunccate the log to get in sync with others * if in a quorum * @param zxid the zxid that it needs to get in sync * with others * @throws IOException */ public void truncateLog(long zxid) throws IOException { this.zkDb.truncateLog(zxid); } public int getTickTime() { return tickTime; } public void setTickTime(int tickTime) { LOG.info("tickTime set to " + tickTime); this.tickTime = tickTime; } public int getMinSessionTimeout() { return minSessionTimeout; } public void setMinSessionTimeout(int min) { this.minSessionTimeout = min == -1 ? tickTime * 2 : min; LOG.info("minSessionTimeout set to {}", this.minSessionTimeout); } public int getMaxSessionTimeout() { return maxSessionTimeout; } public void setMaxSessionTimeout(int max) { this.maxSessionTimeout = max == -1 ? tickTime * 20 : max; LOG.info("maxSessionTimeout set to {}", this.maxSessionTimeout); } public int getClientPort() { return serverCnxnFactory != null ? serverCnxnFactory.getLocalPort() : -1; } public void setTxnLogFactory(FileTxnSnapLog txnLog) { this.txnLogFactory = txnLog; } public FileTxnSnapLog getTxnLogFactory() { return this.txnLogFactory; } public String getState() { return "standalone"; } public void dumpEphemerals(PrintWriter pwriter) { zkDb.dumpEphemerals(pwriter); } public Map<Long, Set<String>> getEphemerals() { return zkDb.getEphemerals(); } public void processConnectRequest(ServerCnxn cnxn, ByteBuffer incomingBuffer) throws IOException { BinaryInputArchive bia = BinaryInputArchive.getArchive(new ByteBufferInputStream(incomingBuffer)); ConnectRequest connReq = new ConnectRequest(); connReq.deserialize(bia, "connect"); if (LOG.isDebugEnabled()) { LOG.debug("Session establishment request from client " + cnxn.getRemoteSocketAddress() + " client's lastZxid is 0x" + Long.toHexString(connReq.getLastZxidSeen())); } boolean readOnly = false; try { readOnly = bia.readBool("readOnly"); cnxn.isOldClient = false; } catch (IOException e) { // this is ok -- just a packet from an old client which // doesn't contain readOnly field LOG.warn("Connection request from old client " + cnxn.getRemoteSocketAddress() + "; will be dropped if server is in r-o mode"); } if (!readOnly && this instanceof ReadOnlyZooKeeperServer) { String msg = "Refusing session request for not-read-only client " + cnxn.getRemoteSocketAddress(); LOG.info(msg); throw new CloseRequestException(msg); } if (connReq.getLastZxidSeen() > zkDb.dataTree.lastProcessedZxid) { String msg = "Refusing session request for client " + cnxn.getRemoteSocketAddress() + " as it has seen zxid 0x" + Long.toHexString(connReq.getLastZxidSeen()) + " our last zxid is 0x" + Long.toHexString(getZKDatabase().getDataTreeLastProcessedZxid()) + " client must try another server"; LOG.info(msg); throw new CloseRequestException(msg); } int sessionTimeout = connReq.getTimeOut(); byte passwd[] = connReq.getPasswd(); int minSessionTimeout = getMinSessionTimeout(); if (sessionTimeout < minSessionTimeout) { sessionTimeout = minSessionTimeout; } int maxSessionTimeout = getMaxSessionTimeout(); if (sessionTimeout > maxSessionTimeout) { sessionTimeout = maxSessionTimeout; } cnxn.setSessionTimeout(sessionTimeout); // We don't want to receive any packets until we are sure that the // session is setup cnxn.disableRecv(); long sessionId = connReq.getSessionId(); if (sessionId == 0) { LOG.info("Client attempting to establish new session at " + cnxn.getRemoteSocketAddress()); createSession(cnxn, passwd, sessionTimeout); } else { long clientSessionId = connReq.getSessionId(); LOG.info("Client attempting to renew session 0x" + Long.toHexString(clientSessionId) + " at " + cnxn.getRemoteSocketAddress()); serverCnxnFactory.closeSession(sessionId); cnxn.setSessionId(sessionId); reopenSession(cnxn, sessionId, passwd, sessionTimeout); } } public boolean shouldThrottle(long outStandingCount) { if (getGlobalOutstandingLimit() < getInProcess()) { return outStandingCount > 0; } return false; } public void processPacket(ServerCnxn cnxn, ByteBuffer incomingBuffer) throws IOException { // We have the request, now process and setup for next InputStream bais = new ByteBufferInputStream(incomingBuffer); BinaryInputArchive bia = BinaryInputArchive.getArchive(bais); RequestHeader h = new RequestHeader(); h.deserialize(bia, "header"); // Through the magic of byte buffers, txn will not be // pointing // to the start of the txn incomingBuffer = incomingBuffer.slice(); if (h.getType() == OpCode.auth) { LOG.info("got auth packet " + cnxn.getRemoteSocketAddress()); AuthPacket authPacket = new AuthPacket(); ByteBufferInputStream.byteBuffer2Record(incomingBuffer, authPacket); String scheme = authPacket.getScheme(); AuthenticationProvider ap = ProviderRegistry.getProvider(scheme); Code authReturn = KeeperException.Code.AUTHFAILED; if(ap != null) { try { authReturn = ap.handleAuthentication(cnxn, authPacket.getAuth()); } catch(RuntimeException e) { LOG.warn("Caught runtime exception from AuthenticationProvider: " + scheme + " due to " + e); authReturn = KeeperException.Code.AUTHFAILED; } } if (authReturn == KeeperException.Code.OK) { if (LOG.isDebugEnabled()) { LOG.debug("Authentication succeeded for scheme: " + scheme); } LOG.info("auth success " + cnxn.getRemoteSocketAddress()); ReplyHeader rh = new ReplyHeader(h.getXid(), 0, KeeperException.Code.OK.intValue()); cnxn.sendResponse(rh, null, null); } else { if (ap == null) { LOG.warn("No authentication provider for scheme: " + scheme + " has " + ProviderRegistry.listProviders()); } else { LOG.warn("Authentication failed for scheme: " + scheme); } // send a response... ReplyHeader rh = new ReplyHeader(h.getXid(), 0, KeeperException.Code.AUTHFAILED.intValue()); cnxn.sendResponse(rh, null, null); // ... and close connection cnxn.sendBuffer(ServerCnxnFactory.closeConn); cnxn.disableRecv(); } return; } else { if (h.getType() == OpCode.sasl) { Record rsp = processSasl(incomingBuffer,cnxn); ReplyHeader rh = new ReplyHeader(h.getXid(), 0, KeeperException.Code.OK.intValue()); cnxn.sendResponse(rh,rsp, "response"); // not sure about 3rd arg..what is it? } else { Request si = new Request(cnxn, cnxn.getSessionId(), h.getXid(), h.getType(), incomingBuffer, cnxn.getAuthInfo()); si.setOwner(ServerCnxn.me); // Always treat packet from the client as a possible // local request. setLocalSessionFlag(si); submitRequest(si); } } cnxn.incrOutstandingRequests(h); } private Record processSasl(ByteBuffer incomingBuffer, ServerCnxn cnxn) throws IOException { LOG.debug("Responding to client SASL token."); GetSASLRequest clientTokenRecord = new GetSASLRequest(); ByteBufferInputStream.byteBuffer2Record(incomingBuffer,clientTokenRecord); byte[] clientToken = clientTokenRecord.getToken(); LOG.debug("Size of client SASL token: " + clientToken.length); byte[] responseToken = null; try { ZooKeeperSaslServer saslServer = cnxn.zooKeeperSaslServer; try { // note that clientToken might be empty (clientToken.length == 0): // if using the DIGEST-MD5 mechanism, clientToken will be empty at the beginning of the // SASL negotiation process. responseToken = saslServer.evaluateResponse(clientToken); if (saslServer.isComplete()) { String authorizationID = saslServer.getAuthorizationID(); LOG.info("adding SASL authorization for authorizationID: " + authorizationID); cnxn.addAuthInfo(new Id("sasl",authorizationID)); } } catch (SaslException e) { LOG.warn("Client failed to SASL authenticate: " + e); if ((System.getProperty("zookeeper.allowSaslFailedClients") != null) && (System.getProperty("zookeeper.allowSaslFailedClients").equals("true"))) { LOG.warn("Maintaining client connection despite SASL authentication failure."); } else { LOG.warn("Closing client connection due to SASL authentication failure."); cnxn.close(); } } } catch (NullPointerException e) { LOG.error("cnxn.saslServer is null: cnxn object did not initialize its saslServer properly."); } if (responseToken != null) { LOG.debug("Size of server SASL response: " + responseToken.length); } // wrap SASL response token to client inside a Response object. return new SetSASLResponse(responseToken); } // entry point for quorum/Learner.java public ProcessTxnResult processTxn(TxnHeader hdr, Record txn) { return processTxn(null, hdr, txn); } // entry point for FinalRequestProcessor.java public ProcessTxnResult processTxn(Request request) { return processTxn(request, request.getHdr(), request.getTxn()); } private ProcessTxnResult processTxn(Request request, TxnHeader hdr, Record txn) { ProcessTxnResult rc; int opCode = request != null ? request.type : hdr.getType(); long sessionId = request != null ? request.sessionId : hdr.getClientId(); if (hdr != null) { rc = getZKDatabase().processTxn(hdr, txn); } else { rc = new ProcessTxnResult(); } if (opCode == OpCode.createSession) { if (hdr != null && txn instanceof CreateSessionTxn) { CreateSessionTxn cst = (CreateSessionTxn) txn; sessionTracker.addGlobalSession(sessionId, cst.getTimeOut()); } else if (request != null && request.isLocalSession()) { request.request.rewind(); int timeout = request.request.getInt(); request.request.rewind(); sessionTracker.addSession(request.sessionId, timeout); } else { LOG.warn("*****>>>>> Got " + txn.getClass() + " " + txn.toString()); } } else if (opCode == OpCode.closeSession) { sessionTracker.removeSession(sessionId); } return rc; } public Map<Long, Set<Long>> getSessionExpiryMap() { return sessionTracker.getSessionExpiryMap(); } }