gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
|---|---|
package com.google.gerrit.server.project;
import com.google.gerrit.common.errors.AuthorCommitsFailedException;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.inject.Inject;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.revwalk.filter.AuthorRevFilter;
import org.eclipse.jgit.revwalk.filter.RevFilter;
import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.LinkedList;
import java.util.List;
/** Class to list commits of an author pertaining to a project. */
public class AuthorCommits {
private static final Logger LOGAUTHORCOMMITS = LoggerFactory
.getLogger(AuthorCommits.class);
private final GitRepositoryManager repoManager;
private final IdentifiedUser currentUser;
/**
* Injecting Dependencies
*
* @param manager
* manager for managing git repositories
*
* @param user
* user to identify the current user
*/
@Inject
public AuthorCommits(final GitRepositoryManager manager,
final IdentifiedUser user) {
this.repoManager = manager;
this.currentUser = user;
}
private final List<CommitInfo> logInfo = new LinkedList<CommitInfo>();
private int count = -1;
/**
* Returns the commit log
*
* @return commit log
*/
public final List<CommitInfo> getCommits() {
return logInfo;
}
/**
* Sets the commit log pertaining to a project and an author
*
* @param project
* project name key
*
* @param author
* author name pattern
*
* @throws AuthorCommitsFailedException
* if repository is not found, user does
* not have capability and author name contains illegal characters
*/
public final void setCommits(final Project.NameKey project,
final String author) throws AuthorCommitsFailedException {
validateParameters(project, author);
try {
Repository repo = repoManager.openRepository(project);
RevWalk walk = new RevWalk(repo);
walk.markStart(walk.parseCommit(repo.resolve("HEAD")));
RevFilter authFilter = AuthorRevFilter.create(author);
walk.setRevFilter(authFilter);
for (RevCommit commit : walk) {
CommitInfo info = new CommitInfo();
info.setId(commit.getId().getName());
info.setAuth(commit.getAuthorIdent().toExternalString());
info.setDate(commit.getAuthorIdent().getWhen().toString());
info.setMsg(commit.getFullMessage());
logInfo.add(info);
count++;
}
} catch (RepositoryNotFoundException badName) {
throw new AuthorCommitsFailedException("Cannot list commits of repo"
+ project, badName);
} catch (IOException io) {
String msg = "Cannot list commits of repo" + project;
LOGAUTHORCOMMITS.error(msg, io);
throw new AuthorCommitsFailedException(msg, io);
} catch (Exception e) {
String msg = "Cannot list commits of repo" + project;
LOGAUTHORCOMMITS.error(msg, e);
throw new AuthorCommitsFailedException(msg, e);
}
}
/**
* Display commits
*
* @param out
* output stream to display commits
*/
public final void display(final OutputStream out) {
final PrintWriter stdout;
try {
stdout =
new PrintWriter(new BufferedWriter(new OutputStreamWriter(out,
"UTF-8")));
} catch (UnsupportedEncodingException e) {
// Our encoding is required by the specifications for the runtime.
throw new RuntimeException("JVM lacks UTF-8 encoding", e);
}
if (count == -1) {
stdout.println("NO COMMITS FOUND");
stdout.flush();
} else {
stdout.println("");
for (CommitInfo info : logInfo) {
stdout.println("commit " + info.getId());
stdout.println("Author: " + info.getAuth());
stdout.println("Date: " + info.getDate());
stdout.println("");
stdout.println("\t" + info.getMsg());
}
stdout.flush();
}
}
/**
* Validates parameters like user capability, project name ending with .git
* extension and author pattern
*
* @param project
* project name key
*
* @param author
* author name pattern
*
* @throws AuthorCommitsFailedException
* if user does not have capabilities and
* illegal characters in author pattern
*/
public final void validateParameters(final Project.NameKey project,
final String author) throws AuthorCommitsFailedException {
if (!currentUser.getCapabilities().canListAuthorCommits()) {
throw new AuthorCommitsFailedException(String.format(
"%s does not have \"Listing an author's commits\" capability.",
currentUser.getUserName()));
}
if (project.get().endsWith(Constants.DOT_GIT_EXT)) {
project.set(project.get().substring(0,
project.get().length() - Constants.DOT_GIT_EXT.length()));
}
if (!author.matches("[a-zA-Z]+")) {
throw new AuthorCommitsFailedException("No special characters allowed");
}
}
/**
* CommitInfo bean class
*
* @author keerathj
*
*/
public static class CommitInfo {
private String id;
private String auth;
private String date;
private String msg;
/**
* Gets the commit id
*
* @return commit id
*/
public final String getId() {
return id;
}
/**
* Gets the commit author
*
* @return commit author
*/
public final String getAuth() {
return auth;
}
/**
* Gets the commit date
*
* @return commit date
*/
public final String getDate() {
return date;
}
/**
* Gets the commit message
*
* @return commit message
*/
public final String getMsg() {
return msg;
}
/**
* Sets the commit id
*
* @param commitId
* commit id
*/
public final void setId(final String commitId) {
this.id = commitId;
}
/**
* Sets the commit author
*
* @param commitAuth
* commit author
*/
public final void setAuth(final String commitAuth) {
this.auth = commitAuth;
}
/**
* Sets the commit date
*
* @param commitDate
* commit date
*/
public final void setDate(final String commitDate) {
this.date = commitDate;
}
/**
* Sets the commit message
*
* @param commitMsg
* commit message
*/
public final void setMsg(final String commitMsg) {
this.msg = commitMsg;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public final boolean equals(final Object commit) {
if (commit != null) {
CommitInfo c = (CommitInfo) commit;
return getId().equals(c.getId()) && getAuth().equals(c.getAuth())
&& getDate().equals(c.getDate()) && getMsg().equals(c.getMsg());
} else {
return false;
}
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public final int hashCode() {
return auth.hashCode();
}
}
}
|
|
package input.controller.logic;
import java.util.EventListener;
import java.util.concurrent.atomic.AtomicLong;
import javax.swing.event.EventListenerList;
import org.lwjgl.input.Controller;
import org.lwjgl.input.Controllers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import input.controller.Constants;
public class ControllerEventQueue {
private static final Logger LOG = LoggerFactory
.getLogger(ControllerEventQueue.class);
private static ControllerEventQueue INSTANCE;
private EventListenerList listenerList = new EventListenerList();
private AtomicLong pollTime = new AtomicLong(
Constants.CONTROLLER_POLL_INTERVAL);
private EventThread eventThread;
private ControllerEventQueue() {
}
public static ControllerEventQueue getInstance() {
if (INSTANCE == null) {
INSTANCE = new ControllerEventQueue();
}
return INSTANCE;
}
public long getPollTime() {
return pollTime.get();
}
public long setPollTime(long pollTime) {
LOG.info("Set poll time {}", pollTime);
return this.pollTime.getAndSet(pollTime);
}
public void addControllerListener(ControllerListener l) {
synchronized (listenerList) {
listenerList.add(ControllerListener.class, l);
}
}
public void removeControllerListener(ControllerListener l) {
synchronized (listenerList) {
listenerList.remove(ControllerListener.class, l);
}
}
/**
* If not started starts the event queue.
*
* @return true, if a new event queue was started, false otherwise
*/
public boolean start() {
if (eventThread != null) {
return false;
}
eventThread = new EventThread();
eventThread.start();
return true;
}
public boolean destroy() {
return destroy(0);
}
public boolean destroy(long millisWait) {
if (eventThread == null) {
return false;
}
eventThread.interrupt();
try {
eventThread.join(millisWait);
} catch (InterruptedException e) {
}
return true;
}
private class EventThread extends Thread {
public EventThread() {
super("ControllerEventQueue");
}
@Override
public void run() {
while (!isInterrupted()) {
synchronized (listenerList) {
poll();
}
try {
Thread.sleep(getPollTime());
} catch (InterruptedException e) {
interrupt();
}
}
}
private void poll() {
synchronized (listenerList) {
if (listenerList.getListenerCount() <= 0) {
return;
}
}
Controllers.poll();
// Event driven
while (Controllers.next()) {
Controller source = Controllers.getEventSource();
ControllerEvent event = new ControllerEvent(source);
event.setButtonEvent(Controllers.isEventButton());
event.setAxisEvent(Controllers.isEventAxis());
event.setxAxisEvent(Controllers.isEventXAxis());
event.setyAxisEvent(Controllers.isEventYAxis());
event.setPovEvent(
Controllers.isEventPovX() || Controllers.isEventPovY());
event.setxPovEvent(Controllers.isEventPovX());
event.setyPovEvent(Controllers.isEventPovY());
event.setPressed(Controllers.getEventButtonState());
event.setControlIndex(Controllers.getEventControlIndex());
fireControllerEvent(event);
}
// Poll driven
for (int i = 0; i < Controllers.getControllerCount(); i++) {
ControllerEvent event = new ControllerEvent(
Controllers.getController(i));
fireControllerEvent(event);
}
}
private void fireControllerEvent(ControllerEvent event) {
synchronized (listenerList) {
ControllerListener[] listeners = listenerList
.getListeners(ControllerListener.class);
for (ControllerListener listener : listeners) {
if (listener == null) {
listenerList.remove(ControllerListener.class, listener);
continue;
}
try {
listener.action(event);
} catch (Exception e) {
LOG.warn("Controllerevent listener failed", e);
}
}
}
}
}
public static class ControllerEvent {
private Controller source;
private boolean buttonEvent = false;
private boolean axisEvent = false;
private boolean xAxisEvent = false;
private boolean yAxisEvent = false;
private boolean povEvent = false;
private boolean xPovEvent = false;
private boolean yPovEvent = false;
private boolean pressed = false;
private int controlIndex = -1;
public ControllerEvent(Controller source) {
this.source = source;
}
public Controller getSource() {
return this.source;
}
public boolean isButtonEvent() {
return buttonEvent;
}
public void setButtonEvent(boolean buttonEvent) {
this.buttonEvent = buttonEvent;
}
public boolean isAxisEvent() {
return axisEvent;
}
public void setAxisEvent(boolean axisEvent) {
this.axisEvent = axisEvent;
}
public boolean isxAxisEvent() {
return xAxisEvent;
}
public void setxAxisEvent(boolean xAxisEvent) {
this.xAxisEvent = xAxisEvent;
}
public boolean isyAxisEvent() {
return yAxisEvent;
}
public void setyAxisEvent(boolean yAxisEvent) {
this.yAxisEvent = yAxisEvent;
}
public boolean isPovEvent() {
return povEvent;
}
public void setPovEvent(boolean povEvent) {
this.povEvent = povEvent;
}
public boolean isxPovEvent() {
return xPovEvent;
}
public void setxPovEvent(boolean xPovEvent) {
this.xPovEvent = xPovEvent;
}
public boolean isyPovEvent() {
return yPovEvent;
}
public void setyPovEvent(boolean yPovEvent) {
this.yPovEvent = yPovEvent;
}
public boolean isPressed() {
return pressed;
}
public void setPressed(boolean pressed) {
this.pressed = pressed;
}
public int getControlIndex() {
return controlIndex;
}
public void setControlIndex(int controlIndex) {
this.controlIndex = controlIndex;
}
@Override
public String toString() {
return "ControllerEvent [source="
+ String.format("%d: %s", source.getIndex(),
source.getName())
+ ", buttonEvent=" + buttonEvent + ", axisEvent="
+ axisEvent + ", xAxisEvent=" + xAxisEvent + ", yAxisEvent="
+ yAxisEvent + ", povEvent=" + povEvent + ", xPovEvent="
+ xPovEvent + ", yPovEvent=" + yPovEvent + ", pressed="
+ pressed + ", controlIndex=" + controlIndex + "]";
}
}
public static interface ControllerListener extends EventListener {
void action(ControllerEvent event);
}
}
|
|
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dxf2.events.event.csv;
import java.util.Objects;
import org.springframework.util.Assert;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.google.common.base.MoreObjects;
/**
* @author Morten Olav Hansen <[email protected]>
*/
@JsonPropertyOrder( {
"event",
"status",
"program",
"programStage",
"enrollment",
"orgUnit",
"eventDate",
"dueDate",
"latitude",
"longitude",
"dataElement",
"value",
"storedBy",
"providedElsewhere",
"completedDate",
"completedBy",
"geometry"
} )
public class CsvEventDataValue
{
private String event;
private String status;
private String program;
private String programStage;
private String orgUnit;
private String enrollment;
private String eventDate;
private String dueDate;
private Double latitude;
private Double longitude;
private String dataElement;
private String value;
private String storedBy;
private Boolean providedElsewhere;
private String completedDate;
private String completedBy;
private String geometry;
public CsvEventDataValue()
{
}
public CsvEventDataValue( CsvEventDataValue dataValue )
{
Assert.notNull( dataValue, "A non-null CsvOutputEventDataValue must be given as a parameter." );
event = dataValue.getEvent();
status = dataValue.getStatus();
program = dataValue.getProgram();
programStage = dataValue.getProgramStage();
enrollment = dataValue.getEnrollment();
orgUnit = dataValue.getOrgUnit();
eventDate = dataValue.getEventDate();
dueDate = dataValue.getDueDate();
latitude = dataValue.getLatitude();
longitude = dataValue.getLongitude();
dataElement = dataValue.getDataElement();
value = dataValue.getValue();
storedBy = dataValue.getStoredBy();
providedElsewhere = dataValue.getProvidedElsewhere();
completedDate = dataValue.getCompletedDate();
completedBy = dataValue.getCompletedBy();
geometry = dataValue.getGeometry();
}
@JsonProperty
public String getEvent()
{
return event;
}
public void setEvent( String event )
{
this.event = event;
}
@JsonProperty
public String getStatus()
{
return status;
}
public void setStatus( String status )
{
this.status = status;
}
@JsonProperty
public String getProgram()
{
return program;
}
public void setProgram( String program )
{
this.program = program;
}
@JsonProperty
public String getProgramStage()
{
return programStage;
}
public void setProgramStage( String programStage )
{
this.programStage = programStage;
}
@JsonProperty
public String getEnrollment()
{
return enrollment;
}
public void setEnrollment( String enrollment )
{
this.enrollment = enrollment;
}
@JsonProperty
public String getOrgUnit()
{
return orgUnit;
}
public void setOrgUnit( String orgUnit )
{
this.orgUnit = orgUnit;
}
@JsonProperty
public String getEventDate()
{
return eventDate;
}
public void setEventDate( String eventDate )
{
this.eventDate = eventDate;
}
@JsonProperty
public String getDueDate()
{
return dueDate;
}
public void setDueDate( String dueDate )
{
this.dueDate = dueDate;
}
@JsonProperty
public Double getLatitude()
{
return latitude;
}
public void setLatitude( Double latitude )
{
this.latitude = latitude;
}
@JsonProperty
public Double getLongitude()
{
return longitude;
}
public void setLongitude( Double longitude )
{
this.longitude = longitude;
}
@JsonProperty
public String getDataElement()
{
return dataElement;
}
public void setDataElement( String dataElement )
{
this.dataElement = dataElement;
}
@JsonProperty
public String getValue()
{
return value;
}
public void setValue( String value )
{
this.value = value;
}
@JsonProperty
public Boolean getProvidedElsewhere()
{
return providedElsewhere;
}
public void setProvidedElsewhere( Boolean providedElsewhere )
{
this.providedElsewhere = providedElsewhere;
}
@JsonProperty
public String getStoredBy()
{
return storedBy;
}
public void setStoredBy( String storedBy )
{
this.storedBy = storedBy;
}
@JsonProperty
public String getCompletedDate()
{
return this.completedDate;
}
public void setCompletedDate( String completedDate )
{
this.completedDate = completedDate;
}
@JsonProperty
public String getCompletedBy()
{
return this.completedBy;
}
public void setCompletedBy( String completedBy )
{
this.completedBy = completedBy;
}
@JsonProperty
public String getGeometry()
{
return geometry;
}
public void setGeometry( String geometry )
{
this.geometry = geometry;
}
@Override
public int hashCode()
{
return Objects.hash( event, status, program, programStage, orgUnit, enrollment, eventDate, dueDate,
dataElement, value, storedBy, providedElsewhere );
}
@Override
public boolean equals( Object obj )
{
if ( this == obj )
{
return true;
}
if ( obj == null || getClass() != obj.getClass() )
{
return false;
}
final CsvEventDataValue other = (CsvEventDataValue) obj;
return Objects.equals( this.event, other.event ) && Objects.equals( this.status, other.status )
&& Objects.equals( this.program,
other.program )
&& Objects.equals( this.programStage, other.programStage ) && Objects.equals( this.orgUnit,
other.orgUnit )
&& Objects.equals( this.enrollment, other.enrollment ) && Objects.equals( this.eventDate,
other.eventDate )
&& Objects.equals( this.dueDate, other.dueDate ) && Objects.equals( this.latitude,
other.latitude )
&& Objects.equals( this.longitude, other.longitude ) && Objects.equals( this.dataElement,
other.dataElement )
&& Objects.equals( this.value, other.value ) && Objects.equals( this.storedBy,
other.storedBy )
&& Objects.equals( this.providedElsewhere, other.providedElsewhere ) && Objects.equals( this.geometry,
other.geometry );
}
@Override
public String toString()
{
return MoreObjects.toStringHelper( this )
.add( "event", event )
.add( "status", status )
.add( "program", program )
.add( "programStage", programStage )
.add( "enrollment", enrollment )
.add( "orgUnit", orgUnit )
.add( "eventDate", eventDate )
.add( "dueDate", dueDate )
.add( "latitude", latitude )
.add( "longitude", longitude )
.add( "dataElement", dataElement )
.add( "value", value )
.add( "storedBy", storedBy )
.add( "providedElsewhere", providedElsewhere )
.add( "completedDate", completedDate )
.add( "completedBy", completedBy )
.add( "geometry", geometry )
.toString();
}
}
|
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.impl;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.DebuggerManagerEx;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.DebuggerManagerThreadImpl;
import com.intellij.debugger.engine.JavaExecutionStack;
import com.intellij.debugger.engine.SuspendContextImpl;
import com.intellij.debugger.jdi.VirtualMachineProxyImpl;
import com.intellij.debugger.ui.breakpoints.BreakpointManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.util.StringBuilderSpinAllocator;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.ui.MessageCategory;
import com.intellij.xdebugger.XDebugSession;
import com.intellij.xdebugger.frame.XExecutionStack;
import com.sun.jdi.ReferenceType;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author lex
*/
class ReloadClassesWorker {
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.impl.ReloadClassesWorker");
private final DebuggerSession myDebuggerSession;
private final HotSwapProgress myProgress;
public ReloadClassesWorker(DebuggerSession session, HotSwapProgress progress) {
myDebuggerSession = session;
myProgress = progress;
}
private DebugProcessImpl getDebugProcess() {
return myDebuggerSession.getProcess();
}
private void processException(Throwable e) {
if (e.getMessage() != null) {
myProgress.addMessage(myDebuggerSession, MessageCategory.ERROR, e.getMessage());
}
if (e instanceof ProcessCanceledException) {
myProgress.addMessage(myDebuggerSession, MessageCategory.INFORMATION, DebuggerBundle.message("error.operation.canceled"));
return;
}
if (e instanceof UnsupportedOperationException) {
myProgress.addMessage(myDebuggerSession, MessageCategory.ERROR, DebuggerBundle.message("error.operation.not.supported.by.vm"));
}
else if (e instanceof NoClassDefFoundError) {
myProgress.addMessage(myDebuggerSession, MessageCategory.ERROR, DebuggerBundle.message("error.class.def.not.found", e.getLocalizedMessage()));
}
else if (e instanceof VerifyError) {
myProgress.addMessage(myDebuggerSession, MessageCategory.ERROR, DebuggerBundle.message("error.verification.error", e.getLocalizedMessage()));
}
else if (e instanceof UnsupportedClassVersionError) {
myProgress.addMessage(myDebuggerSession, MessageCategory.ERROR, DebuggerBundle.message("error.unsupported.class.version", e.getLocalizedMessage()));
}
else if (e instanceof ClassFormatError) {
myProgress.addMessage(myDebuggerSession, MessageCategory.ERROR, DebuggerBundle.message("error.class.format.error", e.getLocalizedMessage()));
}
else if (e instanceof ClassCircularityError) {
myProgress.addMessage(myDebuggerSession, MessageCategory.ERROR, DebuggerBundle.message("error.class.circularity.error", e.getLocalizedMessage()));
}
else {
myProgress.addMessage(
myDebuggerSession, MessageCategory.ERROR,
DebuggerBundle.message("error.exception.while.reloading", e.getClass().getName(), e.getLocalizedMessage())
);
}
}
public void reloadClasses(final Map<String, HotSwapFile> modifiedClasses) {
DebuggerManagerThreadImpl.assertIsManagerThread();
if(modifiedClasses == null || modifiedClasses.size() == 0) {
myProgress.addMessage(myDebuggerSession, MessageCategory.INFORMATION, DebuggerBundle.message("status.hotswap.loaded.classes.up.to.date"));
return;
}
final DebugProcessImpl debugProcess = getDebugProcess();
final VirtualMachineProxyImpl virtualMachineProxy = debugProcess.getVirtualMachineProxy();
final Project project = debugProcess.getProject();
final BreakpointManager breakpointManager = (DebuggerManagerEx.getInstanceEx(project)).getBreakpointManager();
breakpointManager.disableBreakpoints(debugProcess);
//virtualMachineProxy.suspend();
try {
RedefineProcessor redefineProcessor = new RedefineProcessor(virtualMachineProxy);
int processedEntriesCount = 0;
for (final Map.Entry<String, HotSwapFile> entry : modifiedClasses.entrySet()) {
// stop if process is finished already
if (debugProcess.isDetached() || debugProcess.isDetaching()) {
break;
}
if (redefineProcessor.getProcessedClassesCount() == 0 && myProgress.isCancelled()) {
// once at least one class has been actually reloaded, do not interrupt the whole process
break;
}
processedEntriesCount++;
final String qualifiedName = entry.getKey();
if (qualifiedName != null) {
myProgress.setText(qualifiedName);
myProgress.setFraction(processedEntriesCount / (double)modifiedClasses.size());
}
try {
redefineProcessor.processClass(qualifiedName, entry.getValue().file);
}
catch (IOException e) {
reportProblem(qualifiedName, e);
}
}
if (redefineProcessor.getProcessedClassesCount() == 0 && myProgress.isCancelled()) {
// once at least one class has been actually reloaded, do not interrupt the whole process
return;
}
redefineProcessor.processPending();
myProgress.setFraction(1);
final int partiallyRedefinedClassesCount = redefineProcessor.getPartiallyRedefinedClassesCount();
if (partiallyRedefinedClassesCount == 0) {
myProgress.addMessage(
myDebuggerSession, MessageCategory.INFORMATION, DebuggerBundle.message("status.classes.reloaded", redefineProcessor.getProcessedClassesCount())
);
}
else {
final String message = DebuggerBundle.message(
"status.classes.not.all.versions.reloaded", partiallyRedefinedClassesCount, redefineProcessor.getProcessedClassesCount()
);
myProgress.addMessage(myDebuggerSession, MessageCategory.WARNING, message);
}
if (LOG.isDebugEnabled()) {
LOG.debug("classes reloaded");
}
}
catch (Throwable e) {
processException(e);
}
DebuggerContextImpl context = myDebuggerSession.getContextManager().getContext();
SuspendContextImpl suspendContext = context.getSuspendContext();
if (suspendContext != null) {
XExecutionStack stack = suspendContext.getActiveExecutionStack();
if (stack != null) {
((JavaExecutionStack)stack).initTopFrame();
}
}
final Semaphore waitSemaphore = new Semaphore();
waitSemaphore.down();
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
try {
if (!project.isDisposed()) {
final BreakpointManager breakpointManager = (DebuggerManagerEx.getInstanceEx(project)).getBreakpointManager();
breakpointManager.reloadBreakpoints();
debugProcess.getRequestsManager().clearWarnings();
if (LOG.isDebugEnabled()) {
LOG.debug("requests updated");
LOG.debug("time stamp set");
}
myDebuggerSession.refresh(false);
XDebugSession session = myDebuggerSession.getXDebugSession();
if (session != null) {
session.rebuildViews();
}
}
}
catch (Throwable e) {
LOG.error(e);
}
finally {
waitSemaphore.up();
}
}
});
waitSemaphore.waitFor();
if (!project.isDisposed()) {
try {
breakpointManager.enableBreakpoints(debugProcess);
}
catch (Exception e) {
processException(e);
}
}
}
private void reportProblem(final String qualifiedName, @Nullable Exception ex) {
String reason = null;
if (ex != null) {
reason = ex.getLocalizedMessage();
}
if (reason == null || reason.length() == 0) {
reason = DebuggerBundle.message("error.io.error");
}
final StringBuilder buf = StringBuilderSpinAllocator.alloc();
try {
buf.append(qualifiedName).append(" : ").append(reason);
myProgress.addMessage(myDebuggerSession, MessageCategory.ERROR, buf.toString());
}
finally {
StringBuilderSpinAllocator.dispose(buf);
}
}
private static class RedefineProcessor {
/**
* number of classes that will be reloaded in one go.
* Such restriction is needed to deal with big number of classes being reloaded
*/
private static final int CLASSES_CHUNK_SIZE = 100;
private final VirtualMachineProxyImpl myVirtualMachineProxy;
private final Map<ReferenceType, byte[]> myRedefineMap = new HashMap<>();
private int myProcessedClassesCount;
private int myPartiallyRedefinedClassesCount;
public RedefineProcessor(VirtualMachineProxyImpl virtualMachineProxy) {
myVirtualMachineProxy = virtualMachineProxy;
}
public void processClass(String qualifiedName, File file) throws Throwable {
final List<ReferenceType> vmClasses = myVirtualMachineProxy.classesByName(qualifiedName);
if (vmClasses.isEmpty()) {
return;
}
final byte[] content = FileUtil.loadFileBytes(file);
if (vmClasses.size() == 1) {
myRedefineMap.put(vmClasses.get(0), content);
if (myRedefineMap.size() >= CLASSES_CHUNK_SIZE) {
processChunk();
}
return;
}
int redefinedVersionsCount = 0;
Throwable error = null;
for (ReferenceType vmClass : vmClasses) {
try {
myVirtualMachineProxy.redefineClasses(Collections.singletonMap(vmClass, content));
redefinedVersionsCount++;
}
catch (Throwable t) {
error = t;
}
}
if (redefinedVersionsCount == 0) {
throw error;
}
if (redefinedVersionsCount < vmClasses.size()) {
myPartiallyRedefinedClassesCount++;
}
myProcessedClassesCount++;
}
private void processChunk() throws Throwable {
// reload this portion of classes and clear the map to free memory
try {
myVirtualMachineProxy.redefineClasses(myRedefineMap);
myProcessedClassesCount += myRedefineMap.size();
}
finally {
myRedefineMap.clear();
}
}
public void processPending() throws Throwable {
if (myRedefineMap.size() > 0) {
processChunk();
}
}
public int getProcessedClassesCount() {
return myProcessedClassesCount;
}
public int getPartiallyRedefinedClassesCount() {
return myPartiallyRedefinedClassesCount;
}
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.testframework.sm.runner.ui;
import com.intellij.execution.testframework.AbstractTestProxy;
import com.intellij.execution.testframework.TestConsoleProperties;
import com.intellij.execution.testframework.sm.runner.BaseSMTRunnerTestCase;
import com.intellij.execution.testframework.sm.runner.SMTestProxy;
import consulo.disposer.Disposer;
/**
* @author Roman Chernyatchik
*/
public abstract class SMTRunnerUIActionsHandlerTest extends BaseSMTRunnerTestCase {
private MockTestResultsViewer myResultsViewer;
private TestConsoleProperties myProperties;
private SMTRunnerUIActionsHandler myUIActionsHandler;
private AbstractTestProxy mySelectedTestProxy;
@Override
protected void setUp() throws Exception {
super.setUp();
myProperties = createConsoleProperties();
myResultsViewer = new MockTestResultsViewer(myProperties, mySuite) {
@Override
public void selectAndNotify(@javax.annotation.Nullable final AbstractTestProxy proxy) {
super.selectAndNotify(proxy);
mySelectedTestProxy = proxy;
}
};
myUIActionsHandler = new SMTRunnerUIActionsHandler(myProperties);
TestConsoleProperties.HIDE_PASSED_TESTS.set(myProperties, false);
TestConsoleProperties.OPEN_FAILURE_LINE.set(myProperties, false);
TestConsoleProperties.SCROLL_TO_SOURCE.set(myProperties, false);
TestConsoleProperties.SELECT_FIRST_DEFECT.set(myProperties, false);
TestConsoleProperties.TRACK_RUNNING_TEST.set(myProperties, false);
}
@Override
protected void tearDown() throws Exception {
Disposer.dispose(myResultsViewer);
super.tearDown();
}
public void testSelectFirstDeffect_Failed() {
TestConsoleProperties.SELECT_FIRST_DEFECT.set(myProperties, true);
mySuite.setStarted();
final SMTestProxy testsSuite = createSuiteProxy("my suite", mySuite);
testsSuite.setStarted();
// passed test
final SMTestProxy testPassed1 = createTestProxy("testPassed1", testsSuite);
testPassed1.setStarted();
//failed test
final SMTestProxy testFailed1 = createTestProxy("testFailed1", testsSuite);
testFailed1.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testFailed1);
assertNull(mySelectedTestProxy);
testFailed1.setTestFailed("", "", false);
//myUIActionsHandler.onTestFinished(testFailed1);
assertNull(mySelectedTestProxy);
// passed test numer 2
mySelectedTestProxy = null;
final SMTestProxy testPassed2 = createTestProxy("testPassed2", testsSuite);
testPassed2.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testPassed2);
assertNull(mySelectedTestProxy);
testPassed2.setFinished();
//myUIActionsHandler.onTestFinished(testPassed2);
assertNull(mySelectedTestProxy);
//failed test 2
final SMTestProxy testFailed2 = createTestProxy("testFailed1", testsSuite);
testFailed2.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testFailed2);
assertNull(mySelectedTestProxy);
testFailed2.setTestFailed("", "", false);
//myUIActionsHandler.onTestFinished(testFailed2);
assertNull(mySelectedTestProxy);
// finish suite
testsSuite.setFinished();
assertNull(mySelectedTestProxy);
//testing finished
mySuite.setFinished();
assertNull(mySelectedTestProxy);
myUIActionsHandler.onTestingFinished(myResultsViewer);
assertEquals(testFailed1, mySelectedTestProxy);
}
public void testSelectFirstDeffect_Error() {
TestConsoleProperties.SELECT_FIRST_DEFECT.set(myProperties, true);
mySuite.setStarted();
final SMTestProxy testsSuite = createSuiteProxy("my suite", mySuite);
testsSuite.setStarted();
// passed test
final SMTestProxy testPassed1 = createTestProxy("testPassed1", testsSuite);
testPassed1.setStarted();
//failed test
final SMTestProxy testError = createTestProxy("testError", testsSuite);
testError.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testError);
assertNull(mySelectedTestProxy);
testError.setTestFailed("", "", true);
//myUIActionsHandler.onTestFinished(testFailed1);
assertNull(mySelectedTestProxy);
// passed test numer 2
mySelectedTestProxy = null;
final SMTestProxy testPassed2 = createTestProxy("testPassed2", testsSuite);
testPassed2.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testPassed2);
assertNull(mySelectedTestProxy);
testPassed2.setFinished();
//myUIActionsHandler.onTestFinished(testPassed2);
assertNull(mySelectedTestProxy);
//failed test
final SMTestProxy testFailed2 = createTestProxy("testFailed1", testsSuite);
testFailed2.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testFailed2);
assertNull(mySelectedTestProxy);
testFailed2.setTestFailed("", "", false);
//myUIActionsHandler.onTestFinished(testFailed2);
assertNull(mySelectedTestProxy);
// finish suite
testsSuite.setFinished();
assertNull(mySelectedTestProxy);
//testing finished
mySuite.setFinished();
assertNull(mySelectedTestProxy);
myUIActionsHandler.onTestingFinished(myResultsViewer);
assertEquals(testError, mySelectedTestProxy);
}
public void testSelectFirstDefect_Priority_Error() {
// Priority: error -> failure
TestConsoleProperties.SELECT_FIRST_DEFECT.set(myProperties, true);
mySuite.setStarted();
final SMTestProxy testsSuite = createSuiteProxy("my suite", mySuite);
testsSuite.setStarted();
// pending test
final SMTestProxy testPending = createTestProxy("testPending", testsSuite);
testPending.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testPending);
testPending.setTestIgnored("", "");
//failed test
final SMTestProxy testFailed = createTestProxy("testFailed", testsSuite);
testFailed.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testFailed);
testFailed.setTestFailed("", "", false);
//error test
final SMTestProxy testError = createTestProxy("testError", testsSuite);
testError.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testError);
testError.setTestFailed("", "", true);
// Second error test just to check that first failed will be selected
final SMTestProxy testError2 = createTestProxy("testError2", testsSuite);
testError2.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testError2);
testError2.setTestFailed("", "", true);
// finish suite
testsSuite.setFinished();
assertNull(mySelectedTestProxy);
//testing finished
mySuite.setFinished();
assertNull(mySelectedTestProxy);
myUIActionsHandler.onTestingFinished(myResultsViewer);
assertEquals(testError, mySelectedTestProxy);
}
public void testSelectFirstDefect_Priority_Failure() {
// Priority: error -> failure
TestConsoleProperties.SELECT_FIRST_DEFECT.set(myProperties, true);
mySuite.setStarted();
final SMTestProxy testsSuite = createSuiteProxy("my suite", mySuite);
testsSuite.setStarted();
// pending test
final SMTestProxy testPending = createTestProxy("testPending", testsSuite);
testPending.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testPending);
testPending.setTestIgnored("", "");
//failed test
final SMTestProxy testFailed = createTestProxy("testFailed", testsSuite);
testFailed.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testFailed);
testFailed.setTestFailed("", "", false);
// Second failed test just to check that first failed will be selected
final SMTestProxy testFailed2 = createTestProxy("testFailed2", testsSuite);
testFailed2.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testFailed2);
testFailed2.setTestFailed("", "", false);
// finish suite
testsSuite.setFinished();
assertNull(mySelectedTestProxy);
//testing finished
mySuite.setFinished();
assertNull(mySelectedTestProxy);
myUIActionsHandler.onTestingFinished(myResultsViewer);
assertEquals(testFailed, mySelectedTestProxy);
}
public void testSelectFirstDefect_Priority_Pending() {
// Priority: error -> failure
TestConsoleProperties.SELECT_FIRST_DEFECT.set(myProperties, true);
mySuite.setStarted();
final SMTestProxy testsSuite = createSuiteProxy("my suite", mySuite);
testsSuite.setStarted();
// pending test
final SMTestProxy testPending = createTestProxy("testPending", testsSuite);
testPending.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testPending);
testPending.setTestIgnored("", "");
// finish suite
testsSuite.setFinished();
assertNull(mySelectedTestProxy);
//testing finished
mySuite.setFinished();
assertNull(mySelectedTestProxy);
myUIActionsHandler.onTestingFinished(myResultsViewer);
// pending tests shouldn't be considered as errors/failures
assertNull(mySelectedTestProxy);
}
public void testTrackRunningTest() {
TestConsoleProperties.TRACK_RUNNING_TEST.set(myProperties, true);
mySuite.setStarted();
final SMTestProxy testsSuite = createSuiteProxy("my suite", mySuite);
testsSuite.setStarted();
assertNull(mySelectedTestProxy);
// passed test
final SMTestProxy testPassed1 = createTestProxy("testPassed1", testsSuite);
testPassed1.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testPassed1);
assertEquals(testPassed1, mySelectedTestProxy);
testPassed1.setFinished();
//myUIActionsHandler.onTestFinished(testPassed1);
assertEquals(testPassed1, mySelectedTestProxy);
//failed test
final SMTestProxy testFailed1 = createTestProxy("testFailed1", testsSuite);
testFailed1.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testFailed1);
assertEquals(testFailed1, mySelectedTestProxy);
testFailed1.setTestFailed("", "", false);
//myUIActionsHandler.onTestFinished(testFailed1);
assertEquals(testFailed1, mySelectedTestProxy);
//error test
final SMTestProxy testError = createTestProxy("testError", testsSuite);
testError.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testError);
assertEquals(testError, mySelectedTestProxy);
testError.setTestFailed("", "", true);
//myUIActionsHandler.onTestFinished(testError);
assertEquals(testError, mySelectedTestProxy);
//terminated test
final SMTestProxy testTerminated = createTestProxy("testTerimated", testsSuite);
testTerminated.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testTerminated);
assertEquals(testTerminated, mySelectedTestProxy);
testTerminated.setTerminated();
//myUIActionsHandler.onTestFinished(testError);
assertEquals(testTerminated, mySelectedTestProxy);
// passed test numer 2
mySelectedTestProxy = null;
final SMTestProxy testPassed2 = createTestProxy("testPassed2", testsSuite);
testPassed2.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testPassed2);
assertEquals(testPassed2, mySelectedTestProxy);
testPassed2.setFinished();
//myUIActionsHandler.onTestFinished(testPassed2);
assertEquals(testPassed2, mySelectedTestProxy);
//failed test 2
final SMTestProxy testFailed2 = createTestProxy("testFailed2", testsSuite);
testFailed2.setStarted();
myUIActionsHandler.onTestNodeAdded(myResultsViewer, testFailed2);
assertEquals(testFailed2, mySelectedTestProxy);
final SMTestProxy lastSelectedTest = testFailed2;
testFailed2.setTestFailed("", "", false);
//myUIActionsHandler.onTestFinished(testFailed2);
assertEquals(lastSelectedTest, mySelectedTestProxy);
// finish suite
testsSuite.setFinished();
assertEquals(lastSelectedTest, mySelectedTestProxy);
// root suite finished
mySuite.setFinished();
assertEquals(lastSelectedTest, mySelectedTestProxy);
//testing finished
myUIActionsHandler.onTestingFinished(myResultsViewer);
assertEquals(lastSelectedTest, mySelectedTestProxy);
}
}
|
|
package com.google.api.ads.dfp.jaxws.v201511;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* {@link ProposalLineItem Proposal line items} are created from products,
* from which their properties are copied.
*
*
* <p>Java class for Product complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Product">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="status" type="{https://www.google.com/apis/ads/publisher/v201511}ProductStatus" minOccurs="0"/>
* <element name="productType" type="{https://www.google.com/apis/ads/publisher/v201511}ProductType" minOccurs="0"/>
* <element name="productTemplateId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="notes" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="productTemplateDescription" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="lastModifiedDateTime" type="{https://www.google.com/apis/ads/publisher/v201511}DateTime" minOccurs="0"/>
* <element name="rateType" type="{https://www.google.com/apis/ads/publisher/v201511}RateType" minOccurs="0"/>
* <element name="roadblockingType" type="{https://www.google.com/apis/ads/publisher/v201511}RoadblockingType" minOccurs="0"/>
* <element name="deliveryRateType" type="{https://www.google.com/apis/ads/publisher/v201511}DeliveryRateType" minOccurs="0"/>
* <element name="creativeRotationType" type="{https://www.google.com/apis/ads/publisher/v201511}CreativeRotationType" minOccurs="0"/>
* <element name="companionDeliveryOption" type="{https://www.google.com/apis/ads/publisher/v201511}CompanionDeliveryOption" minOccurs="0"/>
* <element name="creativePlaceholders" type="{https://www.google.com/apis/ads/publisher/v201511}CreativePlaceholder" maxOccurs="unbounded" minOccurs="0"/>
* <element name="lineItemType" type="{https://www.google.com/apis/ads/publisher/v201511}LineItemType" minOccurs="0"/>
* <element name="priority" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="frequencyCaps" type="{https://www.google.com/apis/ads/publisher/v201511}FrequencyCap" maxOccurs="unbounded" minOccurs="0"/>
* <element name="builtInTargeting" type="{https://www.google.com/apis/ads/publisher/v201511}Targeting" minOccurs="0"/>
* <element name="customizableAttributes" type="{https://www.google.com/apis/ads/publisher/v201511}CustomizableAttributes" minOccurs="0"/>
* <element name="customFieldValues" type="{https://www.google.com/apis/ads/publisher/v201511}BaseCustomFieldValue" maxOccurs="unbounded" minOccurs="0"/>
* <element name="videoMaxDuration" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Product", propOrder = {
"name",
"status",
"productType",
"productTemplateId",
"id",
"notes",
"productTemplateDescription",
"lastModifiedDateTime",
"rateType",
"roadblockingType",
"deliveryRateType",
"creativeRotationType",
"companionDeliveryOption",
"creativePlaceholders",
"lineItemType",
"priority",
"frequencyCaps",
"builtInTargeting",
"customizableAttributes",
"customFieldValues",
"videoMaxDuration"
})
public class Product {
protected String name;
@XmlSchemaType(name = "string")
protected ProductStatus status;
@XmlSchemaType(name = "string")
protected ProductType productType;
protected Long productTemplateId;
protected Long id;
protected String notes;
protected String productTemplateDescription;
protected DateTime lastModifiedDateTime;
@XmlSchemaType(name = "string")
protected RateType rateType;
@XmlSchemaType(name = "string")
protected RoadblockingType roadblockingType;
@XmlSchemaType(name = "string")
protected DeliveryRateType deliveryRateType;
@XmlSchemaType(name = "string")
protected CreativeRotationType creativeRotationType;
@XmlSchemaType(name = "string")
protected CompanionDeliveryOption companionDeliveryOption;
protected List<CreativePlaceholder> creativePlaceholders;
@XmlSchemaType(name = "string")
protected LineItemType lineItemType;
protected Integer priority;
protected List<FrequencyCap> frequencyCaps;
protected Targeting builtInTargeting;
protected CustomizableAttributes customizableAttributes;
protected List<BaseCustomFieldValue> customFieldValues;
protected Long videoMaxDuration;
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the status property.
*
* @return
* possible object is
* {@link ProductStatus }
*
*/
public ProductStatus getStatus() {
return status;
}
/**
* Sets the value of the status property.
*
* @param value
* allowed object is
* {@link ProductStatus }
*
*/
public void setStatus(ProductStatus value) {
this.status = value;
}
/**
* Gets the value of the productType property.
*
* @return
* possible object is
* {@link ProductType }
*
*/
public ProductType getProductType() {
return productType;
}
/**
* Sets the value of the productType property.
*
* @param value
* allowed object is
* {@link ProductType }
*
*/
public void setProductType(ProductType value) {
this.productType = value;
}
/**
* Gets the value of the productTemplateId property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getProductTemplateId() {
return productTemplateId;
}
/**
* Sets the value of the productTemplateId property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setProductTemplateId(Long value) {
this.productTemplateId = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setId(Long value) {
this.id = value;
}
/**
* Gets the value of the notes property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNotes() {
return notes;
}
/**
* Sets the value of the notes property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNotes(String value) {
this.notes = value;
}
/**
* Gets the value of the productTemplateDescription property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getProductTemplateDescription() {
return productTemplateDescription;
}
/**
* Sets the value of the productTemplateDescription property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setProductTemplateDescription(String value) {
this.productTemplateDescription = value;
}
/**
* Gets the value of the lastModifiedDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getLastModifiedDateTime() {
return lastModifiedDateTime;
}
/**
* Sets the value of the lastModifiedDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setLastModifiedDateTime(DateTime value) {
this.lastModifiedDateTime = value;
}
/**
* Gets the value of the rateType property.
*
* @return
* possible object is
* {@link RateType }
*
*/
public RateType getRateType() {
return rateType;
}
/**
* Sets the value of the rateType property.
*
* @param value
* allowed object is
* {@link RateType }
*
*/
public void setRateType(RateType value) {
this.rateType = value;
}
/**
* Gets the value of the roadblockingType property.
*
* @return
* possible object is
* {@link RoadblockingType }
*
*/
public RoadblockingType getRoadblockingType() {
return roadblockingType;
}
/**
* Sets the value of the roadblockingType property.
*
* @param value
* allowed object is
* {@link RoadblockingType }
*
*/
public void setRoadblockingType(RoadblockingType value) {
this.roadblockingType = value;
}
/**
* Gets the value of the deliveryRateType property.
*
* @return
* possible object is
* {@link DeliveryRateType }
*
*/
public DeliveryRateType getDeliveryRateType() {
return deliveryRateType;
}
/**
* Sets the value of the deliveryRateType property.
*
* @param value
* allowed object is
* {@link DeliveryRateType }
*
*/
public void setDeliveryRateType(DeliveryRateType value) {
this.deliveryRateType = value;
}
/**
* Gets the value of the creativeRotationType property.
*
* @return
* possible object is
* {@link CreativeRotationType }
*
*/
public CreativeRotationType getCreativeRotationType() {
return creativeRotationType;
}
/**
* Sets the value of the creativeRotationType property.
*
* @param value
* allowed object is
* {@link CreativeRotationType }
*
*/
public void setCreativeRotationType(CreativeRotationType value) {
this.creativeRotationType = value;
}
/**
* Gets the value of the companionDeliveryOption property.
*
* @return
* possible object is
* {@link CompanionDeliveryOption }
*
*/
public CompanionDeliveryOption getCompanionDeliveryOption() {
return companionDeliveryOption;
}
/**
* Sets the value of the companionDeliveryOption property.
*
* @param value
* allowed object is
* {@link CompanionDeliveryOption }
*
*/
public void setCompanionDeliveryOption(CompanionDeliveryOption value) {
this.companionDeliveryOption = value;
}
/**
* Gets the value of the creativePlaceholders property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the creativePlaceholders property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getCreativePlaceholders().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link CreativePlaceholder }
*
*
*/
public List<CreativePlaceholder> getCreativePlaceholders() {
if (creativePlaceholders == null) {
creativePlaceholders = new ArrayList<CreativePlaceholder>();
}
return this.creativePlaceholders;
}
/**
* Gets the value of the lineItemType property.
*
* @return
* possible object is
* {@link LineItemType }
*
*/
public LineItemType getLineItemType() {
return lineItemType;
}
/**
* Sets the value of the lineItemType property.
*
* @param value
* allowed object is
* {@link LineItemType }
*
*/
public void setLineItemType(LineItemType value) {
this.lineItemType = value;
}
/**
* Gets the value of the priority property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getPriority() {
return priority;
}
/**
* Sets the value of the priority property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setPriority(Integer value) {
this.priority = value;
}
/**
* Gets the value of the frequencyCaps property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the frequencyCaps property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getFrequencyCaps().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link FrequencyCap }
*
*
*/
public List<FrequencyCap> getFrequencyCaps() {
if (frequencyCaps == null) {
frequencyCaps = new ArrayList<FrequencyCap>();
}
return this.frequencyCaps;
}
/**
* Gets the value of the builtInTargeting property.
*
* @return
* possible object is
* {@link Targeting }
*
*/
public Targeting getBuiltInTargeting() {
return builtInTargeting;
}
/**
* Sets the value of the builtInTargeting property.
*
* @param value
* allowed object is
* {@link Targeting }
*
*/
public void setBuiltInTargeting(Targeting value) {
this.builtInTargeting = value;
}
/**
* Gets the value of the customizableAttributes property.
*
* @return
* possible object is
* {@link CustomizableAttributes }
*
*/
public CustomizableAttributes getCustomizableAttributes() {
return customizableAttributes;
}
/**
* Sets the value of the customizableAttributes property.
*
* @param value
* allowed object is
* {@link CustomizableAttributes }
*
*/
public void setCustomizableAttributes(CustomizableAttributes value) {
this.customizableAttributes = value;
}
/**
* Gets the value of the customFieldValues property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the customFieldValues property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getCustomFieldValues().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link BaseCustomFieldValue }
*
*
*/
public List<BaseCustomFieldValue> getCustomFieldValues() {
if (customFieldValues == null) {
customFieldValues = new ArrayList<BaseCustomFieldValue>();
}
return this.customFieldValues;
}
/**
* Gets the value of the videoMaxDuration property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getVideoMaxDuration() {
return videoMaxDuration;
}
/**
* Sets the value of the videoMaxDuration property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setVideoMaxDuration(Long value) {
this.videoMaxDuration = value;
}
}
|
|
/*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.jcr.index.local;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.Map;
import javax.jcr.query.qom.Constraint;
import javax.jcr.query.qom.JoinCondition;
import org.junit.Before;
import org.mapdb.BTreeKeySerializer;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import org.mapdb.Serializer;
import org.modeshape.jcr.ExecutionContext;
import org.modeshape.jcr.api.query.qom.Operator;
import org.modeshape.jcr.cache.NodeKey;
import org.modeshape.jcr.index.local.IndexValues.Converter;
import org.modeshape.jcr.index.local.MapDB.Serializers;
import org.modeshape.jcr.query.model.Comparison;
import org.modeshape.jcr.query.model.DynamicOperand;
import org.modeshape.jcr.query.model.Literal;
import org.modeshape.jcr.query.model.PropertyValue;
import org.modeshape.jcr.query.model.SelectorName;
import org.modeshape.jcr.query.model.StaticOperand;
import org.modeshape.jcr.spi.index.IndexConstraints;
import org.modeshape.jcr.spi.index.provider.Filter;
import org.modeshape.jcr.value.PropertyType;
import org.modeshape.jcr.value.ValueFactories;
import org.modeshape.jcr.value.ValueFactory;
public abstract class AbstractLocalIndexTest {
protected Serializers serializers;
protected ExecutionContext context;
protected DB db;
protected String propertyName = "indexedProperty";
@Before
public void beforeEach() {
context = new ExecutionContext();
db = DBMaker.newMemoryDB().make();
serializers = MapDB.serializers(context.getValueFactories());
}
protected void loadLongIndex( LocalUniqueIndex<Long> index,
int numValues ) {
for (int i = 1; i <= numValues; ++i) {
index.add(key(i), "test", (long)(i * 10));
}
}
protected void loadStringIndex( LocalUniqueIndex<String> index,
int numValues ) {
for (int i = 1; i <= numValues; ++i) {
index.add(key(i), "test", "" + (i * 10));
}
}
protected void loadLongIndexWithNoDuplicates( LocalDuplicateIndex<Long> index,
int numValues ) {
for (int i = 1; i <= numValues; ++i) {
index.add(key(i), "test", (long)(i * 10));
}
}
protected void loadStringIndexWithNoDuplicates( LocalDuplicateIndex<String> index,
int numValues ) {
for (int i = 1; i <= numValues; ++i) {
index.add(key(i), "test", "" + (i * 10));
}
}
@SuppressWarnings( "unchecked" )
protected <T> LocalUniqueIndex<T> uniqueValueIndex( Class<T> valueType ) {
PropertyType type = PropertyType.discoverType(valueType);
ValueFactory<T> valueFactory = (ValueFactory<T>)context.getValueFactories().getValueFactory(type);
Converter<T> converter = IndexValues.converter(valueFactory);
Serializer<T> serializer = (Serializer<T>)serializers.serializerFor(type.getValueClass());
BTreeKeySerializer<T> keySerializer = (BTreeKeySerializer<T>)serializers.bTreeKeySerializerFor(type.getValueClass(),
type.getComparator(),
false);
return new LocalUniqueIndex<T>("myIndex", "myWorkspace", db, converter, keySerializer, serializer);
}
@SuppressWarnings( "unchecked" )
protected <T> LocalDuplicateIndex<T> duplicateValueIndex( Class<T> valueType ) {
PropertyType type = PropertyType.discoverType(valueType);
Comparator<T> comparator = (Comparator<T>)type.getComparator();
ValueFactory<T> valueFactory = (ValueFactory<T>)context.getValueFactories().getValueFactory(type);
Converter<T> converter = IndexValues.converter(valueFactory);
Serializer<T> serializer = (Serializer<T>)serializers.serializerFor(type.getValueClass());
return new LocalDuplicateIndex<T>("myIndex", "myWorkspace", db, converter, serializer, comparator);
}
public <T> void assertNoMatch( LocalUniqueIndex<T> index,
Operator op,
T value ) {
assertMatch(index, op, value, new String[] {});
}
public <T> void assertNoMatch( LocalDuplicateIndex<T> index,
Operator op,
T value ) {
assertMatch(index, op, value, new String[] {});
}
public <T> void assertMatch( LocalUniqueIndex<T> index,
Operator op,
T value,
String... keys ) {
assertMatch(index, op, value, keyList(keys));
}
public <T> void assertMatch( LocalUniqueIndex<T> index,
Operator op,
T value,
int... keys ) {
assertMatch(index, op, value, keyList(keys));
}
public <T> void assertMatch( LocalUniqueIndex<T> index,
Operator op,
T value,
LinkedList<String> expectedValues ) {
Filter.Results results = index.filter(constraints(propertyName, op, value), -1);
validateResults(expectedValues, results);
assertTrue("Not all expected values were found in results: " + expectedValues, expectedValues.isEmpty());
}
public <T> void assertMatch( LocalDuplicateIndex<T> index,
Operator op,
T value,
String... keys ) {
assertMatch(index, op, value, keyList(keys));
}
public <T> void assertMatch( LocalDuplicateIndex<T> index,
Operator op,
T value,
int... keys ) {
assertMatch(index, op, value, keyList(keys));
}
public <T> void assertMatch( LocalDuplicateIndex<T> index,
Operator op,
T value,
LinkedList<String> expectedValues ) {
Filter.Results results = index.filter(constraints(propertyName, op, value), -1);
validateResults(expectedValues, results);
assertTrue("Not all expected values were found in results: " + expectedValues, expectedValues.isEmpty());
}
protected void validateResults(LinkedList<String> expectedValues, Filter.Results results) {
Filter.ResultBatch batch;
while ((batch = results.getNextBatch(Integer.MAX_VALUE)).size() > 0) {
for (NodeKey actual : batch.keys()) {
assertTrue("Got actual result '" + actual + "' but expected nothing", !expectedValues.isEmpty());
assertThat(actual, is(nodeKey(expectedValues.removeFirst())));
}
}
}
protected static SelectorName selector() {
return selector("selectorA");
}
protected static SelectorName selector( String name ) {
return new SelectorName(name);
}
protected <T> IndexConstraints constraints( String propertyName,
Operator op,
Object literalValue ) {
DynamicOperand dynOp = new PropertyValue(selector(), propertyName);
StaticOperand statOp = new Literal(literalValue);
return constraints(new Comparison(dynOp, op, statOp));
}
protected IndexConstraints constraints( final Constraint comparison ) {
return new IndexConstraints() {
@Override
public Collection<Constraint> getConstraints() {
return Collections.singletonList(comparison);
}
@Override
public Map<String, Object> getParameters() {
return Collections.emptyMap();
}
@Override
public ValueFactories getValueFactories() {
return context.getValueFactories();
}
@Override
public Map<String, Object> getVariables() {
return Collections.emptyMap();
}
@Override
public boolean hasConstraints() {
return true;
}
@Override
public Collection<JoinCondition> getJoinConditions() {
return Collections.emptyList();
}
};
}
protected LinkedList<String> keyList( int... keys ) {
LinkedList<String> expected = new LinkedList<String>();
for (int i = 0; i != keys.length; ++i) {
expected.add(key(keys[i]));
}
return expected;
}
protected LinkedList<String> keyList( String... keys ) {
LinkedList<String> expected = new LinkedList<String>();
for (int i = 0; i != keys.length; ++i) {
expected.add(keys[i]);
}
return expected;
}
private static final String NODE_KEY_PREFIX = "12345671234567-";
protected static String key( int value ) {
return NODE_KEY_PREFIX + value;
}
protected static String key( String value ) {
return NODE_KEY_PREFIX + value;
}
protected static NodeKey nodeKey( String value ) {
return new NodeKey(value);
}
}
|
|
package org.motechproject.nms.kilkari.utils;
import org.motechproject.nms.kilkari.contract.AnmAshaRecord;
import org.motechproject.nms.kilkari.contract.ChildRecord;
import org.motechproject.nms.kilkari.contract.MotherRecord;
import org.motechproject.nms.kilkari.contract.RchAnmAshaRecord;
import org.motechproject.nms.kilkari.contract.RchChildRecord;
import org.motechproject.nms.kilkari.contract.RchMotherRecord;
import org.motechproject.nms.kilkari.domain.MctsMother;
import org.motechproject.nms.rejectionhandler.domain.ChildImportRejection;
import org.motechproject.nms.rejectionhandler.domain.FlwImportRejection;
import org.motechproject.nms.rejectionhandler.domain.MotherImportRejection;
import java.util.Map;
public final class RejectedObjectConverter {
private RejectedObjectConverter() {
}
public static FlwImportRejection flwRejectionRch(RchAnmAshaRecord record, Boolean accepted, String rejectionReason, String action) {
FlwImportRejection flwImportRejection = new FlwImportRejection();
flwImportRejection.setStateId(record.getStateId());
flwImportRejection.setDistrictId(record.getDistrictId());
flwImportRejection.setDistrictName(record.getDistrictName());
flwImportRejection.setMsisdn(record.getMobileNo());
flwImportRejection.setGfName(record.getGfName());
flwImportRejection.setType(record.getGfType());
flwImportRejection.setGfStatus(record.getGfStatus());
flwImportRejection.setExecDate(record.getExecDate());
flwImportRejection.setSource("RCH-Import");
flwImportRejection.setAccepted(accepted);
flwImportRejection.setTalukaId(record.getTalukaId());
flwImportRejection.setHealthBlockId(record.getHealthBlockId());
flwImportRejection.setHealthBlockName(record.getHealthBlockName());
flwImportRejection.setPhcId(record.getPhcId());
flwImportRejection.setPhcName(record.getPhcName());
flwImportRejection.setSubcentreId(record.getSubCentreId());
flwImportRejection.setSubcentreName(record.getSubCentreName());
flwImportRejection.setVillageId(record.getVillageId());
flwImportRejection.setVillageName(record.getVillageName());
flwImportRejection.setFlwId(record.getGfId());
flwImportRejection.setRejectionReason(rejectionReason);
flwImportRejection.setAction(action);
return flwImportRejection;
}
public static FlwImportRejection flwRejectionMcts(AnmAshaRecord record, Boolean accepted, String rejectionReason, String action) { //NOPMD NcssMethodCount
FlwImportRejection flwImportRejection = new FlwImportRejection();
flwImportRejection.setStateId(record.getStateId());
flwImportRejection.setDistrictId(record.getDistrictId());
flwImportRejection.setDistrictName(record.getDistrictName());
flwImportRejection.setTalukaId(record.getTalukaId());
flwImportRejection.setHealthBlockId(record.getHealthBlockId());
flwImportRejection.setHealthBlockName(record.getHealthBlockName());
flwImportRejection.setPhcId(record.getPhcId());
flwImportRejection.setPhcName(record.getPhcName());
flwImportRejection.setSubcentreId(record.getSubCentreId());
flwImportRejection.setSubcentreName(record.getSubCentreName());
flwImportRejection.setVillageId(record.getVillageId());
flwImportRejection.setVillageName(record.getVillageName());
flwImportRejection.setFlwId(record.getId());
flwImportRejection.setMsisdn(record.getContactNo());
flwImportRejection.setGfName(record.getName());
flwImportRejection.setType(record.getType());
flwImportRejection.setGfStatus(record.getGfStatus());
flwImportRejection.setRegDate(record.getRegDate());
flwImportRejection.setSex(record.getSex());
if (record.getSmsReply() != null && record.getSmsReply().length() > 255) {
flwImportRejection.setSmsReply(record.getSmsReply().substring(0, 255));
} else {
flwImportRejection.setSmsReply(record.getSmsReply());
}
flwImportRejection.setAadharNo(record.getAadharNo());
flwImportRejection.setCreatedOn(record.getCreatedOn());
flwImportRejection.setUpdatedOn(record.getUpdatedOn());
flwImportRejection.setBankId(record.getBankId());
flwImportRejection.setBranchName(record.getBranchName());
flwImportRejection.setIfscIdCode(record.getIfscIdCode());
flwImportRejection.setBankName(record.getBankName());
flwImportRejection.setAccountNumber(record.getAccNo());
flwImportRejection.setAadharLinked(record.getIsAadharLinked());
flwImportRejection.setVerifyDate(record.getVerifyDate());
flwImportRejection.setVerifierName(record.getVerifierName());
flwImportRejection.setVerifierId(record.getVerifierId());
flwImportRejection.setCallAns(record.getCallAns());
flwImportRejection.setPhoneNoCorrect(record.getIsPhoneNoCorrect());
flwImportRejection.setNoCallReason(record.getNoCallReason());
flwImportRejection.setNoPhoneReason(record.getNoPhoneReason());
flwImportRejection.setVerifierRemarks(record.getVerifierRemarks());
flwImportRejection.setGfAddress(record.getGfAddress());
flwImportRejection.setHusbandName(record.getHusbandName());
flwImportRejection.setMddsStateId(record.getMddsStateId());
flwImportRejection.setMddsDistrictId(record.getMddsDistrictId());
flwImportRejection.setMddsTalukaId(record.getMddsTalukaId());
flwImportRejection.setMddsVillageId(record.getMddsVillageId());
flwImportRejection.setSource("MCTS-Import");
flwImportRejection.setAccepted(accepted);
flwImportRejection.setRejectionReason(rejectionReason);
flwImportRejection.setAction(action);
return flwImportRejection;
}
public static ChildImportRejection childRejectionRch(RchChildRecord record, Boolean accepted, String rejectionReason, String action) {
ChildImportRejection childImportRejection = new ChildImportRejection();
childImportRejection.setSubcentreId(record.getSubCentreId());
childImportRejection.setSubcentreName(record.getSubCentreName());
childImportRejection.setVillageId(record.getVillageId());
childImportRejection.setVillageName(record.getVillageName());
childImportRejection.setName(record.getName());
childImportRejection.setMobileNo(record.getMobileNo());
childImportRejection.setStateId(record.getStateId());
childImportRejection.setDistrictId(record.getDistrictId());
childImportRejection.setDistrictName(record.getDistrictName());
childImportRejection.setTalukaId(record.getTalukaId());
childImportRejection.setTalukaName(record.getTalukaName());
childImportRejection.setHealthBlockId(record.getHealthBlockId());
childImportRejection.setHealthBlockName(record.getHealthBlockName());
childImportRejection.setPhcId(record.getPhcId());
childImportRejection.setPhcName(record.getPhcName());
childImportRejection.setBirthDate(record.getBirthdate());
childImportRejection.setRegistrationNo(record.getRegistrationNo());
childImportRejection.setEntryType(record.getEntryType());
childImportRejection.setIdNo(record.getMctsId());
childImportRejection.setmCTSMotherIDNo(record.getMctsMotherIdNo());
childImportRejection.setMotherId(record.getMotherRegistrationNo());
childImportRejection.setExecDate(record.getExecDate());
childImportRejection.setSource("RCH-Import");
childImportRejection.setAccepted(accepted);
childImportRejection.setRejectionReason(rejectionReason);
childImportRejection.setAction(action);
return childImportRejection;
}
public static ChildImportRejection childRejectionMcts(ChildRecord record, Boolean accepted, String rejectionReason, String action) { //NOPMD NcssMethodCount
ChildImportRejection childImportRejection = new ChildImportRejection();
childImportRejection.setStateId(record.getStateID());
childImportRejection.setDistrictId(record.getDistrictId());
childImportRejection.setDistrictName(record.getDistrictName());
childImportRejection.setTalukaId(record.getTalukaId());
childImportRejection.setTalukaName(record.getTalukaName());
childImportRejection.setHealthBlockId(record.getHealthBlockId());
childImportRejection.setHealthBlockName(record.getHealthBlockName());
childImportRejection.setPhcId(record.getPhcId());
childImportRejection.setPhcName(record.getPhcName());
childImportRejection.setSubcentreId(record.getSubCentreId());
childImportRejection.setSubcentreName(record.getSubCentreName());
childImportRejection.setVillageId(record.getVillageId());
childImportRejection.setVillageName(record.getVillageName());
childImportRejection.setYr(record.getYr());
childImportRejection.setCityMaholla(record.getCityMaholla());
childImportRejection.setgPVillage(record.getGpVillage());
childImportRejection.setAddress(record.getAddress());
childImportRejection.setIdNo(record.getIdNo());
childImportRejection.setName(record.getName());
childImportRejection.setMotherName(record.getMotherName());
childImportRejection.setmCTSMotherIDNo(record.getMotherId());
childImportRejection.setPhoneNumberWhom(record.getPhoneNoOfWhom());
childImportRejection.setMobileNo(record.getWhomPhoneNo());
childImportRejection.setBirthDate(record.getBirthdate());
childImportRejection.setPlaceOfDelivery(record.getPlaceOfDelivery());
childImportRejection.setBloodGroup(record.getBloodGroup());
childImportRejection.setCaste(record.getCaste());
childImportRejection.setSubcenterName1(record.getSubCentreName1());
childImportRejection.setaNMName(record.getAnmName());
childImportRejection.setaNMPhone(record.getAnmPhone());
childImportRejection.setAshaName(record.getAshaName());
childImportRejection.setAshaPhone(record.getAshaPhone());
childImportRejection.setbCGDt(record.getBcgDt());
childImportRejection.setoPV0Dt(record.getOpv0Dt());
childImportRejection.setHepatitisB1Dt(record.getHepatitisB1Dt());
childImportRejection.setdPT1Dt(record.getDpt1Dt());
childImportRejection.setoPV1Dt(record.getOpv1Dt());
childImportRejection.setHepatitisB2Dt(record.getHepatitisB2Dt());
childImportRejection.setdPT2Dt(record.getdPT2Dt());
childImportRejection.setoPV2Dt(record.getOpv2Dt());
childImportRejection.setHepatitisB3Dt(record.getHepatitisB3Dt());
childImportRejection.setdPT3Dt(record.getDpt3Dt());
childImportRejection.setoPV3Dt(record.getOpv3Dt());
childImportRejection.setHepatitisB4Dt(record.getHepatitisB4Dt());
childImportRejection.setMeaslesDt(record.getMeaslesDt());
childImportRejection.setVitADose1Dt(record.getVitADose1Dt());
childImportRejection.setmRDt(record.getMrDt());
childImportRejection.setdPTBoosterDt(record.getDptBoosterDt());
childImportRejection.setoPVBoosterDt(record.getOpvBoosterDt());
childImportRejection.setVitADose2Dt(record.getVitADose2Dt());
childImportRejection.setVitADose3Dt(record.getVitADose3Dt());
childImportRejection.setjEDt(record.getJeDt());
childImportRejection.setVitADose9Dt(record.getVitADose9Dt());
childImportRejection.setdT5Dt(record.getDt5Dt());
childImportRejection.settT10Dt(record.getTt10Dt());
childImportRejection.settT16Dt(record.getTt16Dt());
childImportRejection.setcLDRegDATE(record.getCldRegDate());
childImportRejection.setSex(record.getSex());
childImportRejection.setVitADose5Dt(record.getVitADose5Dt());
childImportRejection.setVitADose6Dt(record.getVitADose6Dt());
childImportRejection.setVitADose7Dt(record.getVitADose7Dt());
childImportRejection.setVitADose8Dt(record.getVitADose8Dt());
childImportRejection.setLastUpdateDate(record.getLastUpdateDate());
childImportRejection.setRemarks(record.getRemarks());
childImportRejection.setaNMID(record.getAnmID());
childImportRejection.setAshaID(record.getAshaID());
childImportRejection.setCreatedBy(record.getCreatedBy());
childImportRejection.setUpdatedBy(record.getUpdatedBy());
childImportRejection.setMeasles2Dt(record.getMeasles2Dt());
childImportRejection.setWeightOfChild(record.getWeightofChild());
childImportRejection.setChildAadhaarNo(record.getChildAadhaarNo());
childImportRejection.setChildEID(record.getChildEID());
childImportRejection.setChildEIDTime(record.getChildEIDTime());
childImportRejection.setFatherName(record.getFatherName());
childImportRejection.setBirthCertificateNumber(record.getBirthCertificateNumber());
childImportRejection.setEntryType(record.getEntryType());
childImportRejection.setSource("MCTS-Import");
childImportRejection.setAccepted(accepted);
childImportRejection.setRejectionReason(rejectionReason);
childImportRejection.setAction(action);
return childImportRejection;
}
public static MotherImportRejection motherRejectionRch(RchMotherRecord record, Boolean accepted, String rejectionReason, String action) {
MotherImportRejection motherImportRejection = new MotherImportRejection();
motherImportRejection.setStateId(record.getStateId());
motherImportRejection.setDistrictId(record.getDistrictId());
motherImportRejection.setDistrictName(record.getDistrictName());
motherImportRejection.setTalukaId(record.getTalukaId());
motherImportRejection.setTalukaName(record.getTalukaName());
motherImportRejection.setHealthBlockId(record.getHealthBlockId());
motherImportRejection.setHealthBlockName(record.getHealthBlockName());
motherImportRejection.setPhcId(record.getPhcId());
motherImportRejection.setPhcName(record.getPhcName());
motherImportRejection.setSubcentreId(record.getSubCentreId());
motherImportRejection.setSubcentreName(record.getSubCentreName());
motherImportRejection.setVillageId(record.getVillageId());
motherImportRejection.setVillageName(record.getVillageName());
motherImportRejection.setIdNo(record.getMctsIdNo());
motherImportRejection.setRegistrationNo(record.getRegistrationNo());
motherImportRejection.setCaseNo(record.getCaseNo());
motherImportRejection.setName(record.getName());
motherImportRejection.setMobileNo(record.getMobileNo());
motherImportRejection.setLmpDate(record.getLmpDate());
motherImportRejection.setBirthDate(record.getBirthDate());
motherImportRejection.setAbortionType(record.getAbortionType());
motherImportRejection.setDeliveryOutcomes(record.getDeliveryOutcomes());
motherImportRejection.setEntryType(record.getEntryType());
motherImportRejection.setExecDate(record.getExecDate());
motherImportRejection.setSource("RCH-Import");
motherImportRejection.setAccepted(accepted);
motherImportRejection.setRejectionReason(rejectionReason);
motherImportRejection.setAction(action);
return motherImportRejection;
}
public static MotherImportRejection motherRejectionMcts(MotherRecord record, Boolean accepted, String rejectionReason, String action) { //NOPMD NcssMethodCount
MotherImportRejection motherImportRejection = new MotherImportRejection();
motherImportRejection.setStateId(record.getStateId());
motherImportRejection.setDistrictId(record.getDistrictId());
motherImportRejection.setDistrictName(record.getDistrictName());
motherImportRejection.setTalukaId(record.getTalukaId());
motherImportRejection.setTalukaName(record.getTalukaName());
motherImportRejection.setHealthBlockId(record.getHealthBlockId());
motherImportRejection.setHealthBlockName(record.getHealthBlockName());
motherImportRejection.setPhcId(record.getPhcid());
motherImportRejection.setPhcName(record.getPhcName());
motherImportRejection.setSubcentreId(record.getSubCentreid());
motherImportRejection.setSubcentreName(record.getSubCentreName());
motherImportRejection.setVillageId(record.getVillageId());
motherImportRejection.setVillageName(record.getVillageName());
motherImportRejection.setYr(record.getYr());
motherImportRejection.setgPVillage(record.getGpVillage());
motherImportRejection.setAddress(record.getAddress());
motherImportRejection.setIdNo(record.getIdNo());
motherImportRejection.setName(record.getName());
motherImportRejection.setHusbandName(record.getHusbandName());
motherImportRejection.setPhoneNumberWhom(record.getPhoneNoOfWhom());
motherImportRejection.setMobileNo(record.getWhomPhoneNo());
motherImportRejection.setBirthDate(record.getBirthdate());
motherImportRejection.setjSYBeneficiary(record.getJsyBeneficiary());
motherImportRejection.setCaste(record.getCaste());
motherImportRejection.setSubcenterName1(record.getSubCentreName1());
motherImportRejection.setaNMName(record.getAnmName());
motherImportRejection.setaNMPhone(record.getAnmPhone());
motherImportRejection.setAshaName(record.getAshaName());
motherImportRejection.setAshaPhone(record.getAshaPhone());
motherImportRejection.setDeliveryLnkFacility(record.getDeliveryLnkFacility());
motherImportRejection.setFacilityName(record.getFacilityName());
motherImportRejection.setLmpDate(record.getLmpDate());
motherImportRejection.setaNC1Date(record.getAnc1Date());
motherImportRejection.setaNC2Date(record.getAnc2Date());
motherImportRejection.setaNC3Date(record.getAnc3Date());
motherImportRejection.setaNC4Date(record.getAnc4Date());
motherImportRejection.settT1Date(record.getTt1Date());
motherImportRejection.settT2Date(record.getTt2Date());
motherImportRejection.settTBoosterDate(record.getTtBoosterDate());
motherImportRejection.setiFA100GivenDate(record.getIfA100GivenDate());
motherImportRejection.setAnemia(record.getAnemia());
motherImportRejection.setaNCComplication(record.getAncComplication());
motherImportRejection.setrTISTI(record.getRtiSTI());
motherImportRejection.setDlyDate(record.getDlyDate());
motherImportRejection.setDlyPlaceHomeType(record.getDlyPlaceHomeType());
motherImportRejection.setDlyPlacePublic(record.getDlyPlacePublic());
motherImportRejection.setDlyPlacePrivate(record.getDlyPlacePrivate());
motherImportRejection.setDlyType(record.getDlyType());
motherImportRejection.setDlyComplication(record.getDlyComplication());
motherImportRejection.setDischargeDate(record.getDischargeDate());
motherImportRejection.setjSYPaidDate(record.getJsyPaidDate());
motherImportRejection.setAbortion(record.getAbortion());
motherImportRejection.setpNCHomeVisit(record.getPncHomeVisit());
motherImportRejection.setpNCComplication(record.getPncComplication());
motherImportRejection.setpPCMethod(record.getPpcMethod());
motherImportRejection.setpNCCheckup(record.getPncCheckup());
motherImportRejection.setOutcomeNos(record.getOutcomeNos());
motherImportRejection.setChild1Name(record.getChild1Name());
motherImportRejection.setChild1Sex(record.getChild1Sex());
motherImportRejection.setChild1Wt(record.getChild1Wt());
motherImportRejection.setChild1Brestfeeding(record.getChild1Brestfeeding());
motherImportRejection.setChild2Name(record.getChild2Name());
motherImportRejection.setChild2Sex(record.getChild2Sex());
motherImportRejection.setChild2Wt(record.getChild2Wt());
motherImportRejection.setChild2Brestfeeding(record.getChild2Brestfeeding());
motherImportRejection.setChild3Name(record.getChild3Name());
motherImportRejection.setChild3Sex(record.getChild3Sex());
motherImportRejection.setChild3Wt(record.getChild3Wt());
motherImportRejection.setChild3Brestfeeding(record.getChild3Brestfeeding());
motherImportRejection.setChild4Name(record.getChild4Name());
motherImportRejection.setChild4Sex(record.getChild4Sex());
motherImportRejection.setChild4Wt(record.getChild4Wt());
motherImportRejection.setChild4Brestfeeding(record.getChild4Brestfeeding());
motherImportRejection.setAge(record.getAge());
motherImportRejection.setmTHRREGDATE(record.getMthrRegDate());
motherImportRejection.setLastUpdateDate(record.getLastUpdateDate());
motherImportRejection.setRemarks(record.getRemarks());
motherImportRejection.setaNMID(record.getAnmID());
motherImportRejection.setaSHAID(record.getAshaID());
motherImportRejection.setCallAns(record.getCallAns());
motherImportRejection.setNoCallReason(record.getNoCallReason());
motherImportRejection.setNoPhoneReason(record.getNoPhoneReason());
motherImportRejection.setCreatedBy(record.getCreatedBy());
motherImportRejection.setUpdatedBy(record.getUpdatedBy());
motherImportRejection.setAadharNo(record.getAadharNo());
motherImportRejection.setbPLAPL(record.getBplAPL());
motherImportRejection.seteID(record.geteId());
motherImportRejection.seteIDTime(record.geteIdTime());
motherImportRejection.setEntryType(record.getEntryType());
motherImportRejection.setSource("MCTS-Import");
motherImportRejection.setAccepted(accepted);
motherImportRejection.setRejectionReason(rejectionReason);
motherImportRejection.setAction(action);
return motherImportRejection;
}
public static MotherRecord convertMapToMother(Map<String, Object> record) { //NO CHECKSTYLE CyclomaticComplexity
MotherRecord motherRecord = new MotherRecord();
motherRecord.setStateId(record.get(KilkariConstants.STATE_ID) == null ? null : (Long) record.get(KilkariConstants.STATE_ID));
motherRecord.setDistrictId(record.get(KilkariConstants.DISTRICT_ID) == null ? null : (Long) record.get(KilkariConstants.DISTRICT_ID));
motherRecord.setDistrictName(record.get(KilkariConstants.DISTRICT_NAME) == null ? null : record.get(KilkariConstants.DISTRICT_NAME).toString());
motherRecord.setTalukaId(record.get(KilkariConstants.TALUKA_ID) == null ? null : record.get(KilkariConstants.TALUKA_ID).toString());
motherRecord.setTalukaName(record.get(KilkariConstants.TALUKA_NAME) == null ? null : record.get(KilkariConstants.TALUKA_NAME).toString());
motherRecord.setHealthBlockId(record.get(KilkariConstants.HEALTH_BLOCK_ID) == null ? null : (Long) record.get(KilkariConstants.HEALTH_BLOCK_ID));
motherRecord.setHealthBlockName(record.get(KilkariConstants.HEALTH_BLOCK_NAME) == null ? null : record.get(KilkariConstants.HEALTH_BLOCK_NAME).toString());
motherRecord.setPhcid(record.get(KilkariConstants.PHC_ID) == null ? null : (Long) record.get(KilkariConstants.PHC_ID));
motherRecord.setPhcName(record.get(KilkariConstants.PHC_NAME) == null ? null : record.get(KilkariConstants.PHC_NAME).toString());
motherRecord.setSubCentreid(record.get(KilkariConstants.SUB_CENTRE_ID) == null ? null : (Long) record.get(KilkariConstants.SUB_CENTRE_ID));
motherRecord.setSubCentreName(record.get(KilkariConstants.SUB_CENTRE_NAME) == null ? null : record.get(KilkariConstants.SUB_CENTRE_NAME).toString());
motherRecord.setVillageId(record.get(KilkariConstants.CENSUS_VILLAGE_ID) == null ? null : (Long) record.get(KilkariConstants.CENSUS_VILLAGE_ID));
motherRecord.setVillageName(record.get(KilkariConstants.VILLAGE_NAME) == null ? null : record.get(KilkariConstants.VILLAGE_NAME).toString());
motherRecord.setLastUpdateDate(record.get(KilkariConstants.LAST_UPDATE_DATE) == null ? null : record.get(KilkariConstants.LAST_UPDATE_DATE).toString());
motherRecord.setIdNo(record.get(KilkariConstants.BENEFICIARY_ID) == null ? null : record.get(KilkariConstants.BENEFICIARY_ID).toString());
motherRecord.setName(record.get(KilkariConstants.BENEFICIARY_NAME) == null ? null : record.get(KilkariConstants.BENEFICIARY_NAME).toString());
motherRecord.setWhomPhoneNo(record.get(KilkariConstants.MSISDN) == null ? null : record.get(KilkariConstants.MSISDN).toString());
motherRecord.setLmpDate(record.get(KilkariConstants.LMP) == null ? null : record.get(KilkariConstants.LMP).toString());
motherRecord.setBirthdate(record.get(KilkariConstants.MOTHER_DOB) == null ? null : record.get(KilkariConstants.MOTHER_DOB).toString());
motherRecord.setAbortion(record.get(KilkariConstants.ABORTION) == null ? null : record.get(KilkariConstants.ABORTION).toString());
motherRecord.setOutcomeNos(record.get(KilkariConstants.STILLBIRTH) == null ? null : ((Boolean) record.get(KilkariConstants.STILLBIRTH) ? 1 : 0));
motherRecord.setEntryType(record.get(KilkariConstants.DEATH) == null ? null : ((Boolean) record.get(KilkariConstants.DEATH) ? 1 : 0));
motherRecord.setYr(record.get("Yr") == null || record.get("Yr").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("Yr").toString()));
motherRecord.setGpVillage(record.get("GP_Village") == null ? null : (String) record.get("GP_Village"));
motherRecord.setAddress(record.get("Address") == null ? null : (String) record.get("Address"));
motherRecord.setHusbandName(record.get("Husband_Name") == null ? null : (String) record.get("Husband_Name"));
motherRecord.setPhoneNoOfWhom(record.get(KilkariConstants.PH_OF_WHOM) == null ? null : (String) record.get(KilkariConstants.PH_OF_WHOM));
motherRecord.setJsyBeneficiary(record.get("JSY_Beneficiary") == null ? null : (String) record.get("JSY_Beneficiary"));
motherRecord.setCaste(record.get(KilkariConstants.CASTE) == null ? null : (String) record.get(KilkariConstants.CASTE));
motherRecord.setSubCentreName1(record.get(KilkariConstants.SUB_CENTRE_NAME1) == null ? null : (String) record.get(KilkariConstants.SUB_CENTRE_NAME1));
motherRecord.setAnmName(record.get(KilkariConstants.ANM_NAME) == null ? null : (String) record.get(KilkariConstants.ANM_NAME));
motherRecord.setAnmPhone(record.get("ANM_Phone") == null ? null : (String) record.get("ANM_Phone"));
motherRecord.setAshaName(record.get(KilkariConstants.ASHA_NAME) == null ? null : (String) record.get(KilkariConstants.ASHA_NAME));
motherRecord.setAshaPhone(record.get(KilkariConstants.ASHA_PHONE) == null ? null : (String) record.get(KilkariConstants.ASHA_PHONE));
motherRecord.setDeliveryLnkFacility(record.get("Delivery_Lnk_Facility") == null ? null : (String) record.get("Delivery_Lnk_Facility"));
motherRecord.setFacilityName(record.get("Facility_Name") == null ? null : (String) record.get("Facility_Name"));
motherRecord.setAnc1Date(record.get("ANC1_Date") == null ? null : (String) record.get("ANC1_Date"));
motherRecord.setAnc2Date(record.get("ANC2_Date") == null ? null : (String) record.get("ANC2_Date"));
motherRecord.setAnc3Date(record.get("ANC3_Date") == null ? null : (String) record.get("ANC3_Date"));
motherRecord.setAnc4Date(record.get("ANC4_Date") == null ? null : (String) record.get("ANC4_Date"));
motherRecord.setTt1Date(record.get("TT1_Date") == null ? null : (String) record.get("TT1_Date"));
motherRecord.setTt2Date(record.get("TT2_Date") == null ? null : (String) record.get("TT2_Date"));
motherRecord.setTtBoosterDate(record.get("TTBooster_Date") == null ? null : (String) record.get("TTBooster_Date"));
motherRecord.setIfA100GivenDate(record.get("IFA100_Given_Date") == null ? null : (String) record.get("IFA100_Given_Date"));
motherRecord = convertToMother(motherRecord, record);
return motherRecord;
}
private static MotherRecord convertToMother(MotherRecord motherRecord, Map<String, Object> record) { // NO CHECKSTYLE Cyclomatic Complexity
motherRecord.setAnemia(record.get("Anemia") == null ? null : (String) record.get("Anemia"));
motherRecord.setAncComplication(record.get("ANC_Complication") == null ? null : (String) record.get("ANC_Complication"));
motherRecord.setRtiSTI(record.get("RTI_STI") == null ? null : (String) record.get("RTI_STI"));
motherRecord.setDlyDate(record.get("Dly_Date") == null ? null : (String) record.get("Dly_Date"));
motherRecord.setDlyPlaceHomeType(record.get("Dly_Place_Home_Type") == null ? null : (String) record.get("Dly_Place_Home_Type"));
motherRecord.setDlyPlacePublic(record.get("Dly_Place_Public") == null ? null : (String) record.get("Dly_Place_Public"));
motherRecord.setDlyPlacePrivate(record.get("Dly_Place_Private") == null ? null : (String) record.get("Dly_Place_Private"));
motherRecord.setDlyType(record.get("Dly_Type") == null ? null : (String) record.get("Dly_Type"));
motherRecord.setDlyComplication(record.get("Dly_Complication") == null ? null : (String) record.get("Dly_Complication"));
motherRecord.setDischargeDate(record.get("Discharge_Date") == null ? null : (String) record.get("Discharge_Date"));
motherRecord.setJsyPaidDate(record.get("JSY_Paid_Date") == null ? null : (String) record.get("JSY_Paid_Date"));
motherRecord.setPncHomeVisit(record.get("PNC_Home_Visit") == null ? null : (String) record.get("PNC_Home_Visit"));
motherRecord.setPncComplication(record.get("PNC_Complication") == null ? null : (String) record.get("PNC_Complication"));
motherRecord.setPpcMethod(record.get("PPC_Method") == null ? null : (String) record.get("PPC_Method"));
motherRecord.setPncCheckup(record.get("PNC_Checkup") == null ? null : (String) record.get("PNC_Checkup"));
motherRecord.setChild1Name(record.get("Child1_Name") == null ? null : (String) record.get("Child1_Name"));
motherRecord.setChild1Sex(record.get("Child1_Sex") == null ? null : (String) record.get("Child1_Sex"));
motherRecord.setChild1Wt(record.get(KilkariConstants.CHILD1_WT) == null || record.get(KilkariConstants.CHILD1_WT).toString().trim().isEmpty() ? null : Double.parseDouble(record.get(KilkariConstants.CHILD1_WT).toString()));
motherRecord.setChild1Brestfeeding(record.get("Child1_Brestfeeding") == null ? null : (String) record.get("Child1_Brestfeeding"));
motherRecord.setChild2Name(record.get("Child2_Name") == null ? null : (String) record.get("Child2_Name"));
motherRecord.setChild2Sex(record.get("Child2_Sex") == null ? null : (String) record.get("Child2_Sex"));
motherRecord.setChild2Wt(record.get(KilkariConstants.CHILD2_WT) == null || record.get(KilkariConstants.CHILD2_WT).toString().trim().isEmpty() ? null : Double.parseDouble(record.get(KilkariConstants.CHILD2_WT).toString()));
motherRecord.setChild2Brestfeeding(record.get("Child2_Brestfeeding") == null ? null : (String) record.get("Child2_Brestfeeding"));
motherRecord.setChild3Name(record.get("Child3_Name") == null ? null : (String) record.get("Child3_Name"));
motherRecord.setChild3Sex(record.get("Child3_Sex") == null ? null : (String) record.get("Child3_Sex"));
motherRecord.setChild3Wt(record.get(KilkariConstants.CHILD3_WT) == null || record.get(KilkariConstants.CHILD3_WT).toString().trim().isEmpty() ? null : Double.parseDouble(record.get(KilkariConstants.CHILD3_WT).toString()));
motherRecord.setChild3Brestfeeding(record.get("Child3_Brestfeeding") == null ? null : (String) record.get("Child3_Brestfeeding"));
motherRecord.setChild4Name(record.get("Child4_Name") == null ? null : (String) record.get("Child4_Name"));
motherRecord.setChild4Sex(record.get("Child4_Sex") == null ? null : (String) record.get("Child4_Sex"));
motherRecord.setChild4Wt(record.get(KilkariConstants.CHILD4_WT) == null || record.get(KilkariConstants.CHILD4_WT).toString().trim().isEmpty() ? null : Double.parseDouble(record.get(KilkariConstants.CHILD4_WT).toString()));
motherRecord.setChild4Brestfeeding(record.get("Child4_Brestfeeding") == null ? null : (String) record.get("Child4_Brestfeeding"));
motherRecord.setAge(record.get("Age") == null ? null : Integer.parseInt(record.get("Age").toString()));
motherRecord.setMthrRegDate(record.get("MTHR_REG_DATE") == null ? null : (String) record.get("MTHR_REG_DATE"));
motherRecord.setRemarks(record.get(KilkariConstants.REMARKS) == null ? null : (String) record.get(KilkariConstants.REMARKS));
motherRecord.setAnmID(record.get(KilkariConstants.ANM_ID) == null ? null : Integer.parseInt(record.get(KilkariConstants.ANM_ID).toString()));
motherRecord.setAshaID(record.get(KilkariConstants.ASHA_ID) == null ? null : Integer.parseInt(record.get(KilkariConstants.ASHA_ID).toString()));
motherRecord.setCallAns(record.get("Call_Ans") == null ? null : (Boolean) record.get("Call_Ans"));
motherRecord.setNoCallReason(record.get("NoCall_Reason") == null || record.get("NoCall_Reason").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("NoCall_Reason").toString()));
motherRecord.setNoPhoneReason(record.get("NoPhone_Reason") == null || record.get("NoPhone_Reason").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("NoPhone_Reason").toString()));
motherRecord.setCreatedBy(record.get(KilkariConstants.CREATED_BY) == null || record.get(KilkariConstants.CREATED_BY).toString().trim().isEmpty() ? null : Integer.parseInt(record.get(KilkariConstants.CREATED_BY).toString()));
motherRecord.setUpdatedBy(record.get(KilkariConstants.UPDATED_BY) == null || record.get(KilkariConstants.UPDATED_BY).toString().trim().isEmpty() ? null : Integer.parseInt(record.get(KilkariConstants.UPDATED_BY).toString()));
motherRecord.setAadharNo(record.get("Aadhar_No") == null || record.get("Aadhar_No").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("Aadhar_No").toString()));
motherRecord.setBplAPL(record.get("BPL_APL") == null || record.get("BPL_APL").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("BPL_APL").toString()));
motherRecord.seteId(record.get("EID") == null || record.get("EID").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("EID").toString()));
motherRecord.seteIdTime(record.get("EIDTime") == null ? null : (String) record.get("EIDTime"));
return motherRecord;
}
public static RchMotherRecord convertMapToRchMother(Map<String, Object> record) { //NO CHECKSTYLE CyclomaticComplexity
RchMotherRecord rchMotherRecord = new RchMotherRecord();
rchMotherRecord.setStateId(record.get(KilkariConstants.STATE_ID) == null ? null : (Long) record.get(KilkariConstants.STATE_ID));
rchMotherRecord.setDistrictId(record.get(KilkariConstants.DISTRICT_ID) == null ? null : (Long) record.get(KilkariConstants.DISTRICT_ID));
rchMotherRecord.setDistrictName(record.get(KilkariConstants.DISTRICT_NAME) == null ? null : record.get(KilkariConstants.DISTRICT_NAME).toString());
rchMotherRecord.setTalukaId(record.get(KilkariConstants.TALUKA_ID) == null ? null : record.get(KilkariConstants.TALUKA_ID).toString());
rchMotherRecord.setTalukaName(record.get(KilkariConstants.TALUKA_NAME) == null ? null : record.get(KilkariConstants.TALUKA_NAME).toString());
rchMotherRecord.setHealthBlockId(record.get(KilkariConstants.HEALTH_BLOCK_ID) == null ? null : (Long) record.get(KilkariConstants.HEALTH_BLOCK_ID));
rchMotherRecord.setHealthBlockName(record.get(KilkariConstants.HEALTH_BLOCK_NAME) == null ? null : record.get(KilkariConstants.HEALTH_BLOCK_NAME).toString());
rchMotherRecord.setPhcId(record.get(KilkariConstants.PHC_ID) == null ? null : (Long) record.get(KilkariConstants.PHC_ID));
rchMotherRecord.setPhcName(record.get(KilkariConstants.PHC_NAME) == null ? null : record.get(KilkariConstants.PHC_NAME).toString());
rchMotherRecord.setSubCentreId(record.get(KilkariConstants.SUB_CENTRE_ID) == null ? null : (Long) record.get(KilkariConstants.SUB_CENTRE_ID));
rchMotherRecord.setSubCentreName(record.get(KilkariConstants.SUB_CENTRE_NAME) == null ? null : record.get(KilkariConstants.SUB_CENTRE_NAME).toString());
rchMotherRecord.setVillageId(record.get(KilkariConstants.CENSUS_VILLAGE_ID) == null ? null : (Long) record.get(KilkariConstants.CENSUS_VILLAGE_ID));
rchMotherRecord.setVillageName(record.get(KilkariConstants.VILLAGE_NAME) == null ? null : record.get(KilkariConstants.VILLAGE_NAME).toString());
rchMotherRecord.setMctsIdNo(record.get(KilkariConstants.MCTS_ID) == null ? null : record.get(KilkariConstants.MCTS_ID).toString());
rchMotherRecord.setRegistrationNo(record.get(KilkariConstants.RCH_ID) == null ? null : record.get(KilkariConstants.RCH_ID).toString());
rchMotherRecord.setName(record.get(KilkariConstants.BENEFICIARY_NAME) == null ? null : record.get(KilkariConstants.BENEFICIARY_NAME).toString());
rchMotherRecord.setMobileNo(record.get(KilkariConstants.MOBILE_NO) == null ? null : record.get(KilkariConstants.MOBILE_NO).toString());
rchMotherRecord.setLmpDate(record.get(KilkariConstants.LMP) == null ? null : record.get(KilkariConstants.LMP).toString());
rchMotherRecord.setBirthDate(record.get(KilkariConstants.MOTHER_DOB) == null ? null : record.get(KilkariConstants.MOTHER_DOB).toString());
rchMotherRecord.setAbortionType(record.get(KilkariConstants.ABORTION_TYPE) == null ? null : record.get(KilkariConstants.ABORTION_TYPE).toString());
rchMotherRecord.setDeliveryOutcomes(record.get(KilkariConstants.DELIVERY_OUTCOMES) == null ? null : record.get(KilkariConstants.DELIVERY_OUTCOMES).toString());
rchMotherRecord.setEntryType(record.get(KilkariConstants.DEATH) == null || record.get(KilkariConstants.DEATH).toString().trim().isEmpty() ? null : ((Boolean) record.get(KilkariConstants.DEATH) ? 1 : 0));
rchMotherRecord.setExecDate(record.get(KilkariConstants.EXECUTION_DATE) == null ? null : record.get(KilkariConstants.EXECUTION_DATE).toString());
rchMotherRecord.setCaseNo(record.get(KilkariConstants.CASE_NO) == null ? null : (Long) record.get(KilkariConstants.CASE_NO));
return rchMotherRecord;
}
public static ChildRecord convertMapToChild(Map<String, Object> record) { //NO CHECKSTYLE CyclomaticComplexity //NOPMD NcssMethodCount
ChildRecord childRecord = new ChildRecord();
childRecord.setStateID(record.get(KilkariConstants.STATE_ID) == null ? null : (Long) record.get(KilkariConstants.STATE_ID));
childRecord.setDistrictId(record.get(KilkariConstants.DISTRICT_ID) == null ? null : (Long) record.get(KilkariConstants.DISTRICT_ID));
childRecord.setDistrictName(record.get(KilkariConstants.DISTRICT_NAME) == null ? null : record.get(KilkariConstants.DISTRICT_NAME).toString());
childRecord.setTalukaId(record.get(KilkariConstants.TALUKA_ID) == null ? null : record.get(KilkariConstants.TALUKA_ID).toString());
childRecord.setTalukaName(record.get(KilkariConstants.TALUKA_NAME) == null ? null : record.get(KilkariConstants.TALUKA_NAME).toString());
childRecord.setHealthBlockId((Long) record.get(KilkariConstants.HEALTH_BLOCK_ID));
childRecord.setHealthBlockName(record.get(KilkariConstants.HEALTH_BLOCK_NAME) == null ? null : record.get(KilkariConstants.HEALTH_BLOCK_NAME).toString());
childRecord.setPhcId(record.get(KilkariConstants.PHC_ID) == null ? null : (Long) record.get(KilkariConstants.PHC_ID));
childRecord.setPhcName(record.get(KilkariConstants.PHC_NAME) == null ? null : record.get(KilkariConstants.PHC_NAME).toString());
childRecord.setSubCentreId(record.get(KilkariConstants.SUB_CENTRE_ID) == null ? null : (Long) record.get(KilkariConstants.SUB_CENTRE_ID));
childRecord.setSubCentreName(record.get(KilkariConstants.SUB_CENTRE_NAME) == null ? null : record.get(KilkariConstants.SUB_CENTRE_NAME).toString());
childRecord.setVillageId(record.get(KilkariConstants.CENSUS_VILLAGE_ID) == null ? null : (Long) record.get(KilkariConstants.CENSUS_VILLAGE_ID));
childRecord.setVillageName(record.get(KilkariConstants.VILLAGE_NAME) == null ? null : record.get(KilkariConstants.VILLAGE_NAME).toString());
childRecord.setLastUpdateDate(record.get(KilkariConstants.LAST_UPDATE_DATE) == null ? null : record.get(KilkariConstants.LAST_UPDATE_DATE).toString());
childRecord.setName(record.get(KilkariConstants.BENEFICIARY_NAME) == null ? null : record.get(KilkariConstants.BENEFICIARY_NAME).toString());
childRecord.setWhomPhoneNo(record.get(KilkariConstants.MSISDN) == null ? null : record.get(KilkariConstants.MSISDN).toString());
childRecord.setBirthdate(record.get(KilkariConstants.DOB) == null ? null : record.get(KilkariConstants.DOB).toString());
childRecord.setIdNo(record.get(KilkariConstants.BENEFICIARY_ID) == null ? null : record.get(KilkariConstants.BENEFICIARY_ID).toString());
String motherId = null;
if (record.get(KilkariConstants.MOTHER_ID) != null) {
Object motherRecord = record.get(KilkariConstants.MOTHER_ID);
try {
MctsMother motherInstance = (MctsMother) record.get(KilkariConstants.MOTHER_ID);
motherId = motherInstance.getBeneficiaryId();
} catch (Exception e) {
motherId = motherRecord.toString();
}
}
childRecord.setMotherId(motherId);
childRecord.setEntryType(record.get(KilkariConstants.DEATH) == null || record.get(KilkariConstants.DEATH).toString().trim().isEmpty() ? null : String.valueOf((Boolean) record.get(KilkariConstants.DEATH) ? 1 : 0));
childRecord.setGpVillage(record.get(KilkariConstants.GP_VILLAGE) == null ? null : (String) record.get(KilkariConstants.GP_VILLAGE));
childRecord.setAddress(record.get(KilkariConstants.ADDRESS) == null ? null : (String) record.get(KilkariConstants.ADDRESS));
childRecord.setYr(record.get("Yr") == null || record.get("Yr").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("Yr").toString()));
childRecord.setCityMaholla(record.get("City_Maholla") == null ? null : (String) record.get("City_Maholla"));
childRecord.setMotherName(record.get("Mother_Name") == null ? null : (String) record.get("Mother_Name"));
childRecord.setPhoneNoOfWhom(record.get(KilkariConstants.PH_OF_WHOM) == null ? null : (String) record.get(KilkariConstants.PH_OF_WHOM));
childRecord.setPlaceOfDelivery(record.get("Place_of_Delivery") == null ? null : (String) record.get("Place_of_Delivery"));
childRecord.setAnmPhone(record.get(KilkariConstants.ANM_PHONE) == null ? null : (String) record.get(KilkariConstants.ANM_PHONE));
childRecord.setBloodGroup(record.get("Blood_Group") == null ? null : (String) record.get("Blood_Group"));
childRecord.setAshaName(record.get(KilkariConstants.ASHA_NAME) == null ? null : (String) record.get(KilkariConstants.ASHA_NAME));
childRecord.setAshaPhone(record.get(KilkariConstants.ASHA_PHONE) == null ? null : (String) record.get(KilkariConstants.ASHA_PHONE));
childRecord.setSubCentreName1(record.get(KilkariConstants.SUB_CENTRE_NAME1) == null ? null : (String) record.get(KilkariConstants.SUB_CENTRE_NAME1));
childRecord.setAnmName(record.get(KilkariConstants.ANM_NAME) == null ? null : (String) record.get(KilkariConstants.ANM_NAME));
childRecord.setCaste(record.get(KilkariConstants.CASTE) == null ? null : (String) record.get(KilkariConstants.CASTE));
childRecord.setBcgDt(record.get("BCG_Dt") == null ? null : (String) record.get("BCG_Dt"));
childRecord.setOpv0Dt(record.get("OPV0_Dt") == null ? null : (String) record.get("OPV0_Dt"));
childRecord.setHepatitisB1Dt(record.get("HepatitisB1_Dt") == null ? null : (String) record.get("HepatitisB1_Dt"));
childRecord.setDpt1Dt(record.get("DPT1_Dt") == null ? null : (String) record.get("DPT1_Dt"));
childRecord.setOpv1Dt(record.get("OPV1_Dt") == null ? null : (String) record.get("OPV1_Dt"));
childRecord = convertToChild(childRecord, record);
return childRecord;
}
private static ChildRecord convertToChild(ChildRecord childRecord, Map<String, Object> record) { // NO CHECKSTYLE Cyclomatic Complexity
childRecord.setHepatitisB2Dt(record.get("HepatitisB2_Dt") == null ? null : (String) record.get("HepatitisB2_Dt"));
childRecord.setdPT2Dt(record.get("DPT2_Dt") == null ? null : (String) record.get("DPT2_Dt"));
childRecord.setOpv2Dt(record.get("OPV2_Dt") == null ? null : (String) record.get("OPV2_Dt"));
childRecord.setHepatitisB3Dt(record.get("HepatitisB3_Dt") == null ? null : (String) record.get("HepatitisB3_Dt"));
childRecord.setDpt3Dt(record.get("DPT3_Dt") == null ? null : (String) record.get("DPT3_Dt"));
childRecord.setOpv3Dt(record.get("OPV3_Dt") == null ? null : (String) record.get("OPV3_Dt"));
childRecord.setHepatitisB4Dt(record.get("HepatitisB4_Dt") == null ? null : (String) record.get("HepatitisB4_Dt"));
childRecord.setMeaslesDt(record.get("Measles_Dt") == null ? null : (String) record.get("Measles_Dt"));
childRecord.setVitADose1Dt(record.get("VitA_Dose1_Dt") == null ? null : (String) record.get("VitA_Dose1_Dt"));
childRecord.setMrDt(record.get("MR_Dt") == null ? null : (String) record.get("MR_Dt"));
childRecord.setDptBoosterDt(record.get("DPTBooster_Dt") == null ? null : (String) record.get("DPTBooster_Dt"));
childRecord.setOpvBoosterDt(record.get("OPVBooster_Dt") == null ? null : (String) record.get("OPVBooster_Dt"));
childRecord.setVitADose2Dt(record.get("VitA_Dose2_Dt") == null ? null : (String) record.get("VitA_Dose2_Dt"));
childRecord.setVitADose3Dt(record.get("VitA_Dose3_Dt") == null ? null : (String) record.get("VitA_Dose3_Dt"));
childRecord.setJeDt(record.get("JE_Dt") == null ? null : (String) record.get("JE_Dt"));
childRecord.setVitADose9Dt(record.get("VitA_Dose9_Dt") == null ? null : (String) record.get("VitA_Dose9_Dt"));
childRecord.setDt5Dt(record.get("DT5_Dt") == null ? null : (String) record.get("DT5_Dt"));
childRecord.setTt10Dt(record.get("TT10_Dt") == null ? null : (String) record.get("TT10_Dt"));
childRecord.setTt16Dt(record.get("TT16_Dt") == null ? null : (String) record.get("TT16_Dt"));
childRecord.setCldRegDate(record.get("CLD_REG_DATE") == null ? null : (String) record.get("CLD_REG_DATE"));
childRecord.setSex(record.get("Sex") == null ? null : (String) record.get("Sex"));
childRecord.setVitADose5Dt(record.get("VitA_Dose5_Dt") == null ? null : (String) record.get("VitA_Dose5_Dt"));
childRecord.setRemarks(record.get(KilkariConstants.REMARKS) == null ? null : (String) record.get(KilkariConstants.REMARKS));
childRecord.setVitADose6Dt(record.get("VitA_Dose6_Dt") == null ? null : (String) record.get("VitA_Dose6_Dt"));
childRecord.setAnmID(record.get(KilkariConstants.ANM_ID) == null || record.get(KilkariConstants.ANM_ID).toString().trim().isEmpty() ? null : Integer.parseInt(record.get(KilkariConstants.ANM_ID).toString()));
childRecord.setAshaID(record.get(KilkariConstants.ASHA_ID) == null || record.get(KilkariConstants.ASHA_ID).toString().trim().isEmpty() ? null : Integer.parseInt(record.get(KilkariConstants.ASHA_ID).toString()));
childRecord.setVitADose7Dt(record.get("VitA_Dose7_Dt") == null ? null : (String) record.get("VitA_Dose7_Dt"));
childRecord.setVitADose8Dt(record.get("VitA_Dose8_Dt") == null ? null : (String) record.get("VitA_Dose8_Dt"));
childRecord.setCreatedBy(record.get(KilkariConstants.CREATED_BY) == null || record.get(KilkariConstants.CREATED_BY).toString().trim().isEmpty() ? null : Integer.parseInt(record.get(KilkariConstants.CREATED_BY).toString()));
childRecord.setUpdatedBy(record.get(KilkariConstants.UPDATED_BY) == null || record.get(KilkariConstants.UPDATED_BY).toString().trim().isEmpty() ? null : Integer.parseInt(record.get(KilkariConstants.UPDATED_BY).toString()));
childRecord.setMeasles2Dt(record.get("Measles2_Dt") == null ? null : (String) record.get("Measles2_Dt"));
childRecord.setWeightofChild(record.get("Weight_of_Child") == null || record.get("Weight_of_Child").toString().trim().isEmpty() ? null : Double.parseDouble(record.get("Weight_of_Child").toString()));
childRecord.setChildAadhaarNo(record.get("Child_Aadhaar_No") == null || record.get("Child_Aadhaar_No").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("Child_Aadhaar_No").toString()));
childRecord.setChildEID(record.get("Child_EID") == null || record.get("Child_EID").toString().trim().isEmpty() ? null : Integer.parseInt(record.get("Child_EID").toString()));
childRecord.setChildEIDTime(record.get("Child_EIDTime") == null ? null : (String) record.get("Child_EIDTime"));
childRecord.setFatherName(record.get("Father_Name") == null ? null : (String) record.get("Father_Name"));
childRecord.setBirthCertificateNumber(record.get("Birth_Certificate_Number") == null ? null : (String) record.get("Birth_Certificate_Number"));
return childRecord;
}
public static RchChildRecord convertMapToRchChild(Map<String, Object> record) { //NO CHECKSTYLE CyclomaticComplexity
RchChildRecord rchChildRecord = new RchChildRecord();
rchChildRecord.setStateId(record.get(KilkariConstants.STATE_ID) == null ? null : (Long) record.get(KilkariConstants.STATE_ID));
rchChildRecord.setDistrictId(record.get(KilkariConstants.DISTRICT_ID) == null ? null : (Long) record.get(KilkariConstants.DISTRICT_ID));
rchChildRecord.setDistrictName(record.get(KilkariConstants.DISTRICT_NAME) == null ? null : record.get(KilkariConstants.DISTRICT_NAME).toString());
rchChildRecord.setTalukaId(record.get(KilkariConstants.TALUKA_ID) == null ? null : record.get(KilkariConstants.TALUKA_ID).toString());
rchChildRecord.setTalukaName(record.get(KilkariConstants.TALUKA_NAME) == null ? null : record.get(KilkariConstants.TALUKA_NAME).toString());
rchChildRecord.setHealthBlockId(record.get(KilkariConstants.HEALTH_BLOCK_ID) == null ? null : (Long) record.get(KilkariConstants.HEALTH_BLOCK_ID));
rchChildRecord.setHealthBlockName(record.get(KilkariConstants.HEALTH_BLOCK_NAME) == null ? null : record.get(KilkariConstants.HEALTH_BLOCK_NAME).toString());
rchChildRecord.setPhcId(record.get(KilkariConstants.PHC_ID) == null ? null : (Long) record.get(KilkariConstants.PHC_ID));
rchChildRecord.setPhcName(record.get(KilkariConstants.PHC_NAME) == null ? null : record.get(KilkariConstants.PHC_NAME).toString());
rchChildRecord.setSubCentreId(record.get(KilkariConstants.SUB_CENTRE_ID) == null ? null : (Long) record.get(KilkariConstants.SUB_CENTRE_ID));
rchChildRecord.setSubCentreName(record.get(KilkariConstants.SUB_CENTRE_NAME) == null ? null : record.get(KilkariConstants.SUB_CENTRE_NAME).toString());
rchChildRecord.setVillageId(record.get(KilkariConstants.CENSUS_VILLAGE_ID) == null ? null : (Long) record.get(KilkariConstants.CENSUS_VILLAGE_ID));
rchChildRecord.setVillageName(record.get(KilkariConstants.VILLAGE_NAME) == null ? null : record.get(KilkariConstants.VILLAGE_NAME).toString());
rchChildRecord.setName(record.get(KilkariConstants.BENEFICIARY_NAME) == null ? null : record.get(KilkariConstants.BENEFICIARY_NAME).toString());
rchChildRecord.setMobileNo(record.get(KilkariConstants.MOBILE_NO) == null ? null : record.get(KilkariConstants.MOBILE_NO).toString());
rchChildRecord.setBirthdate(record.get(KilkariConstants.DOB) == null ? null : record.get(KilkariConstants.DOB).toString());
rchChildRecord.setMctsId(record.get(KilkariConstants.MCTS_ID) == null ? null : record.get(KilkariConstants.MCTS_ID).toString());
rchChildRecord.setMctsMotherIdNo(record.get(KilkariConstants.MCTS_MOTHER_ID) == null ? null : record.get(KilkariConstants.MCTS_MOTHER_ID).toString());
rchChildRecord.setRegistrationNo(record.get(KilkariConstants.RCH_ID) == null ? null : record.get(KilkariConstants.RCH_ID).toString());
rchChildRecord.setMotherRegistrationNo(record.get(KilkariConstants.RCH_MOTHER_ID) == null ? null : record.get(KilkariConstants.RCH_MOTHER_ID).toString());
rchChildRecord.setEntryType(record.get(KilkariConstants.DEATH) == null || record.get(KilkariConstants.DEATH).toString().trim().isEmpty() ? null : String.valueOf((Boolean) record.get(KilkariConstants.DEATH) ? 1 : 0));
rchChildRecord.setExecDate(record.get(KilkariConstants.EXECUTION_DATE) == null ? null : record.get(KilkariConstants.EXECUTION_DATE).toString());
return rchChildRecord;
}
}
|
|
package com.afollestad.inquiry;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNotNull;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public class InquiryTests {
private static final String INSTANCE_NAME = "test";
@SuppressWarnings("CheckResult")
@Before
public void setup() {
Context appContext = InstrumentationRegistry.getTargetContext();
Inquiry inq =
Inquiry.newInstance(appContext, "instrument_test").instanceName(INSTANCE_NAME).build();
inq.delete(Person.class).run();
inq.delete(Child.class).run();
}
@Test
public void test_insert_and_query() throws Exception {
Person[] people =
new Person[] {
new Person("Natalie", 43),
new Person("Angela", 41),
new Person("Jeff", 42),
new Person("Aidan", 21),
new Person("Waverly", 19),
new Person("Jane", 70)
};
Long[] insertedIds = Inquiry.get(INSTANCE_NAME).insert(Person.class).values(people).run();
assertEquals(insertedIds.length, 6);
Person[] query1 =
Inquiry.get(INSTANCE_NAME)
.select(Person.class)
.where("name = ? OR name = ?", "Aidan", "Waverly")
.sort("name")
.all();
assertNotNull(query1);
assertEquals(query1.length, 2);
assertEquals(query1[0].name, "Aidan");
assertEquals(query1[1].name, "Waverly");
Person[] query2 =
Inquiry.get(INSTANCE_NAME).select(Person.class).whereIn("age", 70, 42).sort("name").all();
assertNotNull(query2);
assertEquals(query2.length, 2);
assertEquals(query2[0].name, "Jane");
assertEquals(query2[1].name, "Jeff");
Person[] query3 =
Inquiry.get(INSTANCE_NAME)
.select(Person.class)
.where("age >= 19")
.where("age <= 42")
.sort("name")
.all();
assertNotNull(query3);
assertEquals(query3.length, 4);
assertEquals(query3[0].name, "Aidan");
assertEquals(query3[1].name, "Angela");
assertEquals(query3[2].name, "Jeff");
assertEquals(query3[3].name, "Waverly");
Person[] query4 =
Inquiry.get(INSTANCE_NAME)
.select(Person.class)
.where("age = 19")
.orWhere("name = ?", "Aidan")
.sort("name")
.all();
assertNotNull(query4);
assertEquals(query4.length, 2);
assertEquals(query4[0].name, "Aidan");
assertEquals(query4[1].name, "Waverly");
Person[] query5 =
Inquiry.get(INSTANCE_NAME)
.select(Person.class)
.where("name LIKE ?", "%dan")
.sort("name")
.all();
assertNotNull(query5);
assertEquals(query5.length, 1);
assertEquals(query5[0].name, "Aidan");
}
@Test
public void test_update() {
Person[] people =
new Person[] {
new Person("Natalie", 43),
new Person("Jeff", 42),
new Person("Aidan", 21),
new Person("Waverly Moua", 19)
};
Long[] insertedIds = Inquiry.get(INSTANCE_NAME).insert(Person.class).values(people).run();
assertEquals(insertedIds.length, 4);
people[2].age = 22;
people[3].name = "Waverly Follestad";
people[3].age = 20;
Inquiry.get(INSTANCE_NAME).update(Person.class).values(people).run();
Person[] query1 = Inquiry.get(INSTANCE_NAME).select(Person.class).sort("name").all();
assertNotNull(query1);
assertEquals(query1.length, 4);
assertEquals(people[2].name, "Aidan");
assertEquals(people[2].age, 22);
assertEquals(people[3].name, "Waverly Follestad");
assertEquals(people[3].age, 20);
for (Person p : people) {
p.id = 0;
p.age = 1;
}
Inquiry.get(INSTANCE_NAME)
.update(Person.class)
.sort("name")
.where("age < 43")
.where("age > 20")
.projection("age")
.values(people)
.run();
Person[] query2 = Inquiry.get(INSTANCE_NAME).select(Person.class).sort("name").all();
assertNotNull(query2);
assertEquals(query2.length, 4);
assertEquals(query2[0].name, "Aidan");
assertEquals(query2[0].age, 1);
assertEquals(query2[1].name, "Jeff");
assertEquals(query2[1].age, 1);
assertEquals(query2[2].name, "Natalie");
assertEquals(query2[2].age, 43);
assertEquals(query2[3].name, "Waverly Follestad");
assertEquals(query2[3].age, 20);
}
@Test
public void test_delete() {
Person[] people =
new Person[] {
new Person("Natalie", 43),
new Person("Jeff", 42),
new Person("Aidan", 21),
new Person("Waverly", 19)
};
Long[] insertedIds = Inquiry.get(INSTANCE_NAME).insert(Person.class).values(people).run();
assertEquals(insertedIds.length, 4);
Inquiry.get(INSTANCE_NAME).delete(Person.class).where("age > 21").run();
Person[] query = Inquiry.get(INSTANCE_NAME).select(Person.class).sort("name").all();
assertNotNull(query);
assertEquals(query.length, 2);
assertEquals(query[0].name, "Aidan");
assertEquals(query[1].name, "Waverly");
}
@Test
public void test_query_foreign_keys() {
Person[] people = new Person[] {new Person("Natalie", 43), new Person("Angela", 41)};
people[0].children.add(new Child("Aidan"));
people[1].children.add(new Child("Dylan"));
people[1].children.add(new Child("Elias"));
Long[] insertedIds = Inquiry.get(INSTANCE_NAME).insert(Person.class).values(people).run();
assertEquals(insertedIds.length, 2);
Child[] insertedChildren = Inquiry.get(INSTANCE_NAME).select(Child.class).sort("name").all();
assertNotNull("No children loaded from the database!", insertedChildren);
assertEquals(insertedChildren.length, 3);
assertEquals(insertedChildren[0].name, "Aidan");
assertEquals(insertedChildren[0].parentId, (long) insertedIds[0]);
assertEquals(insertedChildren[1].name, "Dylan");
assertEquals(insertedChildren[1].parentId, (long) insertedIds[1]);
assertEquals(insertedChildren[2].name, "Elias");
assertEquals(insertedChildren[2].parentId, (long) insertedIds[1]);
Person[] queriedPeople = Inquiry.get(INSTANCE_NAME).select(Person.class).sort("name").all();
assertNotNull("No parent people loaded from the database!", queriedPeople);
assertEquals(queriedPeople.length, 2);
Person first = queriedPeople[0];
assertEquals(first.name, "Angela");
assertEquals(first.children.size(), 2);
assertEquals(first.children.get(0).name, "Dylan");
assertEquals(first.children.get(1).name, "Elias");
Person second = queriedPeople[1];
assertEquals(second.name, "Natalie");
assertEquals(second.children.size(), 1);
assertEquals(second.children.get(0).name, "Aidan");
}
@Test
public void test_update_foreign_keys() {
Person[] people = new Person[] {new Person("Natalie", 43), new Person("Angela", 41)};
people[0].children.add(new Child("Aidan"));
people[1].children.add(new Child("Dylan"));
people[1].children.add(new Child("Elias"));
Long[] insertedIds = Inquiry.get(INSTANCE_NAME).insert(Person.class).values(people).run();
assertEquals(insertedIds.length, 2);
Person[] queriedPeople = Inquiry.get(INSTANCE_NAME).select(Person.class).sort("name").all();
assertNotNull(queriedPeople);
queriedPeople[0].children.get(0).name += " Haddeland";
queriedPeople[0].children.get(1).name += " Brasel";
queriedPeople[1].children.get(0).name += " Follestad";
Inquiry.get(INSTANCE_NAME).update(Person.class).values(queriedPeople).run();
Person[] queriedPeople2 = Inquiry.get(INSTANCE_NAME).select(Person.class).sort("name").all();
assertNotNull(queriedPeople2);
assertEquals(queriedPeople[0].children.get(0).name, "Dylan Haddeland");
assertEquals(queriedPeople[0].children.get(1).name, "Elias Brasel");
assertEquals(queriedPeople[1].children.get(0).name, "Aidan Follestad");
}
@Test
public void test_delete_foreign_keys() {
Person[] people = new Person[] {new Person("Natalie", 43), new Person("Angela", 41)};
people[0].children.add(new Child("Aidan"));
people[1].children.add(new Child("Dylan"));
people[1].children.add(new Child("Elias"));
Long[] insertedIds = Inquiry.get(INSTANCE_NAME).insert(Person.class).values(people).run();
assertEquals(insertedIds.length, 2);
Person[] queriedPeople = Inquiry.get(INSTANCE_NAME).select(Person.class).sort("name").all();
assertNotNull(queriedPeople);
queriedPeople[0].children.remove(0);
queriedPeople[1].children.remove(0);
Inquiry.get(INSTANCE_NAME).update(Person.class).values(queriedPeople).run();
Person[] queriedPeople2 = Inquiry.get(INSTANCE_NAME).select(Person.class).sort("name").all();
assertNotNull(queriedPeople2);
assertEquals(queriedPeople[0].children.size(), 1);
assertEquals(queriedPeople[0].children.get(0).name, "Elias");
assertEquals(queriedPeople[1].children.size(), 0);
}
@After
public void cleanup() {
Inquiry.destroy(INSTANCE_NAME);
}
}
|
|
package com.develhack.lombok.javac.handlers.feature;
import static com.sun.tools.javac.code.Flags.*;
import static lombok.javac.Javac.*;
import java.lang.annotation.Annotation;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import lombok.ConfigurationKeys;
import lombok.core.AnnotationValues;
import lombok.core.AnnotationValues.AnnotationValueDecodeFail;
import lombok.javac.Javac;
import lombok.javac.JavacNode;
import lombok.javac.handlers.HandleEqualsAndHashCode;
import lombok.javac.handlers.HandleToString;
import lombok.javac.handlers.JavacHandlerUtil;
import lombok.javac.handlers.JavacHandlerUtil.FieldAccess;
import com.develhack.Conditions;
import com.develhack.annotation.feature.Access;
import com.develhack.annotation.feature.Equatable;
import com.develhack.annotation.feature.ExcludedFrom;
import com.develhack.annotation.feature.Meta;
import com.develhack.annotation.feature.Stringable;
import com.develhack.lombok.NameResolver;
import com.develhack.lombok.javac.handlers.AbstractJavacHandler;
import com.sun.source.tree.Tree.Kind;
import com.sun.tools.javac.code.Flags;
import com.sun.tools.javac.parser.Tokens.Comment;
import com.sun.tools.javac.tree.DocCommentTable;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.tree.JCTree.JCAnnotation;
import com.sun.tools.javac.tree.JCTree.JCAssign;
import com.sun.tools.javac.tree.JCTree.JCBlock;
import com.sun.tools.javac.tree.JCTree.JCClassDecl;
import com.sun.tools.javac.tree.JCTree.JCCompilationUnit;
import com.sun.tools.javac.tree.JCTree.JCExpression;
import com.sun.tools.javac.tree.JCTree.JCLiteral;
import com.sun.tools.javac.tree.JCTree.JCMethodDecl;
import com.sun.tools.javac.tree.JCTree.JCModifiers;
import com.sun.tools.javac.tree.JCTree.JCNewArray;
import com.sun.tools.javac.tree.JCTree.JCStatement;
import com.sun.tools.javac.tree.JCTree.JCTypeParameter;
import com.sun.tools.javac.tree.JCTree.JCVariableDecl;
import com.sun.tools.javac.util.List;
import com.sun.tools.javac.util.ListBuffer;
import com.sun.tools.javac.util.Name;
public abstract class AbstractFeatureHandler<T extends Annotation> extends AbstractJavacHandler<T> {
public static final int PRIORITY = Integer.MIN_VALUE;
public AbstractFeatureHandler(Class<T> annotationType) {
super(annotationType);
}
protected int toModifier(Access access) {
switch (access) {
case DEFAULT:
return 0;
case PUBLIC:
return PUBLIC;
case PROTECTED:
return PROTECTED;
case PRIVATE:
return PRIVATE;
default:
throw new AssertionError(access.toString());
}
}
protected boolean argumentTypesEquals(JCMethodDecl method, List<JCVariableDecl> arguments) {
if (Conditions.isEmpty(method.getParameters())) {
if (!Conditions.isEmpty(arguments)) return false;
return true;
}
if (Conditions.isEmpty(arguments)) return false;
if (method.getParameters().size() != arguments.size()) return false;
Iterator<JCVariableDecl> argumentItr = method.getParameters().iterator();
for (JCVariableDecl argument : arguments) {
if (!argument.vartype.toString().equals(argumentItr.next().vartype.toString())) {
return false;
}
}
return true;
}
protected boolean isExcludedFrom(JCVariableDecl field, Class<? extends java.lang.annotation.Annotation> feature) {
AnnotationValues<ExcludedFrom> excludeFrom = findAnnotationValues(ExcludedFrom.class, field.mods.annotations);
if (excludeFrom == null) return false;
try {
for (Class<? extends java.lang.annotation.Annotation> specifiedFeature : excludeFrom.getInstance().value()) {
if (feature.equals(specifiedFeature)) return true;
}
} catch (AnnotationValueDecodeFail e) {}
return false;
}
protected JCMethodDecl findConstructor(List<JCVariableDecl> fieldsToBeInitialize) {
if (typeNode == null) return null;
JCClassDecl clazz = (JCClassDecl) typeNode.get();
for (JCTree child : clazz.defs) {
if (child.getKind() != Kind.METHOD) continue;
JCMethodDecl method = (JCMethodDecl) child;
if (!isConstructor(method)) continue;
if (argumentTypesEquals(method, fieldsToBeInitialize)) return method;
}
return null;
}
protected List<JCVariableDecl> findFields(int requiredModifiers, int excludedModifiers,
Class<? extends java.lang.annotation.Annotation> excludeFeature, boolean notInitializedOnly) {
if (typeNode == null) return null;
JCClassDecl clazz = (JCClassDecl) typeNode.get();
ListBuffer<JCVariableDecl> fields = new ListBuffer<JCVariableDecl>();
for (JCTree child : clazz.defs) {
if (child.getKind() != Kind.VARIABLE) continue;
JCVariableDecl field = (JCVariableDecl) child;
if ((field.getModifiers().flags & requiredModifiers) != requiredModifiers) continue;
if ((field.getModifiers().flags & excludedModifiers) != 0) continue;
if (notInitializedOnly && field.getInitializer() != null) continue;
if (isExcludedFrom(field, excludeFeature)) continue;
fields.append(field);
}
return fields.toList();
}
protected JCExpression findSuperInterface(Class<?> interfaceType, List<JCExpression> superInterfaces) {
if (Conditions.isEmpty(superInterfaces)) return null;
String interfaceName = interfaceType.getSimpleName();
for (JCExpression superInterface : superInterfaces) {
if (getLastToken(superInterface).equals(interfaceName)) return superInterface;
}
return null;
}
protected List<String> findExcludeFields(Class<? extends java.lang.annotation.Annotation> feature) {
if (typeNode == null) return null;
JCClassDecl clazz = (JCClassDecl) typeNode.get();
ArrayList<String> excludes = new ArrayList<String>();
for (JCTree child : clazz.defs) {
if (child.getKind() != com.sun.source.tree.Tree.Kind.VARIABLE) continue;
JCVariableDecl field = (JCVariableDecl) child;
if (isExcludedFrom(field, feature)) excludes.add(field.name.toString());
}
return List.from(excludes.toArray(new String[excludes.size()]));
}
protected JCMethodDecl generateConstructor(List<JCVariableDecl> fieldsToBeInitialize, int modifiers) {
List<JCTypeParameter> typeParameters = List.nil();
ListBuffer<JCVariableDecl> parameters = new ListBuffer<JCVariableDecl>();
List<JCExpression> thrown = List.nil();
ListBuffer<JCStatement> statements = new ListBuffer<JCStatement>();
for (JCVariableDecl fieldToBeInitialize : fieldsToBeInitialize) {
String argumentName = NameResolver.resolvePropertyName(sourceNode.getAst(), fieldToBeInitialize.name.toString());
JCExpression argumentReference = maker.Ident(sourceNode.toName(argumentName));
JCExpression fieldReference = generateFieldReference(fieldToBeInitialize);
JCVariableDecl argument = maker.VarDef(maker.Modifiers(Flags.PARAMETER), sourceNode.toName(argumentName),
fieldToBeInitialize.vartype, null);
parameters.append(argument);
JCAssign assignment = maker.Assign(fieldReference, argumentReference);
statements.append(maker.Exec(assignment));
}
return maker.MethodDef(maker.Modifiers(modifiers), sourceNode.toName("<init>"), null, typeParameters,
parameters.toList(), thrown, maker.Block(0, statements.toList()), null);
}
protected JCMethodDecl generateGetter(JCVariableDecl field, long modifiers) {
String getterName = NameResolver.resolveGetterName(sourceNode.getAst(), field.name.toString(), isBoolean(field));
JCExpression fieldReference = generateFieldReference(field);
JCStatement returnStatement = maker.Return(fieldReference);
JCModifiers mods = maker.Modifiers(modifiers, List.<JCAnnotation> nil());
Name name = sourceNode.toName(getterName);
JCExpression returnType = field.vartype;
List<JCTypeParameter> typeParameters = List.nil();
List<JCVariableDecl> parameters = List.nil();
List<JCExpression> thrown = List.nil();
JCBlock body = maker.Block(0, List.of(returnStatement));
JCExpression defaultValue = null;
return maker.MethodDef(mods, name, returnType, typeParameters, parameters, thrown, body, defaultValue);
}
protected JCMethodDecl generateSetter(JCVariableDecl field, long modifiers) {
if (typeNode == null) return null;
String setterName = NameResolver.resolveSetterName(sourceNode.getAst(), field.name.toString(), isBoolean(field));
String argumentName = NameResolver.resolvePropertyName(sourceNode.getAst(), field.name.toString());
JCExpression argumentReference = maker.Ident(sourceNode.toName(argumentName));
JCExpression fieldReference = generateFieldReference(field);
JCAssign assignment = maker.Assign(fieldReference, argumentReference);
JCVariableDecl argument = maker.VarDef(maker.Modifiers(Flags.PARAMETER), sourceNode.toName(argumentName),
field.vartype, null);;
JCModifiers mods = maker.Modifiers(modifiers | Flags.PARAMETER, List.<JCAnnotation> nil());
Name name = sourceNode.toName(setterName);
JCExpression returnType = maker.Type(Javac.createVoidType(maker, CTC_VOID));
List<JCTypeParameter> typeParameters = List.nil();
List<JCVariableDecl> parameters = List.of(argument);
List<JCExpression> thrown = List.nil();
JCBlock body = maker.Block(0, List.<JCStatement> of(maker.Exec(assignment)));
JCExpression defaultValue = null;
return maker.MethodDef(mods, name, returnType, typeParameters, parameters, thrown, body, defaultValue);
}
protected JCExpression generateFieldReference(JCVariableDecl field) {
JCExpression fieldRef = null;
if ((field.mods.flags & Flags.STATIC) != 0) {
fieldRef = generateNameReference(typeNode.getName(), field.name.toString());
} else {
fieldRef = maker.Select(maker.Ident(sourceNode.toName("this")), field.name);
}
return fieldRef;
}
protected void supplementFinalModifier() {
if (typeNode == null) return;
JCClassDecl clazz = (JCClassDecl) typeNode.get();
clazz.mods.flags |= FINAL;
JavacNode modsNode = sourceNode.getNodeFor(clazz.mods);
if (modsNode == null) return;
modsNode.getAst().setChanged();
}
protected void supplementConstructor(Access access, List<JCVariableDecl> fieldsToBeInitialize) {
if (typeNode == null) return;
int modifiers = toModifier(access);
JCMethodDecl constructor = findConstructor(fieldsToBeInitialize);
if (constructor != null) {
if (access == Access.NONE || (constructor.mods.flags & modifiers) != modifiers) {
JavacNode constructorNode = sourceNode.getNodeFor(constructor);
if (constructorNode != null) {
constructorNode.addWarning(String.format("conflicted with the %s.", source));
}
}
return;
}
constructor = generateConstructor(fieldsToBeInitialize, modifiers);
injectMethod(recursiveSetGeneratedBy(constructor));
StringBuilder constructorDocComment = new StringBuilder();
String constructorDocCommentTemplate = sourceNode.getAst().readConfiguration(
ConfigurationKeys.CONSTRUCTOR_COMMENT_TEMPLATE);
if (constructorDocCommentTemplate != null) {
JCClassDecl clazz = ((JCClassDecl) typeNode.get());
Object[] docCommentArgs = buildJavadocCommentArgments(clazz, clazz.name.toString(), clazz.mods.annotations);
constructorDocComment.append(MessageFormat.format(constructorDocCommentTemplate, docCommentArgs));
constructorDocComment.append('\n');
}
for (JCVariableDecl fieldToBeInitialize : fieldsToBeInitialize) {
String fieldComment = getJavadocComment(fieldToBeInitialize);
if (fieldComment == null) continue;
String argumentName = NameResolver.resolvePropertyName(sourceNode.getAst(), fieldToBeInitialize.name.toString());
constructorDocComment.append("@param ");
constructorDocComment.append(argumentName);
constructorDocComment.append(' ');
constructorDocComment.append(fieldComment);
constructorDocComment.append("\n");
}
if (constructorDocComment.length() != 0) {
setJavadocComment(constructor, constructorDocComment.toString());
}
}
protected void supplementUncallableConstructor() {
if (typeNode == null) return;
JavacNode defaultConstructorNode = null;
for (JavacNode child : typeNode.down()) {
if (child.getKind() != lombok.core.AST.Kind.METHOD) continue;
JCMethodDecl method = (JCMethodDecl) child.get();
if (!isConstructor(method)) continue;
if ((method.mods.flags & GENERATEDCONSTR) != 0) {
defaultConstructorNode = child;
break;
}
if ((method.mods.flags & PRIVATE) == 0) {
child.addWarning(String.format("class annotated by @%s must not have callable constructor.",
getAnnotationName()));
continue;
}
}
if (defaultConstructorNode == null) {
return;
}
JCExpression assertionError = maker.NewClass(null, List.<JCExpression> nil(),
generateNameReference(AssertionError.class.getName()), List.<JCExpression> nil(), null);
JCMethodDecl defaultConstructor = (JCMethodDecl) defaultConstructorNode.get();
defaultConstructor.mods = maker.Modifiers(Flags.PRIVATE);
defaultConstructor.typarams = List.nil();
defaultConstructor.params = List.nil();
defaultConstructor.thrown = List.nil();
defaultConstructor.body = maker.Block(0, List.<JCStatement> of(maker.Throw(assertionError)));
recursiveSetGeneratedBy(defaultConstructor);
}
protected void supplementGetter(JCVariableDecl field, Access access) {
if (typeNode == null) return;
JCMethodDecl getter = findGetter(field);
if (access == Access.NONE) {
if (getter != null) {
sourceNode.getNodeFor(getter).addWarning(String.format("conflicted with the %s.", source));
}
return;
}
int modifiers = toModifier(access) | (int) (field.mods.flags & STATIC);
if (getter == null) {
getter = generateGetter(field, modifiers);
if (getter == null) return;
injectMethod(recursiveSetGeneratedBy(getter));
String argumentName = NameResolver.resolvePropertyName(sourceNode.getAst(), field.name.toString());
Object[] docCommentArgs = buildJavadocCommentArgments(field, argumentName, field.mods.annotations);
String getterDocCommentTemplate = sourceNode.getAst().readConfiguration(ConfigurationKeys.GETTER_COMMENT_TEMPLATE);
StringBuilder getterDocComment = new StringBuilder();
if (getterDocCommentTemplate != null) {
getterDocComment.append(getterDocCommentTemplate);
getterDocComment.append('\n');
}
if (docCommentArgs[1] != null) {
getterDocComment.append("@return {1}");
}
if (getterDocComment.length() != 0) {
setJavadocComment(getter, MessageFormat.format(getterDocComment.toString(), docCommentArgs));
}
return;
}
if (isAbstract(getter)) {
sourceNode.addWarning(String.format("abstract getter of '%s' already exists.", field.name));
return;
}
if (!modifiersMatches(getter.mods.flags, modifiers, STATIC, PUBLIC, PROTECTED, PRIVATE)) {
sourceNode.getNodeFor(getter).addWarning(String.format("conflicted with the %s.", source));
return;
}
}
protected void supplementSetter(JCVariableDecl field, Access access) {
if (typeNode == null) return;
if ((field.mods.flags & FINAL) != 0) {
return;
}
JCMethodDecl setter = findSetter(field);
if (access == Access.NONE) {
if (setter != null) {
sourceNode.getNodeFor(setter).addWarning(String.format("conflicted with the %s.", source));
}
return;
}
int modifiers = toModifier(access) | (int) (field.mods.flags & STATIC);
if (setter == null) {
setter = generateSetter(field, modifiers);
if (setter == null) return;
injectMethod(recursiveSetGeneratedBy(setter));
String argumentName = NameResolver.resolvePropertyName(sourceNode.getAst(), field.name.toString());
Object[] docCommentArgs = buildJavadocCommentArgments(field, argumentName, field.mods.annotations);
String setterDocCommentTemplate = sourceNode.getAst().readConfiguration(ConfigurationKeys.SETTER_COMMENT_TEMPLATE);
StringBuilder setterDocComment = new StringBuilder();
if (setterDocCommentTemplate != null) {
setterDocComment.append(setterDocCommentTemplate);
setterDocComment.append('\n');
}
if (docCommentArgs[1] != null) {
setterDocComment.append("@param {0} {1}");
}
if (setterDocComment.length() != 0) {
setJavadocComment(setter, MessageFormat.format(setterDocComment.toString(), docCommentArgs));
}
return;
}
if (isAbstract(setter)) {
sourceNode.addWarning(String.format("abstract setter of '%s' already exists.", field.name));
return;
}
if (!modifiersMatches(setter.mods.flags, modifiers, STATIC, PUBLIC, PROTECTED, PRIVATE)) {
sourceNode.getNodeFor(setter).addWarning(String.format("conflicted with the %s.", source));
return;
}
}
protected void supplementSuperInterface(Class<?> interfaceType) {
if (typeNode == null) return;
JCClassDecl clazz = (JCClassDecl) typeNode.get();
if (findSuperInterface(interfaceType, clazz.implementing) != null) return;
JCExpression typeReference = generateNameReference(interfaceType.getName());
if (Conditions.isEmpty(clazz.implementing)) {
clazz.implementing = List.of(typeReference);
} else {
clazz.implementing = clazz.implementing.append(typeReference);
}
}
protected void supplementSuppressWaring(String suppressed) {
if (typeNode == null) return;
JCClassDecl clazz = (JCClassDecl) typeNode.get();
JCAnnotation suppressWarnings = findAnnotation(SuppressWarnings.class, clazz.mods.annotations);
if (suppressWarnings == null) {
suppressWarnings = maker.Annotation(generateNameReference(SuppressWarnings.class.getName()),
List.<JCExpression> of(maker.Literal(suppressed)));
recursiveSetGeneratedBy(suppressWarnings);
if (Conditions.isEmpty(clazz.mods.annotations)) {
clazz.mods.annotations = List.of(suppressWarnings);
} else {
clazz.mods.annotations = clazz.mods.annotations.append(suppressWarnings);
}
return;
}
for (JCExpression arg : suppressWarnings.args) {
if (!(arg instanceof JCAssign)) continue;
JCAssign assign = (JCAssign) arg;
if (!assign.lhs.toString().equals("value")) continue;
if (assign.rhs instanceof JCLiteral) {
JCLiteral literal = (JCLiteral) assign.rhs;
if (literal.value.equals(suppressed)) return;
List<JCExpression> args = List.<JCExpression> of(maker.Literal(literal.value), maker.Literal(suppressed));
assign.rhs = recursiveSetGeneratedBy(maker.NewArray(null, List.<JCExpression> nil(), args));
return;
}
if (!(assign.rhs instanceof JCNewArray)) continue;
JCNewArray values = (JCNewArray) assign.rhs;
for (JCExpression value : values.elems) {
if (!(value instanceof JCLiteral)) continue;
JCLiteral literal = (JCLiteral) value;
if (literal.value.equals(suppressed)) return;
}
values.elems = values.elems.append(recursiveSetGeneratedBy(maker.Literal(suppressed)));
return;
}
}
protected void supplementEqualsAndHashCode(boolean evaluateSuperclass) {
if (typeNode == null) return;
List<String> excludes = findExcludeFields(Equatable.class);
List<String> includes = null;
List<JCAnnotation> onParam = List.nil();
new HandleEqualsAndHashCode().generateMethods(typeNode, sourceNode, excludes, includes, evaluateSuperclass, false,
FieldAccess.ALWAYS_FIELD, onParam);
}
protected void supplementToString(boolean evaluateSuperclass) {
if (typeNode == null) return;
List<String> excludes = findExcludeFields(Stringable.class);
List<String> includes = null;
new HandleToString().generateToString(typeNode, sourceNode, excludes, includes, true, evaluateSuperclass, false,
FieldAccess.ALWAYS_FIELD);
}
protected void injectMethod(JCMethodDecl method) {
if (typeNode == null) return;
JavacHandlerUtil.injectMethod(typeNode, method);
typeNode.rebuild();
}
// Javac only.
protected JCTree getExtendsClause(JCClassDecl clazz) {
return Javac.getExtendsClause(clazz);
}
@SuppressWarnings("unchecked")
protected String getJavadocComment(JCTree tree) {
JavacNode node = sourceNode.getNodeFor(tree);
if (node == null) return null;
try {
JCCompilationUnit compilationUnit = ((JCCompilationUnit) node.top().get());
Object docComments = Javac.getDocComments(compilationUnit);
if (docComments instanceof Map) {
return ((Map<JCTree, String>) docComments).get(tree);
}
if (Javac.instanceOfDocCommentTable(docComments)) {
return Java8Comment.get(docComments, tree);
}
} catch (Exception e) {}
return null;
}
@SuppressWarnings("unchecked")
protected void setJavadocComment(final JCTree tree, final String docComment) {
JavacNode node = sourceNode.getNodeFor(tree);
if (node == null) return;
try {
JCCompilationUnit compilationUnit = ((JCCompilationUnit) node.top().get());
Object docComments = Javac.getDocComments(compilationUnit);
if (docComments instanceof Map) {
((Map<JCTree, String>) docComments).put(tree, docComment);
}
if (Javac.instanceOfDocCommentTable(docComments)) {
Java8Comment.set(docComments, tree, docComment);
}
} catch (Exception e) {}
}
private Object[] buildJavadocCommentArgments(JCTree tree, String name, List<JCAnnotation> annotations) {
AnnotationValues<Meta> metaValues = findAnnotationValues(Meta.class, annotations);
Object[] docCommentArgs;
if (metaValues == null) {
docCommentArgs = new Object[] { name, getJavadocComment(tree) };
} else {
String[] additionalArgs = metaValues.getInstance().value();
docCommentArgs = new Object[2 + additionalArgs.length];
docCommentArgs[0] = name;
docCommentArgs[1] = getJavadocComment(tree);
System.arraycopy(additionalArgs, 0, docCommentArgs, 2, additionalArgs.length);
}
return docCommentArgs;
}
private static class Java8Comment {
static String get(Object docComments, JCTree tree) {
return ((DocCommentTable) docComments).getCommentText(tree);
}
static void set(Object docComments, final JCTree tree, final String docComment) {
((DocCommentTable) docComments).putComment(tree, new Comment() {
@Override
public String getText() {
return docComment;
}
@Override
public int getSourcePos(int index) {
return -1;
}
@Override
public CommentStyle getStyle() {
return CommentStyle.JAVADOC;
}
@Override
public boolean isDeprecated() {
return JavacHandlerUtil.nodeHasDeprecatedFlag(tree);
}
});
}
}
}
|
|
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmBsnUdf4MaskedVer14 implements OFOxmBsnUdf4Masked {
private static final Logger logger = LoggerFactory.getLogger(OFOxmBsnUdf4MaskedVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 12;
private final static UDF DEFAULT_VALUE = UDF.ZERO;
private final static UDF DEFAULT_VALUE_MASK = UDF.ZERO;
// OF message fields
private final UDF value;
private final UDF mask;
//
// Immutable default instance
final static OFOxmBsnUdf4MaskedVer14 DEFAULT = new OFOxmBsnUdf4MaskedVer14(
DEFAULT_VALUE, DEFAULT_VALUE_MASK
);
// package private constructor - used by readers, builders, and factory
OFOxmBsnUdf4MaskedVer14(UDF value, UDF mask) {
if(value == null) {
throw new NullPointerException("OFOxmBsnUdf4MaskedVer14: property value cannot be null");
}
if(mask == null) {
throw new NullPointerException("OFOxmBsnUdf4MaskedVer14: property mask cannot be null");
}
this.value = value;
this.mask = mask;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x31908L;
}
@Override
public UDF getValue() {
return value;
}
@Override
public UDF getMask() {
return mask;
}
@Override
public MatchField<UDF> getMatchField() {
return MatchField.BSN_UDF4;
}
@Override
public boolean isMasked() {
return true;
}
public OFOxm<UDF> getCanonical() {
if (UDF.NO_MASK.equals(mask)) {
return new OFOxmBsnUdf4Ver14(value);
} else if(UDF.FULL_MASK.equals(mask)) {
return null;
} else {
return this;
}
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFOxmBsnUdf4Masked.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmBsnUdf4Masked.Builder {
final OFOxmBsnUdf4MaskedVer14 parentMessage;
// OF message fields
private boolean valueSet;
private UDF value;
private boolean maskSet;
private UDF mask;
BuilderWithParent(OFOxmBsnUdf4MaskedVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x31908L;
}
@Override
public UDF getValue() {
return value;
}
@Override
public OFOxmBsnUdf4Masked.Builder setValue(UDF value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public UDF getMask() {
return mask;
}
@Override
public OFOxmBsnUdf4Masked.Builder setMask(UDF mask) {
this.mask = mask;
this.maskSet = true;
return this;
}
@Override
public MatchField<UDF> getMatchField() {
return MatchField.BSN_UDF4;
}
@Override
public boolean isMasked() {
return true;
}
@Override
public OFOxm<UDF> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFOxmBsnUdf4Masked build() {
UDF value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
UDF mask = this.maskSet ? this.mask : parentMessage.mask;
if(mask == null)
throw new NullPointerException("Property mask must not be null");
//
return new OFOxmBsnUdf4MaskedVer14(
value,
mask
);
}
}
static class Builder implements OFOxmBsnUdf4Masked.Builder {
// OF message fields
private boolean valueSet;
private UDF value;
private boolean maskSet;
private UDF mask;
@Override
public long getTypeLen() {
return 0x31908L;
}
@Override
public UDF getValue() {
return value;
}
@Override
public OFOxmBsnUdf4Masked.Builder setValue(UDF value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public UDF getMask() {
return mask;
}
@Override
public OFOxmBsnUdf4Masked.Builder setMask(UDF mask) {
this.mask = mask;
this.maskSet = true;
return this;
}
@Override
public MatchField<UDF> getMatchField() {
return MatchField.BSN_UDF4;
}
@Override
public boolean isMasked() {
return true;
}
@Override
public OFOxm<UDF> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFOxmBsnUdf4Masked build() {
UDF value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
UDF mask = this.maskSet ? this.mask : DEFAULT_VALUE_MASK;
if(mask == null)
throw new NullPointerException("Property mask must not be null");
return new OFOxmBsnUdf4MaskedVer14(
value,
mask
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmBsnUdf4Masked> {
@Override
public OFOxmBsnUdf4Masked readFrom(ChannelBuffer bb) throws OFParseError {
// fixed value property typeLen == 0x31908L
int typeLen = bb.readInt();
if(typeLen != 0x31908)
throw new OFParseError("Wrong typeLen: Expected=0x31908L(0x31908L), got="+typeLen);
UDF value = UDF.read4Bytes(bb);
UDF mask = UDF.read4Bytes(bb);
OFOxmBsnUdf4MaskedVer14 oxmBsnUdf4MaskedVer14 = new OFOxmBsnUdf4MaskedVer14(
value,
mask
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmBsnUdf4MaskedVer14);
return oxmBsnUdf4MaskedVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmBsnUdf4MaskedVer14Funnel FUNNEL = new OFOxmBsnUdf4MaskedVer14Funnel();
static class OFOxmBsnUdf4MaskedVer14Funnel implements Funnel<OFOxmBsnUdf4MaskedVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmBsnUdf4MaskedVer14 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x31908L
sink.putInt(0x31908);
message.value.putTo(sink);
message.mask.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmBsnUdf4MaskedVer14> {
@Override
public void write(ChannelBuffer bb, OFOxmBsnUdf4MaskedVer14 message) {
// fixed value property typeLen = 0x31908L
bb.writeInt(0x31908);
message.value.write4Bytes(bb);
message.mask.write4Bytes(bb);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmBsnUdf4MaskedVer14(");
b.append("value=").append(value);
b.append(", ");
b.append("mask=").append(mask);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmBsnUdf4MaskedVer14 other = (OFOxmBsnUdf4MaskedVer14) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
if (mask == null) {
if (other.mask != null)
return false;
} else if (!mask.equals(other.mask))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
result = prime * result + ((mask == null) ? 0 : mask.hashCode());
return result;
}
}
|
|
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.graphite.platforms.plugin.client;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.anyString;
import com.google.api.services.cloudkms.v1.model.CryptoKey;
import com.google.api.services.cloudkms.v1.model.CryptoKeyVersion;
import com.google.api.services.cloudkms.v1.model.KeyRing;
import com.google.api.services.cloudkms.v1.model.Location;
import com.google.api.services.cloudkms.v1.model.PublicKey;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
/** Tests {@link CloudKMSClient}. */
@RunWith(MockitoJUnitRunner.class)
public class CloudKMSClientTest {
private static final String TEST_PROJECT_ID = "test-project";
private static final String TEST_LOCATION = "test-location";
private static final String OTHER_LOCATION = "other-location";
private static final String TEST_KEY_RING = "test-key-ring";
private static final String OTHER_KEY_RING = "other-key-ring";
private static final String TEST_CRYPTO_KEY = "test-crypto-key";
private static final String OTHER_CRYPTO_KEY = "other-crypto-key";
private static final String TEST_CRYPTO_KEY_VERSION = "test-crypto-key-version";
private static final String OTHER_CRYPTO_KEY_VERSION = "other-crypto-key-version";
private static final String TEST_PAYLOAD = "test-payload";
private static final String TEST_SIGNATURE = "test-signature";
private static final String PUBLIC_KEY_ALGORITHM = "RSA_SIGN_PKCS1_4096_SHA512";
@Test(expected = IllegalArgumentException.class)
public void testListLocationsErrorWithNullProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listLocations(null);
}
@Test(expected = IllegalArgumentException.class)
public void testListLocationsErrorWithEmptyProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listLocations("");
}
@Test(expected = IOException.class)
public void testListLocationsErrorWithIOException() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(cloudKMS.listLocations(anyString())).thenThrow(IOException.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listLocations(TEST_PROJECT_ID);
}
@Test
public void testListLocations() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(cloudKMS.listLocations(TEST_PROJECT_ID))
.thenReturn(initLocationList(ImmutableList.of(TEST_LOCATION, OTHER_LOCATION)));
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
List<Location> expected = initLocationList(ImmutableList.of(OTHER_LOCATION, TEST_LOCATION));
List<Location> actual = cloudKMSClient.listLocations(TEST_PROJECT_ID);
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test(expected = IllegalArgumentException.class)
public void testListKeyRingsErrorWithNullProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listKeyRings(null, TEST_LOCATION);
}
@Test(expected = IllegalArgumentException.class)
public void testListKeyRingsErrorWithEmptyProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listKeyRings("", TEST_LOCATION);
}
@Test(expected = IllegalArgumentException.class)
public void testListKeyRingsErrorWithNullLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listKeyRings(TEST_PROJECT_ID, null);
}
@Test(expected = IllegalArgumentException.class)
public void testListKeyRingsErrorWithEmptyLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listKeyRings(TEST_PROJECT_ID, "");
}
@Test(expected = IOException.class)
public void testListKeyRingsErrorWithIOException() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(cloudKMS.listKeyRings(anyString(), anyString())).thenThrow(IOException.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listKeyRings(TEST_PROJECT_ID, TEST_LOCATION);
}
@Test
public void testListKeyRings() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(cloudKMS.listKeyRings(TEST_PROJECT_ID, TEST_LOCATION))
.thenReturn(initKeyRingList(ImmutableList.of(TEST_KEY_RING, OTHER_KEY_RING)));
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
List<KeyRing> expected = initKeyRingList(ImmutableList.of(OTHER_KEY_RING, TEST_KEY_RING));
List<KeyRing> actual = cloudKMSClient.listKeyRings(TEST_PROJECT_ID, TEST_LOCATION);
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeysErrorWithNullProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeys(null, TEST_LOCATION, TEST_KEY_RING);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeysErrorWithEmptyProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeys("", TEST_LOCATION, TEST_KEY_RING);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeysErrorWithNullLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeys(TEST_PROJECT_ID, null, TEST_KEY_RING);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeysErrorWithEmptyLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeys(TEST_PROJECT_ID, "", TEST_KEY_RING);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeysErrorWithNullKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeys(TEST_PROJECT_ID, TEST_LOCATION, null);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeysErrorWithEmptyKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeys(TEST_PROJECT_ID, TEST_LOCATION, "");
}
@Test(expected = IOException.class)
public void testListCryptoKeysErrorWithIOException() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(cloudKMS.listCryptoKeys(anyString(), anyString(), anyString()))
.thenThrow(IOException.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeys(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING);
}
@Test
public void testListCryptoKeys() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(cloudKMS.listCryptoKeys(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING))
.thenReturn(initCryptoKeyList(ImmutableList.of(TEST_CRYPTO_KEY, OTHER_CRYPTO_KEY)));
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
List<CryptoKey> expected =
initCryptoKeyList(ImmutableList.of(OTHER_CRYPTO_KEY, TEST_CRYPTO_KEY));
List<CryptoKey> actual =
cloudKMSClient.listCryptoKeys(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING);
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyErrorWithNullProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey(null, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyErrorWithEmptyProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey("", TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyErrorWithNullLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey(TEST_PROJECT_ID, null, TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyErrorWithEmptyLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey(TEST_PROJECT_ID, "", TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyErrorWithNullKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey(TEST_PROJECT_ID, TEST_LOCATION, null, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyErrorWithEmptyKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey(TEST_PROJECT_ID, TEST_LOCATION, "", TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyErrorWithNullCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, null);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyErrorWithEmptyCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, "");
}
@Test(expected = IOException.class)
public void testGetCryptoKeyErrorWithIOException() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(cloudKMS.getCryptoKey(anyString(), anyString(), anyString(), anyString()))
.thenThrow(IOException.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKey(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test
public void testGetCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.getCryptoKey(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY))
.thenReturn(new CryptoKey().setName(TEST_CRYPTO_KEY));
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
CryptoKey expected = new CryptoKey().setName(TEST_CRYPTO_KEY);
CryptoKey actual =
cloudKMSClient.getCryptoKey(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeyVersionsErrorWithNullProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions(null, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeyVersionsErrorWithEmptyProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions("", TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeyVersionsErrorWithNullLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions(TEST_PROJECT_ID, null, TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeyVersionsErrorWithEmptyLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions(TEST_PROJECT_ID, "", TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeyVersionsErrorWithNullKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions(TEST_PROJECT_ID, TEST_LOCATION, null, TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeyVersionsErrorWithEmptyKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions(TEST_PROJECT_ID, TEST_LOCATION, "", TEST_CRYPTO_KEY);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeyVersionsErrorWithNullCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, null);
}
@Test(expected = IllegalArgumentException.class)
public void testListCryptoKeyVersionsErrorWithEmptyCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, "");
}
@Test(expected = IOException.class)
public void testListCryptoKeyVersionsErrorWithIOException() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(cloudKMS.listCryptoKeyVersions(anyString(), anyString(), anyString(), anyString()))
.thenThrow(IOException.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.listCryptoKeyVersions(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
}
@Test
public void testListCryptoKeyVersions() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.listCryptoKeyVersions(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY))
.thenReturn(
initCryptoKeyVersionList(
ImmutableList.of(TEST_CRYPTO_KEY_VERSION, OTHER_CRYPTO_KEY_VERSION)));
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
List<CryptoKeyVersion> expected =
initCryptoKeyVersionList(
ImmutableList.of(OTHER_CRYPTO_KEY_VERSION, TEST_CRYPTO_KEY_VERSION));
List<CryptoKeyVersion> actual =
cloudKMSClient.listCryptoKeyVersions(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test
public void testListCryptoKeyVersionsOmitsDisabledKeys() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.listCryptoKeyVersions(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY))
.thenReturn(
initCryptoKeyVersionList(
ImmutableList.of(TEST_CRYPTO_KEY_VERSION, OTHER_CRYPTO_KEY_VERSION),
ImmutableList.of("ENABLED", "DISABLED")));
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
List<CryptoKeyVersion> expected =
initCryptoKeyVersionList(
ImmutableList.of(TEST_CRYPTO_KEY_VERSION), ImmutableList.of("ENABLED"));
List<CryptoKeyVersion> actual =
cloudKMSClient.listCryptoKeyVersions(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY);
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithNullProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
null, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithEmptyProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
"", TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithNullLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, null, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithEmptyLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, "", TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithNullKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, TEST_LOCATION, null, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithEmptyKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, TEST_LOCATION, "", TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithNullCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, null, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithEmptyCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, "", TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithNullCryptoKeyVersion() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, null);
}
@Test(expected = IllegalArgumentException.class)
public void testGetCryptoKeyVersionErrorWithEmptyCryptoKeyVersion() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, "");
}
@Test(expected = IOException.class)
public void testGetCryptoKeyVersionErrorWithIOException() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.getCryptoKeyVersion(
anyString(), anyString(), anyString(), anyString(), anyString()))
.thenThrow(IOException.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test
public void testGetCryptoKeyVersion() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.getCryptoKeyVersion(
TEST_PROJECT_ID,
TEST_LOCATION,
TEST_KEY_RING,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION))
.thenReturn(new CryptoKeyVersion().setName(TEST_CRYPTO_KEY_VERSION));
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
CryptoKeyVersion expected = new CryptoKeyVersion().setName(TEST_CRYPTO_KEY_VERSION);
CryptoKeyVersion actual =
cloudKMSClient.getCryptoKeyVersion(
TEST_PROJECT_ID,
TEST_LOCATION,
TEST_KEY_RING,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION);
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithNullProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
null, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithEmptyProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
"", TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithNullLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID, null, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithEmptyLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID, "", TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithNullKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID, TEST_LOCATION, null, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithEmptyKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID, TEST_LOCATION, "", TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithNullCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, null, TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithEmptyCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, "", TEST_CRYPTO_KEY_VERSION);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithNullCryptoKeyVersion() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, null);
}
@Test(expected = IllegalArgumentException.class)
public void testGetPublicKeyErrorWithEmptyCryptoKeyVersion() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, "");
}
@Test(expected = IOException.class)
public void testGetPublicKeyErrorWithIOException() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.getPublicKey(anyString(), anyString(), anyString(), anyString(), anyString()))
.thenThrow(IOException.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION);
}
@Test
public void testGetPublicKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.getPublicKey(
TEST_PROJECT_ID,
TEST_LOCATION,
TEST_KEY_RING,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION))
.thenReturn(new PublicKey().setAlgorithm(PUBLIC_KEY_ALGORITHM));
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
PublicKey expected = new PublicKey().setAlgorithm(PUBLIC_KEY_ALGORITHM);
PublicKey actual =
cloudKMSClient.getPublicKey(
TEST_PROJECT_ID,
TEST_LOCATION,
TEST_KEY_RING,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION);
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithNullProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
null, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION, TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithEmptyProjectId() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
"", TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION, TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithNullLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID,
null,
TEST_KEY_RING,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION,
TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithEmptyLocation() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID, "", TEST_KEY_RING, TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION, TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithNullKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID,
TEST_LOCATION,
null,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION,
TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithEmptyKeyRing() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID, TEST_LOCATION, "", TEST_CRYPTO_KEY, TEST_CRYPTO_KEY_VERSION, TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithNullCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, null, TEST_CRYPTO_KEY_VERSION, TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithEmptyCryptoKey() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, "", TEST_CRYPTO_KEY_VERSION, TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithNullCryptoKeyVersion() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, null, TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithEmptyCryptoKeyVersion() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, "", TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithNullPayload() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, null, TEST_PAYLOAD);
}
@Test(expected = IllegalArgumentException.class)
public void testAsymmetricSignErrorWithEmptyPayload() throws IOException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID, TEST_LOCATION, TEST_KEY_RING, TEST_CRYPTO_KEY, "", TEST_PAYLOAD);
}
@Test(expected = IOException.class)
public void testAsymmetricSignErrorWithIOException()
throws IOException, NoSuchAlgorithmException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.getCryptoKeyVersion(
anyString(), anyString(), anyString(), anyString(), anyString()))
.thenReturn(new CryptoKeyVersion().setAlgorithm(PUBLIC_KEY_ALGORITHM));
Mockito.when(
cloudKMS.asymmetricSign(
anyString(),
anyString(),
anyString(),
anyString(),
anyString(),
anyString(),
anyString()))
.thenThrow(IOException.class);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID,
TEST_LOCATION,
TEST_KEY_RING,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION,
TEST_PAYLOAD);
}
@Test
public void testAsymmetricSign() throws IOException, NoSuchAlgorithmException {
CloudKMSWrapper cloudKMS = Mockito.mock(CloudKMSWrapper.class);
Mockito.when(
cloudKMS.getCryptoKeyVersion(
anyString(), anyString(), anyString(), anyString(), anyString()))
.thenReturn(new CryptoKeyVersion().setAlgorithm("RSA_SIGN_PKCS1_4096_SHA512"));
Mockito.when(
cloudKMS.asymmetricSign(
TEST_PROJECT_ID,
TEST_LOCATION,
TEST_KEY_RING,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION,
"SHA-512",
TEST_PAYLOAD))
.thenReturn(TEST_SIGNATURE);
CloudKMSClient cloudKMSClient = new CloudKMSClient(cloudKMS);
String result =
cloudKMSClient.asymmetricSign(
TEST_PROJECT_ID,
TEST_LOCATION,
TEST_KEY_RING,
TEST_CRYPTO_KEY,
TEST_CRYPTO_KEY_VERSION,
TEST_PAYLOAD);
assertEquals(TEST_SIGNATURE, result);
}
private static ImmutableList<Location> initLocationList(List<String> names) {
return ImmutableList.copyOf(
names.stream().map(name -> new Location().setName(name)).collect(Collectors.toList()));
}
private static ImmutableList<KeyRing> initKeyRingList(List<String> names) {
return ImmutableList.copyOf(
names.stream().map(name -> new KeyRing().setName(name)).collect(Collectors.toList()));
}
private static ImmutableList<CryptoKey> initCryptoKeyList(List<String> names) {
return ImmutableList.copyOf(
names.stream().map(name -> new CryptoKey().setName(name)).collect(Collectors.toList()));
}
private static ImmutableList<CryptoKeyVersion> initCryptoKeyVersionList(List<String> names) {
List<String> states = names.stream().map(name -> "ENABLED").collect(Collectors.toList());
return initCryptoKeyVersionList(names, states);
}
private static ImmutableList<CryptoKeyVersion> initCryptoKeyVersionList(
List<String> names, List<String> states) {
assertEquals(names.size(), states.size());
return ImmutableList.copyOf(
IntStream.range(0, names.size())
.boxed()
.map(i -> new CryptoKeyVersion().setName(names.get(i)).setState(states.get(i)))
.collect(Collectors.toList()));
}
}
|
|
/*
* Copyright 2013-2017 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consulo.csharp.lang.psi.impl.source.resolve.type;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.search.GlobalSearchScope;
import consulo.annotation.access.RequiredReadAction;
import consulo.csharp.lang.psi.CSharpMethodDeclaration;
import consulo.csharp.lang.psi.CSharpSimpleParameterInfo;
import consulo.csharp.lang.psi.impl.msil.CSharpTransform;
import consulo.csharp.lang.psi.impl.source.resolve.type.wrapper.GenericUnwrapTool;
import consulo.dotnet.DotNetTypes;
import consulo.dotnet.resolve.*;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* @author VISTALL
* @since 05.05.14
*/
public class CSharpLambdaTypeRef extends DotNetTypeRefWithCachedResult
{
private class Result implements CSharpLambdaResolveResult
{
private final Project myProject;
private final GlobalSearchScope myResolveScope;
public Result(Project project, GlobalSearchScope resolveScope)
{
myProject = project;
myResolveScope = resolveScope;
}
@Nullable
@Override
@RequiredReadAction
public PsiElement getElement()
{
if(myTarget == null)
{
return DotNetPsiSearcher.getInstance(myProject).findType(DotNetTypes.System.MulticastDelegate, myResolveScope, CSharpTransform.INSTANCE);
}
return CSharpLambdaResolveResultUtil.createTypeFromDelegate(myTarget, myExtractor);
}
@Nonnull
@Override
public DotNetGenericExtractor getGenericExtractor()
{
return myExtractor;
}
@Override
public boolean isNullable()
{
return true;
}
@RequiredReadAction
@Nonnull
@Override
public CSharpSimpleParameterInfo[] getParameterInfos()
{
CSharpSimpleParameterInfo[] parameterInfos = myParameterInfos;
if(myExtractor == DotNetGenericExtractor.EMPTY)
{
return parameterInfos;
}
CSharpSimpleParameterInfo[] temp = new CSharpSimpleParameterInfo[parameterInfos.length];
for(int i = 0; i < parameterInfos.length; i++)
{
CSharpSimpleParameterInfo parameterInfo = parameterInfos[i];
DotNetTypeRef typeRef = GenericUnwrapTool.exchangeTypeRef(parameterInfo.getTypeRef(), getGenericExtractor());
temp[i] = new CSharpSimpleParameterInfo(parameterInfo.getIndex(), parameterInfo.getName(), parameterInfo.getElement(), typeRef);
}
return temp;
}
@RequiredReadAction
@Override
public boolean isInheritParameters()
{
return myInheritParameters;
}
@RequiredReadAction
@Nonnull
@Override
public DotNetTypeRef getReturnTypeRef()
{
return GenericUnwrapTool.exchangeTypeRef(myReturnType, getGenericExtractor());
}
@Nullable
@Override
public CSharpMethodDeclaration getTarget()
{
return GenericUnwrapTool.extract(myTarget, getGenericExtractor());
}
}
private final CSharpMethodDeclaration myTarget;
private final CSharpSimpleParameterInfo[] myParameterInfos;
private final DotNetTypeRef myReturnType;
private final boolean myInheritParameters;
private DotNetGenericExtractor myExtractor = DotNetGenericExtractor.EMPTY;
@RequiredReadAction
public CSharpLambdaTypeRef(@Nonnull CSharpMethodDeclaration method)
{
this(method.getProject(), method.getResolveScope(), method, method.getParameterInfos(), method.getReturnTypeRef());
}
@RequiredReadAction
public CSharpLambdaTypeRef(@Nonnull Project project, @Nonnull GlobalSearchScope scope, @Nonnull CSharpMethodDeclaration method, @Nonnull DotNetGenericExtractor extractor)
{
this(project, scope, method, method.getParameterInfos(), method.getReturnTypeRef());
myExtractor = extractor;
}
@RequiredReadAction
public CSharpLambdaTypeRef(@Nonnull Project project,
@Nonnull GlobalSearchScope scope,
@Nullable CSharpMethodDeclaration target,
@Nonnull CSharpSimpleParameterInfo[] parameterInfos,
@Nonnull DotNetTypeRef returnType)
{
this(project, scope, target, parameterInfos, returnType, false);
}
@RequiredReadAction
public CSharpLambdaTypeRef(@Nonnull Project project,
@Nonnull GlobalSearchScope scope,
@Nullable CSharpMethodDeclaration target,
@Nonnull CSharpSimpleParameterInfo[] parameterInfos,
@Nonnull DotNetTypeRef returnType,
boolean inheritParameters)
{
super(project, scope);
myTarget = target;
myParameterInfos = parameterInfos;
myReturnType = returnType;
myInheritParameters = inheritParameters;
}
@RequiredReadAction
@Nonnull
@Override
public String getVmQName()
{
if(myTarget != null)
{
return myTarget.getPresentableQName();
}
StringBuilder builder = new StringBuilder();
builder.append("{(");
for(int i = 0; i < myParameterInfos.length; i++)
{
if(i != 0)
{
builder.append(", ");
}
DotNetTypeRef parameterType = myParameterInfos[i].getTypeRef();
if(parameterType == AUTO_TYPE)
{
builder.append("?");
}
else
{
builder.append(parameterType.toString());
}
}
builder.append(")");
if(myReturnType == AUTO_TYPE)
{
builder.append(" => ?");
}
else
{
builder.append(" => ").append(myReturnType.toString());
}
builder.append("}");
return builder.toString();
}
@Nullable
public PsiElement getTarget()
{
return myTarget;
}
@RequiredReadAction
@Nonnull
@Override
protected DotNetTypeResolveResult resolveResult()
{
return new Result(getProject(), getResolveScope());
}
}
|
|
/*
* Copyright 2015 MICRORISC s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microrisc.simply.iqrf.dpa.v30x.examples.std_per.frc;
import com.microrisc.simply.CallRequestProcessingState;
import com.microrisc.simply.Network;
import com.microrisc.simply.Node;
import com.microrisc.simply.Simply;
import com.microrisc.simply.SimplyException;
import com.microrisc.simply.errors.CallRequestProcessingError;
import com.microrisc.simply.iqrf.dpa.v30x.DPA_SimplyFactory;
import com.microrisc.simply.iqrf.dpa.v30x.devices.FRC;
import com.microrisc.simply.iqrf.dpa.v30x.types.FRC_Configuration;
import com.microrisc.simply.iqrf.dpa.v30x.types.FRC_Data;
import com.microrisc.simply.iqrf.dpa.v30x.types.FRC_UniversalWithBits;
import com.microrisc.simply.iqrf.dpa.v30x.types.FRC_UniversalWithBytes;
import java.io.File;
import java.util.Comparator;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* Example of using FRC peripheral - send command and getting extra result with
* FRC Universal command.
* <p>
* @author Michal Konopa
* @author Rostislav Spinar
* @author Martin Strouhal
*/
public class SendAndExtraResultUniversal {
// reference to Simply
private static Simply simply = null;
// prints out specified error description, destroys the Simply and exits
private static void printMessageAndExit(String errorDescr) {
System.out.println(errorDescr);
if (simply != null) {
simply.destroy();
}
System.exit(1);
}
private static class NodeIdComparator implements Comparator<String> {
@Override
public int compare(String nodeIdStr1, String nodeIdStr2) {
int nodeId_1 = Integer.decode(nodeIdStr1);
int nodeId_2 = Integer.decode(nodeIdStr2);
return Integer.compare(nodeId_1, nodeId_2);
}
}
// Node Id comparator
private static final NodeIdComparator nodeIdComparator = new NodeIdComparator();
// sorting specified results according to node ID in ascendent manner
private static <T> SortedMap<String, T> sortResult(Class<T> type, Map<String, T> result
) {
//TreeMap<String, FRC_Temperature.Result> sortedResult = new TreeMap<>(nodeIdComparator);
TreeMap<String, T> sortedResult = new TreeMap<>(nodeIdComparator);
sortedResult.putAll(result);
return sortedResult;
}
// processes NULL result
private static void processNullResult(FRC frc, String errorMsg,
String notProcMsg) {
CallRequestProcessingState procState = frc.getCallRequestProcessingStateOfLastCall();
if (procState == CallRequestProcessingState.ERROR) {
CallRequestProcessingError error = frc.getCallRequestProcessingErrorOfLastCall();
printMessageAndExit(errorMsg + ": " + error);
} else {
printMessageAndExit(notProcMsg + ": " + procState);
}
}
public static void main(String[] args) {
// creating Simply instance
try {
simply = DPA_SimplyFactory.getSimply(
"config" + File.separator + "Simply.properties");
} catch (SimplyException ex) {
printMessageAndExit("Error while creating Simply: " + ex.getMessage());
}
// getting network 1
Network network1 = simply.getNetwork("1", Network.class);
if (network1 == null) {
printMessageAndExit("Network 1 doesn't exist");
}
// getting a master node
Node master = network1.getNode("0");
if (master == null) {
printMessageAndExit("Master doesn't exist");
}
// getting FRC interface
FRC frc = master.getDeviceObject(FRC.class);
if (frc == null) {
printMessageAndExit("FRC doesn't exist or is not enabled");
}
// For FRC peripheral must be set timeout:
// 1) For typical standard FRC (can transfer up to 2B to the nodes) duration is lower than:
// timeout = Bonded Nodes x 130 + _RESPONSE_FRC_TIME_xxx_MS + 250 [ms]
// 2) Typical advanced FRC (can transfer up to 30B to the nodes) duration is lower than:
// timeout for STD mode = Bonded Nodes x 150 + _RESPONSE_FRC_TIME_xxx_MS + 290 [ms].
// timeout for LP mode = Bonded Nodes x 200 + _RESPONSE_FRC_TIME_xxx_MS + 390 [ms].
// eg. for 5 bonded nodes and FRC response time 640ms
// + overhead for Java framework = 2s
short overhead = 2;
boolean std = true; // indicates if is used STD or LP mode
long timeout = overhead + 5 * (std ? 150 : 200) + (long) FRC_Configuration.FRC_RESPONSE_TIME.TIME_640_MS.getRepsonseTimeInInt() + (std ? 290 : 390);
frc.setDefaultWaitingTimeout(timeout);
sendAndPrintUniversal2bitFRC(frc);
sendAndPrintUniversalFRC(frc);
simply.destroy();
}
private static void sendAndPrintUniversal2bitFRC(FRC frc) {
// creating a new universal FRC command with specified ID
FRC_UniversalWithBits frcCmd = new FRC_UniversalWithBits(0x01); // 0x01 - UART or SPI data available
// sending FRC command and getting data
FRC_Data data = frc.send(frcCmd);
if (data == null) {
processNullResult(frc, "Sending FRC command failed",
"Sending FRC command hasn't been processed yet"
);
}
// getting extra FRC result
short[] extra = frc.extraResult();
if (extra == null) {
processNullResult(frc, "Setting FRC extra result failed",
"Setting FRC extra result hasn't been processed yet"
);
}
// merging data and extra result
short[] allData = new short[64];
System.arraycopy(data.getData(), 0, allData, 0, data.getData().length);
System.arraycopy(extra, 0, allData, data.getData().length, extra.length);
// parsing of all data
Map<String, FRC_UniversalWithBits.Result> resultMap = null;
try {
resultMap = frcCmd.parse(allData);
} catch (Exception ex) {
printMessageAndExit("Parsing of FRC result failed: " + ex);
}
// sort the results
SortedMap<String, FRC_UniversalWithBits.Result> sortedResult =
sortResult(FRC_UniversalWithBits.Result.class, resultMap);
// printing temperature on each node
for (Map.Entry<String, FRC_UniversalWithBits.Result> dataEntry : sortedResult.entrySet()) {
System.out.println("Node: " + dataEntry.getKey()
+ ", Bit0: " + dataEntry.getValue().getBit0()
+ ", Bit1: " + dataEntry.getValue().getBit1());
}
}
private static void sendAndPrintUniversalFRC(FRC frc) {
// creating a new universal FRC command with specified ID
FRC_UniversalWithBytes frcCmd = new FRC_UniversalWithBytes(0x80); // 0x80 - FRC temperature
// sending FRC command and getting data
FRC_Data data = frc.send(frcCmd);
if (data == null) {
processNullResult(frc, "Sending FRC command failed",
"Sending FRC command hasn't been processed yet"
);
}
// getting extra FRC result
short[] extra = frc.extraResult();
if (extra == null) {
processNullResult(frc, "Setting FRC extra result failed",
"Setting FRC extra result hasn't been processed yet"
);
}
// merging data and extra result
short[] allData = new short[64];
System.arraycopy(data.getData(), 0, allData, 0, data.getData().length);
System.arraycopy(extra, 0, allData, data.getData().length, extra.length);
// parsing of all data
Map<String, FRC_UniversalWithBytes.Result> resultMap = null;
try {
resultMap = frcCmd.parse(allData);
} catch (Exception ex) {
printMessageAndExit("Parsing of FRC result failed: " + ex);
}
// sort the results
SortedMap<String, FRC_UniversalWithBytes.Result> sortedResult =
sortResult(FRC_UniversalWithBytes.Result.class, resultMap);
// printing temperature on each node
for (Map.Entry<String, FRC_UniversalWithBytes.Result> dataEntry : sortedResult.entrySet()) {
System.out.println("Node: " + dataEntry.getKey()
+ ", Byte: " + dataEntry.getValue().getByte());
}
}
}
|
|
package br.on.daed.services.controllers;
import br.on.daed.services.html.HTMLHandling;
import br.on.daed.services.html.portos.TabuaMare;
import br.on.daed.services.pdf.DadosMagneticos;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
@Controller
@RequestMapping("/")
public class SiteController {
private String setWebGLTemplate(ModelMap map, String path) {
map.addAttribute("conteudo", path);
return "templatewebgl";
}
@RequestMapping("/angulo-horario")
public String coordenadasHorarias(ModelMap map) {
return setWebGLTemplate(map, "angulo-horario");
}
@RequestMapping("/")
public String index(ModelMap map) {
return "mainpage";
}
@RequestMapping("/obliquidade-da-ecliptica")
public String ecliptica(ModelMap map) {
return setWebGLTemplate(map, "obliquidade-da-ecliptica");
}
@RequestMapping("/coordenadas-supergalacticas")
public String coordenadasSupergalacticas(ModelMap map) {
return setWebGLTemplate(map, "coordenadas-supergalacticas");
}
@RequestMapping("/coordenadas-galacticas")
public String coordenadasGalacticas(ModelMap map) {
return setWebGLTemplate(map, "coordenadas-galacticas");
}
@RequestMapping("/coordenadas-eclipticas")
public String coordenadasEclipticas(ModelMap map) {
return setWebGLTemplate(map, "coordenadas-eclipticas");
}
@RequestMapping("/coordenadas-horizontais")
public String coordenadasHorizontais(ModelMap map) {
return setWebGLTemplate(map, "coordenadas-horizontais");
}
@RequestMapping("/coordenadas-equatoriais")
public String coordenadasEquatoriais(ModelMap map) {
return setWebGLTemplate(map, "coordenadas-equatoriais");
}
@RequestMapping("/movimentos-da-terra")
public String movimentosTerra(ModelMap map) {
return setWebGLTemplate(map, "movimentos-da-terra");
}
@RequestMapping("/calendario-gregoriano")
public String dataGregoriana(ModelMap map) {
return setWebGLTemplate(map, "calendario-gregoriano");
}
@RequestMapping("/data-juliana")
public String dataJuliana(ModelMap map) {
return setWebGLTemplate(map, "data-juliana");
}
@RequestMapping("/posicao-sol")
public String posicaoSol(ModelMap map) {
return setWebGLTemplate(map, "posicao-sol");
}
@RequestMapping("/posicao-lua")
public String posicaoLua(ModelMap map) {
return setWebGLTemplate(map, "posicao-lua");
}
@RequestMapping("/eclipses")
public String eclipseSolar(ModelMap map) {
return setWebGLTemplate(map, "eclipses");
}
@RequestMapping("/equacao-de-kepler")
public String equacaoKepler(ModelMap map) {
return setWebGLTemplate(map, "equacao-de-kepler");
}
@RequestMapping("/satelites-jupiter")
public String satelitesJupiter(ModelMap map) {
return setWebGLTemplate(map, "satelites-jupiter");
}
@RequestMapping("/linhas-de-forca")
public String linhasDeForca(ModelMap map) {
return setWebGLTemplate(map, "linhas-de-forca");
}
@RequestMapping("/magnetismo-terrestre")
public String magnetismoTerrestre(ModelMap map) {
return setWebGLTemplate(map, "magnetismo-terrestre");
}
private final String GRAFICO_DEFAULT = "ead2015";
@RequestMapping("/grafico-globo")
public String graficoGlobo(ModelMap map, @RequestParam(value = "data", required = false) String conteudo) {
map.addAttribute("conteudo", conteudo == null ? GRAFICO_DEFAULT : conteudo);
return "webgl/grafico-globo/grafico-globo";
}
@RequestMapping("/holo-grafico-globo")
public String holoPiramide(ModelMap map, @RequestParam(value = "data", required = false) String conteudo) {
map.addAttribute("conteudo", conteudo == null ? GRAFICO_DEFAULT : conteudo);
return "webgl/holo-piramide/grafico-globo";
}
@RequestMapping("/holo-grafico-globo-2")
public String holoPiramide2(ModelMap map, @RequestParam(value = "data", required = false) String conteudo) {
map.addAttribute("conteudo", conteudo == null ? GRAFICO_DEFAULT : conteudo);
return "webgl/holo-piramide/grafico-globo-2";
}
@RequestMapping("/tabua-mares")
public @ResponseBody
Object tabuaMares(ModelMap map) {
return HTMLHandling.getMareOptions();
}
@RequestMapping("/mares")
public String mares(ModelMap map) {
map.addAttribute("maxAnoTabuas", HTMLHandling.getMareYear());
return setWebGLTemplate(map, "mares");
}
@RequestMapping("/tabua-mares/{tabuaMares}")
public @ResponseBody
TabuaMare tabuaMares(@PathVariable String tabuaMares) {
return HTMLHandling.getTabuaMare(tabuaMares);
}
private File[] getArquivosOrbitas(HttpServletRequest request) {
String path = request.getServletContext().getRealPath("/WEB-INF/classes/static/lib/on-daed-js/orbitas");
File folder = new File(path);
File[] files = folder.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".json") && !name.equals("referencia.json");
}
});
return files;
}
@RequestMapping("/orbitas")
public void orbitas(HttpServletRequest request, HttpServletResponse response) throws IOException {
File[] arquivosOrbitas = getArquivosOrbitas(request);
if (arquivosOrbitas.length > 0) {
response.sendRedirect("orbitas-" + arquivosOrbitas[0].getName().replace(".json", ""));
}
}
@RequestMapping("/orbitas-{orbita}")
public String orbitas(HttpServletRequest request, ModelMap map, @PathVariable String orbita) {
String filename = orbita + ".json";
File[] arquivosOrbitas = getArquivosOrbitas(request);
String arquivoEscolhido = null;
for (int i = 0; i < arquivosOrbitas.length; i++) {
if (arquivosOrbitas[i].getName().equals(filename)) {
arquivoEscolhido = filename;
}
}
if (arquivoEscolhido == null) {
arquivoEscolhido = arquivosOrbitas[0].getName();
}
map.addAttribute("arquivoDados", arquivoEscolhido);
return setWebGLTemplate(map, "orbitas");
}
@RequestMapping(value = "/pdf/dados-magneticos.pdf")
@ResponseBody
public byte[] dadosMagneticos(HttpServletResponse response, @RequestParam(value = "ano") String ano, @RequestParam(value = "tipo") String tipo) throws UnsupportedOperationException, IOException, InterruptedException {
response.setContentType("application/pdf");
response.setHeader("Content-Disposition", "attachment; filename=dados-magneticos.pdf");
return DadosMagneticos.gerarPDF(ano, tipo);
}
}
|
|
package org.activiti.engine.test.api.event;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.activiti.engine.delegate.event.ActivitiEventType;
import org.activiti.engine.event.EventLogEntry;
import org.activiti.engine.impl.event.logger.EventLogger;
import org.activiti.engine.impl.event.logger.handler.Fields;
import org.activiti.engine.impl.identity.Authentication;
import org.activiti.engine.impl.test.PluggableActivitiTestCase;
import org.activiti.engine.impl.util.CollectionUtil;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.task.Task;
import org.activiti.engine.test.Deployment;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* @author Joram Barrez
*/
public class DatabaseEventLoggerTest extends PluggableActivitiTestCase {
protected EventLogger databaseEventLogger;
protected ObjectMapper objectMapper = new ObjectMapper();
@Override
protected void setUp() throws Exception {
super.setUp();
// Database event logger setup
databaseEventLogger = new EventLogger(processEngineConfiguration.getClock(), processEngineConfiguration.getObjectMapper());
runtimeService.addEventListener(databaseEventLogger);
}
@Override
protected void tearDown() throws Exception {
// Database event logger teardown
runtimeService.removeEventListener(databaseEventLogger);
super.tearDown();
}
@Deployment(resources = { "org/activiti/engine/test/api/event/DatabaseEventLoggerProcess.bpmn20.xml" })
public void testDatabaseEvents() throws IOException {
String testTenant = "testTenant";
String deploymentId = repositoryService.createDeployment()
.addClasspathResource("org/activiti/engine/test/api/event/DatabaseEventLoggerProcess.bpmn20.xml")
.tenantId(testTenant)
.deploy().getId();
// Run process to gather data
ProcessInstance processInstance = runtimeService.startProcessInstanceByKeyAndTenantId("DatabaseEventLoggerProcess",
CollectionUtil.singletonMap("testVar", "helloWorld"), testTenant);
// Verify event log entries
List<EventLogEntry> eventLogEntries = managementService.getEventLogEntries(null, null);
String processDefinitionId = processInstance.getProcessDefinitionId();
Iterator<EventLogEntry> iterator = eventLogEntries.iterator();
while (iterator.hasNext()) {
EventLogEntry entry = iterator.next();
if (entry.getProcessDefinitionId() != null && !entry.getProcessDefinitionId().equals(processDefinitionId)) {
iterator.remove();
}
}
assertEquals(15, eventLogEntries.size());
long lastLogNr = -1;
for (int i = 0; i < eventLogEntries.size(); i++) {
EventLogEntry entry = eventLogEntries.get(i);
if (i == 0) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.VARIABLE_CREATED.name(), entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID));
assertNotNull(data.get(Fields.VALUE_STRING));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
// process instance start
if (i == 1) {
assertNotNull(entry.getType());
assertEquals("PROCESSINSTANCE_START", entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNull(entry.getExecutionId());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNotNull(data.get(Fields.ID));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.TENANT_ID));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
Map<String, Object> variableMap = (Map<String, Object>) data.get(Fields.VARIABLES);
assertEquals(1, variableMap.size());
assertEquals("helloWorld", variableMap.get("testVar"));
assertFalse(data.containsKey(Fields.NAME));
assertFalse(data.containsKey(Fields.BUSINESS_KEY));
}
// Activity started
if (i == 2 || i == 5 || i == 9 || i == 12) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.ACTIVITY_STARTED.name(), entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getExecutionId());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.ACTIVITY_ID));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID));
assertNotNull(data.get(Fields.EXECUTION_ID));
assertNotNull(data.get(Fields.ACTIVITY_TYPE));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
// Leaving start
if (i == 3) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.ACTIVITY_COMPLETED.name(), entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getExecutionId());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.ACTIVITY_ID));
assertEquals("startEvent1", data.get(Fields.ACTIVITY_ID));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID));
assertNotNull(data.get(Fields.EXECUTION_ID));
assertNotNull(data.get(Fields.ACTIVITY_TYPE));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
// Sequence flow taken
if (i == 4 || i == 7 || i == 8) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.SEQUENCEFLOW_TAKEN.name(), entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getExecutionId());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.ID));
assertNotNull(data.get(Fields.SOURCE_ACTIVITY_ID));
assertNotNull(data.get(Fields.SOURCE_ACTIVITY_NAME));
assertNotNull(data.get(Fields.SOURCE_ACTIVITY_TYPE));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
// Leaving parallel gateway
if (i == 6) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.ACTIVITY_COMPLETED.name(), entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getExecutionId());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.ACTIVITY_ID));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID));
assertNotNull(data.get(Fields.EXECUTION_ID));
assertNotNull(data.get(Fields.ACTIVITY_TYPE));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
// Tasks
if (i == 11 || i == 14) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.TASK_CREATED.name(), entry.getType());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getExecutionId());
assertNotNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.ID));
assertNotNull(data.get(Fields.NAME));
assertNotNull(data.get(Fields.ASSIGNEE));
assertNotNull(data.get(Fields.CREATE_TIME));
assertNotNull(data.get(Fields.PRIORITY));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.EXECUTION_ID));
assertNotNull(data.get(Fields.TENANT_ID));
assertFalse(data.containsKey(Fields.DESCRIPTION));
assertFalse(data.containsKey(Fields.CATEGORY));
assertFalse(data.containsKey(Fields.OWNER));
assertFalse(data.containsKey(Fields.DUE_DATE));
assertFalse(data.containsKey(Fields.FORM_KEY));
assertFalse(data.containsKey(Fields.USER_ID));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
if (i == 10 || i == 13) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.TASK_ASSIGNED.name(), entry.getType());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getExecutionId());
assertNotNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.ID));
assertNotNull(data.get(Fields.NAME));
assertNotNull(data.get(Fields.ASSIGNEE));
assertNotNull(data.get(Fields.CREATE_TIME));
assertNotNull(data.get(Fields.PRIORITY));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.EXECUTION_ID));
assertNotNull(data.get(Fields.TENANT_ID));
assertFalse(data.containsKey(Fields.DESCRIPTION));
assertFalse(data.containsKey(Fields.CATEGORY));
assertFalse(data.containsKey(Fields.OWNER));
assertFalse(data.containsKey(Fields.DUE_DATE));
assertFalse(data.containsKey(Fields.FORM_KEY));
assertFalse(data.containsKey(Fields.USER_ID));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
lastLogNr = entry.getLogNumber();
}
// Completing two tasks
for (Task task : taskService.createTaskQuery().list()) {
Authentication.setAuthenticatedUserId(task.getAssignee());
Map<String, Object> varMap = new HashMap<String, Object>();
varMap.put("test", "test");
taskService.complete(task.getId(), varMap);
Authentication.setAuthenticatedUserId(null);
}
// Verify events
eventLogEntries = managementService.getEventLogEntries(lastLogNr, 100L);
assertEquals(17, eventLogEntries.size());
for (int i = 0; i < eventLogEntries.size(); i++) {
EventLogEntry entry = eventLogEntries.get(i);
// Task completion
if (i == 1 || i == 6) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.TASK_COMPLETED.name(), entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getExecutionId());
assertNotNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNotNull(data.get(Fields.ID));
assertNotNull(data.get(Fields.NAME));
assertNotNull(data.get(Fields.ASSIGNEE));
assertNotNull(data.get(Fields.CREATE_TIME));
assertNotNull(data.get(Fields.PRIORITY));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.EXECUTION_ID));
assertNotNull(data.get(Fields.TENANT_ID));
assertNotNull(data.get(Fields.USER_ID));
Map<String, Object> variableMap = (Map<String, Object>) data.get(Fields.VARIABLES);
assertEquals(1, variableMap.size());
assertEquals("test", variableMap.get("test"));
assertFalse(data.containsKey(Fields.DESCRIPTION));
assertFalse(data.containsKey(Fields.CATEGORY));
assertFalse(data.containsKey(Fields.OWNER));
assertFalse(data.containsKey(Fields.DUE_DATE));
assertFalse(data.containsKey(Fields.FORM_KEY));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
// Activity Completed
if (i == 2 || i == 7 || i == 10 || i == 13) {
assertNotNull(entry.getType());
assertEquals(ActivitiEventType.ACTIVITY_COMPLETED.name(), entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getExecutionId());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNotNull(data.get(Fields.ACTIVITY_ID));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID));
assertNotNull(data.get(Fields.EXECUTION_ID));
assertNotNull(data.get(Fields.ACTIVITY_TYPE));
assertNotNull(data.get(Fields.BEHAVIOR_CLASS));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
if (i == 2) {
assertEquals("userTask", data.get(Fields.ACTIVITY_TYPE));
} else if (i == 7) {
assertEquals("userTask", data.get(Fields.ACTIVITY_TYPE));
} else if (i == 10) {
assertEquals("parallelGateway", data.get(Fields.ACTIVITY_TYPE));
} else if (i == 13) {
assertEquals("endEvent", data.get(Fields.ACTIVITY_TYPE));
}
}
// Sequence flow taken
if (i == 3 || i == 8 || i == 11) {
assertNotNull(entry.getType());
assertEquals(entry.getType(), ActivitiEventType.SEQUENCEFLOW_TAKEN.name());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getExecutionId());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.ID));
assertNotNull(data.get(Fields.SOURCE_ACTIVITY_ID));
assertNotNull(data.get(Fields.SOURCE_ACTIVITY_TYPE));
assertNotNull(data.get(Fields.SOURCE_ACTIVITY_BEHAVIOR_CLASS));
assertNotNull(data.get(Fields.TARGET_ACTIVITY_ID));
assertNotNull(data.get(Fields.TARGET_ACTIVITY_TYPE));
assertNotNull(data.get(Fields.TARGET_ACTIVITY_BEHAVIOR_CLASS));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
if (i == 14 || i == 15) {
assertNotNull(entry.getType());
assertEquals("VARIABLE_DELETED", entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNotNull(entry.getExecutionId());
assertNull(entry.getTaskId());
}
if (i == 16) {
assertNotNull(entry.getType());
assertEquals("PROCESSINSTANCE_END", entry.getType());
assertNotNull(entry.getProcessDefinitionId());
assertNotNull(entry.getProcessInstanceId());
assertNotNull(entry.getTimeStamp());
assertNull(entry.getExecutionId());
assertNull(entry.getTaskId());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {});
assertNotNull(data.get(Fields.ID));
assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID));
assertNotNull(data.get(Fields.TENANT_ID));
assertFalse(data.containsKey(Fields.NAME));
assertFalse(data.containsKey(Fields.BUSINESS_KEY));
assertEquals(testTenant, data.get(Fields.TENANT_ID));
}
}
// Cleanup
for (EventLogEntry eventLogEntry : managementService.getEventLogEntries(null, null)) {
managementService.deleteEventLogEntry(eventLogEntry.getLogNumber());
}
repositoryService.deleteDeployment(deploymentId, true);
}
public void testDatabaseEventsNoTenant() throws IOException {
String deploymentId = repositoryService.createDeployment().addClasspathResource("org/activiti/engine/test/api/event/DatabaseEventLoggerProcess.bpmn20.xml").deploy().getId();
// Run process to gather data
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("DatabaseEventLoggerProcess", CollectionUtil.singletonMap("testVar", "helloWorld"));
// Verify event log entries
List<EventLogEntry> eventLogEntries = managementService.getEventLogEntries(null, null);
String processDefinitionId = processInstance.getProcessDefinitionId();
Iterator<EventLogEntry> iterator = eventLogEntries.iterator();
while (iterator.hasNext()) {
EventLogEntry entry = iterator.next();
if (entry.getProcessDefinitionId() != null && !entry.getProcessDefinitionId().equals(processDefinitionId)) {
iterator.remove();
}
}
assertEquals(15, eventLogEntries.size());
for (int i = 0; i < eventLogEntries.size(); i++) {
EventLogEntry entry = eventLogEntries.get(i);
if (i == 0) {
assertEquals(entry.getType(), ActivitiEventType.VARIABLE_CREATED.name());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNull(data.get(Fields.TENANT_ID));
}
// process instance start
if (i == 1) {
assertEquals("PROCESSINSTANCE_START", entry.getType());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNull(data.get(Fields.TENANT_ID));
}
// Activity started
if (i == 2 || i == 5 || i == 9 || i == 12) {
assertEquals(entry.getType(), ActivitiEventType.ACTIVITY_STARTED.name());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNull(data.get(Fields.TENANT_ID));
}
// Leaving start
if (i == 3) {
assertEquals(entry.getType(), ActivitiEventType.ACTIVITY_COMPLETED.name());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNull(data.get(Fields.TENANT_ID));
}
// Sequence flow taken
if (i == 4 || i == 7 || i == 8) {
assertEquals(entry.getType(), ActivitiEventType.SEQUENCEFLOW_TAKEN.name());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNull(data.get(Fields.TENANT_ID));
}
// Leaving parallel gateway
if (i == 6) {
assertEquals(entry.getType(), ActivitiEventType.ACTIVITY_COMPLETED.name());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNull(data.get(Fields.TENANT_ID));
}
// Tasks
if (i == 11 || i == 14) {
assertEquals(entry.getType(), ActivitiEventType.TASK_CREATED.name());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNull(data.get(Fields.TENANT_ID));
}
if (i == 10 || i == 13) {
assertEquals(entry.getType(), ActivitiEventType.TASK_ASSIGNED.name());
Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertNull(data.get(Fields.TENANT_ID));
}
}
repositoryService.deleteDeployment(deploymentId, true);
// Cleanup
for (EventLogEntry eventLogEntry : managementService.getEventLogEntries(null, null)) {
managementService.deleteEventLogEntry(eventLogEntry.getLogNumber());
}
}
public void testStandaloneTaskEvents() throws JsonParseException, JsonMappingException, IOException {
Task task = taskService.newTask();
task.setAssignee("kermit");
task.setTenantId("myTenant");
taskService.saveTask(task);
List<EventLogEntry> events = managementService.getEventLogEntries(null, null);
assertEquals(2, events.size());
assertEquals("TASK_ASSIGNED", events.get(0).getType());
assertEquals("TASK_CREATED", events.get(1).getType());
for (EventLogEntry eventLogEntry : events) {
Map<String, Object> data = objectMapper.readValue(eventLogEntry.getData(), new TypeReference<HashMap<String, Object>>() {
});
assertEquals("myTenant", data.get(Fields.TENANT_ID));
}
// Cleanup
taskService.deleteTask(task.getId(), true);
for (EventLogEntry eventLogEntry : managementService.getEventLogEntries(null, null)) {
managementService.deleteEventLogEntry(eventLogEntry.getLogNumber());
}
}
}
|
|
/*
* Copyright (c) 2013 Etsy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.etsy.android.grid;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.os.Parcel;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.util.Log;
import android.util.SparseArray;
import android.view.View;
import android.view.ViewGroup;
import java.util.Arrays;
/**
* A staggered grid view which supports multiple columns with rows of varying sizes.
* <p/>
* Builds multiple columns on top of {@link ExtendableListView}
* <p/>
* Partly inspired by - https://github.com/huewu/PinterestLikeAdapterView
*/
public class StaggeredGridView extends ExtendableListView {
private static final String TAG = "StaggeredGridView";
private static final boolean DBG = false;
private static final int DEFAULT_COLUMNS_PORTRAIT = 2;
private static final int DEFAULT_COLUMNS_LANDSCAPE = 3;
private int mColumnCount;
private int mItemMargin;
private int mColumnWidth;
private boolean mNeedSync;
private int mColumnCountPortrait = DEFAULT_COLUMNS_PORTRAIT;
private int mColumnCountLandscape = DEFAULT_COLUMNS_LANDSCAPE;
/**
* A key-value collection where the key is the position and the
* {@link GridItemRecord} with some info about that position
* so we can maintain it's position - and reorg on orientation change.
*/
private SparseArray<GridItemRecord> mPositionData;
private int mGridPaddingLeft;
private int mGridPaddingRight;
private int mGridPaddingTop;
private int mGridPaddingBottom;
/**
* The location of the top of each top item added in each column.
*/
private int[] mColumnTops;
/**
* The location of the bottom of each bottom item added in each column.
*/
private int[] mColumnBottoms;
/**
* The left location to put items for each column
*/
private int[] mColumnLefts;
/**
* Tells us the distance we've offset from the top.
* Can be slightly off on orientation change - TESTING
*/
private int mDistanceToTop;
public StaggeredGridView(final Context context) {
this(context, null);
}
public StaggeredGridView(final Context context, final AttributeSet attrs) {
this(context, attrs, 0);
}
public StaggeredGridView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
if (attrs != null) {
// get the number of columns in portrait and landscape
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.StaggeredGridView, defStyle, 0);
mColumnCount = typedArray.getInteger(
R.styleable.StaggeredGridView_column_count, 0);
if (mColumnCount > 0) {
mColumnCountPortrait = mColumnCount;
mColumnCountLandscape = mColumnCount;
} else {
mColumnCountPortrait = typedArray.getInteger(
R.styleable.StaggeredGridView_column_count_portrait,
DEFAULT_COLUMNS_PORTRAIT);
mColumnCountLandscape = typedArray.getInteger(
R.styleable.StaggeredGridView_column_count_landscape,
DEFAULT_COLUMNS_LANDSCAPE);
}
mItemMargin = typedArray.getDimensionPixelSize(
R.styleable.StaggeredGridView_item_margin, 0);
mGridPaddingLeft = typedArray.getDimensionPixelSize(
R.styleable.StaggeredGridView_grid_paddingLeft, 0);
mGridPaddingRight = typedArray.getDimensionPixelSize(
R.styleable.StaggeredGridView_grid_paddingRight, 0);
mGridPaddingTop = typedArray.getDimensionPixelSize(
R.styleable.StaggeredGridView_grid_paddingTop, 0);
mGridPaddingBottom = typedArray.getDimensionPixelSize(
R.styleable.StaggeredGridView_grid_paddingBottom, 0);
typedArray.recycle();
}
mColumnCount = 0; // determined onMeasure
// Creating these empty arrays to avoid saving null states
mColumnTops = new int[0];
mColumnBottoms = new int[0];
mColumnLefts = new int[0];
mPositionData = new SparseArray<GridItemRecord>();
}
// Grid padding is applied to the list item rows but not the header and footer
public int getRowPaddingLeft() {
return getListPaddingLeft() + mGridPaddingLeft;
}
// //////////////////////////////////////////////////////////////////////////////////////////
// PROPERTIES
//
public int getRowPaddingRight() {
return getListPaddingRight() + mGridPaddingRight;
}
public int getRowPaddingTop() {
return getListPaddingTop() + mGridPaddingTop;
}
public int getRowPaddingBottom() {
return getListPaddingBottom() + mGridPaddingBottom;
}
public void setGridPadding(int left, int top, int right, int bottom) {
mGridPaddingLeft = left;
mGridPaddingTop = top;
mGridPaddingRight = right;
mGridPaddingBottom = bottom;
}
public void setColumnCountPortrait(int columnCountPortrait) {
mColumnCountPortrait = columnCountPortrait;
onSizeChanged(getWidth(), getHeight());
requestLayoutChildren();
}
public void setColumnCountLandscape(int columnCountLandscape) {
mColumnCountLandscape = columnCountLandscape;
onSizeChanged(getWidth(), getHeight());
requestLayoutChildren();
}
public void setColumnCount(int columnCount) {
mColumnCountPortrait = columnCount;
mColumnCountLandscape = columnCount;
// mColumnCount set onSizeChanged();
onSizeChanged(getWidth(), getHeight());
requestLayoutChildren();
}
// //////////////////////////////////////////////////////////////////////////////////////////
// MEASUREMENT
//
private boolean isLandscape() {
return getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE;
}
@Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
if (mColumnCount <= 0) {
boolean isLandscape = isLandscape();
mColumnCount = isLandscape ? mColumnCountLandscape : mColumnCountPortrait;
}
// our column width is the width of the listview
// minus it's padding
// minus the total items margin
// divided by the number of columns
mColumnWidth = calculateColumnWidth(getMeasuredWidth());
if (mColumnTops == null || mColumnTops.length != mColumnCount) {
mColumnTops = new int[mColumnCount];
initColumnTops();
}
if (mColumnBottoms == null || mColumnBottoms.length != mColumnCount) {
mColumnBottoms = new int[mColumnCount];
initColumnBottoms();
}
if (mColumnLefts == null || mColumnLefts.length != mColumnCount) {
mColumnLefts = new int[mColumnCount];
initColumnLefts();
}
}
@Override
protected void onMeasureChild(final View child, final LayoutParams layoutParams) {
final int viewType = layoutParams.viewType;
final int position = layoutParams.position;
if (viewType == ITEM_VIEW_TYPE_HEADER_OR_FOOTER ||
viewType == ITEM_VIEW_TYPE_IGNORE) {
// for headers and weird ignored views
super.onMeasureChild(child, layoutParams);
} else {
if (DBG) Log.d(TAG, "onMeasureChild BEFORE position:" + position +
" h:" + getMeasuredHeight());
// measure it to the width of our column.
int childWidthSpec = MeasureSpec.makeMeasureSpec(mColumnWidth, MeasureSpec.EXACTLY);
int childHeightSpec;
if (layoutParams.height > 0) {
childHeightSpec = MeasureSpec.makeMeasureSpec(layoutParams.height, MeasureSpec.EXACTLY);
} else {
childHeightSpec = MeasureSpec.makeMeasureSpec(LayoutParams.WRAP_CONTENT, MeasureSpec.UNSPECIFIED);
}
child.measure(childWidthSpec, childHeightSpec);
}
final int childHeight = getChildHeight(child);
setPositionHeightRatio(position, childHeight);
if (DBG) Log.d(TAG, "onMeasureChild AFTER position:" + position +
" h:" + childHeight);
}
public int getColumnWidth() {
return mColumnWidth;
}
public void resetToTop() {
if (mColumnCount > 0) {
if (mColumnTops == null) {
mColumnTops = new int[mColumnCount];
}
if (mColumnBottoms == null) {
mColumnBottoms = new int[mColumnCount];
}
initColumnTopsAndBottoms();
mPositionData.clear();
mNeedSync = false;
mDistanceToTop = 0;
setSelection(0);
}
}
@Override
protected void onChildCreated(final int position, final boolean flowDown) {
super.onChildCreated(position, flowDown);
if (!isHeaderOrFooter(position)) {
// do we already have a column for this position?
final int column = getChildColumn(position, flowDown);
setPositionColumn(position, column);
if (DBG) Log.d(TAG, "onChildCreated position:" + position +
" is in column:" + column);
} else {
setPositionIsHeaderFooter(position);
}
}
// //////////////////////////////////////////////////////////////////////////////////////////
// POSITIONING
//
private void requestLayoutChildren() {
final int count = getChildCount();
for (int i = 0; i < count; i++) {
final View v = getChildAt(i);
if (v != null) v.requestLayout();
}
}
@Override
protected void layoutChildren() {
preLayoutChildren();
super.layoutChildren();
}
private void preLayoutChildren() {
// on a major re-layout reset for our next layout pass
if (!mNeedSync) {
Arrays.fill(mColumnBottoms, 0);
} else {
mNeedSync = false;
}
// copy the tops into the bottom
// since we're going to redo a layout pass that will draw down from
// the top
System.arraycopy(mColumnTops, 0, mColumnBottoms, 0, mColumnCount);
}
@Override
protected void onLayoutChild(final View child,
final int position,
final boolean flowDown,
final int childrenLeft, final int childTop,
final int childRight, final int childBottom) {
if (isHeaderOrFooter(position)) {
layoutGridHeaderFooter(child, position, flowDown, childrenLeft, childTop, childRight, childBottom);
} else {
layoutGridChild(child, position, flowDown, childrenLeft, childRight);
}
}
// NOTE : Views will either be layout out via onLayoutChild
// OR
// Views will be offset if they are active but offscreen so that we can recycle!
// Both onLayoutChild() and onOffsetChild are called after we measure our view
// see ExtensibleListView.setupChild();
private void layoutGridHeaderFooter(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childTop, final int childRight, final int childBottom) {
// offset the top and bottom of all our columns
// if it's the footer we want it below the lowest child bottom
int gridChildTop;
int gridChildBottom;
if (flowDown) {
gridChildTop = getLowestPositionedBottom();
gridChildBottom = gridChildTop + getChildHeight(child);
} else {
gridChildBottom = getHighestPositionedTop();
gridChildTop = gridChildBottom - getChildHeight(child);
}
for (int i = 0; i < mColumnCount; i++) {
updateColumnTopIfNeeded(i, gridChildTop);
updateColumnBottomIfNeeded(i, gridChildBottom);
}
super.onLayoutChild(child, position, flowDown,
childrenLeft, gridChildTop, childRight, gridChildBottom);
}
private void layoutGridChild(final View child, final int position,
final boolean flowDown,
final int childrenLeft, final int childRight) {
// stash the bottom and the top if it's higher positioned
int column = getPositionColumn(position);
int gridChildTop;
int gridChildBottom;
int childTopMargin = getChildTopMargin(position);
int childBottomMargin = getChildBottomMargin();
int verticalMargins = childTopMargin + childBottomMargin;
if (flowDown) {
gridChildTop = mColumnBottoms[column]; // the next items top is the last items bottom
gridChildBottom = gridChildTop + (getChildHeight(child) + verticalMargins);
} else {
gridChildBottom = mColumnTops[column]; // the bottom of the next column up is our top
gridChildTop = gridChildBottom - (getChildHeight(child) + verticalMargins);
}
if (DBG) Log.d(TAG, "onLayoutChild position:" + position +
" column:" + column +
" gridChildTop:" + gridChildTop +
" gridChildBottom:" + gridChildBottom);
// we also know the column of this view so let's stash it in the
// view's layout params
GridLayoutParams layoutParams = (GridLayoutParams) child.getLayoutParams();
layoutParams.column = column;
updateColumnBottomIfNeeded(column, gridChildBottom);
updateColumnTopIfNeeded(column, gridChildTop);
// subtract the margins before layout
gridChildTop += childTopMargin;
gridChildBottom -= childBottomMargin;
child.layout(childrenLeft, gridChildTop, childRight, gridChildBottom);
}
@Override
protected void onOffsetChild(final View child, final int position,
final boolean flowDown, final int childrenLeft, final int childTop) {
// if the child is recycled and is just offset
// we still want to add its deets into our store
if (isHeaderOrFooter(position)) {
offsetGridHeaderFooter(child, position, flowDown, childrenLeft, childTop);
} else {
offsetGridChild(child, position, flowDown, childrenLeft, childTop);
}
}
private void offsetGridHeaderFooter(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childTop) {
// offset the top and bottom of all our columns
// if it's the footer we want it below the lowest child bottom
int gridChildTop;
int gridChildBottom;
if (flowDown) {
gridChildTop = getLowestPositionedBottom();
gridChildBottom = gridChildTop + getChildHeight(child);
} else {
gridChildBottom = getHighestPositionedTop();
gridChildTop = gridChildBottom - getChildHeight(child);
}
for (int i = 0; i < mColumnCount; i++) {
updateColumnTopIfNeeded(i, gridChildTop);
updateColumnBottomIfNeeded(i, gridChildBottom);
}
super.onOffsetChild(child, position, flowDown, childrenLeft, gridChildTop);
}
private void offsetGridChild(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childTop) {
// stash the bottom and the top if it's higher positioned
int column = getPositionColumn(position);
int gridChildTop;
int gridChildBottom;
int childTopMargin = getChildTopMargin(position);
int childBottomMargin = getChildBottomMargin();
int verticalMargins = childTopMargin + childBottomMargin;
if (flowDown) {
gridChildTop = mColumnBottoms[column]; // the next items top is the last items bottom
gridChildBottom = gridChildTop + (getChildHeight(child) + verticalMargins);
} else {
gridChildBottom = mColumnTops[column]; // the bottom of the next column up is our top
gridChildTop = gridChildBottom - (getChildHeight(child) + verticalMargins);
}
if (DBG) Log.d(TAG, "onOffsetChild position:" + position +
" column:" + column +
" childTop:" + childTop +
" gridChildTop:" + gridChildTop +
" gridChildBottom:" + gridChildBottom);
// we also know the column of this view so let's stash it in the
// view's layout params
GridLayoutParams layoutParams = (GridLayoutParams) child.getLayoutParams();
layoutParams.column = column;
updateColumnBottomIfNeeded(column, gridChildBottom);
updateColumnTopIfNeeded(column, gridChildTop);
super.onOffsetChild(child, position, flowDown, childrenLeft, gridChildTop + childTopMargin);
}
private int getChildHeight(final View child) {
return child.getMeasuredHeight();
}
private int getChildTopMargin(final int position) {
boolean isFirstRow = position < (getHeaderViewsCount() + mColumnCount);
return isFirstRow ? mItemMargin : 0;
}
private int getChildBottomMargin() {
return mItemMargin;
}
@Override
protected LayoutParams generateChildLayoutParams(final View child) {
GridLayoutParams layoutParams = null;
final ViewGroup.LayoutParams childParams = child.getLayoutParams();
if (childParams != null) {
if (childParams instanceof GridLayoutParams) {
layoutParams = (GridLayoutParams) childParams;
} else {
layoutParams = new GridLayoutParams(childParams);
}
}
if (layoutParams == null) {
layoutParams = new GridLayoutParams(
mColumnWidth, ViewGroup.LayoutParams.WRAP_CONTENT);
}
return layoutParams;
}
private void updateColumnTopIfNeeded(int column, int childTop) {
if (childTop < mColumnTops[column]) {
mColumnTops[column] = childTop;
}
}
private void updateColumnBottomIfNeeded(int column, int childBottom) {
if (childBottom > mColumnBottoms[column]) {
mColumnBottoms[column] = childBottom;
}
}
@Override
protected int getChildLeft(final int position) {
if (isHeaderOrFooter(position)) {
return super.getChildLeft(position);
} else {
final int column = getPositionColumn(position);
return mColumnLefts[column];
}
}
@Override
protected int getChildTop(final int position) {
if (isHeaderOrFooter(position)) {
return super.getChildTop(position);
} else {
final int column = getPositionColumn(position);
if (column == -1) {
return getHighestPositionedBottom();
}
return mColumnBottoms[column];
}
}
/**
* Get the top for the next child down in our view
* (maybe a column across) so we can fill down.
*/
@Override
protected int getNextChildDownsTop(final int position) {
if (isHeaderOrFooter(position)) {
return super.getNextChildDownsTop(position);
} else {
return getHighestPositionedBottom();
}
}
@Override
protected int getChildBottom(final int position) {
if (isHeaderOrFooter(position)) {
return super.getChildBottom(position);
} else {
final int column = getPositionColumn(position);
if (column == -1) {
return getLowestPositionedTop();
}
return mColumnTops[column];
}
}
/**
* Get the bottom for the next child up in our view
* (maybe a column across) so we can fill up.
*/
@Override
protected int getNextChildUpsBottom(final int position) {
if (isHeaderOrFooter(position)) {
return super.getNextChildUpsBottom(position);
} else {
return getLowestPositionedTop();
}
}
@Override
protected int getLastChildBottom() {
final int lastPosition = mFirstPosition + (getChildCount() - 1);
if (isHeaderOrFooter(lastPosition)) {
return super.getLastChildBottom();
}
return getHighestPositionedBottom();
}
@Override
protected int getFirstChildTop() {
if (isHeaderOrFooter(mFirstPosition)) {
return super.getFirstChildTop();
}
return getLowestPositionedTop();
}
@Override
protected int getHighestChildTop() {
if (isHeaderOrFooter(mFirstPosition)) {
return super.getHighestChildTop();
}
return getHighestPositionedTop();
}
@Override
protected int getLowestChildBottom() {
final int lastPosition = mFirstPosition + (getChildCount() - 1);
if (isHeaderOrFooter(lastPosition)) {
return super.getLowestChildBottom();
}
return getLowestPositionedBottom();
}
@Override
protected void offsetChildrenTopAndBottom(final int offset) {
super.offsetChildrenTopAndBottom(offset);
offsetAllColumnsTopAndBottom(offset);
offsetDistanceToTop(offset);
}
protected void offsetChildrenTopAndBottom(final int offset, final int column) {
if (DBG) Log.d(TAG, "offsetChildrenTopAndBottom: " + offset + " column:" + column);
final int count = getChildCount();
for (int i = 0; i < count; i++) {
final View v = getChildAt(i);
if (v != null &&
v.getLayoutParams() != null &&
v.getLayoutParams() instanceof GridLayoutParams) {
GridLayoutParams lp = (GridLayoutParams) v.getLayoutParams();
if (lp.column == column) {
v.offsetTopAndBottom(offset);
}
}
}
offsetColumnTopAndBottom(offset, column);
}
private void offsetDistanceToTop(final int offset) {
mDistanceToTop += offset;
if (DBG) Log.d(TAG, "offset mDistanceToTop:" + mDistanceToTop);
}
public int getDistanceToTop() {
return mDistanceToTop;
}
private void offsetAllColumnsTopAndBottom(final int offset) {
if (offset != 0) {
for (int i = 0; i < mColumnCount; i++) {
offsetColumnTopAndBottom(offset, i);
}
}
}
private void offsetColumnTopAndBottom(final int offset, final int column) {
if (offset != 0) {
mColumnTops[column] += offset;
mColumnBottoms[column] += offset;
}
}
@Override
protected void adjustViewsAfterFillGap(final boolean down) {
super.adjustViewsAfterFillGap(down);
// fix vertical gaps when hitting the top after a rotate
// only when scrolling back up!
if (!down) {
alignTops();
}
}
private void alignTops() {
if (mFirstPosition == getHeaderViewsCount()) {
// we're showing all the views before the header views
int[] nonHeaderTops = getHighestNonHeaderTops();
// we should now have our non header tops
// align them
boolean isAligned = true;
int highestColumn = -1;
int highestTop = Integer.MAX_VALUE;
for (int i = 0; i < nonHeaderTops.length; i++) {
// are they all aligned
if (isAligned && i > 0 && nonHeaderTops[i] != highestTop) {
isAligned = false; // not all the tops are aligned
}
// what's the highest
if (nonHeaderTops[i] < highestTop) {
highestTop = nonHeaderTops[i];
highestColumn = i;
}
}
// skip the rest.
if (isAligned) return;
// we've got the highest column - lets align the others
for (int i = 0; i < nonHeaderTops.length; i++) {
if (i != highestColumn) {
// there's a gap in this column
int offset = highestTop - nonHeaderTops[i];
offsetChildrenTopAndBottom(offset, i);
}
}
invalidate();
}
}
private int[] getHighestNonHeaderTops() {
int[] nonHeaderTops = new int[mColumnCount];
int childCount = getChildCount();
if (childCount > 0) {
for (int i = 0; i < childCount; i++) {
View child = getChildAt(i);
if (child != null &&
child.getLayoutParams() != null &&
child.getLayoutParams() instanceof GridLayoutParams) {
// is this child's top the highest non
GridLayoutParams lp = (GridLayoutParams) child.getLayoutParams();
// is it a child that isn't a header
if (lp.viewType != ITEM_VIEW_TYPE_HEADER_OR_FOOTER &&
child.getTop() < nonHeaderTops[lp.column]) {
nonHeaderTops[lp.column] = child.getTop();
}
}
}
}
return nonHeaderTops;
}
@Override
protected void onChildrenDetached(final int start, final int count) {
super.onChildrenDetached(start, count);
// go through our remaining views and sync the top and bottom stash.
// Repair the top and bottom column boundaries from the views we still have
Arrays.fill(mColumnTops, Integer.MAX_VALUE);
Arrays.fill(mColumnBottoms, 0);
for (int i = 0; i < getChildCount(); i++) {
final View child = getChildAt(i);
if (child != null) {
final LayoutParams childParams = (LayoutParams) child.getLayoutParams();
if (childParams.viewType != ITEM_VIEW_TYPE_HEADER_OR_FOOTER &&
childParams instanceof GridLayoutParams) {
GridLayoutParams layoutParams = (GridLayoutParams) childParams;
int column = layoutParams.column;
int position = layoutParams.position;
final int childTop = child.getTop();
if (childTop < mColumnTops[column]) {
mColumnTops[column] = childTop - getChildTopMargin(position);
}
final int childBottom = child.getBottom();
if (childBottom > mColumnBottoms[column]) {
mColumnBottoms[column] = childBottom + getChildBottomMargin();
}
} else {
// the header and footer here
final int childTop = child.getTop();
final int childBottom = child.getBottom();
for (int col = 0; col < mColumnCount; col++) {
if (childTop < mColumnTops[col]) {
mColumnTops[col] = childTop;
}
if (childBottom > mColumnBottoms[col]) {
mColumnBottoms[col] = childBottom;
}
}
}
}
}
}
@Override
protected boolean hasSpaceUp() {
int end = mClipToPadding ? getRowPaddingTop() : 0;
return getLowestPositionedTop() > end;
}
@Override
protected void onSizeChanged(final int w, final int h, final int oldw, final int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
onSizeChanged(w, h);
}
// //////////////////////////////////////////////////////////////////////////////////////////
// SYNCING ACROSS ROTATION
//
@Override
protected void onSizeChanged(int w, int h) {
super.onSizeChanged(w, h);
boolean isLandscape = isLandscape();
int newColumnCount = isLandscape ? mColumnCountLandscape : mColumnCountPortrait;
if (mColumnCount != newColumnCount) {
mColumnCount = newColumnCount;
mColumnWidth = calculateColumnWidth(w);
mColumnTops = new int[mColumnCount];
mColumnBottoms = new int[mColumnCount];
mColumnLefts = new int[mColumnCount];
mDistanceToTop = 0;
// rebuild the columns
initColumnTopsAndBottoms();
initColumnLefts();
// if we have data
if (getCount() > 0 && mPositionData.size() > 0) {
onColumnSync();
}
requestLayout();
}
}
private int calculateColumnWidth(final int gridWidth) {
final int listPadding = getRowPaddingLeft() + getRowPaddingRight();
return (gridWidth - listPadding - mItemMargin * (mColumnCount + 1)) / mColumnCount;
}
private int calculateColumnLeft(final int colIndex) {
return getRowPaddingLeft() + mItemMargin + ((mItemMargin + mColumnWidth) * colIndex);
}
/**
* Our mColumnTops and mColumnBottoms need to be re-built up to the
* mSyncPosition - the following layout request will then
* layout the that position and then fillUp and fillDown appropriately.
*/
private void onColumnSync() {
// re-calc tops for new column count!
int syncPosition = Math.min(mSyncPosition, getCount() - 1);
SparseArray<Double> positionHeightRatios = new SparseArray<Double>(syncPosition);
for (int pos = 0; pos < syncPosition; pos++) {
// check for weirdness
final GridItemRecord rec = mPositionData.get(pos);
if (rec == null) break;
Log.d(TAG, "onColumnSync:" + pos + " ratio:" + rec.heightRatio);
positionHeightRatios.append(pos, rec.heightRatio);
}
mPositionData.clear();
// re-calc our relative position while at the same time
// rebuilding our GridItemRecord collection
if (DBG) Log.d(TAG, "onColumnSync column width:" + mColumnWidth);
for (int pos = 0; pos < syncPosition; pos++) {
//Check for weirdness again
final Double heightRatio = positionHeightRatios.get(pos);
if (heightRatio == null) {
break;
}
final GridItemRecord rec = getOrCreateRecord(pos);
final int height = (int) (mColumnWidth * heightRatio);
rec.heightRatio = heightRatio;
int top;
int bottom;
// check for headers
if (isHeaderOrFooter(pos)) {
// the next top is the bottom for that column
top = getLowestPositionedBottom();
bottom = top + height;
for (int i = 0; i < mColumnCount; i++) {
mColumnTops[i] = top;
mColumnBottoms[i] = bottom;
}
} else {
// what's the next column down ?
final int column = getHighestPositionedBottomColumn();
// the next top is the bottom for that column
top = mColumnBottoms[column];
bottom = top + height + getChildTopMargin(pos) + getChildBottomMargin();
mColumnTops[column] = top;
mColumnBottoms[column] = bottom;
rec.column = column;
}
if (DBG) Log.d(TAG, "onColumnSync position:" + pos +
" top:" + top +
" bottom:" + bottom +
" height:" + height +
" heightRatio:" + heightRatio);
}
// our sync position will be displayed in this column
final int syncColumn = getHighestPositionedBottomColumn();
setPositionColumn(syncPosition, syncColumn);
// we want to offset from height of the sync position
// minus the offset
int syncToBottom = mColumnBottoms[syncColumn];
int offset = -syncToBottom + mSpecificTop;
// offset all columns by
offsetAllColumnsTopAndBottom(offset);
// sync the distance to top
mDistanceToTop = -syncToBottom;
// stash our bottoms in our tops - though these will be copied back to the bottoms
System.arraycopy(mColumnBottoms, 0, mColumnTops, 0, mColumnCount);
}
private void setPositionColumn(final int position, final int column) {
GridItemRecord rec = getOrCreateRecord(position);
rec.column = column;
}
// //////////////////////////////////////////////////////////////////////////////////////////
// GridItemRecord UTILS
//
private void setPositionHeightRatio(final int position, final int height) {
GridItemRecord rec = getOrCreateRecord(position);
rec.heightRatio = (double) height / (double) mColumnWidth;
if (DBG) Log.d(TAG, "position:" + position +
" width:" + mColumnWidth +
" height:" + height +
" heightRatio:" + rec.heightRatio);
}
private void setPositionIsHeaderFooter(final int position) {
GridItemRecord rec = getOrCreateRecord(position);
rec.isHeaderFooter = true;
}
private GridItemRecord getOrCreateRecord(final int position) {
GridItemRecord rec = mPositionData.get(position, null);
if (rec == null) {
rec = new GridItemRecord();
mPositionData.append(position, rec);
}
return rec;
}
private int getPositionColumn(final int position) {
GridItemRecord rec = mPositionData.get(position, null);
return rec != null ? rec.column : -1;
}
private boolean isHeaderOrFooter(final int position) {
final int viewType = mAdapter.getItemViewType(position);
return viewType == ITEM_VIEW_TYPE_HEADER_OR_FOOTER;
}
// //////////////////////////////////////////////////////////////////////////////////////////
// HELPERS
//
private int getChildColumn(final int position, final boolean flowDown) {
// do we already have a column for this child position?
int column = getPositionColumn(position);
// we don't have the column or it no longer fits in our grid
final int columnCount = mColumnCount;
if (column < 0 || column >= columnCount) {
// if we're going down -
// get the highest positioned (lowest value)
// column bottom
if (flowDown) {
column = getHighestPositionedBottomColumn();
} else {
column = getLowestPositionedTopColumn();
}
}
return column;
}
private void initColumnTopsAndBottoms() {
initColumnTops();
initColumnBottoms();
}
private void initColumnTops() {
Arrays.fill(mColumnTops, getPaddingTop() + mGridPaddingTop);
}
private void initColumnBottoms() {
Arrays.fill(mColumnBottoms, getPaddingTop() + mGridPaddingTop);
}
private void initColumnLefts() {
for (int i = 0; i < mColumnCount; i++) {
mColumnLefts[i] = calculateColumnLeft(i);
}
}
private int getHighestPositionedBottom() {
final int column = getHighestPositionedBottomColumn();
return mColumnBottoms[column];
}
// //////////////////////////////////////////////////////////////////////////////////////////
// BOTTOM
//
private int getHighestPositionedBottomColumn() {
int columnFound = 0;
int highestPositionedBottom = Integer.MAX_VALUE;
// the highest positioned bottom is the one with the lowest value :D
for (int i = 0; i < mColumnCount; i++) {
int bottom = mColumnBottoms[i];
if (bottom < highestPositionedBottom) {
highestPositionedBottom = bottom;
columnFound = i;
}
}
return columnFound;
}
private int getLowestPositionedBottom() {
final int column = getLowestPositionedBottomColumn();
return mColumnBottoms[column];
}
private int getLowestPositionedBottomColumn() {
int columnFound = 0;
int lowestPositionedBottom = Integer.MIN_VALUE;
// the lowest positioned bottom is the one with the highest value :D
for (int i = 0; i < mColumnCount; i++) {
int bottom = mColumnBottoms[i];
if (bottom > lowestPositionedBottom) {
lowestPositionedBottom = bottom;
columnFound = i;
}
}
return columnFound;
}
private int getLowestPositionedTop() {
final int column = getLowestPositionedTopColumn();
return mColumnTops[column];
}
// //////////////////////////////////////////////////////////////////////////////////////////
// TOP
//
private int getLowestPositionedTopColumn() {
int columnFound = 0;
// we'll go backwards through since the right most
// will likely be the lowest positioned Top
int lowestPositionedTop = Integer.MIN_VALUE;
// the lowest positioned top is the one with the highest value :D
for (int i = 0; i < mColumnCount; i++) {
int top = mColumnTops[i];
if (top > lowestPositionedTop) {
lowestPositionedTop = top;
columnFound = i;
}
}
return columnFound;
}
private int getHighestPositionedTop() {
final int column = getHighestPositionedTopColumn();
return mColumnTops[column];
}
private int getHighestPositionedTopColumn() {
int columnFound = 0;
int highestPositionedTop = Integer.MAX_VALUE;
// the highest positioned top is the one with the lowest value :D
for (int i = 0; i < mColumnCount; i++) {
int top = mColumnTops[i];
if (top < highestPositionedTop) {
highestPositionedTop = top;
columnFound = i;
}
}
return columnFound;
}
@Override
public Parcelable onSaveInstanceState() {
ListSavedState listState = (ListSavedState) super.onSaveInstanceState();
GridListSavedState ss = new GridListSavedState(listState.getSuperState());
// from the list state
ss.selectedId = listState.selectedId;
ss.firstId = listState.firstId;
ss.viewTop = listState.viewTop;
ss.position = listState.position;
ss.height = listState.height;
// our state
boolean haveChildren = getChildCount() > 0 && getCount() > 0;
if (haveChildren && mFirstPosition > 0) {
ss.columnCount = mColumnCount;
ss.columnTops = mColumnTops;
ss.positionData = mPositionData;
} else {
ss.columnCount = mColumnCount >= 0 ? mColumnCount : 0;
ss.columnTops = new int[ss.columnCount];
ss.positionData = new SparseArray<Object>();
}
return ss;
}
// //////////////////////////////////////////////////////////////////////////////////////////
// LAYOUT PARAMS
//
@Override
public void onRestoreInstanceState(Parcelable state) {
GridListSavedState ss = (GridListSavedState) state;
mColumnCount = ss.columnCount;
mColumnTops = ss.columnTops;
mColumnBottoms = new int[mColumnCount];
mPositionData = ss.positionData;
mNeedSync = true;
super.onRestoreInstanceState(ss);
}
// //////////////////////////////////////////////////////////////////////////////////////////
// SAVED STATE
/**
* Our grid item state record with {@link Parcelable} implementation
* so we can persist them across the SGV lifecycle.
*/
static class GridItemRecord implements Parcelable {
public static final Parcelable.Creator<GridItemRecord> CREATOR
= new Parcelable.Creator<GridItemRecord>() {
public GridItemRecord createFromParcel(Parcel in) {
return new GridItemRecord(in);
}
public GridItemRecord[] newArray(int size) {
return new GridItemRecord[size];
}
};
int column;
double heightRatio;
boolean isHeaderFooter;
GridItemRecord() {
}
/**
* Constructor called from {@link #CREATOR}
*/
private GridItemRecord(Parcel in) {
column = in.readInt();
heightRatio = in.readDouble();
isHeaderFooter = in.readByte() == 1;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel out, int flags) {
out.writeInt(column);
out.writeDouble(heightRatio);
out.writeByte((byte) (isHeaderFooter ? 1 : 0));
}
@Override
public String toString() {
return "GridItemRecord.ListSavedState{"
+ Integer.toHexString(System.identityHashCode(this))
+ " column:" + column
+ " heightRatio:" + heightRatio
+ " isHeaderFooter:" + isHeaderFooter
+ "}";
}
}
/**
* Extended LayoutParams to column position and anything else we may been for the grid
*/
public static class GridLayoutParams extends LayoutParams {
// The column the view is displayed in
int column;
public GridLayoutParams(Context c, AttributeSet attrs) {
super(c, attrs);
enforceStaggeredLayout();
}
public GridLayoutParams(int w, int h) {
super(w, h);
enforceStaggeredLayout();
}
public GridLayoutParams(int w, int h, int viewType) {
super(w, h);
enforceStaggeredLayout();
}
public GridLayoutParams(ViewGroup.LayoutParams source) {
super(source);
enforceStaggeredLayout();
}
/**
* Here we're making sure that all grid view items
* are width MATCH_PARENT and height WRAP_CONTENT.
* That's what this grid is designed for
*/
private void enforceStaggeredLayout() {
if (width != MATCH_PARENT) {
width = MATCH_PARENT;
}
if (height == MATCH_PARENT) {
height = WRAP_CONTENT;
}
}
}
public static class GridListSavedState extends ListSavedState {
public static final Creator<GridListSavedState> CREATOR
= new Creator<GridListSavedState>() {
public GridListSavedState createFromParcel(Parcel in) {
return new GridListSavedState(in);
}
public GridListSavedState[] newArray(int size) {
return new GridListSavedState[size];
}
};
int columnCount;
int[] columnTops;
SparseArray positionData;
public GridListSavedState(Parcelable superState) {
super(superState);
}
/**
* Constructor called from {@link #CREATOR}
*/
public GridListSavedState(Parcel in) {
super(in);
columnCount = in.readInt();
columnTops = new int[columnCount >= 0 ? columnCount : 0];
in.readIntArray(columnTops);
positionData = in.readSparseArray(GridItemRecord.class.getClassLoader());
}
@Override
public void writeToParcel(Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(columnCount);
out.writeIntArray(columnTops);
out.writeSparseArray(positionData);
}
@Override
public String toString() {
return "StaggeredGridView.GridListSavedState{"
+ Integer.toHexString(System.identityHashCode(this)) + "}";
}
}
}
|
|
package trash.objects;
import java.util.ArrayList;
import trash.Application;
import trash.states.Game;
import trash.util.AABB;
public class StrongGoon extends GroundGoon{
public static final int IMAGE_WIDTH=150, IMAGE_HEIGHT=150;
public static final int HITBOX_X=0, HITBOX_Y=0;
public static final int HITBOX_WIDTH=150, HITBOX_HEIGHT = IMAGE_HEIGHT - HITBOX_Y;
public static final int CENTER_X = HITBOX_X + HITBOX_WIDTH / 2;
public static final int CENTER_Y = HITBOX_Y + HITBOX_HEIGHT / 2;
public static final double RUN_SPEED=1;
public static final double RUN_ACCEL=1;
public static final int DAMAGE=25;
public static final double KNOCKBACK=2.5;
private boolean aggro=false;
public StrongGoon(Player play) {
super(play);
health=2;
}
public void init(int startX,int groundY) {
x = startX;
y = groundY - IMAGE_HEIGHT;
targetX=(((int)(Math.random()*9999)&1)==0) ? 0 : Application.WIDTH;
}
public double getTargetX()
{
return targetX;
}
public AABB getHitbox() {
return new AABB(x + HITBOX_X, y + HITBOX_Y,
x + HITBOX_X + HITBOX_WIDTH,
y + HITBOX_Y + HITBOX_HEIGHT);
}
public AABB getTopHitbox() {
return new AABB(x + HITBOX_X, y + HITBOX_Y,
x + HITBOX_X + 5,
y + HITBOX_Y + 5);
}
public int getHitboxX()
{
return HITBOX_X;
}
public int getHitboxY()
{
return HITBOX_Y;
}
public int getHitboxWidth()
{
return HITBOX_WIDTH;
}
public int getHitboxHeight()
{
return HITBOX_HEIGHT;
}
public float getDrawX() {
return (float)x;
}
public int getDamage()
{
return DAMAGE;
}
public double getKnockback()
{
return KNOCKBACK;
}
public float getDrawY() {
return (float)y;
}
public boolean canMoveLeft(Building b){
if(getDrawX()>b.getX1())
return true;
return false;
}
public boolean canMoveRight(Building b){
if(getDrawX()+HITBOX_WIDTH<b.getX2())
return true;
return false;
}
public Building whichBuilding(ArrayList<Building> buildings){
double x1 = x + HITBOX_X;
double x2 = x1 + HITBOX_WIDTH;
for (Building b : buildings) {
if (!(x2 < b.getX1() || b.getX2() < x1) && b.getY() < Application.HEIGHT) {
return b;
}
}
return null;
}
public void move(ArrayList<Building> buildings,ArrayList<Bullet> bullets) {
if(health<1)
{
dead=true;
return;
}
Building hitBuilding = whichBuilding(buildings);
if(player.getBuilding()!=null&&player.getBuilding().equals(hitBuilding)
&&player.getDrawY()+Player.HITBOX_Y+Player.HITBOX_HEIGHT<=y+HITBOX_Y)
{
aggro=true;
}
if(aggro)
{
if(player.getInvuln()<1)
targetX=player.getDrawX()+Player.CENTER_X;
else
targetX=x;
}
if(Math.abs(vx)>=Math.abs(targetX-x))
{
x=targetX;
if(!aggro)
{
if(targetX<=hitBuilding.getX1())
{
targetX=hitBuilding.getX2();
aggro=false;
}
else
{
targetX=hitBuilding.getX1();
aggro=false;
}
}
else
{
vx=0;
}
}
else
{
if((canMoveLeft(hitBuilding)&&vx<=0)||(canMoveRight(hitBuilding)&&vx>=0))
{
x += vx;
}
else
{
if(targetX<=hitBuilding.getX1())
{
targetX=hitBuilding.getX2();
aggro=false;
}
else
{
targetX=hitBuilding.getX1();
aggro=false;
}
}
}
if(getHitbox().intersects(player.getHitbox()))
{
attacking=true;
}
else
{
attacking=false;
}
for(Bullet bill:bullets)
{
if(getHitbox().intersects(bill.getHitbox()))
{
health--;
x+=Math.copySign(50,x-bill.getDrawX());
}
}
double groundY = Application.HEIGHT;
double x1 = x + HITBOX_X;
double x2 = x1 + HITBOX_WIDTH;
groundY = hitBuilding.getY();
if (y + IMAGE_HEIGHT > groundY + INTERSECT_MARGIN) {
if (hitBuilding != null) {
if (vx < 0) {
x = hitBuilding.getX2() - HITBOX_X + 1;
} else {
x = hitBuilding.getX1() - HITBOX_X - HITBOX_WIDTH - 1;
}
vx = 0;
groundY = Application.HEIGHT;
x1 = x + HITBOX_X;
x2 = x1 + HITBOX_WIDTH;
for (Building b : buildings) {
if (!(x2 < b.getX1() || b.getX2() < x1) && b.getY() < groundY) {
groundY = b.getY();
}
}
}
}
y += vy;
vy += Game.GRAVITY;
if (y + IMAGE_HEIGHT > groundY) {
vy = 0;
y = groundY - IMAGE_HEIGHT;
}
if (y + IMAGE_HEIGHT + 1 >= groundY) {
// if on ground
double target_vel=targetX-x;
double accel=target_vel-vx;
if(Math.abs(accel)>RUN_ACCEL)
{
accel=Math.copySign(RUN_ACCEL,accel);
}
vx+=accel;
if(Math.abs(vx)>RUN_SPEED)
{
vx=Math.copySign(RUN_SPEED,vx);
}
}
}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.amplify.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Structure for webhook, which associates a webhook with an Amplify App.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/amplify-2017-07-25/Webhook" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Webhook implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* ARN for the webhook.
* </p>
*/
private String webhookArn;
/**
* <p>
* Id of the webhook.
* </p>
*/
private String webhookId;
/**
* <p>
* Url of the webhook.
* </p>
*/
private String webhookUrl;
/**
* <p>
* Name for a branch, part of an Amplify App.
* </p>
*/
private String branchName;
/**
* <p>
* Description for a webhook.
* </p>
*/
private String description;
/**
* <p>
* Create date / time for a webhook.
* </p>
*/
private java.util.Date createTime;
/**
* <p>
* Update date / time for a webhook.
* </p>
*/
private java.util.Date updateTime;
/**
* <p>
* ARN for the webhook.
* </p>
*
* @param webhookArn
* ARN for the webhook.
*/
public void setWebhookArn(String webhookArn) {
this.webhookArn = webhookArn;
}
/**
* <p>
* ARN for the webhook.
* </p>
*
* @return ARN for the webhook.
*/
public String getWebhookArn() {
return this.webhookArn;
}
/**
* <p>
* ARN for the webhook.
* </p>
*
* @param webhookArn
* ARN for the webhook.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Webhook withWebhookArn(String webhookArn) {
setWebhookArn(webhookArn);
return this;
}
/**
* <p>
* Id of the webhook.
* </p>
*
* @param webhookId
* Id of the webhook.
*/
public void setWebhookId(String webhookId) {
this.webhookId = webhookId;
}
/**
* <p>
* Id of the webhook.
* </p>
*
* @return Id of the webhook.
*/
public String getWebhookId() {
return this.webhookId;
}
/**
* <p>
* Id of the webhook.
* </p>
*
* @param webhookId
* Id of the webhook.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Webhook withWebhookId(String webhookId) {
setWebhookId(webhookId);
return this;
}
/**
* <p>
* Url of the webhook.
* </p>
*
* @param webhookUrl
* Url of the webhook.
*/
public void setWebhookUrl(String webhookUrl) {
this.webhookUrl = webhookUrl;
}
/**
* <p>
* Url of the webhook.
* </p>
*
* @return Url of the webhook.
*/
public String getWebhookUrl() {
return this.webhookUrl;
}
/**
* <p>
* Url of the webhook.
* </p>
*
* @param webhookUrl
* Url of the webhook.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Webhook withWebhookUrl(String webhookUrl) {
setWebhookUrl(webhookUrl);
return this;
}
/**
* <p>
* Name for a branch, part of an Amplify App.
* </p>
*
* @param branchName
* Name for a branch, part of an Amplify App.
*/
public void setBranchName(String branchName) {
this.branchName = branchName;
}
/**
* <p>
* Name for a branch, part of an Amplify App.
* </p>
*
* @return Name for a branch, part of an Amplify App.
*/
public String getBranchName() {
return this.branchName;
}
/**
* <p>
* Name for a branch, part of an Amplify App.
* </p>
*
* @param branchName
* Name for a branch, part of an Amplify App.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Webhook withBranchName(String branchName) {
setBranchName(branchName);
return this;
}
/**
* <p>
* Description for a webhook.
* </p>
*
* @param description
* Description for a webhook.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* Description for a webhook.
* </p>
*
* @return Description for a webhook.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* Description for a webhook.
* </p>
*
* @param description
* Description for a webhook.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Webhook withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* Create date / time for a webhook.
* </p>
*
* @param createTime
* Create date / time for a webhook.
*/
public void setCreateTime(java.util.Date createTime) {
this.createTime = createTime;
}
/**
* <p>
* Create date / time for a webhook.
* </p>
*
* @return Create date / time for a webhook.
*/
public java.util.Date getCreateTime() {
return this.createTime;
}
/**
* <p>
* Create date / time for a webhook.
* </p>
*
* @param createTime
* Create date / time for a webhook.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Webhook withCreateTime(java.util.Date createTime) {
setCreateTime(createTime);
return this;
}
/**
* <p>
* Update date / time for a webhook.
* </p>
*
* @param updateTime
* Update date / time for a webhook.
*/
public void setUpdateTime(java.util.Date updateTime) {
this.updateTime = updateTime;
}
/**
* <p>
* Update date / time for a webhook.
* </p>
*
* @return Update date / time for a webhook.
*/
public java.util.Date getUpdateTime() {
return this.updateTime;
}
/**
* <p>
* Update date / time for a webhook.
* </p>
*
* @param updateTime
* Update date / time for a webhook.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Webhook withUpdateTime(java.util.Date updateTime) {
setUpdateTime(updateTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getWebhookArn() != null)
sb.append("WebhookArn: ").append(getWebhookArn()).append(",");
if (getWebhookId() != null)
sb.append("WebhookId: ").append(getWebhookId()).append(",");
if (getWebhookUrl() != null)
sb.append("WebhookUrl: ").append(getWebhookUrl()).append(",");
if (getBranchName() != null)
sb.append("BranchName: ").append(getBranchName()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getCreateTime() != null)
sb.append("CreateTime: ").append(getCreateTime()).append(",");
if (getUpdateTime() != null)
sb.append("UpdateTime: ").append(getUpdateTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Webhook == false)
return false;
Webhook other = (Webhook) obj;
if (other.getWebhookArn() == null ^ this.getWebhookArn() == null)
return false;
if (other.getWebhookArn() != null && other.getWebhookArn().equals(this.getWebhookArn()) == false)
return false;
if (other.getWebhookId() == null ^ this.getWebhookId() == null)
return false;
if (other.getWebhookId() != null && other.getWebhookId().equals(this.getWebhookId()) == false)
return false;
if (other.getWebhookUrl() == null ^ this.getWebhookUrl() == null)
return false;
if (other.getWebhookUrl() != null && other.getWebhookUrl().equals(this.getWebhookUrl()) == false)
return false;
if (other.getBranchName() == null ^ this.getBranchName() == null)
return false;
if (other.getBranchName() != null && other.getBranchName().equals(this.getBranchName()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getCreateTime() == null ^ this.getCreateTime() == null)
return false;
if (other.getCreateTime() != null && other.getCreateTime().equals(this.getCreateTime()) == false)
return false;
if (other.getUpdateTime() == null ^ this.getUpdateTime() == null)
return false;
if (other.getUpdateTime() != null && other.getUpdateTime().equals(this.getUpdateTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getWebhookArn() == null) ? 0 : getWebhookArn().hashCode());
hashCode = prime * hashCode + ((getWebhookId() == null) ? 0 : getWebhookId().hashCode());
hashCode = prime * hashCode + ((getWebhookUrl() == null) ? 0 : getWebhookUrl().hashCode());
hashCode = prime * hashCode + ((getBranchName() == null) ? 0 : getBranchName().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getCreateTime() == null) ? 0 : getCreateTime().hashCode());
hashCode = prime * hashCode + ((getUpdateTime() == null) ? 0 : getUpdateTime().hashCode());
return hashCode;
}
@Override
public Webhook clone() {
try {
return (Webhook) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.amplify.model.transform.WebhookMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/* Copyright 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package codecleaver;
import codecleaver.util.DirectedGraph;
import org.objectweb.asm.AnnotationVisitor;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Type;
/**
* Builds the graph of dependencies from the java infos. Every use of an Id x by an info y
* adds a dependency from y to x.
*/
public class DependencyGraphBuilder implements IdClassVisitor {
private TypeId currentTypeId;
private TypeInfo currentTypeInfo;
private String currentFileName;
private final IdTable ids;
private final InfoTable infos;
private final DirectedGraph<Id> inheritanceGraph;
private final DirectedGraph<Id> result;
public DependencyGraphBuilder(InfoTable infos, DirectedGraph<Id> inheritanceGraph) {
this.ids = infos.ids;
this.infos = infos;
this.inheritanceGraph = inheritanceGraph;
this.result = new DirectedGraph<Id>();
}
public final DirectedGraph<Id> getResult() {
return result;
}
public void visitFile(String file) {
this.currentFileName = file;
}
private void setIgnoredClass(TypeId id) {
currentTypeId = null;
currentTypeInfo = null;
}
private boolean isIgnoredClass() {
return currentTypeId == null;
}
// ignore classes which already have a definition, or which have an outer class from a different
// jar file.
private boolean shouldIgnoreClass(TypeId id) {
if (!infos.hasInfo(id) || !infos.getType(id).getFileName().equals(currentFileName)) {
return true;
}
TypeId outerId = id;
do {
outerId = ids.getOuterType(outerId);
if (outerId != null && infos.hasInfo(outerId)) {
TypeInfo outerInfo = infos.getType(outerId);
if (!outerInfo.getFileName().equals(currentFileName)) {
return true;
}
}
} while (outerId != null);
return false;
}
private void addImplements(TypeId classId, TypeId interfaceId) {
result.addEdge(classId, interfaceId);
}
private void addExtends(TypeId classId, TypeId superId) {
if (superId != null) {
result.addEdge(classId, superId);
}
}
/**
* Types are dependent on their base class, implemented interfaces and outer class (if any).
*/
@Override public void visit(TypeId id,
int version,
int access,
String name,
String signature,
String superName,
String[] interfaces) {
if (shouldIgnoreClass(id)) {
setIgnoredClass(id);
return;
}
currentTypeId = id;
currentTypeInfo = infos.getType(id);
addExtends(currentTypeId, currentTypeInfo.superId);
for (TypeId interfaceId : currentTypeInfo.interfaces) {
addImplements(currentTypeId, interfaceId);
}
if (signature != null) {
// TODO(peterhal): signature contains generic bases & interfaces
// throw new IllegalArgumentException();
}
}
@Override public void visitEnd() {
currentTypeId = null;
currentTypeInfo = null;
}
/**
* Fields are dependent on their type.
*/
@Override public FieldVisitor visitField(FieldId id,
int access,
String name,
String desc,
String signature,
Object value) {
if (isIgnoredClass()) {
return null;
}
result.addEdge(id, currentTypeId);
result.addOptionalEdge(id, ids.idOfDescriptor(desc));
if (signature != null) {
// TODO(peterhal): throw new IllegalArgumentException();
}
return null;
}
/**
* Methods are dependent on all types in their signature and throws clauses, as well as anything
* referenced by their bytecode. Types are dependent on their static initializers.
*/
@Override public MethodVisitor visitMethod(MethodId methodId,
int access,
String name,
String desc,
String signature,
String[] exceptions) {
if (isIgnoredClass()) {
return null;
}
result.addEdge(methodId, currentTypeId);
result.addOptionalEdge(methodId, ids.getIdOfType(Type.getReturnType(desc)));
for (Type argumentType : Type.getArgumentTypes(desc)) {
result.addOptionalEdge(methodId, ids.getIdOfType(argumentType));
}
if (infos.getMethod(methodId).isStaticInitializer()) {
result.addEdge(currentTypeId, methodId);
}
if (exceptions != null) {
for (String exceptionName : exceptions) {
result.addEdge(methodId, ids.getIdOfType(exceptionName));
}
}
if (signature != null) {
// TODO(peterhal): generics
}
if (inheritanceGraph.containsVertex(methodId)) {
// This method overrides an inherited method.
// The thinking being that a type requires all of its override methods to implement
// its contracts whether the contract is an implemented interface or a base class.
// It turns out that this dependency is problematic for some clients and can be worked around
// using the Overrides predefined set.
//
// TODO(peterhal): consider adding this back in as we get more feedback.
// graph.addEdge(currentTypeId, methodId);
}
return new GraphBuilderMethodVisitor(ids, infos, result, methodId);
}
@Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
return null;
}
private void addOuterClass(String innerName, String outerName) {
TypeId innerId = ids.getExistingType(innerName);
TypeId outerId = ids.getExistingType(outerName);
// outerId can be null, for example:
// javax/swing/html/parser/CUP$parser$actions
if (outerId != null && innerId != null) {
// InnerClass attributes can occur in either the inner or the outer type
// and they can also appear in class files for completely unrelated types. No I am not making
// this up. Discard InnerClass attributes not in either the inner or outer type.
if (innerId != this.currentTypeId && outerId != this.currentTypeId) {
return;
}
// This shouldn't happen ... just being defensive.
TypeInfo outerInfo = infos.getOptionalType(outerId);
TypeInfo innerInfo = infos.getOptionalType(innerId);
if (outerInfo == null || innerInfo == null) {
return;
}
if (!outerInfo.getFileName().equals(currentTypeInfo.getFileName())) {
throw new RuntimeException(String.format("Inner Type '%s' contained in file '%s' is in a different file than outer type '%s' in file '%s'.",
innerId,
currentTypeInfo.getFileName(),
outerId,
outerInfo.getFileName()));
}
// innner classes depend on their containing class
result.addEdge(innerId, outerId);
// Nested types are created parent-ed to their containing package. Re-parent them here to
// their containing type.
innerInfo.setParent(outerInfo);
}
}
@Override public void visitInnerClass(
String name, String outerName, String innerName, int access) {
if (isIgnoredClass()) {
return;
}
// anonymous inner classes have null outerName
if (outerName != null) {
addOuterClass(name, outerName);
}
}
@Override public void visitOuterClass(String owner, String name, String desc) {
if (isIgnoredClass()) {
return;
}
addOuterClass(currentTypeId.name, owner);
}
}
|
|
/**
* $RCSfile$
* $Revision: 1761 $
* $Date: 2005-08-09 19:34:09 -0300 (Tue, 09 Aug 2005) $
*
* Copyright (C) 2005-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.handler;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import org.jivesoftware.openfire.IQHandlerInfo;
import org.jivesoftware.openfire.OfflineMessage;
import org.jivesoftware.openfire.OfflineMessageStore;
import org.jivesoftware.openfire.RoutingTable;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.auth.UnauthorizedException;
import org.jivesoftware.openfire.disco.DiscoInfoProvider;
import org.jivesoftware.openfire.disco.DiscoItem;
import org.jivesoftware.openfire.disco.DiscoItemsProvider;
import org.jivesoftware.openfire.disco.IQDiscoInfoHandler;
import org.jivesoftware.openfire.disco.IQDiscoItemsHandler;
import org.jivesoftware.openfire.disco.ServerFeaturesProvider;
import org.jivesoftware.openfire.session.LocalClientSession;
import org.jivesoftware.openfire.user.UserManager;
import org.jivesoftware.util.XMPPDateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xmpp.forms.DataForm;
import org.xmpp.forms.FormField;
import org.xmpp.packet.IQ;
import org.xmpp.packet.JID;
/**
* Implements JEP-0013: Flexible Offline Message Retrieval. Allows users to request number of
* messages, request message headers, retrieve specific messages, remove specific messages,
* retrieve all messages and remove all messages.
*
* @author Gaston Dombiak
*/
public class IQOfflineMessagesHandler extends IQHandler implements ServerFeaturesProvider,
DiscoInfoProvider, DiscoItemsProvider {
private static final Logger Log = LoggerFactory.getLogger(IQOfflineMessagesHandler.class);
private static final String NAMESPACE = "http://jabber.org/protocol/offline";
final private XMPPDateTimeFormat xmppDateTime = new XMPPDateTimeFormat();
private IQHandlerInfo info;
private IQDiscoInfoHandler infoHandler;
private IQDiscoItemsHandler itemsHandler;
private RoutingTable routingTable;
private UserManager userManager;
private OfflineMessageStore messageStore;
public IQOfflineMessagesHandler() {
super("Flexible Offline Message Retrieval Handler");
info = new IQHandlerInfo("offline", NAMESPACE);
}
@Override
public IQ handleIQ(IQ packet) throws UnauthorizedException {
IQ reply = IQ.createResultIQ(packet);
Element offlineRequest = packet.getChildElement();
JID from = packet.getFrom();
if (offlineRequest.element("purge") != null) {
// User requested to delete all offline messages
messageStore.deleteMessages(from.getNode());
}
else if (offlineRequest.element("fetch") != null) {
// Mark that offline messages shouldn't be sent when the user becomes available
stopOfflineFlooding(from);
// User requested to receive all offline messages
for (OfflineMessage offlineMessage : messageStore.getMessages(from.getNode(), false)) {
sendOfflineMessage(from, offlineMessage);
}
}
else {
for (Iterator it = offlineRequest.elementIterator("item"); it.hasNext();) {
Element item = (Element) it.next();
Date creationDate = null;
try {
creationDate = xmppDateTime.parseString(item.attributeValue("node"));
} catch (ParseException e) {
Log.error("Error parsing date", e);
}
if ("view".equals(item.attributeValue("action"))) {
// User requested to receive specific message
OfflineMessage offlineMsg = messageStore.getMessage(from.getNode(), creationDate);
if (offlineMsg != null) {
sendOfflineMessage(from, offlineMsg);
}
}
else if ("remove".equals(item.attributeValue("action"))) {
// User requested to delete specific message
messageStore.deleteMessage(from.getNode(), creationDate);
}
}
}
return reply;
}
private void sendOfflineMessage(JID receipient, OfflineMessage offlineMessage) {
Element offlineInfo = offlineMessage.addChildElement("offline", NAMESPACE);
offlineInfo.addElement("item").addAttribute("node",
XMPPDateTimeFormat.format(offlineMessage.getCreationDate()));
routingTable.routePacket(receipient, offlineMessage, true);
}
@Override
public IQHandlerInfo getInfo() {
return info;
}
public Iterator<String> getFeatures() {
ArrayList<String> features = new ArrayList<String>();
features.add(NAMESPACE);
return features.iterator();
}
public Iterator<Element> getIdentities(String name, String node, JID senderJID) {
ArrayList<Element> identities = new ArrayList<Element>();
Element identity = DocumentHelper.createElement("identity");
identity.addAttribute("category", "automation");
identity.addAttribute("type", "message-list");
identities.add(identity);
return identities.iterator();
}
public Iterator<String> getFeatures(String name, String node, JID senderJID) {
return Arrays.asList(NAMESPACE).iterator();
}
public DataForm getExtendedInfo(String name, String node, JID senderJID) {
// Mark that offline messages shouldn't be sent when the user becomes available
stopOfflineFlooding(senderJID);
final DataForm dataForm = new DataForm(DataForm.Type.result);
final FormField field1 = dataForm.addField();
field1.setVariable("FORM_TYPE");
field1.setType(FormField.Type.hidden);
field1.addValue(NAMESPACE);
final FormField field2 = dataForm.addField();
field2.setVariable("number_of_messages");
field2.addValue(String.valueOf(messageStore.getMessages(senderJID.getNode(), false).size()));
return dataForm;
}
public boolean hasInfo(String name, String node, JID senderJID) {
return NAMESPACE.equals(node) && userManager.isRegisteredUser(senderJID.getNode());
}
public Iterator<DiscoItem> getItems(String name, String node, JID senderJID) {
// Mark that offline messages shouldn't be sent when the user becomes available
stopOfflineFlooding(senderJID);
List<DiscoItem> answer = new ArrayList<DiscoItem>();
for (OfflineMessage offlineMessage : messageStore.getMessages(senderJID.getNode(), false)) {
answer.add(new DiscoItem(senderJID.asBareJID(), offlineMessage.getFrom().toString(),
XMPPDateTimeFormat.format(offlineMessage.getCreationDate()), null));
}
return answer.iterator();
}
@Override
public void initialize(XMPPServer server) {
super.initialize(server);
infoHandler = server.getIQDiscoInfoHandler();
itemsHandler = server.getIQDiscoItemsHandler();
messageStore = server.getOfflineMessageStore();
userManager = server.getUserManager();
routingTable = server.getRoutingTable();
}
@Override
public void start() throws IllegalStateException {
super.start();
infoHandler.setServerNodeInfoProvider(NAMESPACE, this);
itemsHandler.setServerNodeInfoProvider(NAMESPACE, this);
}
@Override
public void stop() {
super.stop();
infoHandler.removeServerNodeInfoProvider(NAMESPACE);
itemsHandler.removeServerNodeInfoProvider(NAMESPACE);
}
private void stopOfflineFlooding(JID senderJID) {
LocalClientSession session = (LocalClientSession) sessionManager.getSession(senderJID);
if (session != null) {
session.setOfflineFloodStopped(true);
}
}
}
|
|
/*
/*
* Copyright (c) 2012 Jan Kotek
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mapdb;
import java.io.DataInput;
import java.io.File;
import java.io.IOError;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.LockSupport;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import static org.mapdb.DataIO.*;
/**
* Write-Ahead-Log
*/
public class StoreWAL extends StoreCached {
/** 2 byte store version*/
protected static final int WAL_STORE_VERSION = 100;
/** 4 byte file header */
protected static final int WAL_HEADER = (0x8A77<<16) | WAL_STORE_VERSION;
protected static final long WAL_SEAL = 8234892392398238983L;
protected static final int FULL_REPLAY_AFTER_N_TX = 16;
/**
* Contains index table modified in previous transactions.
*
* If compaction is in progress, than the value is not index, but following:
* <pre>
* Long.MAX_VALUE == TOMBSTONE
* First three bytes is WAL file number
* Remaining 5 bytes is offset in WAL file
* </pre>
*
*/
protected final LongLongMap[] prevLongLongs;
protected final LongLongMap[] currLongLongs;
protected final LongLongMap[] prevDataLongs;
protected final LongLongMap[] currDataLongs;
protected final LongLongMap pageLongStack = new LongLongMap();
protected final List<Volume> volumes = Collections.synchronizedList(new ArrayList<Volume>());
/** WAL file sealed after compaction is completed, if no valid seal, compaction file should be destroyed */
protected volatile Volume walC;
/** File into which store is compacted. */
protected volatile Volume walCCompact;
/** record WALs, store recid-record pairs. Created during compaction when memory allocator is not available */
protected final List<Volume> walRec = Collections.synchronizedList(new ArrayList<Volume>());
protected final ReentrantLock compactLock = new ReentrantLock(CC.FAIR_LOCKS);
/** protected by commitLock */
protected volatile boolean compactionInProgress = false;
protected Volume curVol;
protected int fileNum = -1;
//TODO how to protect concurrrently file offset when file is being swapped?
protected final AtomicLong walOffset = new AtomicLong();
protected Volume headVolBackup;
protected long[] indexPagesBackup;
protected Volume realVol;
protected volatile boolean $_TEST_HACK_COMPACT_PRE_COMMIT_WAIT =false;
protected volatile boolean $_TEST_HACK_COMPACT_POST_COMMIT_WAIT =false;
public StoreWAL(String fileName) {
this(fileName,
fileName == null ? CC.DEFAULT_MEMORY_VOLUME_FACTORY : CC.DEFAULT_FILE_VOLUME_FACTORY,
null,
CC.DEFAULT_LOCK_SCALE,
0,
false, false, null, false,false, false, null,
null, 0L, 0L, false,
0L,
0);
}
public StoreWAL(
String fileName,
Volume.VolumeFactory volumeFactory,
Cache cache,
int lockScale,
int lockingStrategy,
boolean checksum,
boolean compress,
byte[] password,
boolean readonly,
boolean snapshotEnable,
boolean fileLockDisable,
HeartbeatFileLock fileLockHeartbeat,
ScheduledExecutorService executor,
long startSize,
long sizeIncrement,
boolean recidReuseDisable,
long executorScheduledRate,
int writeQueueSize
) {
super(fileName, volumeFactory, cache,
lockScale,
lockingStrategy,
checksum, compress, password, readonly, snapshotEnable, fileLockDisable, fileLockHeartbeat,
executor,
startSize,
sizeIncrement,
recidReuseDisable,
executorScheduledRate,
writeQueueSize);
prevLongLongs = new LongLongMap[this.lockScale];
currLongLongs = new LongLongMap[this.lockScale];
for (int i = 0; i < prevLongLongs.length; i++) {
prevLongLongs[i] = new LongLongMap();
currLongLongs[i] = new LongLongMap();
}
prevDataLongs = new LongLongMap[this.lockScale];
currDataLongs = new LongLongMap[this.lockScale];
for (int i = 0; i < prevDataLongs.length; i++) {
prevDataLongs[i] = new LongLongMap();
currDataLongs[i] = new LongLongMap();
}
}
@Override
protected void initCreate() {
super.initCreate();
indexPagesBackup = indexPages.clone();
realVol = vol;
//make main vol readonly, to make sure it is never overwritten outside WAL replay
vol = new Volume.ReadOnly(vol);
//start new WAL file
walStartNextFile();
}
@Override
public void initOpen(){
//TODO disable readonly feature for this store
realVol = vol;
//replay WAL files
String wal0Name = getWalFileName("0");
String walCompSeal = getWalFileName("c");
boolean walCompSealExists =
walCompSeal!=null &&
new File(walCompSeal).exists();
if(walCompSealExists ||
(wal0Name!=null &&
new File(wal0Name).exists())){
//fill compaction stuff
walC = walCompSealExists?volumeFactory.makeVolume(walCompSeal, readonly, true) : null;
walCCompact = walCompSealExists? volumeFactory.makeVolume(walCompSeal + ".compact", readonly, true) : null;
for(int i=0;;i++){
String rname = getWalFileName("r"+i);
if(!new File(rname).exists())
break;
walRec.add(volumeFactory.makeVolume(rname, readonly, true));
}
//fill wal files
for(int i=0;;i++){
String wname = getWalFileName(""+i);
if(!new File(wname).exists())
break;
volumes.add(volumeFactory.makeVolume(wname, readonly, true));
}
initOpenPost();
replayWAL();
if(walC!=null)
walC.close();
walC = null;
if(walCCompact!=null)
walCCompact.close();
walCCompact = null;
for(Volume v:walRec){
v.close();
}
walRec.clear();
volumes.clear();
}
//start new WAL file
//TODO do not start if readonly
walStartNextFile();
initOpenPost();
}
@Override
protected void initFailedCloseFiles() {
if(walC!=null && !walC.isClosed()) {
walC.close();
}
walC = null;
if(walCCompact!=null && !walCCompact.isClosed()) {
walCCompact.close();
}
walCCompact = null;
if(walRec!=null){
for(Volume v:walRec){
if(v!=null && !v.isClosed())
v.close();
}
walRec.clear();
}
if(volumes!=null){
for(Volume v:volumes){
if(v!=null && !v.isClosed())
v.close();
}
volumes.clear();
}
}
protected void initOpenPost() {
super.initOpen();
indexPagesBackup = indexPages.clone();
//make main vol readonly, to make sure it is never overwritten outside WAL replay
//all data are written to realVol
vol = new Volume.ReadOnly(vol);
}
@Override
protected void initHeadVol() {
super.initHeadVol();
//backup headVol
if(headVolBackup!=null && !headVolBackup.isClosed())
headVolBackup.close();
byte[] b = new byte[(int) HEAD_END];
headVol.getData(0, b, 0, b.length);
headVolBackup = new Volume.SingleByteArrayVol(b);
}
protected void walStartNextFile() {
if (CC.ASSERT && !structuralLock.isHeldByCurrentThread())
throw new AssertionError();
fileNum++;
if (CC.ASSERT && fileNum != volumes.size())
throw new DBException.DataCorruption();
String filewal = getWalFileName(""+fileNum);
Volume nextVol;
if (readonly && filewal != null && !new File(filewal).exists()){
nextVol = new Volume.ReadOnly(new Volume.ByteArrayVol(8,0L));
}else {
nextVol = volumeFactory.makeVolume(filewal, readonly, true);
}
nextVol.ensureAvailable(16);
if(!readonly) {
nextVol.putInt(0, WAL_HEADER);
nextVol.putLong(8, makeFeaturesBitmap());
}
walOffset.set(16);
volumes.add(nextVol);
curVol = nextVol;
}
protected String getWalFileName(String ext) {
return fileName==null? null :
fileName+".wal"+"."+ext;
}
protected void walPutLong(long offset, long value){
final int plusSize = +1+8+6;
long walOffset2 = walOffset.getAndAdd(plusSize);
Volume curVol2 = curVol;
//in case of overlap, put Skip Bytes instruction and try again
if(hadToSkip(walOffset2, plusSize)){
walPutLong(offset, value);
return;
}
if(CC.ASSERT && offset>>>48!=0)
throw new DBException.DataCorruption();
curVol2.ensureAvailable(walOffset2+plusSize);
int parity = 1+Long.bitCount(value)+Long.bitCount(offset);
parity &=15;
curVol2.putUnsignedByte(walOffset2, (1 << 4)|parity);
walOffset2+=1;
curVol2.putLong(walOffset2, value);
walOffset2+=8;
curVol2.putSixLong(walOffset2, offset);
}
protected void walPutUnsignedShort(long offset, int value) {
final int plusSize = +1+8;
long walOffset2 = walOffset.getAndAdd(plusSize);
Volume curVol2 = curVol;
//in case of overlap, put Skip Bytes instruction and try again
if(hadToSkip(walOffset2, plusSize)){
walPutUnsignedShort(offset, value);
return;
}
curVol2.ensureAvailable(walOffset2+plusSize);
if(CC.ASSERT && offset>>>48!=0)
throw new DBException.DataCorruption();
offset = (((long)value)<<48) | offset;
int parity = 1+Long.bitCount(offset);
parity &=15;
curVol2.putUnsignedByte(walOffset2, (6 << 4)|parity);
walOffset2+=1;
curVol2.putLong(walOffset2, offset);
}
protected boolean hadToSkip(long walOffset2, int plusSize) {
//does it overlap page boundaries?
if((walOffset2>>>CC.VOLUME_PAGE_SHIFT)==(walOffset2+plusSize)>>>CC.VOLUME_PAGE_SHIFT){
return false; //no, does not, all fine
}
//is there enough space for 4 byte skip N bytes instruction?
while((walOffset2&PAGE_MASK) >= PAGE_SIZE-4 || plusSize<5){
//pad with single byte skip instructions, until end of page is reached
int singleByteSkip = (4<<4)|(Long.bitCount(walOffset2)&15);
curVol.putUnsignedByte(walOffset2++, singleByteSkip);
plusSize--;
if(CC.ASSERT && plusSize<0)
throw new DBException.DataCorruption();
}
//now new page starts, so add skip instruction for remaining bits
int val = (3<<(4+3*8)) | (plusSize-4) | ((Integer.bitCount(plusSize-4)&15)<<(3*8));
curVol.ensureAvailable(walOffset2 + 4);
curVol.putInt(walOffset2, val);
return true;
}
@Override
protected void putDataSingleWithLink(int segment, long offset, long link, byte[] buf, int bufPos, int size) {
if(CC.ASSERT && (size&0xFFFF)!=size)
throw new DBException.DataCorruption();
//TODO optimize so array copy is not necessary, that means to clone and modify putDataSingleWithoutLink method
byte[] buf2 = new byte[size+8];
DataIO.putLong(buf2,0,link);
System.arraycopy(buf,bufPos,buf2,8,size);
putDataSingleWithoutLink(segment,offset,buf2,0,buf2.length);
}
@Override
protected void putDataSingleWithoutLink(int segment, long offset, byte[] buf, int bufPos, int size) {
if(CC.ASSERT && (size&0xFFFF)!=size)
throw new DBException.DataCorruption();
if(CC.ASSERT && (offset%16!=0 && offset!=4))
throw new DBException.DataCorruption();
// if(CC.ASSERT && size%16!=0)
// throw new AssertionError(); //TODO allign record size to 16, and clear remaining bytes
if(CC.ASSERT && segment!=-1)
assertWriteLocked(segment);
if(CC.ASSERT && segment==-1 && !structuralLock.isHeldByCurrentThread())
throw new AssertionError();
final int plusSize = +1+2+6+size;
long walOffset2 = walOffset.getAndAdd(plusSize);
if(hadToSkip(walOffset2, plusSize)){
putDataSingleWithoutLink(segment,offset,buf,bufPos,size);
return;
}
curVol.ensureAvailable(walOffset2+plusSize);
int checksum = 1+Integer.bitCount(size)+Long.bitCount(offset)+sum(buf,bufPos,size);
checksum &= 15;
curVol.putUnsignedByte(walOffset2, (2 << 4)|checksum);
walOffset2+=1;
curVol.putLong(walOffset2, ((long) size) << 48 | offset);
walOffset2+=8;
curVol.putData(walOffset2, buf,bufPos,size);
//TODO assertions
long val = ((long)size)<<48;
val |= ((long)fileNum)<<32;
val |= walOffset2;
(segment==-1?pageLongStack:currDataLongs[segment]).put(offset, val);
}
protected DataInput walGetData(long offset, int segment) {
if (CC.ASSERT && offset % 16 != 0)
throw new DBException.DataCorruption();
long longval = currDataLongs[segment].get(offset);
if(longval==0){
longval = prevDataLongs[segment].get(offset);
}
if(longval==0)
return null;
int arraySize = (int) (longval >>> 48);
int fileNum = (int) ((longval >>> 32) & 0xFFFFL);
long dataOffset = longval & 0xFFFFFFFFL;
Volume vol = volumes.get(fileNum);
return vol.getDataInput(dataOffset, arraySize);
}
@Override
protected long indexValGet(long recid) {
if(CC.ASSERT)
assertReadLocked(recid);
int segment = lockPos(recid);
long offset = recidToOffset(recid);
long ret = currLongLongs[segment].get(offset);
if(ret!=0) {
return ret;
}
ret = prevLongLongs[segment].get(offset);
if(ret!=0)
return ret;
return super.indexValGet(recid);
}
@Override
protected long indexValGetRaw(long recid) {
if(CC.ASSERT)
assertReadLocked(recid);
int segment = lockPos(recid);
long offset = recidToOffset(recid);
long ret = currLongLongs[segment].get(offset);
if(ret!=0) {
return ret;
}
ret = prevLongLongs[segment].get(offset);
if(ret!=0)
return ret;
return super.indexValGetRaw(recid);
}
@Override
protected void indexValPut(long recid, int size, long offset, boolean linked, boolean unused) {
if(CC.ASSERT)
assertWriteLocked(lockPos(recid));
// if(CC.ASSERT && compactionInProgress)
// throw new AssertionError();
long newVal = composeIndexVal(size, offset, linked, unused, true);
currLongLongs[lockPos(recid)].put(recidToOffset(recid), newVal);
}
@Override
protected void indexLongPut(long offset, long val) {
if(CC.ASSERT && !structuralLock.isHeldByCurrentThread())
throw new AssertionError();
if(CC.ASSERT && compactionInProgress)
throw new AssertionError();
walPutLong(offset,val);
}
@Override
protected long pageAllocate() {
// TODO compaction assertion
// if(CC.ASSERT && compactionInProgress)
// throw new AssertionError();
long storeSize = parity16Get(headVol.getLong(STORE_SIZE));
headVol.putLong(STORE_SIZE, parity16Set(storeSize + PAGE_SIZE));
//TODO clear data on page? perhaps special instruction?
if(CC.ASSERT && storeSize%PAGE_SIZE!=0)
throw new DBException.DataCorruption();
return storeSize;
}
@Override
protected byte[] loadLongStackPage(long pageOffset, boolean willBeModified) {
if (CC.ASSERT && !structuralLock.isHeldByCurrentThread())
throw new AssertionError();
// if(CC.ASSERT && compactionInProgress)
// throw new AssertionError();
//first try to get it from dirty pages in current TX
byte[] page = dirtyStackPages.get(pageOffset);
if (page != null) {
return page;
}
//try to get it from previous TX stored in WAL, but not yet replayed
long walval = pageLongStack.get(pageOffset);
if(walval!=0){
//get file number, offset and size in WAL
int arraySize = (int) (walval >>> 48);
int fileNum = (int) ((walval >>> 32) & 0xFFFFL);
long dataOffset = walval & 0xFFFFFFFFL;
//read and return data
byte[] b = new byte[arraySize];
Volume vol = volumes.get(fileNum);
vol.getData(dataOffset, b, 0, arraySize);
//page is going to be modified, so put it back into dirtyStackPages)
if (willBeModified) {
dirtyStackPages.put(pageOffset, b);
}
return b;
}
//and finally read it from main store
int pageSize = (int) (parity4Get(vol.getLong(pageOffset)) >>> 48);
page = new byte[pageSize];
vol.getData(pageOffset, page, 0, pageSize);
if (willBeModified){
dirtyStackPages.put(pageOffset, page);
}
return page;
}
/** return positions of (possibly) linked record */
@Override
protected long[] offsetsGet(int segment, long indexVal) {;
if(indexVal>>>48==0){
return ((indexVal&MLINKED)!=0) ? null : StoreDirect.EMPTY_LONGS;
}
long[] ret = new long[]{indexVal};
while((ret[ret.length-1]&MLINKED)!=0){
ret = Arrays.copyOf(ret, ret.length + 1);
long oldLink = ret[ret.length-2]&MOFFSET;
//get WAL position from current transaction, or previous (not yet fully replayed) transactions
long val = currDataLongs[segment].get(oldLink);
if(val==0)
val = prevDataLongs[segment].get(oldLink);
if(val!=0) {
//was found in previous position, read link from WAL
int file = (int) ((val>>>32) & 0xFFFFL); // get WAL file number
val = val & 0xFFFFFFFFL; // convert to WAL offset;
val = volumes.get(file).getLong(val);
}else{
//was not found in any transaction, read from main store
val = vol.getLong(oldLink);
}
ret[ret.length-1] = parity3Get(val);
}
if(CC.ASSERT){
offsetsVerify(ret);
}
if (CC.LOG_STORE && LOG.isLoggable(Level.FINEST)) {
LOG.log(Level.FINEST, "indexVal={0}, ret={1}",
new Object[]{Long.toHexString(indexVal), Arrays.toString(ret)});
}
return ret;
}
@Override
protected <A> A get2(long recid, Serializer<A> serializer) {
if (CC.ASSERT)
assertReadLocked(recid);
int segment = lockPos(recid);
//is in write cache?
{
Object cached = writeCache[segment].get1(recid);
if (cached != null) {
if(cached==TOMBSTONE2)
return null;
return (A) cached;
}
}
//is in wal?
{
long walval = currLongLongs[segment].get(recidToOffset(recid));
if(walval==0) {
walval = prevLongLongs[segment].get(recidToOffset(recid));
}
if(walval!=0){
if(compactionInProgress){
//read from Record log
if(walval==Long.MAX_VALUE) //TOMBSTONE or null
return null;
final int fileNum = (int) (walval>>>(5*8));
Volume recVol = walRec.get(fileNum);
long offset = walval&0xFFFFFFFFFFL; //last 5 bytes
if(CC.ASSERT){
int instruction = recVol.getUnsignedByte(offset);
//TODO exception should not be here
if(instruction!=(5<<4))
throw new DBException.DataCorruption("wrong instruction");
if(recid!=recVol.getSixLong(offset+1))
throw new DBException.DataCorruption("wrong recid");
}
//skip instruction and recid
offset+=1+6;
final int size = recVol.getInt(offset);
//TODO instruction checksum
final DataInput in = size==0?
new DataIO.DataInputByteArray(new byte[0]):
recVol.getDataInput(offset+4,size);
return deserialize(serializer, size, in);
}
//read record from WAL
boolean linked = (walval&MLINKED)!=0;
int size = (int) (walval>>>48);
if(linked && size==0)
return null;
if(size==0){
return deserialize(serializer,0,new DataIO.DataInputByteArray(new byte[0]));
}
if(linked)try {
//read linked record
int totalSize = 0;
byte[] in = new byte[100];
long link = walval;
while((link&MLINKED)!=0){
DataInput in2 = walGetData(link&MOFFSET, segment);
int chunkSize = (int) (link>>>48);
//get value of next link
link = in2.readLong();
//copy data into in
if(in.length<totalSize+chunkSize-8){
in = Arrays.copyOf(in, Math.max(in.length*2,totalSize+chunkSize-8 ));
}
in2.readFully(in,totalSize, chunkSize-8);
totalSize+=chunkSize-8;
}
//copy last chunk of data
DataInput in2 = walGetData(link&MOFFSET, segment);
int chunkSize = (int) (link>>>48);
//copy data into in
if(in.length<totalSize+chunkSize){
in = Arrays.copyOf(in, Math.max(in.length*2,totalSize+chunkSize ));
}
in2.readFully(in,totalSize, chunkSize);
totalSize+=chunkSize;
return deserialize(serializer, totalSize,new DataIO.DataInputByteArray(in,0));
} catch (IOException e) {
throw new IOError(e);
}
//read non-linked record
DataInput in = walGetData(walval&MOFFSET, segment);
return deserialize(serializer, (int) (walval>>>48),in);
}
}
long[] offsets = offsetsGet(lockPos(recid),indexValGet(recid));
if (offsets == null) {
return null; //zero size
}else if (offsets.length==0){
return deserialize(serializer,0,new DataIO.DataInputByteArray(new byte[0]));
}else if (offsets.length == 1) {
//not linked
int size = (int) (offsets[0] >>> 48);
long offset = offsets[0] & MOFFSET;
DataInput in = vol.getDataInput(offset, size);
return deserialize(serializer, size, in);
} else {
//calculate total size
int totalSize = offsetsTotalSize(offsets);
//load data
byte[] b = new byte[totalSize];
int bpos = 0;
for (int i = 0; i < offsets.length; i++) {
int plus = (i == offsets.length - 1)?0:8;
long size = (offsets[i] >>> 48) - plus;
if(CC.ASSERT && (size&0xFFFF)!=size)
throw new DBException.DataCorruption("size mismatch");
long offset = offsets[i] & MOFFSET;
vol.getData(offset + plus, b, bpos, (int) size);
bpos += size;
}
if (CC.ASSERT && bpos != totalSize)
throw new DBException.DataCorruption("size does not match");
DataInput in = new DataIO.DataInputByteArray(b);
return deserialize(serializer, totalSize, in);
}
}
@Override
public void rollback() throws UnsupportedOperationException {
commitLock.lock();
try {
//flush modified records
for (int segment = 0; segment < locks.length; segment++) {
Lock lock = locks[segment].writeLock();
lock.lock();
try {
writeCache[segment].clear();
if(caches!=null) {
caches[segment].clear();
}
} finally {
lock.unlock();
}
}
structuralLock.lock();
try {
dirtyStackPages.clear();
//restore headVol from backup
byte[] b = new byte[(int) HEAD_END];
//TODO use direct copy
headVolBackup.getData(0,b,0,b.length);
headVol.putData(0,b,0,b.length);
lastAllocatedData = parity3Get(headVol.getLong(LAST_PHYS_ALLOCATED_DATA_OFFSET));
indexPages = indexPagesBackup.clone();
} finally {
structuralLock.unlock();
}
}finally {
commitLock.unlock();
}
}
@Override
public void commit() {
commitLock.lock();
try{
if(compactionInProgress){
//use record format rather than instruction format.
String recvalName = getWalFileName("r"+walRec.size());
Volume v = volumeFactory.makeVolume(recvalName, readonly, true);
walRec.add(v);
v.ensureAvailable(16);
long offset = 16;
for(int segment=0;segment<locks.length;segment++) {
Lock lock = locks[segment].writeLock();
lock.lock();
try {
LongObjectObjectMap<Object,Serializer> writeCache1 = writeCache[segment];
LongLongMap prevLongs = prevLongLongs[segment];
long[] set = writeCache1.set;
Object[] values = writeCache1.values;
for(int i=0;i<set.length;i++){
long recid = set[i];
if(recid==0)
continue;
Object value = values[i*2];
DataOutputByteArray buf;
int size;
if (value == TOMBSTONE2) {
buf = null;
size = -2;
} else {
Serializer s = (Serializer) values[i*2+1];
buf = serialize(value, s); //TODO somehow serialize outside lock?
size = buf==null?-1:buf.pos;
}
int needed = 1+6+4 +(buf==null?0:buf.pos); //TODO int overflow, limit max record size to 1GB
//TODO skip page if overlap
prevLongs.put(recidToOffset(recid),
(((long)fileNum)<<(5*8)) | //first 3 bytes is file number
offset //wal offset
);
v.putUnsignedByte(offset, (5<<4));
offset++;
v.putSixLong(offset, recid);
offset+=6;
v.putInt(offset, size);
offset+=4;
if(size>0) {
v.putData(offset, buf.buf, 0, size);
offset+=size;
}
if(buf!=null)
recycledDataOut.lazySet(buf);
}
writeCache1.clear();
} finally {
lock.unlock();
}
}
structuralLock.lock();
try {
//finish instruction
v.putUnsignedByte(offset, 0);
v.sync();
v.putLong(8, StoreWAL.WAL_SEAL);
v.sync();
return;
}finally {
structuralLock.unlock();
}
}
//if big enough, do full WAL replay
if(volumes.size()>FULL_REPLAY_AFTER_N_TX && !compactionInProgress) {
commitFullWALReplay();
return;
}
//move all from current longs to prev
//each segment requires write lock
for(int segment=0;segment<locks.length;segment++){
Lock lock = locks[segment].writeLock();
lock.lock();
try{
flushWriteCacheSegment(segment);
long[] v = currLongLongs[segment].table;
for(int i=0;i<v.length;i+=2){
long offset = v[i];
if(offset==0)
continue;
long value = v[i+1];
prevLongLongs[segment].put(offset,value);
walPutLong(offset,value);
if(checksum && offset>HEAD_END && offset%PAGE_SIZE!=0) {
walPutUnsignedShort(offset + 8, DataIO.longHash(value) & 0xFFFF);
}
}
currLongLongs[segment].clear();
v = currDataLongs[segment].table;
for(int i=0;i<v.length;i+=2){
long offset = v[i];
if(offset==0)
continue;
long value = v[i+1];
prevDataLongs[segment].put(offset,value);
}
currDataLongs[segment].clear();
}finally {
lock.unlock();
}
}
structuralLock.lock();
try {
//flush modified Long Stack Pages into WAL
{
long[] set = dirtyStackPages.set;
for(int i=0;i<set.length;i++){
long offset = set[i];
if(offset==0)
continue;
byte[] val = (byte[]) dirtyStackPages.values[i];
if (CC.ASSERT && offset < PAGE_SIZE)
throw new DBException.DataCorruption();
if (CC.ASSERT && val.length % 16 != 0)
throw new DBException.DataCorruption();
if (CC.ASSERT && val.length <= 0 || val.length > MAX_REC_SIZE)
throw new DBException.DataCorruption();
putDataSingleWithoutLink(-1, offset, val, 0, val.length);
}
dirtyStackPages.clear();
}
headVol.putLong(LAST_PHYS_ALLOCATED_DATA_OFFSET,parity3Set(lastAllocatedData));
//update index checksum
headVol.putInt(HEAD_CHECKSUM, headChecksum(headVol));
// flush headVol into WAL
byte[] b = new byte[(int) HEAD_END-4];
//TODO use direct copy
headVol.getData(4, b, 0, b.length);
//put headVol into WAL
putDataSingleWithoutLink(-1, 4L, b, 0, b.length);
//make copy of current headVol
headVolBackup.putData(4, b, 0, b.length);
indexPagesBackup = indexPages.clone();
long finalOffset = walOffset.get();
curVol.ensureAvailable(finalOffset + 1); //TODO overlap here
//put EOF instruction
curVol.putUnsignedByte(finalOffset, (0 << 4) | (Long.bitCount(finalOffset) & 15));
curVol.sync();
//put wal seal
curVol.putLong(8, WAL_SEAL);
curVol.sync();
walStartNextFile();
} finally {
structuralLock.unlock();
}
}finally {
commitLock.unlock();
}
}
protected void commitFullWALReplay() {
if(CC.ASSERT && !commitLock.isHeldByCurrentThread())
throw new AssertionError();
//lock all segment locks
//TODO use series of try..finally statements, perhaps recursion with runnable
for(int i=0;i<locks.length;i++){
locks[i].writeLock().lock();
}
try {
//flush entire write cache
for(int segment=0;segment<locks.length;segment++){
flushWriteCacheSegment(segment);
long[] v = currLongLongs[segment].table;
for(int i=0;i<v.length;i+=2){
long offset = v[i];
if(offset==0)
continue;
long value = v[i+1];
walPutLong(offset,value);
if(checksum && offset>HEAD_END && offset%PAGE_SIZE!=0) {
walPutUnsignedShort(offset + 8, DataIO.longHash(value) & 0xFFFF);
}
//remove from this
v[i] = 0;
v[i+1] = 0;
}
currLongLongs[segment].clear();
if(CC.ASSERT && currLongLongs[segment].size()!=0)
throw new AssertionError();
currDataLongs[segment].clear();
prevDataLongs[segment].clear();
prevLongLongs[segment].clear();
}
structuralLock.lock();
try {
//flush modified Long Stack Pages into WAL
{
long[] set = dirtyStackPages.set;
for(int i=0;i<set.length;i++){
long offset = set[i];
if(offset==0)
continue;
byte[] val = (byte[]) dirtyStackPages.values[i];
if (CC.ASSERT && offset < PAGE_SIZE)
throw new DBException.DataCorruption();
if (CC.ASSERT && val.length % 16 != 0)
throw new DBException.DataCorruption();
if (CC.ASSERT && val.length <= 0 || val.length > MAX_REC_SIZE)
throw new DBException.DataCorruption();
putDataSingleWithoutLink(-1, offset, val, 0, val.length);
}
dirtyStackPages.clear();
}
if(CC.ASSERT && dirtyStackPages.size!=0)
throw new AssertionError();
pageLongStack.clear();
headVol.putLong(LAST_PHYS_ALLOCATED_DATA_OFFSET,parity3Set(lastAllocatedData));
//update index checksum
headVol.putInt(HEAD_CHECKSUM, headChecksum(headVol));
// flush headVol into WAL
byte[] b = new byte[(int) HEAD_END-4];
//TODO use direct copy
headVol.getData(4, b, 0, b.length);
//put headVol into WAL
putDataSingleWithoutLink(-1, 4L, b, 0, b.length);
//make copy of current headVol
headVolBackup.putData(4, b, 0, b.length);
indexPagesBackup = indexPages.clone();
long finalOffset = walOffset.get();
curVol.ensureAvailable(finalOffset+1); //TODO overlap here
//put EOF instruction
curVol.putUnsignedByte(finalOffset, (0<<4) | (Long.bitCount(finalOffset)&15));
curVol.sync();
//put wal seal
curVol.putLong(8, WAL_SEAL);
curVol.sync();
//now replay full WAL
replayWAL();
walStartNextFile();
} finally {
structuralLock.unlock();
}
}finally {
for(int i=locks.length-1;i>=0;i--){
locks[i].writeLock().unlock();
}
}
}
protected void replayWAL(){
/*
Init Open for StoreWAL has following phases:
1) check existing files and their seals
2) if compacted file exists, swap it with original
3) if Record WAL files exists, initialize Memory Allocator
4) if Record WAL exists, convert it to WAL
5) replay WAL if any
6) reinitialize memory allocator if replay WAL happened
*/
//check if compaction files are present and walid
final boolean compaction =
walC!=null && walC.length()!=0 &&
walCCompact!=null && walCCompact.length()!=0;
if(compaction){
//check compaction file was finished well
walC.ensureAvailable(16);
boolean walCSeal = walC.getLong(8) == WAL_SEAL;
//TODO if walCSeal check indexChecksum on walCCompact volume
if(!walCSeal){
LOG.warning("Compaction failed, seal not present. Removing incomplete compacted file, keeping old fragmented file.");
walC.close();
walC.deleteFile();
walC = null;
walCCompact.close();
walCCompact.deleteFile();
walCCompact = null;
}else{
//compaction is valid, so swap compacted file with current
if(vol.getFile()==null){
//no file present, so we are in-memory, just swap volumes
//in memory vol without file, just swap everything
Volume oldVol = this.vol;
this.realVol = walCCompact;
this.vol = new Volume.ReadOnly(realVol);
this.headVol.close();
this.headVolBackup.close();
initHeadVol();
//TODO update variables
oldVol.close();
}else{
//file is not null, we are working on file system, so swap files
File walCCompactFile = walCCompact.getFile();
walCCompact.sync();
walCCompact.close();
walCCompact = null;
File thisFile = new File(fileName);
File thisFileBackup = new File(fileName+".wal.c.orig");
this.vol.close();
if(!thisFile.renameTo(thisFileBackup)){
//TODO recovery here. Perhaps copy data from one file to other, instead of renaming it
throw new AssertionError("failed to rename file " + thisFile);
}
//rename compacted file to current file
if (!walCCompactFile.renameTo(thisFile)) {
//TODO recovery here.
throw new AssertionError("failed to rename file " + walCCompactFile);
}
//and reopen volume
this.realVol = volumeFactory.makeVolume(this.fileName, readonly, fileLockDisable);
this.vol = new Volume.ReadOnly(this.realVol);
this.initHeadVol();
//delete orig file
if(!thisFileBackup.delete()){
LOG.warning("Could not delete original compacted file: "+thisFileBackup);
}
}
walC.close();
walC.deleteFile();
walC = null;
initOpenPost();
}
}
if(!walRec.isEmpty()){
//convert walRec into WAL log files.
//memory allocator was not available at the time of compaction
// TODO no wal open during compaction
// if(CC.ASSERT && !volumes.isEmpty())
// throw new AssertionError();
//
// if(CC.ASSERT && curVol!=null)
// throw new AssertionError();
structuralLock.lock();
try {
walStartNextFile();
}finally {
structuralLock.unlock();
}
for(Volume wr:walRec){
if(wr.length()==0)
break;
wr.ensureAvailable(16); //TODO this should not be here, Volume should be already mapped if file existsi
if(wr.getLong(8)!=StoreWAL.WAL_SEAL)
break;
long pos = 16;
for(;;) {
int instr = wr.getUnsignedByte(pos++);
if (instr >>> 4 == 0) {
//EOF
break;
} else if (instr >>> 4 != 5) {
//TODO failsafe with corrupted wal
throw new DBException.DataCorruption("Invalid instruction in WAL REC" + (instr >>> 4));
}
long recid = wr.getSixLong(pos);
pos += 6;
int size = wr.getInt(pos);
//TODO zero size, null records, tombstone
pos += 4;
byte[] arr = new byte[size]; //TODO reuse array if bellow certain size
wr.getData(pos, arr, 0, size);
pos += size;
update(recid, arr, Serializer.BYTE_ARRAY_NOSIZE);
}
}
List<Volume> l = new ArrayList(walRec);
walRec.clear();
commitFullWALReplay();
//delete all wr files
for(Volume wr:l){
File f = wr.getFile();
wr.close();
wr.deleteFile();
if(f!=null && f.exists() && !f.delete()){
LOG.warning("Could not delete WAL REC file: "+f);
}
}
walRec.clear();
}
replayWALInstructionFiles();
}
private void replayWALInstructionFiles() {
if(CC.ASSERT && !structuralLock.isHeldByCurrentThread())
throw new AssertionError();
if(CC.ASSERT && !commitLock.isHeldByCurrentThread())
throw new AssertionError();
file:for(Volume wal:volumes){
if(wal.length()<16 || wal.getLong(8)!=WAL_SEAL) {
break file;
//TODO better handling for corrupted logs
}
long pos = 16;
for(;;) {
int checksum = wal.getUnsignedByte(pos++);
int instruction = checksum>>>4;
checksum = (checksum&15);
if (instruction == 0) {
//EOF
if((Long.bitCount(pos-1)&15) != checksum)
throw new InternalError("WAL corrupted");
continue file;
} else if (instruction == 1) {
//write long
long val = wal.getLong(pos);
pos += 8;
long offset = wal.getSixLong(pos);
pos += 6;
if(((1+Long.bitCount(val)+Long.bitCount(offset))&15)!=checksum)
throw new InternalError("WAL corrupted");
realVol.ensureAvailable(offset+8);
realVol.putLong(offset, val);
} else if (instruction == 2) {
//write byte[]
int dataSize = wal.getUnsignedShort(pos);
pos += 2;
long offset = wal.getSixLong(pos);
pos += 6;
byte[] data = new byte[dataSize];
wal.getData(pos, data, 0, data.length);
pos += data.length;
if(((1+Integer.bitCount(dataSize)+Long.bitCount(offset)+sum(data))&15)!=checksum)
throw new InternalError("WAL corrupted");
//TODO direct transfer
realVol.ensureAvailable(offset+data.length);
realVol.putData(offset, data, 0, data.length);
} else if (instruction == 3) {
//skip N bytes
int skipN = wal.getInt(pos - 1) & 0xFFFFFF; //read 3 bytes
if((Integer.bitCount(skipN)&15) != checksum)
throw new InternalError("WAL corrupted");
pos += 3 + skipN;
} else if (instruction == 4) {
//skip single byte
if((Long.bitCount(pos-1)&15) != checksum)
throw new InternalError("WAL corrupted");
} else if (instruction == 6) {
//write two bytes
long s = wal.getLong(pos);
pos+=8;
if(((1+Long.bitCount(s))&15) != checksum)
throw new InternalError("WAL corrupted");
long offset = s&0xFFFFFFFFFFFFL;
realVol.ensureAvailable(offset + 2);
realVol.putUnsignedShort(offset, (int) (s>>>48));
}else{
throw new InternalError("WAL corrupted, unknown instruction");
}
}
}
realVol.sync();
//destroy old wal files
for(Volume wal:volumes){
if(!wal.isClosed()) {
wal.truncate(0);
wal.close();
}
wal.deleteFile();
}
fileNum = -1;
curVol = null;
volumes.clear();
}
private int sum(byte[] data) {
int ret = 0;
for(byte b:data){
ret+=b;
}
return Math.abs(ret);
}
private int sum(byte[] buf, int bufPos, int size) {
int ret = 0;
size+=bufPos;
while(bufPos<size){
ret+=buf[bufPos++];
}
return Math.abs(ret);
}
@Override
public boolean canRollback() {
return true;
}
@Override
public void close() {
compactLock.lock();
try{
commitLock.lock();
try{
if(closed) {
return;
}
if(hasUncommitedData()){
LOG.warning("Closing storage with uncommited data, those data will be discarded.");
}
//TODO do not replay if not dirty
if(!readonly) {
structuralLock.lock();
try {
replayWAL();
} finally {
structuralLock.unlock();
}
}
if(walC!=null)
walC.close();
if(walCCompact!=null)
walCCompact.close();
for(Volume v:walRec){
v.close();
}
walRec.clear();
for(Volume v:volumes){
v.close();
}
volumes.clear();
vol.close();
vol = null;
headVol.close();
headVol = null;
headVolBackup.close();
headVolBackup = null;
curVol = null;
dirtyStackPages.clear();
if(caches!=null){
for(Cache c:caches){
c.close();
}
Arrays.fill(caches,null);
}
if(fileLockHeartbeat !=null) {
fileLockHeartbeat.unlock();
fileLockHeartbeat = null;
}
closed = true;
}finally {
commitLock.unlock();
}
}finally {
compactLock.unlock();
}
}
@Override
public void compact() {
compactLock.lock();
try{
if(compactOldFilesExists())
return;
commitLock.lock();
try{
//check if there are uncommited data, and log warning if yes
if(hasUncommitedData()){
//TODO how to deal with uncommited data? Is there way not to commit? Perhaps upgrade to recordWAL?
LOG.warning("Compaction started with uncommited data. Calling commit automatically.");
}
snapshotCloseAllOnCompact();
//cleanup everything
commitFullWALReplay();
//start compaction
compactionInProgress = true;
//start zero WAL file with compaction flag
structuralLock.lock();
try {
if(CC.ASSERT && fileNum!=0)
throw new AssertionError();
if(CC.ASSERT && walC!=null)
throw new AssertionError();
//start walC file, which indicates if compaction finished fine
String walCFileName = getWalFileName("c");
if(walC!=null)
walC.close();
walC = volumeFactory.makeVolume(walCFileName, readonly, true);
walC.ensureAvailable(16);
walC.putLong(0,0); //TODO wal header
walC.putLong(8,0);
//reset free size
freeSize.set(-1);
}finally {
structuralLock.unlock();
}
}finally {
commitLock.unlock();
}
final long maxRecidOffset = parity1Get(headVol.getLong(MAX_RECID_OFFSET));
//open target file
final String targetFile = getWalFileName("c.compact");
final StoreDirect target = new StoreDirect(targetFile,
volumeFactory,
null,lockScale,
executor==null?LOCKING_STRATEGY_NOLOCK:LOCKING_STRATEGY_WRITELOCK,
checksum,compress,null,false,false,fileLockDisable,null, null, 0L, 0L, false);
target.init();
walCCompact = target.vol;
final AtomicLong maxRecid = new AtomicLong(
parity1Get(headVol.getLong(MAX_RECID_OFFSET))/indexValSize);
compactIndexPages(target, maxRecid);
while($_TEST_HACK_COMPACT_PRE_COMMIT_WAIT){
LockSupport.parkNanos(10000);
}
target.vol.putLong(MAX_RECID_OFFSET, parity1Set(maxRecid.get() * indexValSize));
//compaction finished fine, so now flush target file, and seal log file. This makes compaction durable
target.commit(); //sync all files, that is durable since there are no background tasks
walC.putLong(8, WAL_SEAL);
walC.sync();
commitLock.lock();
try{
if(hasUncommitedData()){
LOG.warning("Uncommited data at end of compaction, autocommit");
}
//TODO there should be full WAL replay, but without commit
commitFullWALReplay();
compactionInProgress = false;
}finally {
commitLock.unlock();
}
while($_TEST_HACK_COMPACT_POST_COMMIT_WAIT){
LockSupport.parkNanos(10000);
}
}finally {
compactionInProgress = false; //TODO this should be under commitLock, but still better than leaving it true
compactLock.unlock();
}
}
/** return true if there are uncommited data in current transaction, otherwise false*/
protected boolean hasUncommitedData() {
for(int i=0;i<locks.length;i++){
final Lock lock = locks[i].readLock();
lock.lock();
try{
if(currLongLongs[i].size()!=0 ||
currDataLongs[i].size()!=0 ||
writeCache[i].size!=0)
return true;
}finally {
lock.unlock();
}
}
return false;
}
}
|
|
package org.jasig.cas.authentication;
import com.codahale.metrics.annotation.Counted;
import com.codahale.metrics.annotation.Metered;
import com.codahale.metrics.annotation.Timed;
import org.jasig.cas.authentication.principal.NullPrincipal;
import org.jasig.cas.authentication.principal.Principal;
import org.jasig.cas.authentication.principal.PrincipalResolver;
import org.jasig.inspektr.audit.annotation.Audit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.util.Assert;
import javax.annotation.Resource;
import javax.validation.constraints.NotNull;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Provides an authenticaiton manager that is inherently aware of multiple credentials and supports pluggable
* security policy via the {@link AuthenticationPolicy} component. The authentication process is as follows:
*
* <ul>
* <li>For each given credential do the following:
* <ul>
* <li>Iterate over all configured authentication handlers.</li>
* <li>Attempt to authenticate a credential if a handler supports it.</li>
* <li>On success attempt to resolve a principal by doing the following:
* <ul>
* <li>Check whether a resolver is configured for the handler that authenticated the credential.</li>
* <li>If a suitable resolver is found, attempt to resolve the principal.</li>
* <li>If a suitable resolver is not found, use the principal resolved by the authentication handler.</li>
* </ul>
* </li>
* <li>Check whether the security policy (e.g. any, all) is satisfied.
* <ul>
* <li>If security policy is met return immediately.</li>
* <li>Continue if security policy is not met.</li>
* </ul>
* </li>
* </ul>
* </li>
* <li>
* After all credentials have been attempted check security policy again.
* Note there is an implicit security policy that requires at least one credential to be authenticated.
* Then the security policy given by {@link #setAuthenticationPolicy(AuthenticationPolicy)} is applied.
* In all cases {@link AuthenticationException} is raised if security policy is not met.
* </li>
* </ul>
*
* It is an error condition to fail to resolve a principal.
*
* @author Marvin S. Addison
* @since 4.0.0
*/
@Component("authenticationManager")
public class PolicyBasedAuthenticationManager implements AuthenticationManager {
/** Log instance for logging events, errors, warnings, etc. */
protected final Logger logger = LoggerFactory.getLogger(getClass());
/** An array of AuthenticationAttributesPopulators. */
@NotNull
private List<AuthenticationMetaDataPopulator> authenticationMetaDataPopulators =
new ArrayList<>();
/** Authentication security policy. */
@NotNull
private AuthenticationPolicy authenticationPolicy = new AnyAuthenticationPolicy();
/** Map of authentication handlers to resolvers to be used when handler does not resolve a principal. */
@NotNull
@Resource(name="authenticationHandlersResolvers")
private Map<AuthenticationHandler, PrincipalResolver> handlerResolverMap;
/**
* Instantiates a new Policy based authentication manager.
*/
protected PolicyBasedAuthenticationManager() {}
/**
* Creates a new authentication manager with a varargs array of authentication handlers that are attempted in the
* listed order for supported credentials. This form may only be used by authentication handlers that
* resolve principals during the authentication process.
*
* @param handlers One or more authentication handlers.
*/
public PolicyBasedAuthenticationManager(final AuthenticationHandler ... handlers) {
this(Arrays.asList(handlers));
}
/**
* Creates a new authentication manager with a list of authentication handlers that are attempted in the
* listed order for supported credentials. This form may only be used by authentication handlers that
* resolve principals during the authentication process.
*
* @param handlers Non-null list of authentication handlers containing at least one entry.
*/
public PolicyBasedAuthenticationManager(final List<AuthenticationHandler> handlers) {
Assert.notEmpty(handlers, "At least one authentication handler is required");
this.handlerResolverMap = new LinkedHashMap<>(
handlers.size());
for (final AuthenticationHandler handler : handlers) {
this.handlerResolverMap.put(handler, null);
}
}
/**
* Creates a new authentication manager with a map of authentication handlers to the principal resolvers that
* should be used upon successful authentication if no principal is resolved by the authentication handler. If
* the order of evaluation of authentication handlers is important, a map that preserves insertion order
* (e.g. {@link LinkedHashMap}) should be used.
*
* @param map Non-null map of authentication handler to principal resolver containing at least one entry.
*/
public PolicyBasedAuthenticationManager(final Map<AuthenticationHandler, PrincipalResolver> map) {
Assert.notEmpty(map, "At least one authentication handler is required");
this.handlerResolverMap = map;
}
/**
* {@inheritDoc}
*/
@Override
@Audit(
action="AUTHENTICATION",
actionResolverName="AUTHENTICATION_RESOLVER",
resourceResolverName="AUTHENTICATION_RESOURCE_RESOLVER")
@Timed(name="AUTHENTICATE")
@Metered(name="AUTHENTICATE")
@Counted(name="AUTHENTICATE", monotonic=true)
public final Authentication authenticate(final Credential... credentials) throws AuthenticationException {
final AuthenticationBuilder builder = authenticateInternal(credentials);
final Authentication authentication = builder.build();
final Principal principal = authentication.getPrincipal();
if (principal instanceof NullPrincipal) {
throw new UnresolvedPrincipalException(authentication);
}
addAuthenticationMethodAttribute(builder, authentication);
logger.info("Authenticated {} with credentials {}.", principal, Arrays.asList(credentials));
logger.debug("Attribute map for {}: {}", principal.getId(), principal.getAttributes());
populateAuthenticationMetadataAttributes(builder, credentials);
return builder.build();
}
/**
* Populate authentication metadata attributes.
*
* @param builder the builder
* @param credentials the credentials
*/
private void populateAuthenticationMetadataAttributes(final AuthenticationBuilder builder, final Credential[] credentials) {
for (final AuthenticationMetaDataPopulator populator : this.authenticationMetaDataPopulators) {
for (final Credential credential : credentials) {
if (populator.supports(credential)) {
populator.populateAttributes(builder, credential);
}
}
}
}
/**
* Add authentication method attribute.
*
* @param builder the builder
* @param authentication the authentication
*/
private void addAuthenticationMethodAttribute(final AuthenticationBuilder builder, final Authentication authentication) {
for (final HandlerResult result : authentication.getSuccesses().values()) {
builder.addAttribute(AUTHENTICATION_METHOD_ATTRIBUTE, result.getHandlerName());
}
}
/**
* Sets the authentication metadata populators that will be applied to every successful authentication event.
*
* @param populators Non-null list of metadata populators.
*/
@Resource(name="authenticationMetadataPopulators")
public final void setAuthenticationMetaDataPopulators(final List<AuthenticationMetaDataPopulator> populators) {
this.authenticationMetaDataPopulators = populators;
}
/**
* Sets the authentication policy used by this component.
*
* @param policy Non-null authentication policy. The default policy is {@link AnyAuthenticationPolicy}.
*/
@Resource(name="authenticationPolicy")
public void setAuthenticationPolicy(final AuthenticationPolicy policy) {
this.authenticationPolicy = policy;
}
/**
* Follows the same contract as {@link AuthenticationManager#authenticate(Credential...)}.
*
* @param credentials One or more credentials to authenticate.
*
* @return An authentication containing a resolved principal and metadata about successful and failed
* authentications. There SHOULD be a record of each attempted authentication, whether success or failure.
*
* @throws AuthenticationException When one or more credentials failed authentication such that security policy
* was not satisfied.
*/
protected AuthenticationBuilder authenticateInternal(final Credential... credentials)
throws AuthenticationException {
final AuthenticationBuilder builder = new DefaultAuthenticationBuilder(NullPrincipal.getInstance());
for (final Credential c : credentials) {
builder.addCredential(new BasicCredentialMetaData(c));
}
boolean found;
for (final Credential credential : credentials) {
found = false;
for (final Map.Entry<AuthenticationHandler, PrincipalResolver> entry : this.handlerResolverMap.entrySet()) {
final AuthenticationHandler handler = entry.getKey();
if (handler.supports(credential)) {
found = true;
try {
authenticateAndResolvePrincipal(builder, credential, entry.getValue(), handler);
if (this.authenticationPolicy.isSatisfiedBy(builder.build())) {
return builder;
}
} catch (final GeneralSecurityException e) {
logger.info("{} failed authenticating {}", handler.getName(), credential);
logger.debug("{} exception details: {}", handler.getName(), e.getMessage());
builder.addFailure(handler.getName(), e.getClass());
} catch (final PreventedException e) {
logger.error("{}: {} (Details: {})", handler.getName(), e.getMessage(), e.getCause().getMessage());
builder.addFailure(handler.getName(), e.getClass());
}
}
}
if (!found) {
logger.warn(
"Cannot find authentication handler that supports [{}] of type [{}], which suggests a configuration problem.",
credential, credential.getClass().getSimpleName());
}
}
evaluateProducedAuthenticationContext(builder);
return builder;
}
/**
* Evaluate produced authentication context.
*
* @param builder the builder
* @throws AuthenticationException the authentication exception
*/
private void evaluateProducedAuthenticationContext(final AuthenticationBuilder builder) throws AuthenticationException {
// We apply an implicit security policy of at least one successful authentication
if (builder.getSuccesses().isEmpty()) {
throw new AuthenticationException(builder.getFailures(), builder.getSuccesses());
}
// Apply the configured security policy
if (!this.authenticationPolicy.isSatisfiedBy(builder.build())) {
throw new AuthenticationException(builder.getFailures(), builder.getSuccesses());
}
}
/**
* Authenticate and resolve principal.
*
* @param builder the builder
* @param credential the credential
* @param resolver the resolver
* @param handler the handler
* @throws GeneralSecurityException the general security exception
* @throws PreventedException the prevented exception
*/
private void authenticateAndResolvePrincipal(final AuthenticationBuilder builder, final Credential credential,
final PrincipalResolver resolver, final AuthenticationHandler handler)
throws GeneralSecurityException, PreventedException {
final Principal principal;
final HandlerResult result = handler.authenticate(credential);
builder.addSuccess(handler.getName(), result);
logger.info("{} successfully authenticated {}", handler.getName(), credential);
if (resolver == null) {
principal = result.getPrincipal();
logger.debug(
"No resolver configured for {}. Falling back to handler principal {}",
handler.getName(),
principal);
} else {
principal = resolvePrincipal(handler.getName(), resolver, credential);
}
// Must avoid null principal since AuthenticationBuilder/ImmutableAuthentication
// require principal to be non-null
if (principal != null) {
builder.setPrincipal(principal);
}
}
/**
* Resolve principal.
*
* @param handlerName the handler name
* @param resolver the resolver
* @param credential the credential
* @return the principal
*/
protected Principal resolvePrincipal(
final String handlerName, final PrincipalResolver resolver, final Credential credential) {
if (resolver.supports(credential)) {
try {
final Principal p = resolver.resolve(credential);
logger.debug("{} resolved {} from {}", resolver, p, credential);
return p;
} catch (final Exception e) {
logger.error("{} failed to resolve principal from {}", resolver, credential, e);
}
} else {
logger.warn(
"{} is configured to use {} but it does not support {}, which suggests a configuration problem.",
handlerName,
resolver,
credential);
}
return null;
}
}
|
|
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_UNSIGNED_INT;
import static io.netty.handler.codec.http2.Http2TestUtil.as;
import static io.netty.handler.codec.http2.Http2TestUtil.randomString;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.Unpooled;
import io.netty.buffer.UnpooledByteBufAllocator;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.channel.DefaultChannelPromise;
import io.netty.util.CharsetUtil;
import io.netty.util.concurrent.EventExecutor;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
* Integration tests for {@link DefaultHttp2FrameReader} and {@link DefaultHttp2FrameWriter}.
*/
public class DefaultHttp2FrameIOTest {
private DefaultHttp2FrameReader reader;
private DefaultHttp2FrameWriter writer;
private ByteBufAllocator alloc;
private CountDownLatch latch;
private ByteBuf buffer;
@Mock
private ChannelHandlerContext ctx;
@Mock
private Http2FrameListener listener;
@Mock
private ChannelPromise promise;
@Mock
private Channel channel;
@Mock
private EventExecutor executor;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
alloc = UnpooledByteBufAllocator.DEFAULT;
buffer = alloc.buffer();
latch = new CountDownLatch(1);
when(executor.inEventLoop()).thenReturn(true);
when(ctx.alloc()).thenReturn(alloc);
when(ctx.channel()).thenReturn(channel);
when(ctx.executor()).thenReturn(executor);
doAnswer(new Answer<ChannelPromise>() {
@Override
public ChannelPromise answer(InvocationOnMock invocation) throws Throwable {
return new DefaultChannelPromise(channel, executor);
}
}).when(ctx).newPromise();
doAnswer(new Answer<ChannelPromise>() {
@Override
public ChannelPromise answer(InvocationOnMock in) throws Throwable {
latch.countDown();
return promise;
}
}).when(promise).setSuccess();
doAnswer(new Answer<ChannelFuture>() {
@Override
public ChannelFuture answer(InvocationOnMock in) throws Throwable {
if (in.getArguments()[0] instanceof ByteBuf) {
ByteBuf tmp = (ByteBuf) in.getArguments()[0];
try {
buffer.writeBytes(tmp);
} finally {
tmp.release();
}
}
if (in.getArguments()[1] instanceof ChannelPromise) {
return ((ChannelPromise) in.getArguments()[1]).setSuccess();
}
return null;
}
}).when(ctx).write(any(), any(ChannelPromise.class));
reader = new DefaultHttp2FrameReader();
writer = new DefaultHttp2FrameWriter();
}
@Test
public void emptyDataShouldRoundtrip() throws Exception {
final ByteBuf data = Unpooled.EMPTY_BUFFER;
writer.writeData(ctx, 1000, data, 0, false, promise);
ByteBuf frame = null;
try {
frame = captureWrite();
reader.readFrame(ctx, frame, listener);
verify(listener).onDataRead(eq(ctx), eq(1000), eq(data), eq(0), eq(false));
} finally {
if (frame != null) {
frame.release();
}
data.release();
}
}
@Test
public void dataShouldRoundtrip() throws Exception {
final ByteBuf data = dummyData();
writer.writeData(ctx, 1000, data.retain().duplicate(), 0, false, promise);
ByteBuf frame = null;
try {
frame = captureWrite();
reader.readFrame(ctx, frame, listener);
verify(listener).onDataRead(eq(ctx), eq(1000), eq(data), eq(0), eq(false));
} finally {
if (frame != null) {
frame.release();
}
data.release();
}
}
@Test
public void dataWithPaddingShouldRoundtrip() throws Exception {
final ByteBuf data = dummyData();
writer.writeData(ctx, 1, data.retain().duplicate(), 0xFF, true, promise);
ByteBuf frame = null;
try {
frame = captureWrite();
reader.readFrame(ctx, frame, listener);
verify(listener).onDataRead(eq(ctx), eq(1), eq(data), eq(0xFF), eq(true));
} finally {
if (frame != null) {
frame.release();
}
data.release();
}
}
@Test
public void priorityShouldRoundtrip() throws Exception {
writer.writePriority(ctx, 1, 2, (short) 255, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onPriorityRead(eq(ctx), eq(1), eq(2), eq((short) 255), eq(true));
} finally {
frame.release();
}
}
@Test
public void rstStreamShouldRoundtrip() throws Exception {
writer.writeRstStream(ctx, 1, MAX_UNSIGNED_INT, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onRstStreamRead(eq(ctx), eq(1), eq(MAX_UNSIGNED_INT));
} finally {
frame.release();
}
}
@Test
public void emptySettingsShouldRoundtrip() throws Exception {
writer.writeSettings(ctx, new Http2Settings(), promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onSettingsRead(eq(ctx), eq(new Http2Settings()));
} finally {
frame.release();
}
}
@Test
public void settingsShouldStripShouldRoundtrip() throws Exception {
Http2Settings settings = new Http2Settings();
settings.pushEnabled(true);
settings.headerTableSize(4096);
settings.initialWindowSize(123);
settings.maxConcurrentStreams(456);
writer.writeSettings(ctx, settings, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onSettingsRead(eq(ctx), eq(settings));
} finally {
frame.release();
}
}
@Test
public void settingsAckShouldRoundtrip() throws Exception {
writer.writeSettingsAck(ctx, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onSettingsAckRead(eq(ctx));
} finally {
frame.release();
}
}
@Test
public void pingShouldRoundtrip() throws Exception {
ByteBuf data = dummyData();
writer.writePing(ctx, false, data.retain().duplicate(), promise);
ByteBuf frame = null;
try {
frame = captureWrite();
reader.readFrame(ctx, frame, listener);
verify(listener).onPingRead(eq(ctx), eq(data));
} finally {
if (frame != null) {
frame.release();
}
data.release();
}
}
@Test
public void pingAckShouldRoundtrip() throws Exception {
ByteBuf data = dummyData();
writer.writePing(ctx, true, data.retain().duplicate(), promise);
ByteBuf frame = null;
try {
frame = captureWrite();
reader.readFrame(ctx, frame, listener);
verify(listener).onPingAckRead(eq(ctx), eq(data));
} finally {
if (frame != null) {
frame.release();
}
data.release();
}
}
@Test
public void goAwayShouldRoundtrip() throws Exception {
ByteBuf data = dummyData();
writer.writeGoAway(ctx, 1, MAX_UNSIGNED_INT, data.retain().duplicate(), promise);
ByteBuf frame = null;
try {
frame = captureWrite();
reader.readFrame(ctx, frame, listener);
verify(listener).onGoAwayRead(eq(ctx), eq(1), eq(MAX_UNSIGNED_INT), eq(data));
} finally {
if (frame != null) {
frame.release();
}
data.release();
}
}
@Test
public void windowUpdateShouldRoundtrip() throws Exception {
writer.writeWindowUpdate(ctx, 1, Integer.MAX_VALUE, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onWindowUpdateRead(eq(ctx), eq(1), eq(Integer.MAX_VALUE));
} finally {
frame.release();
}
}
@Test
public void emptyHeadersShouldRoundtrip() throws Exception {
Http2Headers headers = EmptyHttp2Headers.INSTANCE;
writer.writeHeaders(ctx, 1, headers, 0, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onHeadersRead(eq(ctx), eq(1), eq(headers), eq(0), eq(true));
} finally {
frame.release();
}
}
@Test
public void emptyHeadersWithPaddingShouldRoundtrip() throws Exception {
Http2Headers headers = EmptyHttp2Headers.INSTANCE;
writer.writeHeaders(ctx, 1, headers, 0xFF, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onHeadersRead(eq(ctx), eq(1), eq(headers), eq(0xFF), eq(true));
} finally {
frame.release();
}
}
@Test
public void binaryHeadersWithoutPriorityShouldRoundtrip() throws Exception {
Http2Headers headers = dummyBinaryHeaders();
writer.writeHeaders(ctx, 1, headers, 0, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onHeadersRead(eq(ctx), eq(1), eq(headers), eq(0), eq(true));
} finally {
frame.release();
}
}
@Test
public void headersWithoutPriorityShouldRoundtrip() throws Exception {
Http2Headers headers = dummyHeaders();
writer.writeHeaders(ctx, 1, headers, 0, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onHeadersRead(eq(ctx), eq(1), eq(headers), eq(0), eq(true));
} finally {
frame.release();
}
}
@Test
public void headersWithPaddingWithoutPriorityShouldRoundtrip() throws Exception {
Http2Headers headers = dummyHeaders();
writer.writeHeaders(ctx, 1, headers, 0xFF, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onHeadersRead(eq(ctx), eq(1), eq(headers), eq(0xFF), eq(true));
} finally {
frame.release();
}
}
@Test
public void headersWithPriorityShouldRoundtrip() throws Exception {
Http2Headers headers = dummyHeaders();
writer.writeHeaders(ctx, 1, headers, 2, (short) 3, true, 0, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener)
.onHeadersRead(eq(ctx), eq(1), eq(headers), eq(2), eq((short) 3), eq(true), eq(0), eq(true));
} finally {
frame.release();
}
}
@Test
public void headersWithPaddingWithPriorityShouldRoundtrip() throws Exception {
Http2Headers headers = dummyHeaders();
writer.writeHeaders(ctx, 1, headers, 2, (short) 3, true, 0xFF, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onHeadersRead(eq(ctx), eq(1), eq(headers), eq(2), eq((short) 3), eq(true), eq(0xFF),
eq(true));
} finally {
frame.release();
}
}
@Test
public void continuedHeadersShouldRoundtrip() throws Exception {
Http2Headers headers = largeHeaders();
writer.writeHeaders(ctx, 1, headers, 2, (short) 3, true, 0, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener)
.onHeadersRead(eq(ctx), eq(1), eq(headers), eq(2), eq((short) 3), eq(true), eq(0), eq(true));
} finally {
frame.release();
}
}
@Test
public void continuedHeadersWithPaddingShouldRoundtrip() throws Exception {
Http2Headers headers = largeHeaders();
writer.writeHeaders(ctx, 1, headers, 2, (short) 3, true, 0xFF, true, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onHeadersRead(eq(ctx), eq(1), eq(headers), eq(2), eq((short) 3), eq(true), eq(0xFF),
eq(true));
} finally {
frame.release();
}
}
@Test
public void emptypushPromiseShouldRoundtrip() throws Exception {
Http2Headers headers = EmptyHttp2Headers.INSTANCE;
writer.writePushPromise(ctx, 1, 2, headers, 0, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onPushPromiseRead(eq(ctx), eq(1), eq(2), eq(headers), eq(0));
} finally {
frame.release();
}
}
@Test
public void pushPromiseShouldRoundtrip() throws Exception {
Http2Headers headers = dummyHeaders();
writer.writePushPromise(ctx, 1, 2, headers, 0, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onPushPromiseRead(eq(ctx), eq(1), eq(2), eq(headers), eq(0));
} finally {
frame.release();
}
}
@Test
public void pushPromiseWithPaddingShouldRoundtrip() throws Exception {
Http2Headers headers = dummyHeaders();
writer.writePushPromise(ctx, 1, 2, headers, 0xFF, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onPushPromiseRead(eq(ctx), eq(1), eq(2), eq(headers), eq(0xFF));
} finally {
frame.release();
}
}
@Test
public void continuedPushPromiseShouldRoundtrip() throws Exception {
Http2Headers headers = largeHeaders();
writer.writePushPromise(ctx, 1, 2, headers, 0, promise);
ByteBuf frame = captureWrite();
reader.readFrame(ctx, frame, listener);
verify(listener).onPushPromiseRead(eq(ctx), eq(1), eq(2), eq(headers), eq(0));
frame.release();
}
@Test
public void continuedPushPromiseWithPaddingShouldRoundtrip() throws Exception {
Http2Headers headers = largeHeaders();
writer.writePushPromise(ctx, 1, 2, headers, 0xFF, promise);
ByteBuf frame = captureWrite();
try {
reader.readFrame(ctx, frame, listener);
verify(listener).onPushPromiseRead(eq(ctx), eq(1), eq(2), eq(headers), eq(0xFF));
} finally {
frame.release();
}
}
private ByteBuf captureWrite() throws InterruptedException {
assertTrue(latch.await(2, TimeUnit.SECONDS));
return buffer;
}
private ByteBuf dummyData() {
return alloc.buffer().writeBytes("abcdefgh".getBytes(CharsetUtil.UTF_8));
}
private static Http2Headers dummyBinaryHeaders() {
DefaultHttp2Headers headers = new DefaultHttp2Headers();
for (int ix = 0; ix < 10; ++ix) {
headers.add(randomString(), randomString());
}
return headers;
}
private static Http2Headers dummyHeaders() {
return new DefaultHttp2Headers().method(as("GET")).scheme(as("https")).authority(as("example.org"))
.path(as("/some/path")).add(as("accept"), as("*/*"));
}
private static Http2Headers largeHeaders() {
DefaultHttp2Headers headers = new DefaultHttp2Headers();
for (int i = 0; i < 100; ++i) {
String key = "this-is-a-test-header-key-" + i;
String value = "this-is-a-test-header-value-" + i;
headers.add(as(key), as(value));
}
return headers;
}
}
|
|
/*******************************************************************************
*
* Copyright 2011-2014 Spiffy UI Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.spiffyui.client.nav;
import java.util.ArrayList;
import java.util.List;
import org.spiffyui.client.HistoryCallback;
import org.spiffyui.client.JSUtil;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Anchor;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.Widget;
/**
* This is the navigation bar for the main page.
*
*/
public class MainNavBar extends HasNavBarListenersPanel implements ClickHandler, HistoryCallback
{
private static final String TITLE = Window.getTitle();
private final List<NavItem> m_items = new ArrayList<NavItem>();
private NavItem m_selectedItem = null;
private boolean m_bookmarkable = false;
/**
* The class used for selected navigation items
*/
protected static final String SELECTED_CLASS = "navItemSelected";
/**
* Create a new MainNavBar
*/
public MainNavBar()
{
getElement().setId("mainNavContainer");
if (RootPanel.get("mainNavigation") != null) {
RootPanel.get("mainNavigation").add(this);
} else {
throw new IllegalStateException("Unable to locate the mainNavigation element. You must import spiffyui.min.js before using this widget.");
}
}
@Override
public void add(Widget w)
{
if (w instanceof NavWidget) {
super.add(w);
if (w instanceof NavItem) {
m_items.add((NavItem) w);
((NavItem) w).getAnchor().addClickHandler(this);
} else if (w instanceof NavSection) {
((NavSection) w).setNavBar(this);
}
} else {
throw new IllegalArgumentException("You can only add NavItem, NavHeader, NavSection, NavPanel, or NavSeparator to this class");
}
}
/**
* Addes a nav item to the list of anchors to listen to
*
* @param item the nav item to add
*/
protected void addNavItem(NavItem item)
{
if (!m_items.contains(item)) {
item.getAnchor().addClickHandler(this);
m_items.add(item);
}
}
@Override
public void onClick(ClickEvent event)
{
event.preventDefault();
if (!isEnabled()) {
return;
}
NavItem navItem = null;
for (Widget w : getChildren()) {
if (w instanceof NavItem) {
NavItem item = (NavItem) w;
if (item.getAnchor() == event.getSource()) {
navItem = item;
break;
}
} else if (w instanceof NavSection) {
NavItem item = ((NavSection) w).getNavItem((Anchor) event.getSource());
if (item != null) {
navItem = item;
break;
}
}
}
doFireEvent(event, navItem);
}
/**
* Call-back when the history item is retrieved after a browser back or forward
* button is pressed. The default implementation will be to treat the supplied id
* as a {@link NavItem} reference and select the one with the same ID.
*
* If you subclass and overload the {@link #addHistoryItem(String,String)} routine, you
* will probably need to overload this method as well, to ensure that the supplied id
* (or history token) is translated correctly for your subclass.
*
* @param id The history token previously stored on the history stack.
*/
@Override
public void historyChanged(String id)
{
NavItem item = getItem(id);
if (item != null) {
selectItem(item, false, true);
}
}
private void doFireEvent(ClickEvent event, NavItem navItem)
{
//if any listener wants to cancel the event
//then do not continue
if (!firePreEvent(navItem)) {
return;
}
//continue if no listener returned false on the pre-event
for (Widget w : getChildren()) {
if (w instanceof NavItem) {
NavItem item = (NavItem) w;
if (item.getAnchor() == event.getSource()) {
item.addStyleName(SELECTED_CLASS);
m_selectedItem = item;
fireEvent(item);
} else {
item.removeStyleName(SELECTED_CLASS);
}
} else if (w instanceof NavSection) {
((NavSection) w).updateSelectedState((Anchor) event.getSource());
}
}
}
/**
* Selects the specified navigation item and fires the navigation event to
* let all listeners know it was selected. This intentionally does not
* fire a pre-event so that it cannot be intercepted. Returns true if NavItem found, false otherwise
*
* @param item the item to select
* @return true if the item is one of the NavItems of the NavBar, false if not a member such as logout
*/
public boolean selectItem(NavItem item)
{
return selectItem(item, true, false);
}
/**
* Selects the specified navigation item and fires the navigation event to
* let all listeners know it was selected. Returns true if NavItem found, false otherwise
*
* @param item the item to select
* @param addToHistory
* true if this item should be added to the browser's history and false otherwise
* @param doFirePreEvent
* true to allow interception and cancelling of the event, false to not fire the pre-event
*
* @return true if the item is one of the NavItems of the NavBar, false if not a member such as logout
*/
public boolean selectItem(NavItem item, boolean addToHistory, boolean doFirePreEvent)
{
return selectItem(item, addToHistory, doFirePreEvent, true);
}
/**
* Selects the specified navigation item and can fire the navigation event to
* let all listeners know it was selected, if the doFireSelectedEvent parameter is set accordingly.
* Returns true if NavItem found, false otherwise
*
* @param item the item to select
* @param addToHistory
* true if this item should be added to the browser's history and false otherwise
* @param doFirePreEvent
* true to allow interception and cancelling of the event, false to not fire the pre-event
* @param doFireSelectEvent
* true to fire the selection event to navbar listeners, false to not fire the event
*
* @return true if the item is one of the NavItems of the NavBar, false if not a member such as logout
*/
public boolean selectItem(NavItem item, boolean addToHistory, boolean doFirePreEvent, boolean doFireSelectEvent)
{
//if any listener wants to cancel the event
//then do not continue
if (doFirePreEvent && !firePreEvent(item)) {
return false;
}
boolean found = false;
for (NavItem ni : m_items) {
if (ni == item) {
ni.addStyleName(SELECTED_CLASS);
m_selectedItem = ni;
if (doFireSelectEvent) {
fireEvent(item, addToHistory);
}
found = true;
} else {
ni.removeStyleName(SELECTED_CLASS);
}
}
return found;
}
/**
* Get the currently selected navigation item in this navigation bar.
*
* @return the currently selected navigation item or null if no items are selected
*/
public NavItem getSelectedItem()
{
return m_selectedItem;
}
/**
* Gets a navigation item from the navigation menu.
*
* @param id The id of the item to get
*
* @return the item with that id or null if it doesn't exist
*/
public NavItem getItem(String id)
{
for (NavItem item : m_items) {
if (item.getElement().getId().equals(id)) {
return item;
}
}
return null;
}
/**
* Set the navigation bar to be enabled or disabled. A disabled navigation bar
* is gray and never fires selection events.
*
* @param enabled true for enabled and false for disabled
*/
@Override
public void setEnabled(boolean enabled)
{
super.setEnabled(enabled);
if (enabled) {
removeStyleName("disabled");
} else {
addStyleName("disabled");
}
}
@Override
public void fireEvent(NavItem item, boolean addToHistory)
{
super.fireEvent(item, addToHistory);
if (isEnabled() && addToHistory) {
addHistoryItem(item.getElement().getId(), TITLE + " - " + item.getDisplayName());
}
}
/**
* A routine to store a history token that this Navbar can use when called
* in the future when the forward or back button's are called.<p>
* This is available for subclasses to overload if they wish to store a token
* that is not based on the NavItem's ID. For instance the situation where
* there are multiple types of identifiers being used to reconstitute a past application
* state.<p>
* Please note that if you overload this method you will need to provide an alternative implementation
* of the history callback routine {@link #historyChanged(String)}. That will need to translate the
* stored token that your overloaded addHistoryItem routine stored.
*
* @param historyToken
* A string value that will server as a token about the applications state
*
* @see MainNavBar.addHistoryItem
* @deprecated This method is deprecated and will not be called.
*/
@Deprecated
protected void addHistoryItem(String historyToken)
{
/*
This method is a no-op and is just kept for backward compatability
*/
}
/**
* A routine to store a history token that this Navbar can use when called
* in the future when the forward or back button's are called.<p>
* This is available for subclasses to overload if they wish to store a token
* that is not based on the NavItem's ID. For instance the situation where
* there are multiple types of identifiers being used to reconstitute a past application
* state.<p>
* Please note that if you overload this method you will need to provide an alternative implementation
* of the history callback routine {@link #historyChanged(String)}. That will need to translate the
* stored token that your overloaded addHistoryItem routine stored.
*
* @param historyToken
* A string value that will server as a token about the applications state
* @param title the window title for this history item
*/
protected void addHistoryItem(String historyToken, String title)
{
if (historyToken != null && historyToken.length() > 0) {
if (title == null) {
JSUtil.addHistoryItem(this, historyToken, m_bookmarkable);
} else {
JSUtil.addHistoryItem(this, historyToken, m_bookmarkable, title);
}
}
}
public void setBookmarkable(boolean bookmarkable)
{
m_bookmarkable = bookmarkable;
}
public boolean getBookmarkable()
{
return m_bookmarkable;
}
/**
* Remove all the items from this navigation bar.
*
*/
@Override
public void clear()
{
super.clear();
m_items.clear();
}
/**
* Remove the specified NavItem from the navigation bar.
*
* @param item the item to remove
*/
public void remove(NavItem item)
{
super.remove(item);
m_items.remove(item);
}
}
|
|
package ca.ualberta.team7project.network;
import java.util.ArrayList;
import java.util.UUID;
import android.content.Context;
import ca.ualberta.team7project.MainActivity;
import ca.ualberta.team7project.cache.CacheOperation;
import ca.ualberta.team7project.models.ThreadModel;
/**
* Fetches comments from the server
* <p>
* Prepares search strings for ElasticSearchOperation
*
*/
public class ThreadFetcher
{
void CacheToast()
{
//Toast.makeText(MainActivity.getMainContext(), "Pulling from cache", Toast.LENGTH_SHORT).show();
}
private ElasticSearchOperation search;
private CacheOperation cache;
private String listSize;
double lat = 0;
double lon = 0;
boolean isPictureSort = false;
private final String pictureFilterEntityString = "\"filter\":{\"exists\":{\"field\":\"innerBitmapData\"}}";
private ConnectionDetector detector;
/**
* Construct and set max size to the default (15)
*/
public ThreadFetcher()
{
super();
listSize = "size=15";
search = new ElasticSearchOperation();
detector = new ConnectionDetector(MainActivity.getMainContext());
}
/**
* Construct similar to ThreadFetcher but need context.
* @param context
*/
public ThreadFetcher(Context context)
{
super();
listSize = "size=15";
search = new ElasticSearchOperation();
detector = new ConnectionDetector(context);
}
/**
* Construct with a custom max size
* @param maxItems the max number of comments to pull from the server
*/
public ThreadFetcher(int maxItems)
{
super();
listSize = "size=" + Integer.toString(maxItems);
search = new ElasticSearchOperation();
detector = new ConnectionDetector(MainActivity.getMainContext());
}
/**
* Set the ThreadFetcher to fetch only comments with pictures
*/
public void EnablePictureSort()
{
isPictureSort = true;
}
/**
* Enumeration of methods used to get the "best/most relevant" topics
*/
public static enum SortMethod
{
NO_SORT, DATE, LOCATION
}
/**
* Insert the user's location for proximity sorting
* <p>
* This must be called if passing in SortMethod.LOCATION, else the user's location is treated as [0, 0]
* @param latitude
* @param longitude
*/
public void SetLocation(double latitude, double longitude)
{
this.lat = latitude;
this.lon = longitude;
}
public void InitCacheOperation(SortMethod sort)
{
cache = new CacheOperation();
cache.SetSortMethod(sort);
cache.SetMaxResults(20);
cache.SetFilterPicture(isPictureSort);
cache.SetLocation(lat, lon);
}
/**
* Fetch comments that match a set of tags
* <p>
* Only comments with <i>all</i> the specified tags are fetched
* @param tags a list of tags
* @return list of comments/topics
*/
public ArrayList<ThreadModel> fetchTaggedComments(ArrayList<String> tags)
{
if(! detector.isConnectingToInternet())
{
InitCacheOperation(SortMethod.NO_SORT);
CacheToast();
return cache.searchTags(tags);
}
String sortString = "_search?sort=threadTimestamp:desc" + "&" + "size=40";
String sortEntity = "{";
sortEntity += "\"query\":{\"query_string\":{\"query\":\"innerTags:(";
//insert space-seperated UUID's into the sortEntity (extra space at end is OK)
for(String tag : tags)
{
sortEntity += tag + " ";
}
sortEntity += ")\"}}";
if(isPictureSort)
sortEntity += "," + pictureFilterEntityString;
sortEntity += "}";
return new ArrayList<ThreadModel>(search.searchThreads(sortString, sortEntity));
}
/**
* Fetch comments globally (so not by parent) by location/date
* @param sort sorting method
* @return list of comments/topics
*/
public ArrayList<ThreadModel> fetchComments(SortMethod sort)
{
if(! detector.isConnectingToInternet())
{
InitCacheOperation(sort);
CacheToast();
return cache.searchAll();
}
String sortString = null;
String sortEntity = "{";
switch(sort)
{
case DATE:
sortString = "_search?sort=threadTimestamp:desc" + "&" + listSize;
break;
case LOCATION:
sortString = "_search" + "?" + listSize;
sortEntity += "\"sort\":{\"_geo_distance\":{\"user.locationModel.locationInner\":[";
sortEntity += Double.toString(lon);
sortEntity += ", ";
sortEntity += Double.toString(lat);
sortEntity += "],\"order\":\"asc\",\"unit\":\"km\"}}";
break;
case NO_SORT:
default:
sortString = "_search" + "?" + listSize;
}
if(isPictureSort){
sortEntity += ((sort == SortMethod.LOCATION) ? "," : "") + pictureFilterEntityString;
}
sortEntity += "}";
return new ArrayList<ThreadModel>(search.searchThreads(sortString, sortEntity));
}
/**
* Fetch comments by parent (and location/date)
* <p>
* Can be used to fetch only direct children of a thread (pass parentID = ThreadModel.ROOT)
* @param parentID UUID of parent comment
* @param sort sorting method
* @return list of comments (or topics if parentID = null)
*/
public ArrayList<ThreadModel> fetchChildComments(UUID parentID, SortMethod sort)
{
if(! detector.isConnectingToInternet())
{
InitCacheOperation(sort);
CacheToast();
return cache.searchChildren(parentID);
}
String sortString = null;
String sortEntity = "{";
switch(sort)
{
case DATE:
sortString = "_search?q=parentUUID:" + parentID.toString() + "&" +
"sort=threadTimestamp:desc" + "&" + listSize;
break;
case LOCATION:
sortString = "_search?q=parentUUID:" + parentID.toString() + "&" + listSize;
sortEntity += "\"sort\":{\"_geo_distance\":{\"user.locationModel.locationInner\":[";
sortEntity += Double.toString(lon);
sortEntity += ", ";
sortEntity += Double.toString(lat);
sortEntity += "],\"order\":\"asc\",\"unit\":\"km\"}}";
break;
case NO_SORT:
default:
sortString = "_search?q=parentUUID:" + parentID.toString() + "&" + listSize;
}
if(isPictureSort){
sortEntity += ((sort == SortMethod.LOCATION) ? "," : "") + pictureFilterEntityString;
}
sortEntity += "}";
return new ArrayList<ThreadModel>(search.searchThreads(sortString, sortEntity));
}
/**
* Fetch comments by topicUUID
* <p>
* Can be used to fetch all the comments whose topicUUID is specified
* @param parentID UUID of parent comment
* @return list of comments
*/
public ArrayList<ThreadModel> fetchAllComments(UUID topicID)
{
String sortString = "_search?q=topicUUID:" + topicID.toString() + "&" + "size=40";
String sortEntity = "{}";
return new ArrayList<ThreadModel>(search.searchThreads(sortString, sortEntity));
}
/**
* Fetch comments by a list of their own UUID's
* <p>
* Used to fetch the list of favorited comments from server
* @param favorites list of UUID's for favorited comments
* @param sort sorting method
* @return list of favorited comments
*/
public ArrayList<ThreadModel> fetchFavorites(ArrayList<UUID> favorites, SortMethod sort)
{
if(! detector.isConnectingToInternet())
{
InitCacheOperation(sort);
CacheToast();
return cache.searchFavorites(favorites);
}
String sortString = null;
String sortEntity = null;
String favoritesSize = "size=" + Integer.toString(favorites.size());
sortEntity = "{";
switch(sort)
{
case DATE:
sortString = "_search?sort=threadTimestamp:desc" + "&" + favoritesSize;
break;
case LOCATION:
sortString = "_search?" + favoritesSize;
sortEntity += "\"sort\":{\"_geo_distance\":{\"user.locationModel.locationInner\":[";
sortEntity += Double.toString(lon);
sortEntity += ", ";
sortEntity += Double.toString(lat);
sortEntity += "],\"order\":\"asc\",\"unit\":\"km\"}}";
break;
case NO_SORT:
default:
sortString = "_search?" + favoritesSize;
}
sortEntity += ((sort == SortMethod.LOCATION) ? "," : "")
+ "\"query\":{\"query_string\":{\"query\":\"uniqueID:(";
//insert space-seperated UUID's into the sortEntity (extra space at end is OK)
for(UUID fav : favorites)
{
sortEntity += fav.toString() + " ";
}
sortEntity += ")\"}}";
if(isPictureSort)
sortEntity += "," + pictureFilterEntityString;
sortEntity += "}";
return new ArrayList<ThreadModel>(search.searchThreads(sortString, sortEntity));
}
/**
* This is the same code as above. By using the above method however, there
* might be an issue with the default condition which wouldn't work for the
* Unique ID.
* @param Unique ID
* @param sort
* @return
*/
public ArrayList<ThreadModel> fetchByUnique(UUID uniqueID, SortMethod sort)
{
String sortString = null;
String sortEntity = null;
switch(sort)
{
case DATE:
sortString = "_search?q=uniqueID:" + uniqueID.toString() + "&" +
"sort=threadTimestamp:desc" + "&" + listSize;
sortEntity = null;
break;
case LOCATION:
sortString = "_search?q=uniqueID:" + uniqueID.toString() + "&" + listSize;
sortEntity = "{\"sort\":{\"_geo_distance\":{\"user.locationModel.locationInner\":[";
sortEntity += Double.toString(lon);
sortEntity += ", ";
sortEntity += Double.toString(lat);
sortEntity += "],\"order\":\"asc\",\"unit\":\"km\"}}}";
break;
case NO_SORT:
default:
sortString = "_search?q=uniqueID:" + uniqueID.toString() + "&" + listSize;
sortEntity = null;
}
return new ArrayList<ThreadModel>(search.searchThreads(sortString, sortEntity));
}
}
|
|
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2012 The ZAP development team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.quickstart;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Event;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Toolkit;
import java.awt.event.KeyEvent;
import java.net.URL;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.KeyStroke;
import javax.swing.border.EtchedBorder;
import org.apache.commons.httpclient.URI;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.control.Control;
import org.parosproxy.paros.control.Control.Mode;
import org.parosproxy.paros.extension.AbstractPanel;
import org.parosproxy.paros.model.Model;
import org.parosproxy.paros.model.SiteNode;
import org.parosproxy.paros.view.View;
import org.zaproxy.zap.extension.alert.ExtensionAlert;
import org.zaproxy.zap.extension.api.API;
import org.zaproxy.zap.extension.brk.BreakPanel;
import org.zaproxy.zap.extension.search.SearchPanel;
import org.zaproxy.zap.extension.tab.Tab;
import org.zaproxy.zap.utils.DesktopUtils;
import org.zaproxy.zap.utils.DisplayUtils;
import org.zaproxy.zap.utils.ZapTextField;
import org.zaproxy.zap.view.LayoutHelper;
import org.zaproxy.zap.view.NodeSelectDialog;
public class QuickStartPanel extends AbstractPanel implements Tab {
private static final long serialVersionUID = 1L;
private ExtensionQuickStart extension;
private JButton attackButton = null;
private JButton stopButton = null;
private JButton confButton = null;
private ZapTextField urlField = null;
private ZapTextField confField = null;
private JLabel progressLabel = null;
public QuickStartPanel(ExtensionQuickStart extension) {
super();
this.extension = extension;
initialize();
}
private void initialize() {
this.setIcon(new ImageIcon(BreakPanel.class.getResource("/resource/icon/16/147.png"))); // 'lightning' icon
this.setDefaultAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_Q, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask() | Event.SHIFT_MASK, false));
this.setMnemonic(Constant.messages.getChar("quickstart.panel.mnemonic"));
this.setLayout(new BorderLayout());
JPanel panelContent = new JPanel(new GridBagLayout());
JScrollPane jScrollPane = new JScrollPane();
jScrollPane.setFont(new java.awt.Font("Dialog", java.awt.Font.PLAIN, 11));
jScrollPane.setHorizontalScrollBarPolicy(javax.swing.JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
jScrollPane.setViewportView(panelContent);
this.add(jScrollPane, BorderLayout.CENTER);
panelContent.setBackground(Color.white);
panelContent.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.RAISED));
/*
* Layout:
* Col 0 1 2 3 4
* Row+----------------------+----------------------+----------------------+----------------------+----------------------+
* 0 | Top welcome message | zap128x128.png |
* 1 | URL: | [ Url field ] | |
* 2 | | [ Attack button ] | [ Stop button ] | padding | |
* 3 | Progress: | Progress details | |
* | Bottom message |
* | Show at start: | [x] | | | |
* +----------------------+----------------------+----------------------+----------------------+----------------------+
*/
panelContent.add(new JLabel(Constant.messages.getString("quickstart.panel.topmsg")),
LayoutHelper.getGBC(0, 0, 4, 1.0D, new Insets(5,5,5,5)));
if (Constant.isDevBuild()) {
panelContent.add(new JLabel(new ImageIcon(QuickStartPanel.class.getResource(
"/org/zaproxy/zap/extension/quickstart/resources/zap128x128dark.png"))),
LayoutHelper.getGBC(4, 0, 1, 0.0D, 0.0D, GridBagConstraints.NORTH));
} else {
panelContent.add(new JLabel(DisplayUtils.getScaledIcon(new ImageIcon(SearchPanel.class.getResource("/resource/zap128x128.png")))),
LayoutHelper.getGBC(4, 0, 1, 0.0D, 0.0D, GridBagConstraints.NORTH));
}
panelContent.add(new JLabel(Constant.messages.getString("quickstart.label.url")),
LayoutHelper.getGBC(0, 1, 1, 0.0D, new Insets(5,5,5,5)));
JPanel urlSelectPanel = new JPanel(new GridBagLayout());
JButton selectButton = new JButton(Constant.messages.getString("all.button.select"));
selectButton.setIcon(DisplayUtils.getScaledIcon(new ImageIcon(View.class.getResource("/resource/icon/16/094.png")))); // Globe icon
selectButton.addActionListener(new java.awt.event.ActionListener() {
@Override
public void actionPerformed(java.awt.event.ActionEvent e) {
NodeSelectDialog nsd = new NodeSelectDialog(View.getSingleton().getMainFrame());
SiteNode node = null;
try {
node = Model.getSingleton().getSession().getSiteTree().findNode(new URI(getUrlField().getText(), false));
} catch (Exception e2) {
// Ignore
}
node = nsd.showDialog(node);
if (node != null && node.getHistoryReference() != null) {
try {
getUrlField().setText(node.getHistoryReference().getURI().toString());
} catch (Exception e1) {
// Ignore
}
}
}
});
urlSelectPanel.add(this.getUrlField(), LayoutHelper.getGBC(0, 0, 1, 1.0D));
urlSelectPanel.add(selectButton, LayoutHelper.getGBC(1, 0, 1, 0.0D));
panelContent.add(urlSelectPanel, LayoutHelper.getGBC(1, 1, 3, 0.25D));
panelContent.add(this.getAttackButton(), LayoutHelper.getGBC(1, 2, 1, 0.0D));
panelContent.add(this.getStopButton(), LayoutHelper.getGBC(2, 2, 1, 0.0D));
panelContent.add(new JLabel(""), LayoutHelper.getGBC(3, 2, 1, 0.75D, 0.0D)); // Padding to right of buttons
progressLabel = new JLabel(Constant.messages.getString("quickstart.progress." + AttackThread.Progress.notstarted.name()));
panelContent.add(new JLabel(Constant.messages.getString("quickstart.label.progress")),
LayoutHelper.getGBC(0, 3, 1, 0.0D, new Insets(5,5,5,5)));
panelContent.add(this.progressLabel, LayoutHelper.getGBC(1, 3, 3, 0.0D));
panelContent.add(new JLabel(Constant.messages.getString("quickstart.panel.proxymsg")),
LayoutHelper.getGBC(0, 4, 5, 1.0D, new Insets(5,5,5,5)));
if (Control.getSingleton().getExtensionLoader().getExtension("ExtensionPlugNHack") != null) {
// Plug-n-Hack extension has been installed - this makes configuration much easier :)
if (DesktopUtils.canOpenUrlInBrowser()) {
panelContent.add(new JLabel(Constant.messages.getString("quickstart.panel.pnhmsg")),
LayoutHelper.getGBC(0, 6, 5, 1.0D, new Insets(5,5,5,5)));
panelContent.add(new JLabel(Constant.messages.getString("quickstart.label.mitm")),
LayoutHelper.getGBC(0, 7, 1, 0.0D, new Insets(5,5,5,5)));
panelContent.add(this.getConfButton(), LayoutHelper.getGBC(1, 7, 1, 0.0D));
panelContent.add(new JLabel(
Constant.messages.getString("quickstart.label.mitmalt")),
LayoutHelper.getGBC(0, 8, 1, 0.0D, new Insets(5,5,5,5)));
} else {
panelContent.add(new JLabel(
Constant.messages.getString("quickstart.label.mitmurl")),
LayoutHelper.getGBC(0, 8, 1, 0.0D, new Insets(5,5,5,5)));
}
panelContent.add(this.getConfField(), LayoutHelper.getGBC(1, 8, 3, 0.25D));
} else {
panelContent.add(new JLabel(Constant.messages.getString("quickstart.panel.helpmsg")),
LayoutHelper.getGBC(0, 5, 5, 1.0D, new Insets(5,5,5,5)));
}
panelContent.add(new JLabel(""), LayoutHelper.getGBC(0, 10, 4, 1.D, 1.0D)); // Padding at bottom
this.setMode(Control.getSingleton().getMode());
}
protected void setMode(Mode mode) {
switch (mode) {
case safe:
case protect:
this.getUrlField().setEditable(false);
this.getAttackButton().setEnabled(false);
break;
case standard:
case attack:
this.getUrlField().setEditable(true);
this.getAttackButton().setEnabled(true);
break;
}
}
private ZapTextField getUrlField () {
if (urlField == null) {
urlField = new ZapTextField();
urlField.setText("http://");
}
return urlField;
}
private JButton getAttackButton() {
if (attackButton == null) {
attackButton = new JButton();
attackButton.setText(Constant.messages.getString("quickstart.button.label.attack"));
attackButton.setIcon(DisplayUtils.getScaledIcon(new ImageIcon(SearchPanel.class.getResource("/resource/icon/16/147.png")))); // 'lightning' icon
attackButton.setToolTipText(Constant.messages.getString("quickstart.button.tooltip.attack"));
attackButton.addActionListener(new java.awt.event.ActionListener() {
@Override
public void actionPerformed(java.awt.event.ActionEvent e) {
attackUrl();
}
});
}
return attackButton;
}
private JButton getStopButton() {
if (stopButton == null) {
stopButton = new JButton();
stopButton.setText(Constant.messages.getString("quickstart.button.label.stop"));
stopButton.setIcon(DisplayUtils.getScaledIcon(new ImageIcon(SearchPanel.class.getResource("/resource/icon/16/142.png")))); // 'stop' icon
stopButton.setToolTipText(Constant.messages.getString("quickstart.button.tooltip.stop"));
stopButton.setEnabled(false);
stopButton.addActionListener(new java.awt.event.ActionListener() {
@Override
public void actionPerformed(java.awt.event.ActionEvent e) {
stopAttack();
}
});
}
return stopButton;
}
private String getPlugNHackUrl() {
String apiKey = API.getInstance().getApiKey();
String keyStr = "";
if (apiKey != null && apiKey.length() > 0) {
keyStr = "?" + API.API_KEY_PARAM + "=" + apiKey;
}
return "http://" + Model.getSingleton().getOptionsParam().getProxyParam().getProxyIp() + ":" +
Model.getSingleton().getOptionsParam().getProxyParam().getProxyPort() + "/pnh/" + keyStr;
}
private ZapTextField getConfField () {
if (confField == null) {
confField = new ZapTextField();
confField.setEditable(false);
updateConfField(Model.getSingleton().getOptionsParam().getApiParam().isEnabled());
}
return confField;
}
private void updateConfField(boolean apiState) {
if (confField == null) {
return;
}
//PnH URL Field has the same enable state as the API
confField.setEnabled(apiState);
if (apiState) {
confField.setText(getPlugNHackUrl());
} else {
confField.setText(Constant.messages.getString("quickstart.mitm.api.disabled"));
}
}
private JButton getConfButton() {
if (confButton == null) {
confButton = new JButton();
confButton.setText(Constant.messages.getString("quickstart.button.label.mitm"));
confButton.setToolTipText(Constant.messages.getString("quickstart.button.tooltip.mitm"));
confButton.setIcon(DisplayUtils.getScaledIcon(new ImageIcon(
QuickStartPanel.class.getResource("/org/zaproxy/zap/extension/quickstart/resources/plug.png"))));
updateConfButton(Model.getSingleton().getOptionsParam().getApiParam().isEnabled());
confButton.addActionListener(new java.awt.event.ActionListener() {
@Override
public void actionPerformed(java.awt.event.ActionEvent e) {
DesktopUtils.openUrlInBrowser(getPlugNHackUrl());
}
});
}
return confButton;
}
private void updateConfButton(boolean apiState) {
if (confButton == null) {
return;
}
//PnH button has the same enable state as the API
confButton.setEnabled(apiState);
if (apiState) {
confButton.setToolTipText(Constant.messages.getString("quickstart.button.tooltip.mitm"));
} else {
confButton.setToolTipText(Constant.messages.getString("quickstart.mitm.api.disabled"));
}
}
public void updatePnhPanelElements(boolean apiState) {
updateConfButton(apiState);
updateConfField(apiState);
}
boolean attackUrl () {
URL url;
try {
url = new URL(this.getUrlField().getText());
} catch (Exception e) {
extension.getView().showWarningDialog(Constant.messages.getString("quickstart.url.warning.invalid"));
this.getUrlField().requestFocusInWindow();
return false;
}
getAttackButton().setEnabled(false);
getStopButton().setEnabled(true);
extension.attack(url);
return true;
}
void setAttackUrl(String url) {
getUrlField().setText(url);
}
private void stopAttack() {
extension.stopAttack();
stopButton.setEnabled(false);
}
protected void notifyProgress(AttackThread.Progress progress) {
this.notifyProgress(progress, null);
}
protected void notifyProgress(AttackThread.Progress progress, String msg) {
if (msg == null) {
progressLabel.setText(Constant.messages.getString("quickstart.progress." + progress.name()));
} else {
progressLabel.setText(msg);
}
switch (progress) {
case complete:
getAttackButton().setEnabled(true);
getStopButton().setEnabled(false);
ExtensionAlert extAlert = ((ExtensionAlert)Control.getSingleton().getExtensionLoader().getExtension(ExtensionAlert.NAME));
if (extAlert != null) {
extAlert.setAlertTabFocus();
}
break;
case failed:
case stopped:
getAttackButton().setEnabled(true);
getStopButton().setEnabled(false);
break;
default:
break;
}
}
/**
* This should override (or use) the AbstractPanel class but cant do this until the relevant changes are
* available in the zap-extensions trunk
* @return
*/
public boolean isShowByDefault() {
return true;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.afp.fonts;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.xmlgraphics.image.loader.util.SoftMapCache;
import org.apache.fop.afp.AFPConstants;
import org.apache.fop.afp.AFPEventProducer;
import org.apache.fop.afp.util.ResourceAccessor;
import org.apache.fop.afp.util.StructuredFieldReader;
import org.apache.fop.fonts.Typeface;
/**
* The CharacterSetBuilder is responsible building the a CharacterSet instance that holds
* the font metric data. The data is either read from disk and passed to a CharacterSet (*)
* or a FopCharacterSet is instantiated that is composed of a Typeface instance configured
* with this data.<p/>
* -*- For referenced fonts CharacterSetBuilder is responsible for reading the font attributes
* from binary code page files and the character set metric files. In IBM font structure, a
* code page maps each character of text to the characters in a character set.
* Each character is translated into a code point. When the character is
* printed, each code point is matched to a character ID on the code page
* specified. The character ID is then matched to the image (raster pattern or
* outline pattern) of the character in the character set specified. The image
* in the character set is the image that is printed in the document. To be a
* valid code page for a particular character set, all character IDs in the code
* page must be included in that character set. <p/>This class will read the
* font information from the binary code page files and character set metric
* files in order to determine the correct metrics to use when rendering the
* formatted object. <p/>
*
*/
public abstract class CharacterSetBuilder {
/**
* Static logging instance
*/
protected static final Log LOG = LogFactory.getLog(CharacterSetBuilder.class);
/**
* Template used to convert lists to arrays.
*/
private static final CharacterSetOrientation[] EMPTY_CSO_ARRAY = new CharacterSetOrientation[0];
/** Codepage MO:DCA structured field. */
private static final byte[] CODEPAGE_SF = new byte[] {
(byte) 0xD3, (byte) 0xA8, (byte) 0x87};
/** Character table MO:DCA structured field. */
private static final byte[] CHARACTER_TABLE_SF = new byte[] {
(byte) 0xD3, (byte) 0x8C, (byte) 0x87};
/** Font descriptor MO:DCA structured field. */
private static final byte[] FONT_DESCRIPTOR_SF = new byte[] {
(byte) 0xD3, (byte) 0xA6, (byte) 0x89 };
/** Font control MO:DCA structured field. */
private static final byte[] FONT_CONTROL_SF = new byte[] {
(byte) 0xD3, (byte) 0xA7, (byte) 0x89 };
/** Font orientation MO:DCA structured field. */
private static final byte[] FONT_ORIENTATION_SF = new byte[] {
(byte) 0xD3, (byte) 0xAE, (byte) 0x89 };
/** Font position MO:DCA structured field. */
private static final byte[] FONT_POSITION_SF = new byte[] {
(byte) 0xD3, (byte) 0xAC, (byte) 0x89 };
/** Font index MO:DCA structured field. */
private static final byte[] FONT_INDEX_SF = new byte[] {
(byte) 0xD3, (byte) 0x8C, (byte) 0x89 };
/**
* The collection of code pages
*/
private final Map<String, Map<String, String>> codePagesCache
= Collections.synchronizedMap(new WeakHashMap<String, Map<String, String>>());
/**
* Cache of charactersets
*/
private final SoftMapCache characterSetsCache = new SoftMapCache(true);
/** Default constructor. */
private CharacterSetBuilder() {
}
/**
* Factory method for the single-byte implementation of AFPFontReader.
* @return AFPFontReader
*/
public static CharacterSetBuilder getSingleByteInstance() {
return SingleByteLoader.getInstance();
}
/**
* Factory method for the double-byte (CID Keyed font (Type 0)) implementation of AFPFontReader.
* @return AFPFontReader
*/
public static CharacterSetBuilder getDoubleByteInstance() {
return DoubleByteLoader.getInstance();
}
/**
* Returns an InputStream to a given file path and filename
*
* * @param accessor the resource accessor
* @param filename the file name
* @param eventProducer for handling AFP related events
* @return an inputStream
*
* @throws IOException in the event that an I/O exception of some sort has occurred
*/
protected InputStream openInputStream(ResourceAccessor accessor, String filename,
AFPEventProducer eventProducer)
throws IOException {
URI uri;
try {
uri = new URI(filename.trim());
} catch (URISyntaxException e) {
throw new FileNotFoundException("Invalid filename: "
+ filename + " (" + e.getMessage() + ")");
}
if (LOG.isDebugEnabled()) {
LOG.debug("Opening " + uri);
}
InputStream inputStream = accessor.createInputStream(uri);
return inputStream;
}
/**
* Closes the inputstream
*
* @param inputStream the inputstream to close
*/
protected void closeInputStream(InputStream inputStream) {
try {
if (inputStream != null) {
inputStream.close();
}
} catch (Exception ex) {
// Lets log at least!
LOG.error(ex.getMessage());
}
}
/**
* Load the font details and metrics into the CharacterSetMetric object, this will use the
* actual afp code page and character set files to load the object with the necessary metrics.
*
* @param characterSetName name of the characterset
* @param codePageName name of the code page file
* @param encoding encoding name
* @param accessor used to load codepage and characterset
* @param eventProducer for handling AFP related events
* @return CharacterSet object
* @throws IOException if an I/O error occurs
*/
public CharacterSet build(String characterSetName, String codePageName, String encoding,
ResourceAccessor accessor, AFPEventProducer eventProducer) throws IOException {
return processFont(characterSetName, codePageName, encoding, false, accessor,
eventProducer);
}
/**
* Load the font details and metrics into the CharacterSetMetric object, this will use the
* actual afp code page and character set files to load the object with the necessary metrics.
* This method is to be used for double byte character sets (DBCS).
*
* @param characterSetName name of the characterset
* @param codePageName name of the code page file
* @param encoding encoding name
* @param isEDBCS if this is an EBCDIC double byte character set (DBCS)
* @param accessor used to load codepage and characterset
* @param eventProducer for handling AFP related events
* @return CharacterSet object
* @throws IOException if an I/O error occurs
*/
public CharacterSet buildDBCS(String characterSetName, String codePageName, String encoding,
boolean isEDBCS, ResourceAccessor accessor, AFPEventProducer eventProducer)
throws IOException {
return processFont(characterSetName, codePageName, encoding, isEDBCS, accessor,
eventProducer);
}
/**
* Load the font details and metrics into the CharacterSetMetric object, this will use the
* actual afp code page and character set files to load the object with the necessary metrics.
*
* @param characterSetName the CharacterSetMetric object to populate
* @param codePageName the name of the code page to use
* @param encoding name of the encoding in use
* @param typeface base14 font name
* @param eventProducer for handling AFP related events
* @return CharacterSet object
* @throws IOException if an I/O error occurs
*/
public CharacterSet build(String characterSetName, String codePageName, String encoding,
Typeface typeface, AFPEventProducer eventProducer) throws IOException {
return new FopCharacterSet(codePageName, encoding, characterSetName, typeface,
eventProducer);
}
private CharacterSet processFont(String characterSetName, String codePageName, String encoding,
boolean isEDBCS, ResourceAccessor accessor, AFPEventProducer eventProducer)
throws IOException {
// check for cached version of the characterset
String descriptor = characterSetName + "_" + encoding + "_" + codePageName;
CharacterSet characterSet = (CharacterSet) characterSetsCache.get(descriptor);
if (characterSet != null) {
return characterSet;
}
// characterset not in the cache, so recreating
characterSet = new CharacterSet(codePageName, encoding, isEDBCS, characterSetName,
accessor, eventProducer);
InputStream inputStream = null;
try {
/**
* Get the code page which contains the character mapping
* information to map the unicode character id to the graphic
* chracter global identifier.
*/
Map<String, String> codePage;
synchronized (codePagesCache) {
codePage = codePagesCache.get(codePageName);
if (codePage == null) {
codePage = loadCodePage(codePageName, encoding, accessor, eventProducer);
codePagesCache.put(codePageName, codePage);
}
}
inputStream = openInputStream(accessor, characterSetName, eventProducer);
StructuredFieldReader structuredFieldReader = new StructuredFieldReader(inputStream);
// Process D3A689 Font Descriptor
FontDescriptor fontDescriptor = processFontDescriptor(structuredFieldReader);
characterSet.setNominalVerticalSize(fontDescriptor.getNominalFontSizeInMillipoints());
// Process D3A789 Font Control
FontControl fontControl = processFontControl(structuredFieldReader);
if (fontControl != null) {
//process D3AE89 Font Orientation
CharacterSetOrientation[] characterSetOrientations
= processFontOrientation(structuredFieldReader);
int metricNormalizationFactor;
if (fontControl.isRelative()) {
metricNormalizationFactor = 1;
} else {
int dpi = fontControl.getDpi();
metricNormalizationFactor = 1000 * 72000
/ fontDescriptor.getNominalFontSizeInMillipoints() / dpi;
}
//process D3AC89 Font Position
processFontPosition(structuredFieldReader, characterSetOrientations,
metricNormalizationFactor);
//process D38C89 Font Index (per orientation)
for (int i = 0; i < characterSetOrientations.length; i++) {
processFontIndex(structuredFieldReader,
characterSetOrientations[i], codePage, metricNormalizationFactor);
characterSet.addCharacterSetOrientation(characterSetOrientations[i]);
}
} else {
throw new IOException("Missing D3AE89 Font Control structured field.");
}
} finally {
closeInputStream(inputStream);
}
characterSetsCache.put(descriptor, characterSet);
return characterSet;
}
/**
* Load the code page information from the appropriate file. The file name
* to load is determined by the code page name and the file extension 'CDP'.
*
* @param codePage
* the code page identifier
* @param encoding
* the encoding to use for the character decoding
* @param accessor the resource accessor
* @param eventProducer for handling AFP related events
* @return a code page mapping (key: GCGID, value: Unicode character)
* @throws IOException if an I/O exception of some sort has occurred.
*/
protected Map<String, String> loadCodePage(String codePage, String encoding,
ResourceAccessor accessor, AFPEventProducer eventProducer) throws IOException {
// Create the HashMap to store code page information
Map<String, String> codePages = new HashMap<String, String>();
InputStream inputStream = null;
try {
inputStream = openInputStream(accessor, codePage.trim(), eventProducer);
StructuredFieldReader structuredFieldReader = new StructuredFieldReader(inputStream);
byte[] data = structuredFieldReader.getNext(CHARACTER_TABLE_SF);
int position = 0;
byte[] gcgiBytes = new byte[8];
byte[] charBytes = new byte[1];
// Read data, ignoring bytes 0 - 2
for (int index = 3; index < data.length; index++) {
if (position < 8) {
// Build the graphic character global identifier key
gcgiBytes[position] = data[index];
position++;
} else if (position == 9) {
position = 0;
// Set the character
charBytes[0] = data[index];
String gcgiString = new String(gcgiBytes,
AFPConstants.EBCIDIC_ENCODING);
//Use the 8-bit char index to find the Unicode character using the Java encoding
//given in the configuration. If the code page and the Java encoding don't
//match, a wrong Unicode character will be associated with the AFP GCGID.
//Idea: we could use IBM's GCGID to Unicode map and build code pages ourselves.
String charString = new String(charBytes, encoding);
codePages.put(gcgiString, charString);
} else {
position++;
}
}
} catch (FileNotFoundException e) {
eventProducer.codePageNotFound(this, e);
} finally {
closeInputStream(inputStream);
}
return codePages;
}
/**
* Process the font descriptor details using the structured field reader.
*
* @param structuredFieldReader the structured field reader
* @return a class representing the font descriptor
* @throws IOException if an I/O exception of some sort has occurred.
*/
protected static FontDescriptor processFontDescriptor(
StructuredFieldReader structuredFieldReader)
throws IOException {
byte[] fndData = structuredFieldReader.getNext(FONT_DESCRIPTOR_SF);
return new FontDescriptor(fndData);
}
/**
* Process the font control details using the structured field reader.
*
* @param structuredFieldReader
* the structured field reader
* @return the FontControl
* @throws IOException if an I/O exception of some sort has occurred.
*/
protected FontControl processFontControl(StructuredFieldReader structuredFieldReader)
throws IOException {
byte[] fncData = structuredFieldReader.getNext(FONT_CONTROL_SF);
FontControl fontControl = null;
if (fncData != null) {
fontControl = new FontControl();
if (fncData[7] == (byte) 0x02) {
fontControl.setRelative(true);
}
int metricResolution = getUBIN(fncData, 9);
if (metricResolution == 1000) {
//Special case: 1000 units per em (rather than dpi)
fontControl.setUnitsPerEm(1000);
} else {
fontControl.setDpi(metricResolution / 10);
}
}
return fontControl;
}
/**
* Process the font orientation details from using the structured field
* reader.
*
* @param structuredFieldReader
* the structured field reader
* @return CharacterSetOrientation array
* @throws IOException if an I/O exception of some sort has occurred.
*/
protected CharacterSetOrientation[] processFontOrientation(
StructuredFieldReader structuredFieldReader) throws IOException {
byte[] data = structuredFieldReader.getNext(FONT_ORIENTATION_SF);
int position = 0;
byte[] fnoData = new byte[26];
List<CharacterSetOrientation> orientations = new ArrayList<CharacterSetOrientation>();
// Read data, ignoring bytes 0 - 2
for (int index = 3; index < data.length; index++) {
// Build the font orientation record
fnoData[position] = data[index];
position++;
if (position == 26) {
position = 0;
int orientation = determineOrientation(fnoData[2]);
// Space Increment
int space = ((fnoData[8] & 0xFF ) << 8) + (fnoData[9] & 0xFF);
// Em-Space Increment
int em = ((fnoData[14] & 0xFF ) << 8) + (fnoData[15] & 0xFF);
CharacterSetOrientation cso = new CharacterSetOrientation(orientation);
cso.setSpaceIncrement(space);
cso.setEmSpaceIncrement(em);
orientations.add(cso);
}
}
return (CharacterSetOrientation[]) orientations
.toArray(EMPTY_CSO_ARRAY);
}
/**
* Populate the CharacterSetOrientation object in the suplied array with the
* font position details using the supplied structured field reader.
*
* @param structuredFieldReader
* the structured field reader
* @param characterSetOrientations
* the array of CharacterSetOrientation objects
* @param metricNormalizationFactor factor to apply to the metrics to get normalized
* font metric values
* @throws IOException if an I/O exception of some sort has occurred.
*/
protected void processFontPosition(StructuredFieldReader structuredFieldReader,
CharacterSetOrientation[] characterSetOrientations, double metricNormalizationFactor)
throws IOException {
byte[] data = structuredFieldReader.getNext(FONT_POSITION_SF);
int position = 0;
byte[] fpData = new byte[26];
int characterSetOrientationIndex = 0;
// Read data, ignoring bytes 0 - 2
for (int index = 3; index < data.length; index++) {
if (position < 22) {
// Build the font orientation record
fpData[position] = data[index];
if (position == 9) {
CharacterSetOrientation characterSetOrientation
= characterSetOrientations[characterSetOrientationIndex];
int xHeight = getSBIN(fpData, 2);
int capHeight = getSBIN(fpData, 4);
int ascHeight = getSBIN(fpData, 6);
int dscHeight = getSBIN(fpData, 8);
dscHeight = dscHeight * -1;
characterSetOrientation.setXHeight(
(int)Math.round(xHeight * metricNormalizationFactor));
characterSetOrientation.setCapHeight(
(int)Math.round(capHeight * metricNormalizationFactor));
characterSetOrientation.setAscender(
(int)Math.round(ascHeight * metricNormalizationFactor));
characterSetOrientation.setDescender(
(int)Math.round(dscHeight * metricNormalizationFactor));
}
} else if (position == 22) {
position = 0;
characterSetOrientationIndex++;
fpData[position] = data[index];
}
position++;
}
}
/**
* Process the font index details for the character set orientation.
*
* @param structuredFieldReader the structured field reader
* @param cso the CharacterSetOrientation object to populate
* @param codepage the map of code pages
* @param metricNormalizationFactor factor to apply to the metrics to get normalized
* font metric values
* @throws IOException if an I/O exception of some sort has occurred.
*/
protected void processFontIndex(StructuredFieldReader structuredFieldReader,
CharacterSetOrientation cso, Map<String, String> codepage,
double metricNormalizationFactor)
throws IOException {
byte[] data = structuredFieldReader.getNext(FONT_INDEX_SF);
int position = 0;
byte[] gcgid = new byte[8];
byte[] fiData = new byte[20];
char lowest = 255;
char highest = 0;
String firstABCMismatch = null;
// Read data, ignoring bytes 0 - 2
for (int index = 3; index < data.length; index++) {
if (position < 8) {
gcgid[position] = data[index];
position++;
} else if (position < 27) {
fiData[position - 8] = data[index];
position++;
} else if (position == 27) {
fiData[position - 8] = data[index];
position = 0;
String gcgiString = new String(gcgid, AFPConstants.EBCIDIC_ENCODING);
String idx = (String) codepage.get(gcgiString);
if (idx != null) {
char cidx = idx.charAt(0);
int width = getUBIN(fiData, 0);
int a = getSBIN(fiData, 10);
int b = getUBIN(fiData, 12);
int c = getSBIN(fiData, 14);
int abc = a + b + c;
int diff = Math.abs(abc - width);
if (diff != 0 && width != 0) {
double diffPercent = 100 * diff / (double)width;
if (diffPercent > 2) {
if (LOG.isTraceEnabled()) {
LOG.trace(gcgiString + ": "
+ a + " + " + b + " + " + c + " = " + (a + b + c)
+ " but found: " + width);
}
if (firstABCMismatch == null) {
firstABCMismatch = gcgiString;
}
}
}
if (cidx < lowest) {
lowest = cidx;
}
if (cidx > highest) {
highest = cidx;
}
int normalizedWidth = (int)Math.round(width * metricNormalizationFactor);
cso.setWidth(cidx, normalizedWidth);
}
}
}
cso.setFirstChar(lowest);
cso.setLastChar(highest);
if (LOG.isDebugEnabled() && firstABCMismatch != null) {
//Debug level because it usually is no problem.
LOG.debug("Font has metrics inconsitencies where A+B+C doesn't equal the"
+ " character increment. The first such character found: "
+ firstABCMismatch);
}
}
private static int getUBIN(byte[] data, int start) {
return ((data[start] & 0xFF) << 8) + (data[start + 1] & 0xFF);
}
private static int getSBIN(byte[] data, int start) {
int ubin = ((data[start] & 0xFF) << 8) + (data[start + 1] & 0xFF);
if ((ubin & 0x8000) != 0) {
//extend sign
return ubin | 0xFFFF0000;
} else {
return ubin;
}
}
private class FontControl {
private int dpi;
private int unitsPerEm;
private boolean isRelative = false;
public int getDpi() {
return dpi;
}
public void setDpi(int i) {
dpi = i;
}
public int getUnitsPerEm() {
return this.unitsPerEm;
}
public void setUnitsPerEm(int value) {
this.unitsPerEm = value;
}
public boolean isRelative() {
return isRelative;
}
public void setRelative(boolean b) {
isRelative = b;
}
}
private static class FontDescriptor {
private byte[] data;
public FontDescriptor(byte[] data) {
this.data = data;
}
public int getNominalFontSizeInMillipoints() {
int nominalFontSize = 100 * getUBIN(data, 39);
return nominalFontSize;
}
}
private static final class SingleByteLoader extends CharacterSetBuilder {
private static final SingleByteLoader INSTANCE = new SingleByteLoader();
private SingleByteLoader() {
super();
}
private static SingleByteLoader getInstance() {
return INSTANCE;
}
}
/**
* Double-byte (CID Keyed font (Type 0)) implementation of AFPFontReader.
*/
private static final class DoubleByteLoader extends CharacterSetBuilder {
private static final DoubleByteLoader INSTANCE = new DoubleByteLoader();
private DoubleByteLoader() {
}
static DoubleByteLoader getInstance() {
return INSTANCE;
}
protected Map<String, String> loadCodePage(String codePage, String encoding,
ResourceAccessor accessor, AFPEventProducer eventProducer) throws IOException {
// Create the HashMap to store code page information
Map<String, String> codePages = new HashMap<String, String>();
InputStream inputStream = null;
try {
inputStream = openInputStream(accessor, codePage.trim(), eventProducer);
StructuredFieldReader structuredFieldReader
= new StructuredFieldReader(inputStream);
byte[] data;
while ((data = structuredFieldReader.getNext(CHARACTER_TABLE_SF)) != null) {
int position = 0;
byte[] gcgiBytes = new byte[8];
byte[] charBytes = new byte[2];
// Read data, ignoring bytes 0 - 2
for (int index = 3; index < data.length; index++) {
if (position < 8) {
// Build the graphic character global identifier key
gcgiBytes[position] = data[index];
position++;
} else if (position == 9) {
// Set the character
charBytes[0] = data[index];
position++;
} else if (position == 10) {
position = 0;
// Set the character
charBytes[1] = data[index];
String gcgiString = new String(gcgiBytes,
AFPConstants.EBCIDIC_ENCODING);
String charString = new String(charBytes, encoding);
codePages.put(gcgiString, charString);
}
else {
position++;
}
}
}
} catch (FileNotFoundException e) {
eventProducer.codePageNotFound(this, e);
} finally {
closeInputStream(inputStream);
}
return codePages;
}
}
private static int determineOrientation(byte orientation) {
int degrees = 0;
switch (orientation) {
case 0x00:
degrees = 0;
break;
case 0x2D:
degrees = 90;
break;
case 0x5A:
degrees = 180;
break;
case (byte) 0x87:
degrees = 270;
break;
default:
throw new IllegalStateException("Invalid orientation: " + orientation);
}
return degrees;
}
}
|
|
/*Copyright 2014 Divya Anna Marcus
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
/*
* Reference: Android sender for Chromecast: https://developers.google.com/cast/docs/android_sender
* Webserver NanoHTTPD: https://github.com/NanoHttpd/nanohttpd/tree/nanohttpd-for-java1.1
* Media Playback Messages: https://developers.google.com/cast/docs/reference/messages
*
*/
package com.castoffline.castActivity;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Map;
import com.castoffline.mediaactivity.AudioPlayerActivity.Song;
import com.castoffline.mediaactivity.ImageGrid.Image;
import com.castoffline.mediaactivity.VideoPlayerActivity.Video;
import com.castoffline.R;
import com.google.android.gms.cast.ApplicationMetadata;
import com.google.android.gms.cast.Cast;
import com.google.android.gms.cast.Cast.ApplicationConnectionResult;
import com.google.android.gms.cast.CastDevice;
import com.google.android.gms.cast.CastMediaControlIntent;
import com.google.android.gms.cast.MediaInfo;
import com.google.android.gms.cast.MediaMetadata;
import com.google.android.gms.cast.MediaStatus;
import com.google.android.gms.cast.RemoteMediaPlayer;
import com.google.android.gms.cast.RemoteMediaPlayer.MediaChannelResult;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.common.images.WebImage;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.net.Uri;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Bundle;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.app.MediaRouteActionProvider;
import android.support.v7.media.MediaRouteSelector;
import android.support.v7.media.MediaRouter;
import android.support.v7.media.MediaRouter.RouteInfo;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ImageView;
import android.widget.MediaController;
import android.widget.TextView;
import android.widget.MediaController.MediaPlayerControl;
import android.widget.Toast;
import android.widget.VideoView;
public class CastMedia extends ActionBarActivity implements MediaPlayerControl{
String mediatype;
MediaRouter mMediaRouter;
MediaMetadata mediaMetadata;
public MediaRouteSelector mMediaRouteSelector;
public MediaRouter.Callback mMediaRouterCallback;
public Cast.Listener mCastListener;
public webserver mediaserver;
public GoogleApiClient mApiClient;
public ConnectionCallbacks mConnectionCallbacks;
public ConnectionFailedListener mConnectionFailedListener;
public boolean mApplicationStarted;
public boolean mWaitingForReconnect;
ActionBar actionBar;
private boolean playbackPaused=false;
public String mSessionId;
CustomVideoView videoview;
ImageView imageview,imageView2;
VideoView remoteview;
MediaInfo mediaInfo;
MediaPlayer mMediaPlayer;
MediaStatus mediaStatus;
RemoteMediaPlayer mRemoteMediaPlayer;
public CastDevice mSelectedDevice;
VideoController videocontroller,controller;
public ArrayList<Video> Videos;
public ArrayList<Song> Audios;
public ArrayList<Image> Photos;
public int videoPosn,audioPosn,photoPosn;
public String videoTitle,audioTitle,videoArtist,audioArtist,mimetype,mediaart;
public String path,dateString;
Intent extras;
public static final String TAG = CastMedia.class.getSimpleName();
boolean change;
String ipdevice;
Long date;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
actionBar = getSupportActionBar();
setContentView(R.layout.castmedia);
mediatype=this.getIntent().getType();
extras = this.getIntent();
//Get the ip of the device running NanoHTTPD server
WifiManager wifiManager = (WifiManager) getSystemService(WIFI_SERVICE);
WifiInfo wifiInfo = wifiManager.getConnectionInfo();
int ipAddress = wifiInfo.getIpAddress();
ipdevice=String.format("http://%d.%d.%d.%d:8080",(ipAddress & 0xff),(ipAddress >> 8 & 0xff),(ipAddress >> 16 & 0xff),(ipAddress >> 24 & 0xff));
// start the webserver
mediaserver = new webserver();
try {
mediaserver.start();
} catch(IOException ioe) {
Log.d("Httpd", "The server could not start.");
}
//The application needs to obtain an instance of the MediaRouter and needs to hold onto that instance for the lifetime of the sender application
mMediaRouter = MediaRouter.getInstance(getApplicationContext());
//The MediaRouter needs to filter discovery for Cast devices that can launch the receiver application associated with the sender app. For that a MediaRouteSelector is created by calling MediaRouteSelector.Builder
mMediaRouteSelector = new MediaRouteSelector.Builder().addControlCategory(CastMediaControlIntent.categoryForCast(getString(R.string.app_id))).build();
// create an instance of MediaRouterCallback
mMediaRouterCallback = new MyMediaRouterCallback();
mediaplay();
}
/*
* (non-Javadoc)
* @see android.app.Activity#onCreateOptionsMenu(android.view.Menu)
* To provide cast button according to Google Cast UX Guidelines
* Using the MediaRouter ActionBar provider: android.support.v7.app.MediaRouteActionProvider
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.main, menu);
MenuItem mediaRouteMenuItem = menu.findItem(R.id.media_route_menu_item);
MediaRouteActionProvider mediaRouteActionProvider=(MediaRouteActionProvider) MenuItemCompat.getActionProvider(mediaRouteMenuItem);
mediaRouteActionProvider.setRouteSelector(mMediaRouteSelector);
return true;
}
/*
* Identifies the mediatype that user has selected and gets the file path and other metadata details
*/
public void mediaplay()
{
switch(mediatype){
case "video" :
Videos = extras.getParcelableArrayListExtra("videolist");
playVideoCast();
break;
case "audio" :
Audios = extras.getParcelableArrayListExtra("songlist");
playAudioCast();
break;
case "photo":
Photos = extras.getParcelableArrayListExtra("imagelist");
playPhotoCast();
break;
default:
}
}
/*
* By default the video list is display in navigation list layout and hence up button is provided to traverse back to the home screen
* Gets the corresponding video details that user wants to display through Chromecast
*/
@SuppressLint("SimpleDateFormat")
public void playVideoCast(){
//navigation up enabled and gos back to the home/main activity screen
actionBar.setDisplayHomeAsUpEnabled(true);
videoPosn=this.getIntent().getFlags();
Video playVideo=Videos.get(videoPosn);
videoTitle=playVideo.getTitle();
Log.d(videoTitle,"videoTitle");
videoArtist=playVideo.getArtist();
mimetype=playVideo.getMimetype();
path=playVideo.getPath();
setvideoController();
videoview.setVideoPath(path);
videoview.seekTo(100);
Toast.makeText(CastMedia.this, "Connect to chromecast device",Toast.LENGTH_LONG).show();
videoview.requestFocus();
}
/*
* Gets the corresponding audio details that user wants to display through Chromecast
*
*/
public void playAudioCast(){
audioPosn=this.getIntent().getFlags();
Song playSong=Audios.get(audioPosn);
audioTitle=playSong.getTitle();
audioArtist=playSong.getArtist();
mimetype=playSong.getMimeType();
path=playSong.getPath();
if(playSong.getAlbumArt()!=null)
{
mediaart="/audio/albumart/"+playSong.getID();
Log.d(mediaart,"album");
}
setvideoController();
videoview.setVideoPath(path);
Toast.makeText(CastMedia.this, "Connect to chromecast device",Toast.LENGTH_LONG).show();
videoview.requestFocus();
}
/*
* Gets the corresponding image details that user wants to display through Chromecast
*
*/
public void playPhotoCast(){
photoPosn=this.getIntent().getFlags();
Image playPhoto=Photos.get(photoPosn);
path=playPhoto.getPath();
mimetype=playPhoto.getmimetype();
date=playPhoto.getDate();
java.text.DateFormat formatter = new SimpleDateFormat("MMMM dd, yyyy h:mmaa");
dateString = formatter.format(date);
TextView cast_title = (TextView)findViewById(R.id.cast_title);
TextView cast_artist = (TextView)findViewById(R.id.cast_artist);
cast_title.setText("Connect to Chromecast to View Image");
cast_artist.setText("Date Taken: "+dateString);
Toast.makeText(CastMedia.this, "Connect to chromecast device", Toast.LENGTH_LONG).show();
}
/*
* MediaController for the video view in case user selects audio/video
*/
public class VideoController extends MediaController {
public VideoController(Context c){
super(c);
}
public void hide(){}
}
public void setvideoController(){
videocontroller = new VideoController(this);
videocontroller.setMediaPlayer(this);
videoview=(CustomVideoView)findViewById(R.id.videoView1);
videocontroller.setAnchorView(videoview);
videoview.setMediaController(videocontroller);
videocontroller.setEnabled(true);
}
public class MyMediaRouterCallback extends MediaRouter.Callback {
@Override
public void onRouteSelected(MediaRouter router, RouteInfo info) {
mSelectedDevice = CastDevice.getFromBundle(info.getExtras());
if((mediatype=="video" )|| (mediatype == "audio"))
videoview.pause();
launchReceiver();
}
@Override
public void onRouteUnselected(MediaRouter router, RouteInfo info) {
teardown();
mSelectedDevice = null;
}
}
@Override
protected void onResume() {
super.onResume();
mMediaRouter.addCallback(mMediaRouteSelector, mMediaRouterCallback,MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
}
@Override
protected void onPause() {
if (isFinishing()) {
mMediaRouter.removeCallback(mMediaRouterCallback);
}
super.onPause();
}
@Override
protected void onStart() {
super.onStart();
mMediaRouter.addCallback(mMediaRouteSelector, mMediaRouterCallback,MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
}
@Override
protected void onStop() {
mMediaRouter.removeCallback(mMediaRouterCallback);
super.onStop();
}
/*
* Send the file path that user has chosen to the web server
* Reference :https://github.com/NanoHttpd/nanohttpd/tree/nanohttpd-for-java1.1
*
*/
public class webserver extends NanoHTTPD {
FileInputStream fileInputStream;
public webserver(){
super(8080);
}
@Override
public Response serve(String uri, Method method, Map<String, String> header,Map<String, String> parameters, Map<String, String> files) {
String mediasend=" ";
FileInputStream fis = null;
try {
fis = new FileInputStream(path);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
switch(mediatype){
case "photo":
mediasend="image/jpeg";
break;
case "audio":
mediasend="audio/mp3";
break;
case "video":
mediasend="video/mp4";
break;
}
return new NanoHTTPD.Response(com.castoffline.castActivity.NanoHTTPD.Response.Status.OK,mediasend,fis);
}
}
@Override
public void onDestroy()
{
super.onDestroy();
if (mediaserver != null)
mediaserver.stop();
}
/*
* Callback to handle the receiver application in chromecast
*/
public void launchReceiver() {
try {
mCastListener = new Cast.Listener() {
@Override
public void onApplicationStatusChanged() {
if (mApiClient != null) {
Log.d(TAG, "onApplicationStatusChanged:"+ Cast.CastApi.getApplicationStatus(mApiClient));
}
}
@Override
public void onVolumeChanged() {
if (mApiClient != null) {
Log.d(TAG, "onVolumeChanged: " + Cast.CastApi.getVolume(mApiClient));
}
}
@Override
public void onApplicationDisconnected(int errorCode) {
Log.d(TAG, "application has stopped");
teardown();
}
};
// Connect to Google Play services
mConnectionCallbacks = new ConnectionCallbacks();
mConnectionFailedListener = new ConnectionFailedListener();
Cast.CastOptions.Builder apiOptionsBuilder = Cast.CastOptions.builder(mSelectedDevice, mCastListener);
mApiClient = new GoogleApiClient.Builder(this).addApi(Cast.API, apiOptionsBuilder.build()).addConnectionCallbacks(mConnectionCallbacks).addOnConnectionFailedListener(mConnectionFailedListener).build();
mApiClient.connect();
} catch (Exception e) {Log.e(TAG, "Failed launchReceiver", e);
}
}
private class ConnectionCallbacks implements GoogleApiClient.ConnectionCallbacks {
@Override
public void onConnected(Bundle connectionHint) { Log.d(TAG, "onConnected");
if (mApiClient == null) { return;}
try {
if (mWaitingForReconnect) {
mWaitingForReconnect = false;
// Check if the receiver app is still running
if ((connectionHint != null)&& connectionHint.getBoolean(Cast.EXTRA_APP_NO_LONGER_RUNNING)) {
Log.d(TAG, "App is no longer running");
teardown();
}
else{ // Re-create the custom message channel
try {
Cast.CastApi.setMessageReceivedCallbacks(mApiClient,mRemoteMediaPlayer.getNamespace(), mRemoteMediaPlayer);
} catch (IOException e) {
Log.e(TAG, "Exception while creating channel", e);
}
}
}
else {
Cast.CastApi.launchApplication(mApiClient,getString(R.string.app_id),false).setResultCallback(new ResultCallback<Cast.ApplicationConnectionResult>() {
@Override
public void onResult(ApplicationConnectionResult result) {
Status status = result.getStatus();
if (status.isSuccess()) {
ApplicationMetadata applicationMetadata = result.getApplicationMetadata();
mSessionId = result.getSessionId();
String applicationStatus = result.getApplicationStatus();
boolean wasLaunched = result.getWasLaunched();
Log.d(TAG,"application name: "+ applicationMetadata.getName()+ ", status: "+ applicationStatus+ ", sessionId: "+ mSessionId+ ", wasLaunched: "+ wasLaunched);
mApplicationStarted = true;
mRemoteMediaPlayer = new RemoteMediaPlayer();
/*
* Identify the mediatype and send the metadata details to media info
*/
switch(mediatype)
{
case "audio" : mediaMetadata = new MediaMetadata(MediaMetadata.MEDIA_TYPE_MUSIC_TRACK);
mediaMetadata.putString(MediaMetadata.KEY_TITLE, "My MUSIC TRACK"+": "+audioTitle);
mediaMetadata.putString(MediaMetadata.KEY_ARTIST,audioArtist);
mediaMetadata.addImage(new WebImage(Uri.parse("https://www.googledrive.com/host/0B61ekPEN_94sZ21mcnQtbVU2RHM/media.png")));
mediaInfo = new MediaInfo.Builder(ipdevice).setContentType(mimetype).setStreamType(MediaInfo.STREAM_TYPE_BUFFERED).setMetadata(mediaMetadata).build();
break;
case "video" : mediaMetadata = new MediaMetadata(MediaMetadata.MEDIA_TYPE_MOVIE);
mediaMetadata.addImage(new WebImage(Uri.parse("https://www.googledrive.com/host/0B61ekPEN_94sZ21mcnQtbVU2RHM/film_reel.png")));
mediaMetadata.putString(MediaMetadata.KEY_TITLE, "My MOVIE"+": "+videoTitle);
mediaInfo = new MediaInfo.Builder(ipdevice).setContentType(mimetype).setStreamType(MediaInfo.STREAM_TYPE_BUFFERED).setMetadata(mediaMetadata).build();
break;
case "photo" : mediaMetadata = new MediaMetadata(MediaMetadata.MEDIA_TYPE_PHOTO);
mediaMetadata.putString(MediaMetadata.KEY_TITLE, "My PHOTO"+": ");
mediaInfo = new MediaInfo.Builder(ipdevice).setContentType(mimetype).setStreamType(MediaInfo.STREAM_TYPE_BUFFERED).setMetadata(mediaMetadata).build();
break;
default:
}
try {
Cast.CastApi.setMessageReceivedCallbacks(mApiClient,mRemoteMediaPlayer.getNamespace(), mRemoteMediaPlayer);
} catch (IOException e) {
Log.d(TAG, "Exception while creating media channel", e);
}
try {
mRemoteMediaPlayer.load(mApiClient, mediaInfo, false).setResultCallback(new ResultCallback<RemoteMediaPlayer.MediaChannelResult>() {
@Override
public void onResult(MediaChannelResult result) {
if (result.getStatus().isSuccess()) {
Log.d(TAG, "Media loaded successfully");
}
}});
/*
* checks if the video is playing or if it is paused and according it will be played/paused in the receiver
*/
videoview.setPlayPauseListener(new CustomVideoView.PlayPauseListener() {
AudioManager amanager=(AudioManager)getSystemService(Context.AUDIO_SERVICE);
@Override
public void onPlay() {
playbackPaused=false; //videoView is playing
if(mSelectedDevice!=null && mApiClient != null && mRemoteMediaPlayer != null){
//volume is set to mute if media is casting in Chromecast
amanager.setStreamMute(AudioManager.STREAM_MUSIC, true);
sendMediaControl(playbackPaused,false);
}else{
amanager.setStreamVolume(AudioManager.STREAM_MUSIC, 3,1);
}
}
@Override
public void onPause(){
playbackPaused=true; //videoView is paused
if (mSelectedDevice != null && mApiClient != null && mRemoteMediaPlayer != null){
amanager.setStreamMute(AudioManager.STREAM_MUSIC, false);
sendMediaControl(playbackPaused,false);
}else{
amanager.setStreamVolume(AudioManager.STREAM_MUSIC, 3,1); }
}
/* Currently Seek function is not working for the media playback while casting
* (non-Javadoc)
* @see com.castoffline.castActivity.CustomVideoView.PlayPauseListener#onSeekChanged(int)
*/
@Override
public void onSeekChanged(int pos){
Log.d(String.valueOf(videoview.getCurrentPosition()),"seekinsie");
seek(videoview.getCurrentPosition());
}
});
} catch (IllegalStateException e) {
Log.d(TAG, "Problem occurred with media during loading", e);
} catch (Exception e) {
Log.d(TAG, "Problem opening media during loading", e);}
} else {
Log.e(TAG,"application could not launch");
teardown();
}
}
});
}
} catch (Exception e) {
Log.e(TAG, "Failed to launch application", e);}
}
@Override
public void onConnectionSuspended(int cause) {
mWaitingForReconnect = true;
}
}
// Google Play services callbacks
private class ConnectionFailedListener implements GoogleApiClient.OnConnectionFailedListener {
@Override
public void onConnectionFailed(ConnectionResult result) {
teardown();
}
}
//Tear down the connection to the receiver
private void teardown() {
Log.d(TAG, "teardown");
if (mApiClient != null) {
if (mApplicationStarted) {
if (mApiClient.isConnected()) {
try {
Cast.CastApi.stopApplication(mApiClient, mSessionId);
if (mRemoteMediaPlayer != null) {
Cast.CastApi.removeMessageReceivedCallbacks(mApiClient,mRemoteMediaPlayer.getNamespace());
mRemoteMediaPlayer = null;
}
} catch (IOException e) {
Log.e(TAG, "Exception while removing channel", e);
}
mApiClient.disconnect();
}
mApplicationStarted = false;
}
mApiClient = null;
}
mSelectedDevice = null;
mWaitingForReconnect = false;
mSessionId = null;
}
private void sendMediaControl(final boolean playbackPaused,final boolean change)
{
if (mApiClient != null && mRemoteMediaPlayer != null){
mRemoteMediaPlayer.requestStatus(mApiClient).setResultCallback( new ResultCallback<RemoteMediaPlayer.MediaChannelResult>() {
@Override
public void onResult(RemoteMediaPlayer.MediaChannelResult mediaChannelResult) {
if(playbackPaused ==true){
mRemoteMediaPlayer.pause(mApiClient);
}else{
mRemoteMediaPlayer.play(mApiClient);
}
}
});
}
}
/*
* Currently seek function is not working
*/
public void seek(final int position)
{
Log.d("seek","seek");
if (mApiClient != null && mRemoteMediaPlayer != null){
videoview.pause();
mRemoteMediaPlayer.seek(mApiClient,position).setResultCallback(new ResultCallback<RemoteMediaPlayer.MediaChannelResult>(){
@Override
public void onResult(MediaChannelResult result) {
if (result.getStatus().isSuccess()) {
Log.d(String.valueOf(result.getStatus().getStatusCode()), "No seek");
}
}
});
mRemoteMediaPlayer.setOnStatusUpdatedListener(new RemoteMediaPlayer.OnStatusUpdatedListener(){
@Override
public void onStatusUpdated() {
@SuppressWarnings("unused")
MediaStatus mediaStatus = mRemoteMediaPlayer.getMediaStatus();
}
});
}
}
//MediaPlayerControl Methods
@Override
public void start() {
// TODO Auto-generated method stub
}
@Override
public void pause() {
// TODO Auto-generated method stub
}
@Override
public int getDuration() {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getCurrentPosition() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void seekTo(int pos) {
// TODO Auto-generated method stub
}
@Override
public boolean isPlaying() {
// TODO Auto-generated method stub
return false;
}
@Override
public int getBufferPercentage() {
// TODO Auto-generated method stub
return 0;
}
@Override
public boolean canPause() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean canSeekBackward() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean canSeekForward() {
// TODO Auto-generated method stub
return false;
}
@Override
public int getAudioSessionId() {
// TODO Auto-generated method stub
return 0;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.h2.twostep.msg;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.UUID;
import org.apache.ignite.internal.GridDirectCollection;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
/**
* Range response message.
*/
public class GridH2IndexRangeResponse implements Message {
/** */
public static final byte STATUS_OK = 0;
/** */
public static final byte STATUS_ERROR = 1;
/** */
public static final byte STATUS_NOT_FOUND = 2;
/** */
private UUID originNodeId;
/** */
private long qryId;
/** */
private int batchLookupId;
/** */
@GridDirectCollection(Message.class)
private List<GridH2RowRange> ranges;
/** */
private byte status;
/** */
private String err;
/**
* @param ranges Ranges.
*/
public void ranges(List<GridH2RowRange> ranges) {
this.ranges = ranges;
}
/**
* @return Ranges.
*/
public List<GridH2RowRange> ranges() {
return ranges;
}
/**
* @return Origin node ID.
*/
public UUID originNodeId() {
return originNodeId;
}
/**
* @param originNodeId Origin node ID.
*/
public void originNodeId(UUID originNodeId) {
this.originNodeId = originNodeId;
}
/**
* @return Query ID.
*/
public long queryId() {
return qryId;
}
/**
* @param qryId Query ID.
*/
public void queryId(long qryId) {
this.qryId = qryId;
}
/**
* @param err Error message.
*/
public void error(String err) {
this.err = err;
}
/**
* @return Error message or {@code null} if everything is ok.
*/
public String error() {
return err;
}
/**
* @param status Status.
*/
public void status(byte status) {
this.status = status;
}
/**
* @return Status.
*/
public byte status() {
return status;
}
/**
* @param batchLookupId Batch lookup ID.
*/
public void batchLookupId(int batchLookupId) {
this.batchLookupId = batchLookupId;
}
/**
* @return Batch lookup ID.
*/
public int batchLookupId() {
return batchLookupId;
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 0:
if (!writer.writeInt("batchLookupId", batchLookupId))
return false;
writer.incrementState();
case 1:
if (!writer.writeString("err", err))
return false;
writer.incrementState();
case 2:
if (!writer.writeUuid("originNodeId", originNodeId))
return false;
writer.incrementState();
case 3:
if (!writer.writeLong("qryId", qryId))
return false;
writer.incrementState();
case 4:
if (!writer.writeCollection("ranges", ranges, MessageCollectionItemType.MSG))
return false;
writer.incrementState();
case 5:
if (!writer.writeByte("status", status))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
switch (reader.state()) {
case 0:
batchLookupId = reader.readInt("batchLookupId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 1:
err = reader.readString("err");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 2:
originNodeId = reader.readUuid("originNodeId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 3:
qryId = reader.readLong("qryId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 4:
ranges = reader.readCollection("ranges", MessageCollectionItemType.MSG);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 5:
status = reader.readByte("status");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(GridH2IndexRangeResponse.class);
}
/** {@inheritDoc} */
@Override public byte directType() {
return -31;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 6;
}
/** {@inheritDoc} */
@Override public void onAckReceived() {
// No-op.
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridH2IndexRangeResponse.class, this, "rangesSize", ranges == null ? null : ranges.size());
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.library;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.LocalCollectionOutputFormat;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.Graph;
import org.apache.flink.graph.Vertex;
import org.apache.flink.graph.asm.translate.TranslateEdgeValues;
import org.apache.flink.graph.asm.translate.TranslateFunction;
import org.apache.flink.graph.asm.translate.TranslateVertexValues;
import org.apache.flink.graph.asm.translate.translators.ToNullValue;
import org.apache.flink.graph.examples.data.SummarizationData;
import org.apache.flink.graph.library.Summarization.EdgeValue;
import org.apache.flink.test.util.MultipleProgramsTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.regex.Pattern;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Tests for {@link Summarization}.
*/
@RunWith(Parameterized.class)
public class SummarizationITCase extends MultipleProgramsTestBase {
private static final Pattern TOKEN_SEPARATOR = Pattern.compile(";");
private static final Pattern ID_SEPARATOR = Pattern.compile(",");
public SummarizationITCase(TestExecutionMode mode) {
super(mode);
}
@Test
public void testWithVertexAndEdgeStringValues() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, String, String> input = Graph.fromDataSet(
SummarizationData.getVertices(env),
SummarizationData.getEdges(env),
env);
List<Vertex<Long, Summarization.VertexValue<String>>> summarizedVertices = new ArrayList<>();
List<Edge<Long, EdgeValue<String>>> summarizedEdges = new ArrayList<>();
Graph<Long, Summarization.VertexValue<String>, EdgeValue<String>> output =
input.run(new Summarization<>());
output.getVertices().output(new LocalCollectionOutputFormat<>(summarizedVertices));
output.getEdges().output(new LocalCollectionOutputFormat<>(summarizedEdges));
env.execute();
validateVertices(SummarizationData.EXPECTED_VERTICES, summarizedVertices);
validateEdges(SummarizationData.EXPECTED_EDGES_WITH_VALUES, summarizedEdges);
}
@Test
public void testWithVertexAndAbsentEdgeStringValues() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, String, NullValue> input = Graph.fromDataSet(
SummarizationData.getVertices(env),
SummarizationData.getEdges(env),
env)
.run(new TranslateEdgeValues<>(new ToNullValue<>()));
List<Vertex<Long, Summarization.VertexValue<String>>> summarizedVertices = new ArrayList<>();
List<Edge<Long, EdgeValue<NullValue>>> summarizedEdges = new ArrayList<>();
Graph<Long, Summarization.VertexValue<String>, EdgeValue<NullValue>> output =
input.run(new Summarization<>());
output.getVertices().output(new LocalCollectionOutputFormat<>(summarizedVertices));
output.getEdges().output(new LocalCollectionOutputFormat<>(summarizedEdges));
env.execute();
validateVertices(SummarizationData.EXPECTED_VERTICES, summarizedVertices);
validateEdges(SummarizationData.EXPECTED_EDGES_ABSENT_VALUES, summarizedEdges);
}
@Test
public void testWithVertexAndEdgeLongValues() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> input = Graph.fromDataSet(
SummarizationData.getVertices(env),
SummarizationData.getEdges(env),
env)
.run(new TranslateVertexValues<>(new StringToLong()))
.run(new TranslateEdgeValues<>(new StringToLong()));
List<Vertex<Long, Summarization.VertexValue<Long>>> summarizedVertices = new ArrayList<>();
List<Edge<Long, EdgeValue<Long>>> summarizedEdges = new ArrayList<>();
Graph<Long, Summarization.VertexValue<Long>, EdgeValue<Long>> output =
input.run(new Summarization<>());
output.getVertices().output(new LocalCollectionOutputFormat<>(summarizedVertices));
output.getEdges().output(new LocalCollectionOutputFormat<>(summarizedEdges));
env.execute();
validateVertices(SummarizationData.EXPECTED_VERTICES, summarizedVertices);
validateEdges(SummarizationData.EXPECTED_EDGES_WITH_VALUES, summarizedEdges);
}
private <VV extends Comparable<VV>> void validateVertices(String[] expectedVertices, List<Vertex<Long, Summarization.VertexValue<VV>>> actualVertices) {
Arrays.sort(expectedVertices);
Collections.sort(actualVertices, new Comparator<Vertex<Long, Summarization.VertexValue<VV>>>() {
@Override
public int compare(Vertex<Long, Summarization.VertexValue<VV>> o1, Vertex<Long, Summarization.VertexValue<VV>> o2) {
int result = o1.getId().compareTo(o2.getId());
if (result == 0) {
result = o1.getValue().getVertexGroupValue().compareTo(o2.getValue().getVertexGroupValue());
}
return result;
}
});
for (int i = 0; i < expectedVertices.length; i++) {
validateVertex(expectedVertices[i], actualVertices.get(i));
}
}
private <EV extends Comparable<EV>> void validateEdges(String[] expectedEdges, List<Edge<Long, EdgeValue<EV>>> actualEdges) {
Arrays.sort(expectedEdges);
Collections.sort(actualEdges, new Comparator<Edge<Long, EdgeValue<EV>>> () {
@Override
public int compare(Edge<Long, EdgeValue<EV>> o1, Edge<Long, EdgeValue<EV>> o2) {
int result = o1.getSource().compareTo(o2.getSource());
if (result == 0) {
result = o1.getTarget().compareTo(o2.getTarget());
}
if (result == 0) {
result = o1.getValue().getEdgeGroupValue().compareTo(o2.getValue().getEdgeGroupValue());
}
return result;
}
});
for (int i = 0; i < expectedEdges.length; i++) {
validateEdge(expectedEdges[i], actualEdges.get(i));
}
}
private <VV> void validateVertex(String expected, Vertex<Long, Summarization.VertexValue<VV>> actual) {
String[] tokens = TOKEN_SEPARATOR.split(expected);
assertTrue(getListFromIdRange(tokens[0]).contains(actual.getId()));
assertEquals(getGroupValue(tokens[1]), actual.getValue().getVertexGroupValue().toString());
assertEquals(getGroupCount(tokens[1]), actual.getValue().getVertexGroupCount());
}
private <EV> void validateEdge(String expected, Edge<Long, EdgeValue<EV>> actual) {
String[] tokens = TOKEN_SEPARATOR.split(expected);
assertTrue(getListFromIdRange(tokens[0]).contains(actual.getSource()));
assertTrue(getListFromIdRange(tokens[1]).contains(actual.getTarget()));
assertEquals(getGroupValue(tokens[2]), actual.getValue().getEdgeGroupValue().toString());
assertEquals(getGroupCount(tokens[2]), actual.getValue().getEdgeGroupCount());
}
private List<Long> getListFromIdRange(String idRange) {
String[] split = ID_SEPARATOR.split(idRange);
List<Long> result = new ArrayList<>(split.length);
for (String id : split) {
result.add(Long.parseLong(id));
}
return result;
}
private String getGroupValue(String token) {
return ID_SEPARATOR.split(token)[0];
}
private Long getGroupCount(String token) {
return Long.valueOf(ID_SEPARATOR.split(token)[1]);
}
private static class StringToLong implements TranslateFunction<String, Long> {
@Override
public Long translate(String value, Long reuse) throws Exception {
return Long.parseLong(value);
}
}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.applicationdiscovery.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Inventory data for installed discovery agents.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CustomerAgentInfo implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Number of active discovery agents.
* </p>
*/
private Integer activeAgents;
/**
* <p>
* Number of healthy discovery agents
* </p>
*/
private Integer healthyAgents;
/**
* <p>
* Number of blacklisted discovery agents.
* </p>
*/
private Integer blackListedAgents;
/**
* <p>
* Number of discovery agents with status SHUTDOWN.
* </p>
*/
private Integer shutdownAgents;
/**
* <p>
* Number of unhealthy discovery agents.
* </p>
*/
private Integer unhealthyAgents;
/**
* <p>
* Total number of discovery agents.
* </p>
*/
private Integer totalAgents;
/**
* <p>
* Number of unknown discovery agents.
* </p>
*/
private Integer unknownAgents;
/**
* <p>
* Number of active discovery agents.
* </p>
*
* @param activeAgents
* Number of active discovery agents.
*/
public void setActiveAgents(Integer activeAgents) {
this.activeAgents = activeAgents;
}
/**
* <p>
* Number of active discovery agents.
* </p>
*
* @return Number of active discovery agents.
*/
public Integer getActiveAgents() {
return this.activeAgents;
}
/**
* <p>
* Number of active discovery agents.
* </p>
*
* @param activeAgents
* Number of active discovery agents.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomerAgentInfo withActiveAgents(Integer activeAgents) {
setActiveAgents(activeAgents);
return this;
}
/**
* <p>
* Number of healthy discovery agents
* </p>
*
* @param healthyAgents
* Number of healthy discovery agents
*/
public void setHealthyAgents(Integer healthyAgents) {
this.healthyAgents = healthyAgents;
}
/**
* <p>
* Number of healthy discovery agents
* </p>
*
* @return Number of healthy discovery agents
*/
public Integer getHealthyAgents() {
return this.healthyAgents;
}
/**
* <p>
* Number of healthy discovery agents
* </p>
*
* @param healthyAgents
* Number of healthy discovery agents
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomerAgentInfo withHealthyAgents(Integer healthyAgents) {
setHealthyAgents(healthyAgents);
return this;
}
/**
* <p>
* Number of blacklisted discovery agents.
* </p>
*
* @param blackListedAgents
* Number of blacklisted discovery agents.
*/
public void setBlackListedAgents(Integer blackListedAgents) {
this.blackListedAgents = blackListedAgents;
}
/**
* <p>
* Number of blacklisted discovery agents.
* </p>
*
* @return Number of blacklisted discovery agents.
*/
public Integer getBlackListedAgents() {
return this.blackListedAgents;
}
/**
* <p>
* Number of blacklisted discovery agents.
* </p>
*
* @param blackListedAgents
* Number of blacklisted discovery agents.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomerAgentInfo withBlackListedAgents(Integer blackListedAgents) {
setBlackListedAgents(blackListedAgents);
return this;
}
/**
* <p>
* Number of discovery agents with status SHUTDOWN.
* </p>
*
* @param shutdownAgents
* Number of discovery agents with status SHUTDOWN.
*/
public void setShutdownAgents(Integer shutdownAgents) {
this.shutdownAgents = shutdownAgents;
}
/**
* <p>
* Number of discovery agents with status SHUTDOWN.
* </p>
*
* @return Number of discovery agents with status SHUTDOWN.
*/
public Integer getShutdownAgents() {
return this.shutdownAgents;
}
/**
* <p>
* Number of discovery agents with status SHUTDOWN.
* </p>
*
* @param shutdownAgents
* Number of discovery agents with status SHUTDOWN.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomerAgentInfo withShutdownAgents(Integer shutdownAgents) {
setShutdownAgents(shutdownAgents);
return this;
}
/**
* <p>
* Number of unhealthy discovery agents.
* </p>
*
* @param unhealthyAgents
* Number of unhealthy discovery agents.
*/
public void setUnhealthyAgents(Integer unhealthyAgents) {
this.unhealthyAgents = unhealthyAgents;
}
/**
* <p>
* Number of unhealthy discovery agents.
* </p>
*
* @return Number of unhealthy discovery agents.
*/
public Integer getUnhealthyAgents() {
return this.unhealthyAgents;
}
/**
* <p>
* Number of unhealthy discovery agents.
* </p>
*
* @param unhealthyAgents
* Number of unhealthy discovery agents.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomerAgentInfo withUnhealthyAgents(Integer unhealthyAgents) {
setUnhealthyAgents(unhealthyAgents);
return this;
}
/**
* <p>
* Total number of discovery agents.
* </p>
*
* @param totalAgents
* Total number of discovery agents.
*/
public void setTotalAgents(Integer totalAgents) {
this.totalAgents = totalAgents;
}
/**
* <p>
* Total number of discovery agents.
* </p>
*
* @return Total number of discovery agents.
*/
public Integer getTotalAgents() {
return this.totalAgents;
}
/**
* <p>
* Total number of discovery agents.
* </p>
*
* @param totalAgents
* Total number of discovery agents.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomerAgentInfo withTotalAgents(Integer totalAgents) {
setTotalAgents(totalAgents);
return this;
}
/**
* <p>
* Number of unknown discovery agents.
* </p>
*
* @param unknownAgents
* Number of unknown discovery agents.
*/
public void setUnknownAgents(Integer unknownAgents) {
this.unknownAgents = unknownAgents;
}
/**
* <p>
* Number of unknown discovery agents.
* </p>
*
* @return Number of unknown discovery agents.
*/
public Integer getUnknownAgents() {
return this.unknownAgents;
}
/**
* <p>
* Number of unknown discovery agents.
* </p>
*
* @param unknownAgents
* Number of unknown discovery agents.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomerAgentInfo withUnknownAgents(Integer unknownAgents) {
setUnknownAgents(unknownAgents);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getActiveAgents() != null)
sb.append("ActiveAgents: ").append(getActiveAgents()).append(",");
if (getHealthyAgents() != null)
sb.append("HealthyAgents: ").append(getHealthyAgents()).append(",");
if (getBlackListedAgents() != null)
sb.append("BlackListedAgents: ").append(getBlackListedAgents()).append(",");
if (getShutdownAgents() != null)
sb.append("ShutdownAgents: ").append(getShutdownAgents()).append(",");
if (getUnhealthyAgents() != null)
sb.append("UnhealthyAgents: ").append(getUnhealthyAgents()).append(",");
if (getTotalAgents() != null)
sb.append("TotalAgents: ").append(getTotalAgents()).append(",");
if (getUnknownAgents() != null)
sb.append("UnknownAgents: ").append(getUnknownAgents());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CustomerAgentInfo == false)
return false;
CustomerAgentInfo other = (CustomerAgentInfo) obj;
if (other.getActiveAgents() == null ^ this.getActiveAgents() == null)
return false;
if (other.getActiveAgents() != null && other.getActiveAgents().equals(this.getActiveAgents()) == false)
return false;
if (other.getHealthyAgents() == null ^ this.getHealthyAgents() == null)
return false;
if (other.getHealthyAgents() != null && other.getHealthyAgents().equals(this.getHealthyAgents()) == false)
return false;
if (other.getBlackListedAgents() == null ^ this.getBlackListedAgents() == null)
return false;
if (other.getBlackListedAgents() != null && other.getBlackListedAgents().equals(this.getBlackListedAgents()) == false)
return false;
if (other.getShutdownAgents() == null ^ this.getShutdownAgents() == null)
return false;
if (other.getShutdownAgents() != null && other.getShutdownAgents().equals(this.getShutdownAgents()) == false)
return false;
if (other.getUnhealthyAgents() == null ^ this.getUnhealthyAgents() == null)
return false;
if (other.getUnhealthyAgents() != null && other.getUnhealthyAgents().equals(this.getUnhealthyAgents()) == false)
return false;
if (other.getTotalAgents() == null ^ this.getTotalAgents() == null)
return false;
if (other.getTotalAgents() != null && other.getTotalAgents().equals(this.getTotalAgents()) == false)
return false;
if (other.getUnknownAgents() == null ^ this.getUnknownAgents() == null)
return false;
if (other.getUnknownAgents() != null && other.getUnknownAgents().equals(this.getUnknownAgents()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getActiveAgents() == null) ? 0 : getActiveAgents().hashCode());
hashCode = prime * hashCode + ((getHealthyAgents() == null) ? 0 : getHealthyAgents().hashCode());
hashCode = prime * hashCode + ((getBlackListedAgents() == null) ? 0 : getBlackListedAgents().hashCode());
hashCode = prime * hashCode + ((getShutdownAgents() == null) ? 0 : getShutdownAgents().hashCode());
hashCode = prime * hashCode + ((getUnhealthyAgents() == null) ? 0 : getUnhealthyAgents().hashCode());
hashCode = prime * hashCode + ((getTotalAgents() == null) ? 0 : getTotalAgents().hashCode());
hashCode = prime * hashCode + ((getUnknownAgents() == null) ? 0 : getUnknownAgents().hashCode());
return hashCode;
}
@Override
public CustomerAgentInfo clone() {
try {
return (CustomerAgentInfo) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.applicationdiscovery.model.transform.CustomerAgentInfoMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/*
* Copyright 2015 Torridity.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tor.tribes.util.html;
import de.tor.tribes.io.DataHolder;
import de.tor.tribes.util.*;
import de.tor.tribes.io.ServerManager;
import de.tor.tribes.io.UnitHolder;
import de.tor.tribes.types.ext.Ally;
import de.tor.tribes.types.Attack;
import de.tor.tribes.types.StandardAttack;
import de.tor.tribes.types.ext.Tribe;
import de.tor.tribes.util.attack.StandardAttackManager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.text.NumberFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.regex.Matcher;
import org.apache.log4j.Logger;
/**
* @author Charon
*/
public class AttackPlanHTMLExporter {
private static Logger logger = Logger.getLogger("AttackHTMLExporter");
private static String HEADER = "";
private static String FOOTER = "";
private static String BLOCK = "";
private static boolean TEMPLATE_ERROR = false;
//header and footer variables
private static final String CREATOR = "\\$CREATOR";
private static final String SERVER = "\\$SERVER";
private static final String PLANNAME = "\\$PLANNAME";
private static final String ATTACK_COUNT = "\\$ATTACK_COUNT";
private static final String VERSION = "\\$VERSION";
private static final String CREATION_DATE = "\\$CREATION_DATE";
//block variables
private static final String ID = "\\$ID";
private static final String DIV_CLASS = "\\$DIV_CLASS";
private static final String TYPE = "\\$TYPE";
private static final String UNIT = "\\$UNIT";
private static final String SEND_TIME = "\\$SEND_TIME";
private static final String ARRIVE_TIME = "\\$ARRIVE_TIME";
private static final String PLACE = "\\$PLACE";
//source variables
private static final String SOURCE_PLAYER_LINK = "\\$SOURCE_PLAYER_LINK";
private static final String SOURCE_PLAYER_NAME = "\\$SOURCE_PLAYER_NAME";
private static final String SOURCE_ALLY_LINK = "\\$SOURCE_ALLY_LINK";
private static final String SOURCE_ALLY_NAME = "\\$SOURCE_ALLY_NAME";
private static final String SOURCE_ALLY_TAG = "\\$SOURCE_ALLY_TAG";
private static final String SOURCE_VILLAGE_LINK = "\\$SOURCE_VILLAGE_LINK";
private static final String SOURCE_VILLAGE_NAME = "\\$SOURCE_VILLAGE_NAME";
private static final String SOURCE_VILLAGE_COORD = "\\$SOURCE_VILLAGE_COORD";
//target variables
private static final String TARGET_PLAYER_LINK = "\\$TARGET_PLAYER_LINK";
private static final String TARGET_PLAYER_NAME = "\\$TARGET_PLAYER_NAME";
private static final String TARGET_ALLY_LINK = "\\$TARGET_ALLY_LINK";
private static final String TARGET_ALLY_NAME = "\\$TARGET_ALLY_NAME";
private static final String TARGET_ALLY_TAG = "\\$TARGET_ALLY_TAG";
private static final String TARGET_VILLAGE_LINK = "\\$TARGET_VILLAGE_LINK";
private static final String TARGET_VILLAGE_NAME = "\\$TARGET_VILLAGE_NAME";
private static final String TARGET_VILLAGE_COORD = "\\$TARGET_VILLAGE_COORD";
static {
loadCustomTemplate();
}
public static void loadCustomTemplate() {
try {
HEADER = "";
BLOCK = "";
FOOTER = "";
String header = GlobalOptions.getProperty("attack.template.header");
String block = GlobalOptions.getProperty("attack.template.block");
String footer = GlobalOptions.getProperty("attack.template.footer");
if (header == null) {
header = "ThisFileDoesNotExist";
}
if (block == null) {
block = "ThisFileDoesNotExist";
}
if (footer == null) {
footer = "ThisFileDoesNotExist";
}
File fHeader = new File(header);
File fBlock = new File(block);
File fFooter = new File(footer);
BufferedReader r = null;
if (!fHeader.exists()) {
r = new BufferedReader(new InputStreamReader(
new FileInputStream(new File(
GlobalDefaults.getProperty("attack.template.header.internal")))));
} else {
r = new BufferedReader(new InputStreamReader(new FileInputStream(header)));
}
String line = "";
while ((line = r.readLine()) != null) {
HEADER += line + "\n";
}
r.close();
if (!fBlock.exists()) {
r = new BufferedReader(new InputStreamReader(
new FileInputStream(new File(
GlobalDefaults.getProperty("attack.template.block.internal")))));
} else {
r = new BufferedReader(new InputStreamReader(new FileInputStream(block)));
}
line = "";
while ((line = r.readLine()) != null) {
BLOCK += line + "\n";
}
r.close();
if (!fFooter.exists()) {
r = new BufferedReader(new InputStreamReader(
new FileInputStream(new File(
GlobalDefaults.getProperty("attack.template.footer.internal")))));
} else {
r = new BufferedReader(new InputStreamReader(new FileInputStream(footer)));
}
line = "";
while ((line = r.readLine()) != null) {
FOOTER += line + "\n";
}
r.close();
} catch (Exception e) {
logger.error("Failed to read custom templates. Switch to default template.", e);
loadDefaultTemplate();
}
}
private static void loadDefaultTemplate() {
try {
HEADER = "";
BLOCK = "";
FOOTER = "";
BufferedReader r = new BufferedReader(new InputStreamReader(AttackPlanHTMLExporter.class.getResourceAsStream("/de/tor/tribes/tmpl/attack_header.tmpl")));
String line = "";
while ((line = r.readLine()) != null) {
HEADER += line + "\n";
}
r.close();
r = new BufferedReader(new InputStreamReader(AttackPlanHTMLExporter.class.getResourceAsStream("/de/tor/tribes/tmpl/attack_block.tmpl")));
line = "";
while ((line = r.readLine()) != null) {
BLOCK += line + "\n";
}
r.close();
r = new BufferedReader(new InputStreamReader(AttackPlanHTMLExporter.class.getResourceAsStream("/de/tor/tribes/tmpl/attack_footer.tmpl")));
line = "";
while ((line = r.readLine()) != null) {
FOOTER += line + "\n";
}
r.close();
} catch (Exception e) {
logger.error("Failed to read templates", e);
TEMPLATE_ERROR = true;
}
}
public static void doExport(File pHtmlFile, String pPlanName, List<Attack> pAttacks) {
if (TEMPLATE_ERROR) {
logger.warn("Skip writing HTML file due to TEMPLATE_ERROR flag");
return;
}
SimpleDateFormat f = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
StringBuilder result = new StringBuilder();
//append header
result.append(replaceHeadFootVariables(HEADER, pPlanName, pAttacks));
int cnt = 0;
for (Attack a : pAttacks) {
String b = BLOCK;
// <editor-fold defaultstate="collapsed" desc="Replace DIV-IDs">
if (cnt % 2 == 0) {
b = b.replaceAll(DIV_CLASS, "odd_div");
} else {
b = b.replaceAll(DIV_CLASS, "even_div");
}
b = b.replaceAll(ID, Integer.toString(cnt));
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Replace Unit Icons">
UnitHolder unit = a.getUnit();
b = b.replaceAll(UNIT, "<img src=\"http://torridity.de/tools/res/" + unit.getPlainName() + ".png\">");
switch (a.getType()) {
case Attack.CLEAN_TYPE: {
b = b.replaceAll(TYPE, "<img src=\"http://torridity.de/tools/res/att.png\">");
break;
}
case Attack.SNOB_TYPE: {
b = b.replaceAll(TYPE, "<img src=\"http://torridity.de/tools/res/snob.png\">");
break;
}
case Attack.FAKE_TYPE: {
b = b.replaceAll(TYPE, "<img src=\"http://torridity.de/tools/res/fake.png\">");
break;
}
case Attack.FAKE_DEFF_TYPE: {
b = b.replaceAll(TYPE, "<img src=\"http://torridity.de/tools/res/def_fake.png\">");
break;
}
case Attack.SUPPORT_TYPE: {
b = b.replaceAll(TYPE, "<img src=\"http://torridity.de/tools/res/ally.png\">");
break;
}
default: {
b = b.replaceAll(TYPE, "-");
break;
}
}
// </editor-fold>
String baseURL = ServerManager.getServerURL(GlobalOptions.getSelectedServer()) + "/";
// <editor-fold defaultstate="collapsed" desc=" replace source tribe and ally">
Tribe sourceTribe = a.getSource().getTribe();
String sourceTribeName = "";
String sourceTribeLink = "";
String sourceAllyName = "";
String sourceAllyTag = "";
String sourceAllyLink = "";
String sourceVillageName = "";
String sourceVillageCoord = "";
String sourceVillageLink = "";
if (sourceTribe == null) {
//tribe is null, so it is a barbarian village
sourceTribeName = "Barbaren";
sourceAllyName = "Barbaren";
} else {
sourceTribeLink = baseURL;
sourceTribeLink += "guest.php?screen=info_player&id=" + sourceTribe.getId();
sourceTribeName = sourceTribe.getName();
//replace source tribe
Ally sourceAlly = sourceTribe.getAlly();
if (sourceAlly == null) {
//tribe has no ally
sourceAllyName = "Kein Stamm";
} else {
//ally valid
sourceAllyName = sourceAlly.getName();
sourceAllyTag = sourceAlly.getTag();
sourceAllyLink = baseURL;
sourceAllyLink += "guest.php?screen=info_ally&id=" + sourceAlly.getId();
}
}
//replace source village
sourceVillageLink = baseURL;
sourceVillageLink += "guest.php?screen=info_village&id=" + a.getSource().getId();
sourceVillageName = a.getSource().getFullName();
sourceVillageCoord = a.getSource().getCoordAsString();
//replace values
b = b.replaceAll(SOURCE_PLAYER_NAME, Matcher.quoteReplacement(sourceTribeName));
b = b.replaceAll(SOURCE_PLAYER_LINK, sourceTribeLink);
b = b.replaceAll(SOURCE_ALLY_NAME, Matcher.quoteReplacement(sourceAllyName));
b = b.replaceAll(SOURCE_ALLY_TAG, Matcher.quoteReplacement(sourceAllyTag));
b = b.replaceAll(SOURCE_ALLY_LINK, sourceAllyLink);
b = b.replaceAll(SOURCE_VILLAGE_NAME, Matcher.quoteReplacement(sourceVillageName));
b = b.replaceAll(SOURCE_VILLAGE_COORD, sourceVillageCoord);
b = b.replaceAll(SOURCE_VILLAGE_LINK, sourceVillageLink);
//</editor-fold>
// <editor-fold defaultstate="collapsed" desc=" replace target tribe and ally">
Tribe targetTribe = a.getTarget().getTribe();
String targetTribeName = "";
String targetTribeLink = "";
String targetAllyName = "";
String targetAllyTag = "";
String targetAllyLink = "";
String targetVillageName = "";
String targetVillageCoord = "";
String targetVillageLink = "";
if (targetTribe == null) {
//tribe is null, so it is a barbarian village
targetTribeName = "Barbaren";
targetAllyName = "Barbaren";
} else {
targetTribeLink = baseURL;
targetTribeLink += "guest.php?screen=info_player&id=" + targetTribe.getId();
targetTribeName = targetTribe.getName();
//replace source tribe
Ally targetAlly = targetTribe.getAlly();
if (targetAlly == null) {
//tribe has no ally
targetAllyName = "Kein Stamm";
} else {
//ally valid
targetAllyName = targetAlly.getName();
targetAllyTag = targetAlly.getTag();
targetAllyLink = baseURL;
targetAllyLink += "guest.php?screen=info_ally&id=" + targetAlly.getId();
}
}
//replace source village
targetVillageLink = baseURL;
targetVillageLink += "guest.php?screen=info_village&id=" + a.getTarget().getId();
targetVillageName = a.getTarget().getFullName();
targetVillageCoord = a.getTarget().getCoordAsString();
//replace values
b = b.replaceAll(TARGET_PLAYER_NAME, Matcher.quoteReplacement(targetTribeName));
b = b.replaceAll(TARGET_PLAYER_LINK, targetTribeLink);
b = b.replaceAll(TARGET_ALLY_NAME, Matcher.quoteReplacement(targetAllyName));
b = b.replaceAll(TARGET_ALLY_TAG, Matcher.quoteReplacement(targetAllyTag));
b = b.replaceAll(TARGET_ALLY_LINK, targetAllyLink);
b = b.replaceAll(TARGET_VILLAGE_NAME, Matcher.quoteReplacement(targetVillageName));
b = b.replaceAll(TARGET_VILLAGE_COORD, targetVillageCoord);
b = b.replaceAll(TARGET_VILLAGE_LINK, targetVillageLink);
//</editor-fold>
// <editor-fold defaultstate="collapsed" desc="Replace times and place URL">
//replace arrive time
b = b.replaceAll(ARRIVE_TIME, f.format(a.getArriveTime()));
//replace send time
b = b.replaceAll(SEND_TIME, f.format(a.getSendTime()));
//replace place link
String placeURL = baseURL + "game.php?village=";
int uvID = GlobalOptions.getSelectedProfile().getUVId();
if (uvID >= 0) {
placeURL = baseURL + "game.php?t=" + uvID + "&village=";
}
placeURL += a.getSource().getId() + "&screen=place&mode=command&target=" + a.getTarget().getId();
placeURL += "&type=0";
StandardAttack stdAttack = StandardAttackManager.getSingleton().getElementByIcon(a.getType());
for (UnitHolder u : DataHolder.getSingleton().getUnits()) {
int amount = 0;
if (stdAttack != null) {
amount = stdAttack.getTroops().getAmountForUnit(u, a.getSource());
}
placeURL += "&" + u.getPlainName() + "=" + amount;
}
b = b.replaceAll(PLACE, placeURL);
// </editor-fold>
result.append(b);
cnt++;
}
//append footer
result.append(replaceHeadFootVariables(FOOTER, pPlanName, pAttacks));
try {
FileWriter w = new FileWriter(pHtmlFile);
w.write(result.toString());
w.flush();
w.close();
} catch (Exception e) {
logger.error("Failed writing HTML file", e);
}
}
private static String replaceHeadFootVariables(String pBlock, String pPlanName, List<Attack> pAttacks) {
String result = pBlock;
//set creator
Tribe user = GlobalOptions.getSelectedProfile().getTribe();
if (user != null) {
result = result.replaceAll(CREATOR, Matcher.quoteReplacement(user.getName()));
} else {
result = result.replaceAll(CREATOR, "-");
}
//set planname
if (pPlanName != null) {
result = result.replaceAll(PLANNAME, EscapeChars.forHTML(pPlanName));
} else {
result = result.replaceAll(PLANNAME, "-");
}
//set attack count
NumberFormat nf = NumberFormat.getInstance();
nf.setMinimumFractionDigits(0);
nf.setMinimumFractionDigits(0);
result = result.replaceAll(ATTACK_COUNT, nf.format(pAttacks.size()));
//set attack count
String server = GlobalOptions.getSelectedServer();
if (server != null) {
result = result.replaceAll(SERVER, server);
} else {
result = result.replaceAll(SERVER, "-");
}
//replace version
result = result.replaceAll(VERSION, Double.toString(Constants.VERSION) + Constants.VERSION_ADDITION);
//replace creation date
SimpleDateFormat f = new SimpleDateFormat("dd.MM.yyyy 'um' HH:mm:ss 'Uhr'");
result = result.replaceAll(CREATION_DATE, f.format(new Date(System.currentTimeMillis())));
return result;
}
public static void main(String[] args) {
String test = "%P%";
System.out.println(test.replaceAll("%P%", Matcher.quoteReplacement("$test$")));
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.psi.resolve;
import com.intellij.navigation.NavigationItem;
import com.intellij.openapi.util.RecursionManager;
import com.intellij.psi.*;
import com.intellij.psi.infos.MethodCandidateInfo;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NonNls;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
/**
* @author dsl
*/
public class ResolveMethod15Test extends Resolve15TestCase {
public void testStaticImportOnDemand() throws Exception {
final PsiReference ref = configureByFile();
final PsiElement element = ref.resolve();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals("asList", method.getName());
assertEquals("java.util.Arrays", method.getContainingClass().getQualifiedName());
}
public void testStaticImportHidden() throws Exception {
final PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(false);
assertFalse(result.isValidResult());
final PsiElement element = result.getElement();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals(CommonClassNames.JAVA_LANG_OBJECT, method.getContainingClass().getQualifiedName());
}
public void testStaticImportDirect() throws Exception {
final PsiReference ref = configureByFile();
final PsiElement element = ref.resolve();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals("asList", method.getName());
assertEquals("java.util.Arrays", method.getContainingClass().getQualifiedName());
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final JavaResolveResult[] resolveResults = refExpr.multiResolve(false);
assertEquals(1, resolveResults.length);
final JavaResolveResult resolveResult = resolveResults[0];
assertTrue(resolveResult.isValidResult());
assertThat(resolveResult.getCurrentFileResolveScope(), instanceOf(PsiImportStaticStatement.class));
assertThat(resolveResult, instanceOf(MethodCandidateInfo.class));
final MethodCandidateInfo methodCandidateInfo = (MethodCandidateInfo)resolveResult;
assertTrue(methodCandidateInfo.isApplicable());
}
public void testStaticImportConflict() throws Exception {
final PsiReference ref = configureByFile();
final PsiElement element = ref.resolve();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals("sort", method.getName());
assertEquals("java.util.Collections", method.getContainingClass().getQualifiedName());
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final JavaResolveResult[] resolveResults = refExpr.multiResolve(false);
assertEquals(1, resolveResults.length);
final JavaResolveResult resolveResult = resolveResults[0];
assertFalse(resolveResult.isValidResult());
assertThat(resolveResult.getCurrentFileResolveScope(), instanceOf(PsiImportStaticStatement.class));
assertThat(resolveResult, instanceOf(MethodCandidateInfo.class));
final MethodCandidateInfo methodCandidateInfo = (MethodCandidateInfo)resolveResult;
assertFalse(methodCandidateInfo.isApplicable());
}
public void testStaticImportConflict1() throws Exception {
final PsiJavaCodeReferenceElement ref = (PsiJavaCodeReferenceElement)configureByFile();
final JavaResolveResult result = ref.advancedResolve(false);
PsiElement element = result.getElement();
assertTrue(!result.isValidResult());
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
PsiMethod parentMethod = PsiTreeUtil.getParentOfType(ref.getElement(), PsiMethod.class);
assertEquals(method, parentMethod);
}
public void testStaticImportConflict3() throws Exception {
final PsiJavaCodeReferenceElement ref = (PsiJavaCodeReferenceElement)configureByFile();
final JavaResolveResult result = ref.advancedResolve(false);
assertResolvesToMethodInClass(result, "ToImportX2");
}
public void testGenericsAndVarargsNoConflict() throws Exception {
final PsiReference ref = configureByFile();
final PsiElement element = ref.resolve();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals("method", method.getName());
assertEquals(method.getTypeParameters().length, 0);
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final JavaResolveResult[] resolveResults = refExpr.multiResolve(false);
assertEquals(1, resolveResults.length);
final JavaResolveResult resolveResult = resolveResults[0];
assertTrue(resolveResult.isValidResult());
assertThat(resolveResult, instanceOf(MethodCandidateInfo.class));
final MethodCandidateInfo methodCandidateInfo = (MethodCandidateInfo)resolveResult;
assertTrue(methodCandidateInfo.isApplicable());
}
//JLS3 15.2.8 hack
public void testGetClass() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiType type = ((PsiExpression)refExpr.getParent()).getType();
assertEquals("java.lang.Class<? extends java.lang.String>", type.getCanonicalText());
}
public void testToString() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement resolve = refExpr.resolve();
assertTrue(resolve != null ? resolve.toString() : null, resolve instanceof PsiMethod);
final PsiClass containingClass = ((PsiMethod)resolve).getContainingClass();
assertTrue(containingClass != null ? containingClass.getName() : null, containingClass instanceof PsiAnonymousClass);
}
public void testListEquals() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement resolve = refExpr.resolve();
assertTrue(resolve != null ? resolve.toString() : null, resolve instanceof PsiMethod);
final PsiClass containingClass = ((PsiMethod)resolve).getContainingClass();
assertNotNull(containingClass);
assertTrue(containingClass.toString(), CommonClassNames.JAVA_UTIL_LIST.equals(containingClass.getQualifiedName()));
}
public void testCovariantReturnTypeAnonymous() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement resolve = refExpr.resolve();
assertTrue(resolve != null ? resolve.toString() : null, resolve instanceof PsiMethod);
final PsiClass containingClass = ((PsiMethod)resolve).getContainingClass();
assertTrue(containingClass != null ? containingClass.getName() : null, !(containingClass instanceof PsiAnonymousClass));
}
public void testNonPublicAnonymous() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement resolve = refExpr.resolve();
assertTrue(resolve != null ? resolve.toString() : null, resolve instanceof PsiMethod);
final PsiClass containingClass = ((PsiMethod)resolve).getContainingClass();
assertTrue(containingClass != null ? containingClass.getName() : null, !(containingClass instanceof PsiAnonymousClass));
}
public void testFilterFixedVsVarargs1() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
assertNull(call.resolveMethod());
}
public void testFilterFixedVsVarargs2() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
assertNull(call.resolveMethod());
}
public void testFilterFixedVsVarargs3() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
assertNull(call.resolveMethod());
}
public void testFilterFixedVsVarargs4() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
assertNotNull(resolveResult.getElement());
assertTrue(resolveResult.isValidResult());
}
public void testFilterFixedVsVarargs5() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(!((PsiMethod) element).isVarArgs());
}
public void testFilterFixedVsVarargs6() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(((PsiMethod) element).isVarArgs());
}
public void testFilterFixedVsVarargs7() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(!((PsiMethod) element).isVarArgs());
}
public void testFilterFixedVsVarargs8() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(!((PsiMethod) element).isVarArgs());
}
public void testFilterFixedVsVarargs9() throws Exception {
RecursionManager.assertOnRecursionPrevention(getTestRootDisposable());
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(((PsiMethod) element).isVarArgs());
}
public void testFilterBoxing1() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
final PsiMethod method = (PsiMethod)element;
assertEquals(PsiType.BOOLEAN, method.getParameterList().getParameters()[1].getType());
}
public void testFilterVarargsVsVarargs1() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertEquals(((PsiMethod)element).getParameterList().getParametersCount(), 3);
}
public void testFilterVarargsVsVarargs2() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
assertNull(resolveResult.getElement());
assertFalse(resolveResult.isValidResult());
final JavaResolveResult[] candidates = refExpr.multiResolve(false);
assertEquals(2, candidates.length);
}
public void testFilterVarargsVsVarargs3() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
assertNotNull(resolveResult.getElement());
assertFalse(resolveResult.isValidResult());
final JavaResolveResult[] candidates = refExpr.multiResolve(false);
assertEquals(1, candidates.length);
}
public void testFilterVarargsVsVarargs4() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
assertNull(resolveResult.getElement());
assertFalse(resolveResult.isValidResult());
final JavaResolveResult[] candidates = refExpr.multiResolve(false);
assertEquals(2, candidates.length);
}
//IDEADEV-3313
public void testCovariantReturnTypes() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement parent = refExpr.getParent();
assertThat(parent, instanceOf(PsiMethodCallExpression.class));
final PsiMethod method = ((PsiCall)parent).resolveMethod();
assertNotNull(method);
assertEquals("E", method.getContainingClass().getName());
}
public void testGenericMethods1() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement parent = refExpr.getParent();
assertThat(parent, instanceOf(PsiMethodCallExpression.class));
final PsiMethodCallExpression expression = (PsiMethodCallExpression)parent;
assertNull(expression.resolveMethod());
final JavaResolveResult[] results = expression.getMethodExpression().multiResolve(false);
assertEquals(2, results.length);
}
public void testGenericMethods2() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
public void testGenericMethods3() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
public void testGenericMethods4() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
public void testGenericMethods5() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(2, method.getTypeParameters().length);
}
public void testGenericMethods6() throws Exception {
final PsiReference ref = configureByFile();
checkResolvesUnique(ref);
}
public void testGenericClass1() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals("Foo", method.getContainingClass().getName());
}
public void testGenericClass2() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
public void testMoreSpecificSameErasure() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
private PsiReference configureByFile() throws Exception {
return configureByFile("method/generics/" + getTestName(false) + ".java");
}
private static PsiMethod checkResolvesUnique(final PsiReference ref) {
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement parent = refExpr.getParent();
assertThat(parent, instanceOf(PsiMethodCallExpression.class));
final PsiMethodCallExpression expression = (PsiMethodCallExpression)parent;
final PsiMethod method = expression.resolveMethod();
assertNotNull(method);
return method;
}
public void testTestGenericMethodOverloading1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
assertEquals("Object", ((NavigationItem)target.getParent()).getName());
}
public void testPreferArrayTypeToObject() throws Exception {
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
final PsiParameter[] parameters = ((PsiMethod)target).getParameterList().getParameters();
assertEquals(1, parameters.length);
assertTrue(parameters[0].getType() instanceof PsiArrayType);
}
public void testTestGenericMethodOverloading2() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
assertEquals("A", ((NavigationItem)target.getParent()).getName());
}
public void testTestGenericMethodOverloading3() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
assertEquals("Object", ((NavigationItem)target.getParent()).getName());
}
public void testTestGenericMethodOverloading4() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
assertEquals("A", ((NavigationItem)target.getParent()).getName());
}
public void testTestReturnType1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testTestReturnType2() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertNull(target);
}
public void testMerge1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertNull(target);
}
public void testExtends1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testInheritance1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testInheritance2() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testInheritance3() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testInheritance4() throws Exception{
RecursionManager.assertOnRecursionPrevention(getTestRootDisposable());
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testExplicitParams1() throws Exception {
PsiReference ref = configureByFile();
assertGenericResolve(ref, "f", new String[] {"java.lang.String"}, "java.lang.String");
}
public void testExplicitParams2() throws Exception {
PsiReference ref = configureByFile();
assertGenericResolve(ref, "f", new String[] {"java.lang.Integer"}, "Foo");
}
public void testConstructorExplicitParams() throws Exception {
PsiReference ref = configureByFile();
assertThat(ref.getElement(), instanceOf(PsiJavaCodeReferenceElement.class));
assertThat(ref.getElement().getParent(), instanceOf(PsiNewExpression.class));
}
private static void assertGenericResolve(PsiReference ref, final String methodName, final String[] expectedTypeParameterValues, @NonNls final String expectedCallType) {
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
PsiMethod psiMethod = (PsiMethod)target;
assertEquals(methodName, psiMethod.getName());
assertThat(ref.getElement(), instanceOf(PsiJavaCodeReferenceElement.class));
PsiJavaCodeReferenceElement refElement = (PsiJavaCodeReferenceElement)ref.getElement();
JavaResolveResult resolveResult = refElement.advancedResolve(false);
PsiSubstitutor substitutor = resolveResult.getSubstitutor();
PsiTypeParameter[] typeParameters = psiMethod.getTypeParameters();
assertEquals(expectedTypeParameterValues.length, typeParameters.length);
for (int i = 0; i < expectedTypeParameterValues.length; i++) {
String expectedTypeParameterValue = expectedTypeParameterValues[i];
assertTrue(substitutor.substitute(typeParameters[i]).equalsToText(expectedTypeParameterValue));
}
PsiType type = ((PsiExpression)refElement.getParent()).getType();
assertTrue(type.equalsToText(expectedCallType));
}
public void testRawMethod1() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testDependingParams2() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertTrue(result.isValidResult());
}
public void testTypeInference1() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertNotNull(result.getElement());
}
public void testRawVsGenericConflict() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "A");
}
public void testRawInheritanceConflict() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult[] result = ref.multiResolve(false);
assertEquals("False ambiguity", 1, result.length);
}
public void testRawVsGenericConflictInCaseOfOverride() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "B");
}
public void testRawVsGenericConflictInCaseOfOverride2() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "TestProcessor");
}
public void testAutoboxingAndWidening() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertNotNull(result.getElement());
assertTrue(result.isValidResult());
}
public void testSOE() throws Exception {
PsiReference ref = configureByFile();
ref.resolve();
}
public void testHidingSuperPrivate() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "S");
}
public void testNestedTypeParams() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "TestImpl");
}
public void testTypeParamBoundConflict() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "Testergen");
}
public void testAmbiguousBoxing() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertFalse(result.isValidResult());
JavaResolveResult[] results = ref.multiResolve(false);
assertEquals(2, results.length);
assertEquals("f", ((PsiMethod)results[0].getElement()).getName());
assertEquals("f", ((PsiMethod)results[1].getElement()).getName());
}
public void testStaticMethodInSubclass() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertNull(result.getElement());
}
private static void assertResolvesToMethodInClass(JavaResolveResult result, @NonNls String name) {
PsiMethod method = (PsiMethod)result.getElement();
assertNotNull(method);
assertTrue(result.isValidResult());
assertEquals(name, method.getContainingClass().getName());
}
}
|
|
/*
* Project.java
*
*
*
* Copyright 2006-2018 James F. Bowring, CIRDLES.org, and Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.projects;
import java.awt.Frame;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.SortedSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.filechooser.FileFilter;
import org.earthtime.Tripoli.dataModels.inputParametersModels.AbstractAcquisitionModel;
import org.earthtime.Tripoli.fractions.TripoliFraction;
import org.earthtime.Tripoli.rawDataFiles.handlers.AbstractRawDataFileHandler;
import org.earthtime.Tripoli.samples.AbstractTripoliSample;
import org.earthtime.Tripoli.sessions.TripoliSessionInterface;
import org.earthtime.UPb_Redux.ReduxConstants;
import org.earthtime.UPb_Redux.aliquots.UPbReduxAliquot;
import org.earthtime.UPb_Redux.filters.ReduxFileFilter;
import org.earthtime.UPb_Redux.fractions.FractionI;
import org.earthtime.UPb_Redux.fractions.UPbReduxFractions.UPbFractionI;
import org.earthtime.UPb_Redux.fractions.UPbReduxFractions.UPbLAICPMSFraction;
import org.earthtime.UPb_Redux.fractions.UPbReduxFractions.UPbSHRIMPFraction;
import org.earthtime.UPb_Redux.samples.Sample;
import org.earthtime.UPb_Redux.user.ReduxPersistentState;
import org.earthtime.UPb_Redux.utilities.ETSerializer;
import org.earthtime.aliquots.AliquotInterface;
import org.earthtime.aliquots.ReduxAliquotInterface;
import org.earthtime.dataDictionaries.DataDictionary;
import org.earthtime.dataDictionaries.SampleAnalysisTypesEnum;
import org.earthtime.dataDictionaries.SampleTypesEnum;
import org.earthtime.exceptions.ETException;
import org.earthtime.samples.SampleInterface;
import org.earthtime.utilities.FileHelper;
/**
* Project is an organizing principle proposed at "ReduxFest 2010" at MIT that
* provides for the assembly of disparate samples and their aliquots. Project
* implementation began in Oct 2011. We will keep the same functionality for
* Samples, with the caveat that now, any Sample will belong to a default
* project of one sample only. Any Sample can be a member of any number of
* Projects. The .redux file extension will do double duty for samples and
* projects.
*
* @author James F. Bowring
*/
public class Project implements
Serializable,
EarthTimeSerializedFileInterface,
ProjectInterface {
// Class variables
private static final long serialVersionUID = 6292924571103425985L;
// instance variables
private String projectName;
private SampleInterface compiledSuperSample;
private ArrayList<SampleInterface> projectSamples;
private TripoliSessionInterface tripoliSession;
private File locationOfProjectReduxFile;
private boolean changed;
private ReduxConstants.ANALYSIS_PURPOSE analysisPurpose;
private AbstractAcquisitionModel acquisitionModel;
private AbstractRawDataFileHandler rawDataFileHandler;
private ReduxPersistentState myState;
// for Legacy projects
private File locationOfDataImportFile;
private SampleAnalysisTypesEnum sampleAnalysisType;
/**
*
*/
public Project() {
this.projectName = "Empty Project";
this.compiledSuperSample = null;
this.projectSamples = new ArrayList<>();
this.tripoliSession = null;
this.locationOfProjectReduxFile = null;
this.acquisitionModel = null;
this.rawDataFileHandler = null;
this.sampleAnalysisType = SampleAnalysisTypesEnum.LAICPMS;
}
/**
*
* @param myState
*/
public Project(ReduxPersistentState myState) {
this();
this.myState = myState;
}
/**
*
* @return
*/
@Override
public File saveProjectFileAs() {
String dialogTitle = "Save Redux file for this Project: *.redux";
final String fileExtension = ".redux";
String projectFileName = projectName + fileExtension;
FileFilter nonMacFileFilter = new ReduxFileFilter();
File selectedFile;
String projectFolderPath;
if (locationOfProjectReduxFile != null) {
projectFolderPath = locationOfProjectReduxFile.getParent();
} else {
projectFolderPath = myState.getMRUProjectFolderPath();
}
selectedFile = FileHelper.AllPlatformSaveAs(
new Frame(),
dialogTitle,
projectFolderPath,
fileExtension,
projectFileName,
nonMacFileFilter);
if (selectedFile != null) {
saveTheProjectAsSerializedReduxFile(selectedFile);
}
return selectedFile;
}
/**
*
*/
@Override
public final void saveTheProjectAsSerializedReduxFile() {
if (locationOfProjectReduxFile == null) {
locationOfProjectReduxFile = saveProjectFileAs();
}
if (locationOfProjectReduxFile != null) {
try {
ETSerializer.SerializeObjectToFile(this, locationOfProjectReduxFile.getCanonicalPath());
} catch (IOException ex) {
Logger.getLogger(Project.class.getName()).log(Level.SEVERE, null, ex);
} catch (ETException etexception) {
}
}
}
/**
*
* @param file
*/
@Override
public final void saveTheProjectAsSerializedReduxFile(
File file) {
locationOfProjectReduxFile = file;
saveTheProjectAsSerializedReduxFile();
// APRIL 2014 update the project so it knows where it is
setLocationOfProjectReduxFile(locationOfProjectReduxFile);
// update MRU status
myState.updateMRUProjectList(locationOfProjectReduxFile);
}
/**
*
*/
@Override
public void prepareSamplesForRedux() {
System.out.println("Preparing Samples for Redux");
// walk the tripolisamples and convert to samples
// Redux will end up with a set of aliquots (aka compiled sample) each named for the sample (1-to-1)
// and a set of fractions each associated with an aliquot
// make a super-sample or projectsample to leverage existing Redux
if (compiledSuperSample == null) {
compiledSuperSample = new Sample( //
projectName, //
SampleTypesEnum.PROJECT.getName(), //
SampleAnalysisTypesEnum.TRIPOLIZED.getName(), //
ReduxConstants.ANALYSIS_PURPOSE.DetritalSpectrum, "UPb", "UPb");
}
ArrayList<AbstractTripoliSample> tripoliSamples = tripoliSession.getTripoliSamples();
for (AbstractTripoliSample tripoliSample : tripoliSamples) {
// check for primary standard and leave it out
if (true) {//oct 2014 want to include standards now (!tripoliSample.isPrimaryReferenceMaterial()) {
// june 2016
// determine if sample already processed
SampleInterface sample = null;
AliquotInterface aliquot = null;
for (int i = 0; i < projectSamples.size(); i++) {
if (projectSamples.get(i).getSampleName().equalsIgnoreCase(tripoliSample.getSampleName())) {
sample = projectSamples.get(i);
aliquot = sample.getAliquots().get(0);
break;
}
}
if (sample == null) {
sample = new Sample( //
tripoliSample.getSampleName(), //
SampleTypesEnum.ANALYSIS.getName(), //
sampleAnalysisType.getName(), //
analysisPurpose, "UPb", "UPb");
projectSamples.add(sample);
try {
aliquot = sample.addNewAliquot(tripoliSample.getSampleName());
aliquot.setAnalysisPurpose(analysisPurpose);
// TODO: Enum of inst methods
aliquot.setAliquotInstrumentalMethod(DataDictionary.AliquotInstrumentalMethod[5]);
} catch (ETException eTException) {
}
System.out.println("New Aliquot is # " + ((UPbReduxAliquot) aliquot).getAliquotNumber() + " = " + aliquot.getAliquotName());
}
SortedSet<TripoliFraction> tripoliSampleFractions = tripoliSample.getSampleFractions();
for (Iterator<TripoliFraction> it = tripoliSampleFractions.iterator(); it.hasNext();) {
TripoliFraction tf = it.next();
// june 2016
// determine if fraction already exists
if (!sample.containsFractionByName(tf.getFractionID())) {
// feb 2016
FractionI reduxVersionTripolizedFraction = null;
if (sampleAnalysisType.compareTo(SampleAnalysisTypesEnum.LAICPMS) == 0) {
reduxVersionTripolizedFraction = new UPbLAICPMSFraction(tf.getFractionID());
} else if (sampleAnalysisType.compareTo(SampleAnalysisTypesEnum.SHRIMP) == 0) {
reduxVersionTripolizedFraction = new UPbSHRIMPFraction(tf.getFractionID());
}
reduxVersionTripolizedFraction.setSampleName(tripoliSample.getSampleName());
// add to tripoli fraction so its UPbFraction can be contiunously updated
tf.setuPbFraction(reduxVersionTripolizedFraction);
// dec 2015
((UPbFractionI) reduxVersionTripolizedFraction).setTripoliFraction(tf);
reduxVersionTripolizedFraction.setRejected(!tf.isIncluded());
// automatically added to aliquot #1 as we are assuming only one aliquot in this scenario
sample.addFraction(reduxVersionTripolizedFraction);
// feb 2015 in prep for export
((ReduxAliquotInterface) aliquot).getAliquotFractions().add(reduxVersionTripolizedFraction);
}
}
// this forces aliquot fraction population
SampleInterface.copyAliquotIntoSample(compiledSuperSample, sample.getAliquotByName(aliquot.getAliquotName()), new UPbReduxAliquot());
}// if true
}
// first pass without any user interaction
tripoliSession.setEstimatedPlottingPointsCount(1000);
}
/**
*
* @return @throws org.earthtime.exceptions.ETException
*/
@Override
public Path exportProjectSamples() throws ETException {
File projectSamplesFolder = new File(locationOfProjectReduxFile.getParent() + File.separatorChar + projectName + "_Samples");
boolean jobCompleted = true;
if (projectSamplesFolder.exists()) {
File[] filesFound = projectSamplesFolder.listFiles();
for (File filesFound1 : filesFound) {
jobCompleted = jobCompleted && filesFound1.delete();
}
} else {
jobCompleted = jobCompleted && projectSamplesFolder.mkdir();
}
if (jobCompleted) {
prepareSamplesForExport();
for (int i = 0; i < projectSamples.size(); i++) {
SampleInterface sample = projectSamples.get(i);
File sampleFile = new File(//
projectSamplesFolder.getAbsolutePath() + File.separatorChar + sample.getSampleName());
// first write sample out
SampleInterface.saveSampleAsSerializedReduxFile(sample, sampleFile);
}
} else {
throw new ETException(null, "Unable to process " + projectSamplesFolder.getAbsolutePath());
}
return projectSamplesFolder.toPath();
}
public void prepareSamplesForExport() {
for (int i = 0; i < projectSamples.size(); i++) {
SampleInterface sample = projectSamples.get(i);
System.out.println("Preparing for export Sample: " + sample.getSampleName());
// oct 2014 specify sample types
if (SampleInterface.isAnalysisTypeTripolized(compiledSuperSample.getSampleAnalysisType())) {
sample.setSampleType(SampleTypesEnum.ANALYSIS.getName());
} else {
sample.setSampleType(SampleTypesEnum.LEGACY.getName());
sample.setAnalyzed(true);
}
sample.setLegacyStatusForReportTable();
}
}
/**
* @return the projectSamples
*/
@Override
public ArrayList<SampleInterface> getProjectSamples() {
return projectSamples;
}
/**
* @param projectSamples the projectSamples to set
*/
@Override
public void setProjectSamples(ArrayList<SampleInterface> projectSamples) {
this.projectSamples = projectSamples;
}
/**
* @return the tripoliSession
*/
@Override
public TripoliSessionInterface getTripoliSession() {
return tripoliSession;
}
/**
* @param tripoliSession the tripoliSession to set
*/
@Override
public void setTripoliSession(TripoliSessionInterface tripoliSession) {
this.tripoliSession = tripoliSession;
}
/**
* @return the projectName
*/
@Override
public String getProjectName() {
return projectName;
}
/**
* @param projectName the projectName to set
*/
@Override
public void setProjectName(String projectName) {
if (projectName.length() > 0) {
this.projectName = projectName;
}
}
/**
* @return the locationOfProjectReduxFile
*/
@Override
public File getLocationOfProjectReduxFile() {
return locationOfProjectReduxFile;
}
/**
* @param locationOfProjectReduxFile the locationOfProjectReduxFile to set
*/
@Override
public void setLocationOfProjectReduxFile(File locationOfProjectReduxFile) {
this.locationOfProjectReduxFile = locationOfProjectReduxFile;
}
/**
* @return the compiledSuperSample
*/
@Override
public SampleInterface getSuperSample() {
return compiledSuperSample;
}
/**
* @param superSample
*/
@Override
public void setSuperSample(SampleInterface superSample) {
this.compiledSuperSample = superSample;
}
/**
* @return the changed
*/
public boolean isChanged() {
return changed;
}
/**
* @param changed the changed to set
*/
public void setChanged(boolean changed) {
this.changed = changed;
}
/**
* @return the analysisPurpose
*/
@Override
public ReduxConstants.ANALYSIS_PURPOSE getAnalysisPurpose() {
return analysisPurpose;
}
/**
* @param analysisPurpose the analysisPurpose to set
*/
@Override
public void setAnalysisPurpose(ReduxConstants.ANALYSIS_PURPOSE analysisPurpose) {
this.analysisPurpose = analysisPurpose;
for (int i = 0; i < projectSamples.size(); i ++){
projectSamples.get(i).setAnalysisPurpose(analysisPurpose);
}
}
/**
* @return the acquisitionModel
*/
@Override
public AbstractAcquisitionModel getAcquisitionModel() {
return acquisitionModel;
}
/**
* @param acquisitionModel the acquisitionModel to set
*/
@Override
public void setAcquisitionModel(AbstractAcquisitionModel acquisitionModel) {
this.acquisitionModel = acquisitionModel;
}
/**
* @return the rawDataFileHandler
*/
@Override
public AbstractRawDataFileHandler getRawDataFileHandler() {
return rawDataFileHandler;
}
/**
* @param rawDataFileHandler the rawDataFileHandler to set
*/
@Override
public void setRawDataFileHandler(AbstractRawDataFileHandler rawDataFileHandler) {
this.rawDataFileHandler = rawDataFileHandler;
}
/**
* @return the compiledSuperSample
*/
@Override
public SampleInterface getCompiledSuperSample() {
return compiledSuperSample;
}
/**
* @return the locationOfDataImportFile
*/
@Override
public File getLocationOfDataImportFile() {
if (locationOfDataImportFile == null) {
locationOfDataImportFile = new File(":");
}
return locationOfDataImportFile;
}
/**
* @param locationOfDataImportFile the locationOfDataImportFile to set
*/
@Override
public void setLocationOfDataImportFile(File locationOfDataImportFile) {
this.locationOfDataImportFile = locationOfDataImportFile;
}
/**
* @return the sampleAnalysisType
*/
public SampleAnalysisTypesEnum getSampleAnalysisType() {
if (sampleAnalysisType == null) {
sampleAnalysisType = SampleAnalysisTypesEnum.LAICPMS;
}
return sampleAnalysisType;
}
/**
* @param sampleAnalysisType the sampleAnalysisType to set
*/
public void setSampleAnalysisType(SampleAnalysisTypesEnum sampleAnalysisType) {
this.sampleAnalysisType = sampleAnalysisType;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.twill.internal.logging;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.AppenderBase;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.Service;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.twill.api.logging.LogThrowable;
import org.apache.twill.common.Services;
import org.apache.twill.common.Threads;
import org.apache.twill.internal.json.ILoggingEventSerializer;
import org.apache.twill.internal.json.LogThrowableCodec;
import org.apache.twill.internal.json.StackTraceElementCodec;
import org.apache.twill.internal.kafka.client.ZKKafkaClientService;
import org.apache.twill.kafka.client.Compression;
import org.apache.twill.kafka.client.KafkaClientService;
import org.apache.twill.kafka.client.KafkaPublisher;
import org.apache.twill.zookeeper.RetryStrategies;
import org.apache.twill.zookeeper.ZKClientService;
import org.apache.twill.zookeeper.ZKClientServices;
import org.apache.twill.zookeeper.ZKClients;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
/**
*
*/
public final class KafkaAppender extends AppenderBase<ILoggingEvent> {
private static final Logger LOG = LoggerFactory.getLogger(KafkaAppender.class);
private final AtomicReference<KafkaPublisher.Preparer> publisher;
private final Runnable flushTask;
/**
* Rough count of how many entries are being buffered. It's just approximate, not exact.
*/
private final AtomicInteger bufferedSize;
private LogEventConverter eventConverter;
private ZKClientService zkClientService;
private KafkaClientService kafkaClient;
private String zkConnectStr;
private String hostname;
private String runnableName;
private String topic;
private Queue<String> buffer;
private int flushLimit = 20;
private int flushPeriod = 100;
private ScheduledExecutorService scheduler;
public KafkaAppender() {
publisher = new AtomicReference<KafkaPublisher.Preparer>();
flushTask = createFlushTask();
bufferedSize = new AtomicInteger();
buffer = new ConcurrentLinkedQueue<String>();
}
/**
* Sets the zookeeper connection string. Called by slf4j.
*/
@SuppressWarnings("unused")
public void setZookeeper(String zkConnectStr) {
this.zkConnectStr = zkConnectStr;
}
/**
* Sets the hostname. Called by slf4j.
*/
@SuppressWarnings("unused")
public void setHostname(String hostname) {
this.hostname = hostname;
}
/**
* Sets the runnableName.
*/
@SuppressWarnings("unused")
public void setRunnableName(String runnableName) {
this.runnableName = runnableName;
}
/**
* Sets the topic name for publishing logs. Called by slf4j.
*/
@SuppressWarnings("unused")
public void setTopic(String topic) {
this.topic = topic;
}
/**
* Sets the maximum number of cached log entries before performing an force flush. Called by slf4j.
*/
@SuppressWarnings("unused")
public void setFlushLimit(int flushLimit) {
this.flushLimit = flushLimit;
}
/**
* Sets the periodic flush time in milliseconds. Called by slf4j.
*/
@SuppressWarnings("unused")
public void setFlushPeriod(int flushPeriod) {
this.flushPeriod = flushPeriod;
}
@Override
public void start() {
Preconditions.checkNotNull(zkConnectStr);
eventConverter = new LogEventConverter(hostname, runnableName);
scheduler = Executors.newSingleThreadScheduledExecutor(Threads.createDaemonThreadFactory("kafka-logger"));
zkClientService = ZKClientServices.delegate(
ZKClients.reWatchOnExpire(
ZKClients.retryOnFailure(ZKClientService.Builder.of(zkConnectStr).build(),
RetryStrategies.fixDelay(1, TimeUnit.SECONDS))));
kafkaClient = new ZKKafkaClientService(zkClientService);
Futures.addCallback(Services.chainStart(zkClientService, kafkaClient),
new FutureCallback<List<ListenableFuture<Service.State>>>() {
@Override
public void onSuccess(List<ListenableFuture<Service.State>> result) {
for (ListenableFuture<Service.State> future : result) {
Preconditions.checkState(Futures.getUnchecked(future) == Service.State.RUNNING,
"Service is not running.");
}
LOG.info("Kafka client started: " + zkConnectStr);
scheduler.scheduleWithFixedDelay(flushTask, 0, flushPeriod, TimeUnit.MILLISECONDS);
}
@Override
public void onFailure(Throwable t) {
// Fail to talk to kafka. Other than logging, what can be done?
LOG.error("Failed to start kafka appender.", t);
}
}, Threads.SAME_THREAD_EXECUTOR);
super.start();
}
@Override
public void stop() {
super.stop();
scheduler.shutdownNow();
Futures.getUnchecked(Services.chainStop(kafkaClient, zkClientService));
}
public void forceFlush() {
try {
publishLogs(2, TimeUnit.SECONDS);
} catch (Exception e) {
LOG.error("Failed to publish last batch of log.", e);
}
}
@Override
protected void append(ILoggingEvent eventObject) {
buffer.offer(eventConverter.convert(eventObject));
if (bufferedSize.incrementAndGet() >= flushLimit && publisher.get() != null) {
// Try to do a extra flush
scheduler.submit(flushTask);
}
}
/**
* Publishes buffered logs to Kafka, within the given timeout.
*
* @return Number of logs published.
* @throws TimeoutException If timeout reached before publish completed.
*/
private int publishLogs(long timeout, TimeUnit timeoutUnit) throws TimeoutException {
List<ByteBuffer> logs = Lists.newArrayListWithExpectedSize(bufferedSize.get());
for (String json : Iterables.consumingIterable(buffer)) {
logs.add(Charsets.UTF_8.encode(json));
}
long backOffTime = timeoutUnit.toNanos(timeout) / 10;
if (backOffTime <= 0) {
backOffTime = 1;
}
try {
Stopwatch stopwatch = new Stopwatch();
stopwatch.start();
long publishTimeout = timeout;
do {
try {
int published = doPublishLogs(logs).get(publishTimeout, timeoutUnit);
bufferedSize.addAndGet(-published);
return published;
} catch (ExecutionException e) {
LOG.error("Failed to publish logs to Kafka.", e);
TimeUnit.NANOSECONDS.sleep(backOffTime);
publishTimeout -= stopwatch.elapsedTime(timeoutUnit);
stopwatch.reset();
stopwatch.start();
}
} while (publishTimeout > 0);
} catch (InterruptedException e) {
LOG.warn("Logs publish to Kafka interrupted.", e);
}
return 0;
}
private ListenableFuture<Integer> doPublishLogs(Collection <ByteBuffer> logs) {
// Nothing to publish, simply returns a completed future.
if (logs.isEmpty()) {
return Futures.immediateFuture(0);
}
// If the publisher is not available, tries to create one.
KafkaPublisher.Preparer publisher = KafkaAppender.this.publisher.get();
if (publisher == null) {
try {
KafkaPublisher.Preparer preparer = kafkaClient.getPublisher(KafkaPublisher.Ack.LEADER_RECEIVED,
Compression.SNAPPY).prepare(topic);
KafkaAppender.this.publisher.compareAndSet(null, preparer);
publisher = KafkaAppender.this.publisher.get();
} catch (Exception e) {
return Futures.immediateFailedFuture(e);
}
}
for (ByteBuffer buffer : logs) {
publisher.add(buffer, 0);
}
return publisher.send();
}
/**
* Creates a {@link Runnable} that writes all logs in the buffer into kafka.
* @return The Runnable task
*/
private Runnable createFlushTask() {
return new Runnable() {
@Override
public void run() {
try {
int published = publishLogs(2L, TimeUnit.SECONDS);
if (LOG.isDebugEnabled()) {
LOG.info("Published {} log messages to Kafka.", published);
}
} catch (Exception e) {
LOG.error("Failed to push logs to Kafka. Log entries dropped.", e);
}
}
};
}
/**
* Helper class to convert {@link ILoggingEvent} into json string.
*/
private static final class LogEventConverter {
private final Gson gson;
private LogEventConverter(String hostname, String runnableName) {
gson = new GsonBuilder()
.registerTypeAdapter(StackTraceElement.class, new StackTraceElementCodec())
.registerTypeAdapter(LogThrowable.class, new LogThrowableCodec())
.registerTypeAdapter(ILoggingEvent.class, new ILoggingEventSerializer(hostname, runnableName))
.create();
}
private String convert(ILoggingEvent event) {
return gson.toJson(event, ILoggingEvent.class);
}
}
}
|
|
/* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2007, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* ---------------------
* XYLineAnnotation.java
* ---------------------
* (C) Copyright 2003-2007, by Object Refinery Limited.
*
* Original Author: David Gilbert (for Object Refinery Limited);
* Contributor(s): -;
*
* Changes:
* --------
* 02-Apr-2003 : Version 1 (DG);
* 19-Aug-2003 : Added equals method, implemented Cloneable, and applied
* serialization fixes (DG);
* 21-Jan-2004 : Update for renamed method in ValueAxis (DG);
* 14-Apr-2004 : Fixed draw() method to handle plot orientation correctly (DG);
* 29-Sep-2004 : Added support for tool tips and URLS, now extends
* AbstractXYAnnotation (DG);
* 04-Oct-2004 : Renamed ShapeUtils --> ShapeUtilities (DG);
* 08-Jun-2005 : Fixed equals() method to handle GradientPaint() (DG);
*
*/
package org.jfree.chart.annotations;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Stroke;
import java.awt.geom.Line2D;
import java.awt.geom.Rectangle2D;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import org.jfree.chart.axis.ValueAxis;
import org.jfree.chart.plot.Plot;
import org.jfree.chart.plot.PlotOrientation;
import org.jfree.chart.plot.PlotRenderingInfo;
import org.jfree.chart.plot.XYPlot;
import org.jfree.io.SerialUtilities;
import org.jfree.ui.RectangleEdge;
import org.jfree.util.ObjectUtilities;
import org.jfree.util.PaintUtilities;
import org.jfree.util.PublicCloneable;
import org.jfree.util.ShapeUtilities;
/**
* A simple line annotation that can be placed on an {@link XYPlot}.
*/
public class XYLineAnnotation extends AbstractXYAnnotation
implements Cloneable, PublicCloneable,
Serializable {
/** For serialization. */
private static final long serialVersionUID = -80535465244091334L;
/** The x-coordinate. */
private double x1;
/** The y-coordinate. */
private double y1;
/** The x-coordinate. */
private double x2;
/** The y-coordinate. */
private double y2;
/** The line stroke. */
private transient Stroke stroke;
/** The line color. */
private transient Paint paint;
/**
* Creates a new annotation that draws a line from (x1, y1) to (x2, y2)
* where the coordinates are measured in data space (that is, against the
* plot's axes).
*
* @param x1 the x-coordinate for the start of the line.
* @param y1 the y-coordinate for the start of the line.
* @param x2 the x-coordinate for the end of the line.
* @param y2 the y-coordinate for the end of the line.
*/
public XYLineAnnotation(double x1, double y1, double x2, double y2) {
this(x1, y1, x2, y2, new BasicStroke(1.0f), Color.black);
}
/**
* Creates a new annotation that draws a line from (x1, y1) to (x2, y2)
* where the coordinates are measured in data space (that is, against the
* plot's axes).
*
* @param x1 the x-coordinate for the start of the line.
* @param y1 the y-coordinate for the start of the line.
* @param x2 the x-coordinate for the end of the line.
* @param y2 the y-coordinate for the end of the line.
* @param stroke the line stroke (<code>null</code> not permitted).
* @param paint the line color (<code>null</code> not permitted).
*/
public XYLineAnnotation(double x1, double y1, double x2, double y2,
Stroke stroke, Paint paint) {
if (stroke == null) {
throw new IllegalArgumentException("Null 'stroke' argument.");
}
if (paint == null) {
throw new IllegalArgumentException("Null 'paint' argument.");
}
this.x1 = x1;
this.y1 = y1;
this.x2 = x2;
this.y2 = y2;
this.stroke = stroke;
this.paint = paint;
}
/**
* Draws the annotation. This method is called by the {@link XYPlot}
* class, you won't normally need to call it yourself.
*
* @param g2 the graphics device.
* @param plot the plot.
* @param dataArea the data area.
* @param domainAxis the domain axis.
* @param rangeAxis the range axis.
* @param rendererIndex the renderer index.
* @param info if supplied, this info object will be populated with
* entity information.
*/
public void draw(Graphics2D g2, XYPlot plot, Rectangle2D dataArea,
ValueAxis domainAxis, ValueAxis rangeAxis,
int rendererIndex,
PlotRenderingInfo info) {
PlotOrientation orientation = plot.getOrientation();
RectangleEdge domainEdge = Plot.resolveDomainAxisLocation(
plot.getDomainAxisLocation(), orientation);
RectangleEdge rangeEdge = Plot.resolveRangeAxisLocation(
plot.getRangeAxisLocation(), orientation);
float j2DX1 = 0.0f;
float j2DX2 = 0.0f;
float j2DY1 = 0.0f;
float j2DY2 = 0.0f;
if (orientation == PlotOrientation.VERTICAL) {
j2DX1 = (float) domainAxis.valueToJava2D(this.x1, dataArea,
domainEdge);
j2DY1 = (float) rangeAxis.valueToJava2D(this.y1, dataArea,
rangeEdge);
j2DX2 = (float) domainAxis.valueToJava2D(this.x2, dataArea,
domainEdge);
j2DY2 = (float) rangeAxis.valueToJava2D(this.y2, dataArea,
rangeEdge);
}
else if (orientation == PlotOrientation.HORIZONTAL) {
j2DY1 = (float) domainAxis.valueToJava2D(this.x1, dataArea,
domainEdge);
j2DX1 = (float) rangeAxis.valueToJava2D(this.y1, dataArea,
rangeEdge);
j2DY2 = (float) domainAxis.valueToJava2D(this.x2, dataArea,
domainEdge);
j2DX2 = (float) rangeAxis.valueToJava2D(this.y2, dataArea,
rangeEdge);
}
g2.setPaint(this.paint);
g2.setStroke(this.stroke);
Line2D line = new Line2D.Float(j2DX1, j2DY1, j2DX2, j2DY2);
g2.draw(line);
String toolTip = getToolTipText();
String url = getURL();
if (toolTip != null || url != null) {
addEntity(info, ShapeUtilities.createLineRegion(line, 1.0f),
rendererIndex, toolTip, url);
}
}
/**
* Tests this object for equality with an arbitrary object.
*
* @param obj the object to test against (<code>null</code> permitted).
*
* @return <code>true</code> or <code>false</code>.
*/
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (!(obj instanceof XYLineAnnotation)) {
return false;
}
XYLineAnnotation that = (XYLineAnnotation) obj;
if (this.x1 != that.x1) {
return false;
}
if (this.y1 != that.y1) {
return false;
}
if (this.x2 != that.x2) {
return false;
}
if (this.y2 != that.y2) {
return false;
}
if (!PaintUtilities.equal(this.paint, that.paint)) {
return false;
}
if (!ObjectUtilities.equal(this.stroke, that.stroke)) {
return false;
}
// seems to be the same...
return true;
}
/**
* Returns a hash code.
*
* @return A hash code.
*/
public int hashCode() {
int result;
long temp;
temp = Double.doubleToLongBits(this.x1);
result = (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.x2);
result = 29 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.y1);
result = 29 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.y2);
result = 29 * result + (int) (temp ^ (temp >>> 32));
return result;
}
/**
* Returns a clone of the annotation.
*
* @return A clone.
*
* @throws CloneNotSupportedException if the annotation can't be cloned.
*/
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
/**
* Provides serialization support.
*
* @param stream the output stream.
*
* @throws IOException if there is an I/O error.
*/
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
SerialUtilities.writePaint(this.paint, stream);
SerialUtilities.writeStroke(this.stroke, stream);
}
/**
* Provides serialization support.
*
* @param stream the input stream.
*
* @throws IOException if there is an I/O error.
* @throws ClassNotFoundException if there is a classpath problem.
*/
private void readObject(ObjectInputStream stream)
throws IOException, ClassNotFoundException {
stream.defaultReadObject();
this.paint = SerialUtilities.readPaint(stream);
this.stroke = SerialUtilities.readStroke(stream);
}
}
|
|
/*
* Copyright (c) 2012-14, salesforce.com, inc.
* All rights reserved.
* Redistribution and use of this software in source and binary forms, with or
* without modification, are permitted provided that the following conditions
* are met:
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of salesforce.com, inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission of salesforce.com, inc.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.androidsdk.smartstore.store;
import java.util.ArrayList;
import java.util.List;
import net.sqlcipher.database.SQLiteDatabase;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.ContentValues;
import android.database.Cursor;
import android.text.TextUtils;
import com.salesforce.androidsdk.smartstore.store.QuerySpec.QueryType;
/**
* Smart store
*
* Provides a secure means for SalesforceMobileSDK Container-based applications to store objects in a persistent
* and searchable manner. Similar in some ways to CouchDB, SmartStore stores documents as JSON values.
* SmartStore is inspired by the Apple Newton OS Soup/Store model.
* The main challenge here is how to effectively store documents with dynamic fields, and still allow indexing and searching.
*/
public class SmartStore {
// Default
public static final int DEFAULT_PAGE_SIZE = 10;
// Table to keep track of soup names
protected static final String SOUP_NAMES_TABLE = "soup_names";
// Table to keep track of soup's index specs
protected static final String SOUP_INDEX_MAP_TABLE = "soup_index_map";
// Columns of the soup index map table
protected static final String SOUP_NAME_COL = "soupName";
protected static final String PATH_COL = "path";
protected static final String COLUMN_NAME_COL = "columnName";
protected static final String COLUMN_TYPE_COL = "columnType";
// Columns of a soup table
protected static final String ID_COL = "id";
protected static final String CREATED_COL = "created";
protected static final String LAST_MODIFIED_COL = "lastModified";
protected static final String SOUP_COL = "soup";
// JSON fields added to soup element on insert/update
public static final String SOUP_ENTRY_ID = "_soupEntryId";
public static final String SOUP_LAST_MODIFIED_DATE = "_soupLastModifiedDate";
// Predicates
protected static final String SOUP_NAME_PREDICATE = SOUP_NAME_COL + " = ?";
protected static final String PATH_PREDICATE = PATH_COL + " = ?";
protected static final String ID_PREDICATE = ID_COL + " = ?";
// Backing database
protected SQLiteDatabase db;
/**
* Changes the encryption key on the smartstore.
*
* @param db Database object.
* @param newKey New encryption key.
*/
public static synchronized void changeKey(SQLiteDatabase db, String newKey) {
synchronized(SmartStore.class) {
if (newKey != null && !newKey.trim().equals("")) {
db.execSQL("PRAGMA rekey = '" + newKey + "'");
}
}
}
/**
* Create soup index map table to keep track of soups' index specs
* Create soup name map table to keep track of soup name to table name mappings
* Called when the database is first created
*
* @param db
*/
public static void createMetaTables(SQLiteDatabase db) {
synchronized(SmartStore.class) {
// Create soup_index_map table
StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE ").append(SOUP_INDEX_MAP_TABLE).append(" (")
.append(SOUP_NAME_COL).append(" TEXT")
.append(",").append(PATH_COL).append(" TEXT")
.append(",").append(COLUMN_NAME_COL).append(" TEXT")
.append(",").append(COLUMN_TYPE_COL).append(" TEXT")
.append(")");
db.execSQL(sb.toString());
// Add index on soup_name column
db.execSQL(String.format("CREATE INDEX %s on %s ( %s )", SOUP_INDEX_MAP_TABLE + "_0", SOUP_INDEX_MAP_TABLE, SOUP_NAME_COL));
// Create soup_names table
// The table name for the soup will simply be table_<soupId>
sb = new StringBuilder();
sb.append("CREATE TABLE ").append(SOUP_NAMES_TABLE).append(" (")
.append(ID_COL).append(" INTEGER PRIMARY KEY AUTOINCREMENT")
.append(",").append(SOUP_NAME_COL).append(" TEXT")
.append(")");
db.execSQL(sb.toString());
// Add index on soup_name column
db.execSQL(String.format("CREATE INDEX %s on %s ( %s )", SOUP_NAMES_TABLE + "_0", SOUP_NAMES_TABLE, SOUP_NAME_COL));
}
}
/**
* @param db
*/
public SmartStore(SQLiteDatabase db) {
this.db = db;
}
/**
* Start transaction
*/
public void beginTransaction() {
db.beginTransaction();
}
/**
* End transaction (commit or rollback)
*/
public void endTransaction() {
db.endTransaction();
}
/**
* Mark transaction as successful (next call to endTransaction will be a commit)
*/
public void setTransactionSuccessful() {
db.setTransactionSuccessful();
}
/**
* Register a soup
*
* Create table for soupName with a column for the soup itself and columns for paths specified in indexSpecs
* Create indexes on the new table to make lookup faster
* Create rows in soup index map table for indexSpecs
* @param soupName
* @param indexSpecs
*/
public void registerSoup(String soupName, IndexSpec[] indexSpecs) {
synchronized(SmartStore.class) {
if (soupName == null) throw new SmartStoreException("Bogus soup name:" + soupName);
if (indexSpecs.length == 0) throw new SmartStoreException("No indexSpecs specified for soup: " + soupName);
if (hasSoup(soupName)) return; // soup already exist - do nothing
// First get a table name
String soupTableName = null;
ContentValues soupMapValues = new ContentValues();
soupMapValues.put(SOUP_NAME_COL, soupName);
try {
db.beginTransaction();
long soupId = DBHelper.INSTANCE.insert(db, SOUP_NAMES_TABLE, soupMapValues);
soupTableName = getSoupTableName(soupId);
db.setTransactionSuccessful();
}
finally {
db.endTransaction();
}
// Do the rest - create table / indexes
registerSoupUsingTableName(soupName, indexSpecs, soupTableName);
}
}
/**
* Helper method for registerSoup
*
* @param soupName
* @param indexSpecs
* @param soupTableName
*/
protected void registerSoupUsingTableName(String soupName, IndexSpec[] indexSpecs, String soupTableName) {
// Prepare SQL for creating soup table and its indices
StringBuilder createTableStmt = new StringBuilder(); // to create new soup table
List<String> createIndexStmts = new ArrayList<String>(); // to create indices on new soup table
List<ContentValues> soupIndexMapInserts = new ArrayList<ContentValues>(); // to be inserted in soup index map table
createTableStmt.append("CREATE TABLE ").append(soupTableName).append(" (")
.append(ID_COL).append(" INTEGER PRIMARY KEY AUTOINCREMENT")
.append(", ").append(SOUP_COL).append(" TEXT")
.append(", ").append(CREATED_COL).append(" INTEGER")
.append(", ").append(LAST_MODIFIED_COL).append(" INTEGER");
int i = 0;
IndexSpec[] indexSpecsToCache = new IndexSpec[indexSpecs.length];
for (IndexSpec indexSpec : indexSpecs) {
// for create table
String columnName = soupTableName + "_" + i;
String columnType = indexSpec.type.getColumnType();
createTableStmt.append(", ").append(columnName).append(" ").append(columnType);
// for insert
ContentValues values = new ContentValues();
values.put(SOUP_NAME_COL, soupName);
values.put(PATH_COL, indexSpec.path);
values.put(COLUMN_NAME_COL, columnName);
values.put(COLUMN_TYPE_COL, indexSpec.type.toString());
soupIndexMapInserts.add(values);
// for create index
String indexName = soupTableName + "_" + i + "_idx";
createIndexStmts.add(String.format("CREATE INDEX %s on %s ( %s )", indexName, soupTableName, columnName));;
// for the cache
indexSpecsToCache[i] = new IndexSpec(indexSpec.path, indexSpec.type, columnName);
i++;
}
createTableStmt.append(")");
// Run SQL for creating soup table and its indices
db.execSQL(createTableStmt.toString());
for (String createIndexStmt : createIndexStmts) {
db.execSQL(createIndexStmt.toString());
}
try {
db.beginTransaction();
for (ContentValues values : soupIndexMapInserts) {
DBHelper.INSTANCE.insert(db, SOUP_INDEX_MAP_TABLE, values);
}
db.setTransactionSuccessful();
// Add to soupNameToTableNamesMap
DBHelper.INSTANCE.cacheTableName(soupName, soupTableName);
// Add to soupNameToIndexSpecsMap
DBHelper.INSTANCE.cacheIndexSpecs(soupName, indexSpecsToCache);
}
finally {
db.endTransaction();
}
}
/**
* Check if soup exists
*
* @param soupName
* @return true if soup exists, false otherwise
*/
public boolean hasSoup(String soupName) {
return DBHelper.INSTANCE.getSoupTableName(db, soupName) != null;
}
/**
* Destroy a soup
*
* Drop table for soupName
* Cleanup entries in soup index map table
* @param soupName
*/
public void dropSoup(String soupName) {
synchronized(SmartStore.class) {
String soupTableName = DBHelper.INSTANCE.getSoupTableName(db, soupName);
if (soupTableName != null) {
db.execSQL("DROP TABLE IF EXISTS " + soupTableName);
try {
db.beginTransaction();
DBHelper.INSTANCE.delete(db, SOUP_NAMES_TABLE, SOUP_NAME_PREDICATE, soupName);
DBHelper.INSTANCE.delete(db, SOUP_INDEX_MAP_TABLE, SOUP_NAME_PREDICATE, soupName);
db.setTransactionSuccessful();
// Remove from cache
DBHelper.INSTANCE.removeFromCache(soupName);
}
finally {
db.endTransaction();
}
}
}
}
/**
* Destroy all the soups in the smartstore
*/
public void dropAllSoups() {
synchronized(SmartStore.class) {
List<String> soupNames = getAllSoupNames();
for(String soupName : soupNames) {
dropSoup(soupName);
}
}
}
/**
* @return all soup names in the smartstore
*/
public List<String> getAllSoupNames() {
synchronized(SmartStore.class) {
List<String> soupNames = new ArrayList<String>();
Cursor cursor = null;
try {
cursor = DBHelper.INSTANCE.query(db, SOUP_NAMES_TABLE, new String[] {SOUP_NAME_COL}, null, null, null);
if (cursor.moveToFirst()) {
do {
soupNames.add(cursor.getString(0));
}
while (cursor.moveToNext());
}
}
finally {
safeClose(cursor);
}
return soupNames;
}
}
/**
* Run a query given by its query Spec, only returned results from selected page
* @param querySpec
* @param pageIndex
* @throws JSONException
*/
public JSONArray query(QuerySpec querySpec, int pageIndex) throws JSONException {
synchronized(SmartStore.class) {
QueryType qt = querySpec.queryType;
String sql = convertSmartSql(querySpec.smartSql);
// Page
int offsetRows = querySpec.pageSize * pageIndex;
int numberRows = querySpec.pageSize;
String limit = offsetRows + "," + numberRows;
Cursor cursor = null;
try {
cursor = DBHelper.INSTANCE.limitRawQuery(db, sql, limit, querySpec.getArgs());
JSONArray results = new JSONArray();
if (cursor.moveToFirst()) {
do {
// Smart queries
if (qt == QueryType.smart) {
results.put(getDataFromRow(cursor));
}
// Exact/like/range queries
else {
results.put(new JSONObject(cursor.getString(0)));
}
}
while (cursor.moveToNext());
}
return results;
}
finally {
safeClose(cursor);
}
}
}
/**
* Return JSONArray for one row of data from cursor
* @param cursor
* @return
* @throws JSONException
*/
private JSONArray getDataFromRow(Cursor cursor) throws JSONException {
JSONArray row = new JSONArray();
int columnCount = cursor.getColumnCount();
for (int i=0; i<columnCount; i++) {
String raw = cursor.getString(i);
// Is this column holding a serialized json object?
if (cursor.getColumnName(i).endsWith(SOUP_COL)) {
row.put(new JSONObject(raw));
// Note: we could end up returning a string if you aliased the column
}
else {
// TODO Leverage cursor.getType once our min api is 11 or above
// For now, we do our best to guess
// Is it holding a integer ?
try {
Long n = Long.parseLong(raw);
row.put(n);
// Note: we could end up returning an integer for a string column if you have a string value that contains just an integer
}
// Is it holding a floating ?
catch (NumberFormatException e) {
try {
Double d = Double.parseDouble(raw);
// No exception, let's get the value straight from the cursor
// XXX Double.parseDouble(cursor.getString(i)) is sometimes different from cursor.getDouble(i) !!!
d = cursor.getDouble(i);
row.put(d);
// Note: we could end up returning an integer for a string column if you have a string value that contains just an integer
}
// It must be holding a string then
catch (NumberFormatException ne) {
row.put(raw);
}
}
}
}
return row;
}
/**
* @param querySpec
* @return count of results for a "smart" query
*/
public int countQuery(QuerySpec querySpec) {
synchronized(SmartStore.class) {
String countSql = convertSmartSql(querySpec.countSmartSql);
return DBHelper.INSTANCE.countRawCountQuery(db, countSql, querySpec.getArgs());
}
}
/**
* @param smartSql
* @return
*/
public String convertSmartSql(String smartSql) {
synchronized(SmartStore.class) {
return SmartSqlHelper.INSTANCE.convertSmartSql(db, smartSql);
}
}
/**
* Create (and commits)
* Note: Passed soupElt is modified (last modified date and soup entry id fields)
* @param soupName
* @param soupElt
* @return soupElt created or null if creation failed
* @throws JSONException
*/
public JSONObject create(String soupName, JSONObject soupElt) throws JSONException {
synchronized(SmartStore.class) {
return create(soupName, soupElt, true);
}
}
/**
* Create
* Note: Passed soupElt is modified (last modified date and soup entry id fields)
* @param soupName
* @param soupElt
* @return
* @throws JSONException
*/
public JSONObject create(String soupName, JSONObject soupElt, boolean handleTx) throws JSONException {
synchronized(SmartStore.class) {
String soupTableName = DBHelper.INSTANCE.getSoupTableName(db, soupName);
if (soupTableName == null) throw new SmartStoreException("Soup: " + soupName + " does not exist");
IndexSpec[] indexSpecs = DBHelper.INSTANCE.getIndexSpecs(db, soupName);
try {
if (handleTx) {
db.beginTransaction();
}
long now = System.currentTimeMillis();
long soupEntryId = DBHelper.INSTANCE.getNextId(db, soupTableName);
// Adding fields to soup element
soupElt.put(SOUP_ENTRY_ID, soupEntryId);
soupElt.put(SOUP_LAST_MODIFIED_DATE, now);
ContentValues contentValues = new ContentValues();
contentValues.put(ID_COL, soupEntryId);
contentValues.put(SOUP_COL, "");
contentValues.put(CREATED_COL, now);
contentValues.put(LAST_MODIFIED_COL, now);
contentValues.put(SOUP_COL, soupElt.toString());
for (IndexSpec indexSpec : indexSpecs) {
projectIndexedPaths(soupElt, contentValues, indexSpec);
}
// Inserting into database
boolean success = DBHelper.INSTANCE.insert(db, soupTableName, contentValues) == soupEntryId;
// Commit if successful
if (success) {
if (handleTx) {
db.setTransactionSuccessful();
}
return soupElt;
}
else {
return null;
}
}
finally {
if (handleTx) {
db.endTransaction();
}
}
}
}
/**
* @param soupElt
* @param contentValues
* @param indexSpec
*/
private void projectIndexedPaths(JSONObject soupElt, ContentValues contentValues, IndexSpec indexSpec) {
Object value = project(soupElt, indexSpec.path);
switch (indexSpec.type) {
case integer:
contentValues.put(indexSpec.columnName, ((Number) value).longValue()); break;
case string:
contentValues.put(indexSpec.columnName, value != null ? value.toString() : null); break;
case floating:
contentValues.put(indexSpec.columnName, ((Number) value).doubleValue()); break;
}
}
/**
* Retrieve
* @param soupName
* @param soupEntryIds
* @return JSONArray of JSONObject's with the given soupEntryIds
* @throws JSONException
*/
public JSONArray retrieve(String soupName, Long... soupEntryIds) throws JSONException {
synchronized(SmartStore.class) {
String soupTableName = DBHelper.INSTANCE.getSoupTableName(db, soupName);
if (soupTableName == null) throw new SmartStoreException("Soup: " + soupName + " does not exist");
Cursor cursor = null;
try {
JSONArray result = new JSONArray();
cursor = DBHelper.INSTANCE.query(db, soupTableName, new String[] {SOUP_COL}, null, null, getSoupEntryIdsPredicate(soupEntryIds), (String[]) null);
if (!cursor.moveToFirst()) {
return result;
}
do {
String raw = cursor.getString(cursor.getColumnIndex(SOUP_COL));
result.put(new JSONObject(raw));
}
while (cursor.moveToNext());
return result;
}
finally {
safeClose(cursor);
}
}
}
/**
* Update (and commits)
* Note: Passed soupElt is modified (last modified date and soup entry id fields)
* @param soupName
* @param soupElt
* @param soupEntryId
* @return soupElt updated or null if update failed
* @throws JSONException
*/
public JSONObject update(String soupName, JSONObject soupElt, long soupEntryId) throws JSONException {
synchronized(SmartStore.class) {
return update(soupName, soupElt, soupEntryId, true);
}
}
/**
* Update
* Note: Passed soupElt is modified (last modified date and soup entry id fields)
* @param soupName
* @param soupElt
* @param soupEntryId
* @return
* @throws JSONException
*/
public JSONObject update(String soupName, JSONObject soupElt, long soupEntryId, boolean handleTx) throws JSONException {
synchronized(SmartStore.class) {
String soupTableName = DBHelper.INSTANCE.getSoupTableName(db, soupName);
if (soupTableName == null) throw new SmartStoreException("Soup: " + soupName + " does not exist");
IndexSpec[] indexSpecs = DBHelper.INSTANCE.getIndexSpecs(db, soupName);
long now = System.currentTimeMillis();
// In the case of an upsert with external id, _soupEntryId won't be in soupElt
soupElt.put(SOUP_ENTRY_ID, soupEntryId);
// Updating last modified field in soup element
soupElt.put(SOUP_LAST_MODIFIED_DATE, now);
// Preparing data for row
ContentValues contentValues = new ContentValues();
contentValues.put(SOUP_COL, soupElt.toString());
contentValues.put(LAST_MODIFIED_COL, now);
for (IndexSpec indexSpec : indexSpecs) {
projectIndexedPaths(soupElt, contentValues, indexSpec);
}
try {
if (handleTx) {
db.beginTransaction();
}
boolean success = DBHelper.INSTANCE.update(db, soupTableName, contentValues, ID_PREDICATE, soupEntryId + "") == 1;
if (success) {
if (handleTx) {
db.setTransactionSuccessful();
}
return soupElt;
}
else {
return null;
}
}
finally {
if (handleTx) {
db.endTransaction();
}
}
}
}
/**
* Upsert (and commits)
* @param soupName
* @param soupElt
* @param externalIdPath
* @return soupElt upserted or null if upsert failed
* @throws JSONException
*/
public JSONObject upsert(String soupName, JSONObject soupElt, String externalIdPath) throws JSONException {
synchronized(SmartStore.class) {
return upsert(soupName, soupElt, externalIdPath, true);
}
}
/**
* Upsert (and commits) expecting _soupEntryId in soupElt for updates
* @param soupName
* @param soupElt
* @return
* @throws JSONException
*/
public JSONObject upsert(String soupName, JSONObject soupElt) throws JSONException {
synchronized(SmartStore.class) {
return upsert(soupName, soupElt, SOUP_ENTRY_ID);
}
}
/**
* Upsert
* @param soupName
* @param soupElt
* @param externalIdPath
* @param handleTx
* @return
* @throws JSONException
*/
public JSONObject upsert(String soupName, JSONObject soupElt, String externalIdPath, boolean handleTx) throws JSONException {
synchronized(SmartStore.class) {
long entryId = -1;
if (externalIdPath.equals(SOUP_ENTRY_ID)) {
if (soupElt.has(SOUP_ENTRY_ID)) {
entryId = soupElt.getLong(SOUP_ENTRY_ID);
}
}
else {
Object externalIdObj = project(soupElt, externalIdPath);
if (externalIdObj != null) {
entryId = lookupSoupEntryId(soupName, externalIdPath, externalIdObj + "");
}
}
// If we have an entryId, let's do an update, otherwise let's do a create
if (entryId != -1) {
return update(soupName, soupElt, entryId, handleTx);
}
else {
return create(soupName, soupElt, handleTx);
}
}
}
/**
* Look for a soup element where fieldPath's value is fieldValue
* Return its soupEntryId
* Return -1 if not found
* Throw an exception if fieldName is not indexed
* Throw an exception if more than one soup element are found
*
* @param soupName
* @param fieldPath
* @param fieldValue
*/
public long lookupSoupEntryId(String soupName, String fieldPath, String fieldValue) {
synchronized(SmartStore.class) {
String soupTableName = DBHelper.INSTANCE.getSoupTableName(db, soupName);
if (soupTableName == null) throw new SmartStoreException("Soup: " + soupName + " does not exist");
String columnName = DBHelper.INSTANCE.getColumnNameForPath(db, soupName, fieldPath);
Cursor cursor = null;
try {
cursor = db.query(soupTableName, new String[] {ID_COL}, columnName + " = ?", new String[] { fieldValue }, null, null, null);
if (cursor.getCount() > 1) {
throw new SmartStoreException(String.format("There are more than one soup elements where %s is %s", fieldPath, fieldValue));
}
if (cursor.moveToFirst()) {
return cursor.getLong(0);
}
else {
return -1; // not found
}
}
finally {
safeClose(cursor);
}
}
}
/**
* Delete (and commits)
* @param soupName
* @param soupEntryIds
*/
public void delete(String soupName, Long... soupEntryIds) {
synchronized(SmartStore.class) {
delete(soupName, soupEntryIds, true);
}
}
/**
* Delete
* @param soupName
* @param soupEntryId
* @param handleTx
*/
public void delete(String soupName, Long[] soupEntryIds, boolean handleTx) {
synchronized(SmartStore.class) {
String soupTableName = DBHelper.INSTANCE.getSoupTableName(db, soupName);
if (soupTableName == null) throw new SmartStoreException("Soup: " + soupName + " does not exist");
if (handleTx) {
db.beginTransaction();
}
try {
db.delete(soupTableName, getSoupEntryIdsPredicate(soupEntryIds), (String []) null);
if (handleTx) {
db.setTransactionSuccessful();
}
}
finally {
if (handleTx) {
db.endTransaction();
}
}
}
}
/**
* @return predicate to match soup entries by id
*/
private String getSoupEntryIdsPredicate(Long[] soupEntryIds) {
return ID_COL + " IN (" + TextUtils.join(",", soupEntryIds)+ ")";
}
/**
* @param soupId
* @return
*/
public static String getSoupTableName(long soupId) {
return "TABLE_" + soupId;
}
/**
* @param cursor
*/
private void safeClose(Cursor cursor) {
if (cursor != null) {
cursor.close();
}
}
/**
* @param soup
* @param path
* @return object at path in soup
*/
public static Object project(JSONObject soup, String path) {
if (soup == null) {
return null;
}
if (path == null || path.equals("")) {
return soup;
}
String[] pathElements = path.split("[.]");
Object o = soup;
for (String pathElement : pathElements) {
if (o != null) {
o = ((JSONObject) o).opt(pathElement);
}
}
return o;
}
/**
* Enum for column type
*/
public enum Type {
string("TEXT"), integer("INTEGER"), floating("REAL");
private String columnType;
private Type(String columnType) {
this.columnType = columnType;
}
public String getColumnType() {
return columnType;
}
}
/**
* Exception thrown by smart store
*
*/
public static class SmartStoreException extends RuntimeException {
public SmartStoreException(String message) {
super(message);
}
private static final long serialVersionUID = -6369452803270075464L;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.state.alerts;
import javax.persistence.EntityManager;
import org.apache.ambari.server.H2DatabaseCleaner;
import org.apache.ambari.server.events.AlertReceivedEvent;
import org.apache.ambari.server.events.AlertStateChangeEvent;
import org.apache.ambari.server.events.MockEventListener;
import org.apache.ambari.server.events.listeners.alerts.AlertAggregateListener;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.dao.AlertSummaryDTO;
import org.apache.ambari.server.orm.dao.AlertsDAO;
import org.apache.ambari.server.orm.entities.AlertCurrentEntity;
import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
import org.apache.ambari.server.state.Alert;
import org.apache.ambari.server.state.AlertFirmness;
import org.apache.ambari.server.state.alert.AggregateDefinitionMapping;
import org.apache.ambari.server.state.alert.AggregateSource;
import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.alert.Reporting;
import org.apache.ambari.server.state.alert.Reporting.ReportTemplate;
import org.apache.ambari.server.utils.EventBusSynchronizer;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.util.Modules;
import junit.framework.Assert;
/**
* Tests the {@link AlertAggregateListener}.
*/
@Category({ category.AlertTest.class})
public class AggregateAlertListenerTest {
private Injector m_injector;
private MockEventListener m_listener;
private AlertsDAO m_alertsDao;
private AggregateDefinitionMapping m_aggregateMapping;
/**
*
*/
@Before
public void setup() throws Exception {
m_injector = Guice.createInjector(Modules.override(
new InMemoryDefaultTestModule()).with(new MockModule()));
m_injector.getInstance(GuiceJpaInitializer.class);
m_listener = m_injector.getInstance(MockEventListener.class);
m_alertsDao = m_injector.getInstance(AlertsDAO.class);
// !!! need a synchronous op for testing
EventBusSynchronizer.synchronizeAlertEventPublisher(m_injector).register(m_listener);
EventBusSynchronizer.synchronizeAmbariEventPublisher(m_injector).register(m_listener);
}
/**
* @throws Exception
*/
@After
public void teardown() throws Exception {
H2DatabaseCleaner.clearDatabase(m_injector.getProvider(EntityManager.class).get());
m_injector = null;
}
/**
* Tests that the {@link AlertAggregateListener} caches values of the
* aggregates and only triggers events when needed.
*
* @throws Exception
*/
@Test
public void testAlertNoticeCreationFromEvent() throws Exception {
AlertDefinition aggregateDefinition = getAggregateAlertDefinition();
AlertCurrentEntity currentEntityMock = EasyMock.createNiceMock(AlertCurrentEntity.class);
AlertHistoryEntity historyEntityMock = EasyMock.createNiceMock(AlertHistoryEntity.class);
EasyMock.expect(currentEntityMock.getAlertHistory()).andReturn(historyEntityMock).atLeastOnce();
EasyMock.expect(
m_aggregateMapping.getAggregateDefinition(EasyMock.anyLong(), EasyMock.eq("mock-alert"))).andReturn(
aggregateDefinition).atLeastOnce();
AlertSummaryDTO summaryDTO = new AlertSummaryDTO(5,0,0,0,0);
EasyMock.expect(
m_alertsDao.findAggregateCounts(EasyMock.anyLong(), EasyMock.eq("mock-aggregate-alert"))).andReturn(
summaryDTO).atLeastOnce();
EasyMock.replay(m_alertsDao, m_aggregateMapping, currentEntityMock);
// check that we're starting at 0
Assert.assertEquals(0, m_listener.getAlertEventReceivedCount(AlertReceivedEvent.class));
// trigger an alert which will trigger the aggregate
Alert alert = new Alert("mock-alert", null, null, null, null, null);
AlertAggregateListener aggregateListener = m_injector.getInstance(AlertAggregateListener.class);
AlertStateChangeEvent event = new AlertStateChangeEvent(0, alert, currentEntityMock, null,
AlertFirmness.HARD);
aggregateListener.onAlertStateChangeEvent(event);
// verify that one AlertReceivedEvent was fired (it's the one the listener
// creates for the aggregate)
Assert.assertEquals(1, m_listener.getAlertEventReceivedCount(AlertReceivedEvent.class));
// fire the same alert event again; the cache in the aggregate listener
// should prevent it from firing a new alert received event of its own
aggregateListener.onAlertStateChangeEvent(event);
// check that we're still at 1
Assert.assertEquals(1, m_listener.getAlertEventReceivedCount(AlertReceivedEvent.class));
// now change the returned summary DTO so that a new alert will get generated
summaryDTO.setOkCount(0);
summaryDTO.setCriticalCount(5);
aggregateListener.onAlertStateChangeEvent(event);
Assert.assertEquals(2, m_listener.getAlertEventReceivedCount(AlertReceivedEvent.class));
}
/**
* Tests that the {@link AlertAggregateListener} disregards
* {@link AlertFirmness#SOFT} alerts.
*
* @throws Exception
*/
@Test
public void testNoAggregateCalculationOnSoftAlert() throws Exception {
AlertDefinition aggregateDefinition = getAggregateAlertDefinition();
AlertCurrentEntity currentEntityMock = EasyMock.createNiceMock(AlertCurrentEntity.class);
AlertHistoryEntity historyEntityMock = EasyMock.createNiceMock(AlertHistoryEntity.class);
EasyMock.expect(currentEntityMock.getAlertHistory()).andReturn(historyEntityMock).atLeastOnce();
EasyMock.expect(currentEntityMock.getFirmness()).andReturn(AlertFirmness.SOFT).atLeastOnce();
EasyMock.expect(m_aggregateMapping.getAggregateDefinition(EasyMock.anyLong(),
EasyMock.eq("mock-alert"))).andReturn(aggregateDefinition).atLeastOnce();
AlertSummaryDTO summaryDTO = new AlertSummaryDTO(5, 0, 0, 0, 0);
EasyMock.expect(m_alertsDao.findAggregateCounts(EasyMock.anyLong(),
EasyMock.eq("mock-aggregate-alert"))).andReturn(summaryDTO).atLeastOnce();
EasyMock.replay(m_alertsDao, m_aggregateMapping, currentEntityMock);
// check that we're starting at 0
Assert.assertEquals(0, m_listener.getAlertEventReceivedCount(AlertReceivedEvent.class));
// trigger an alert which would normally trigger the aggregate, except that
// the alert will be SOFT and should not cause a recalculation
Alert alert = new Alert("mock-alert", null, null, null, null, null);
AlertAggregateListener aggregateListener = m_injector.getInstance(AlertAggregateListener.class);
AlertStateChangeEvent event = new AlertStateChangeEvent(0, alert, currentEntityMock, null,
AlertFirmness.HARD);
aggregateListener.onAlertStateChangeEvent(event);
// ensure that the aggregate listener did not trigger an alert in response
// to the SOFT alert
Assert.assertEquals(0, m_listener.getAlertEventReceivedCount(AlertReceivedEvent.class));
}
/**
* Gets a mocked {@link AlertDefinition}.
*
* @return
*/
private AlertDefinition getAggregateAlertDefinition() {
// setup the mocks for the aggregate definition to avoid NPEs
AlertDefinition aggregateDefinition = new AlertDefinition();
aggregateDefinition.setName("mock-aggregate-alert");
AggregateSource aggregateSource = new AggregateSource();
aggregateSource.setAlertName("mock-aggregate-alert");
Reporting reporting = new Reporting();
ReportTemplate criticalTemplate = new ReportTemplate();
ReportTemplate okTemplate = new ReportTemplate();
criticalTemplate.setValue(.05);
criticalTemplate.setText("CRITICAL");
okTemplate.setText("OK");
reporting.setCritical(criticalTemplate);
reporting.setWarning(criticalTemplate);
reporting.setOk(okTemplate);
aggregateSource.setReporting(reporting);
aggregateDefinition.setSource(aggregateSource);
return aggregateDefinition;
}
/**
*
*/
private class MockModule implements Module {
/**
* {@inheritDoc}
*/
@Override
public void configure(Binder binder) {
m_alertsDao = EasyMock.createMock(AlertsDAO.class);
m_aggregateMapping = EasyMock.createMock(AggregateDefinitionMapping.class);
binder.bind(AlertsDAO.class).toInstance(m_alertsDao);
binder.bind(AggregateDefinitionMapping.class).toInstance(m_aggregateMapping);
}
}
}
|
|
//====================================================================
//
//File: $RCSfile: T.java,v $
//Version: $Revision: 1.1.20.1 $
//Modified: $Date: 2013/01/02 18:02:12 $
//
//====================================================================
package lib;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.text.CharacterIterator;
import java.text.StringCharacterIterator;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.variables.VariablesPlugin;
import org.xtuml.bp.core.AttributeValue_c;
import org.xtuml.bp.core.Attribute_c;
import org.xtuml.bp.core.BlockInStackFrame_c;
import org.xtuml.bp.core.ComponentPackage_c;
import org.xtuml.bp.core.Component_c;
import org.xtuml.bp.core.CorePlugin;
import org.xtuml.bp.core.DataType_c;
import org.xtuml.bp.core.DomainAsComponent_c;
import org.xtuml.bp.core.Domain_c;
import org.xtuml.bp.core.InstanceHandle_c;
import org.xtuml.bp.core.InstanceInReference_c;
import org.xtuml.bp.core.InstanceReferenceValue_c;
import org.xtuml.bp.core.Instance_c;
import org.xtuml.bp.core.LocalReference_c;
import org.xtuml.bp.core.LocalValue_c;
import org.xtuml.bp.core.Local_c;
import org.xtuml.bp.core.ModelClass_c;
import org.xtuml.bp.core.Ooaofooa;
import org.xtuml.bp.core.RuntimeValue_c;
import org.xtuml.bp.core.SimpleValue_c;
import org.xtuml.bp.core.StackFrame_c;
import org.xtuml.bp.core.Subsystem_c;
import org.xtuml.bp.core.SystemModel_c;
import org.xtuml.bp.core.TransientVar_c;
import org.xtuml.bp.core.Variable_c;
import org.xtuml.bp.core.common.ClassQueryInterface_c;
public class T {
private static String outputBuffer = new String();
private static String templatePath = "";
public static void t() {
Ooaofooa ooaofooa = Ooaofooa.getDefaultInstance();
SystemModel_c mdl = SystemModel_c.SystemModelInstance(ooaofooa, new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object arg0) {
// Auto-generated method stub
CorePlugin.out.println(((SystemModel_c)arg0).getName());
return ((SystemModel_c)arg0).getName().toLowerCase().contains("watch");
}
});
Attribute_c[] attrs = Attribute_c.getManyO_ATTRsOnR102(ModelClass_c.getManyO_OBJsOnR2(Subsystem_c.getManyS_SSsOnR1(Domain_c.getManyS_DOMsOnR4204(DomainAsComponent_c.getManyCN_DCsOnR4204(Component_c.getManyC_CsOnR4604(ComponentPackage_c.getManyCP_CPsOnR4602(mdl)))))));
for (Attribute_c attr : attrs) {
DataType_c data = DataType_c.getOneS_DTOnR114(attr);
CorePlugin.out.println(data ==null ? attr.getName() + " has no datatype" : attr.getName() + ":" + data.getName());
}
}
public static void include(StackFrame_c stackFrame, String templateFileName) {
// Obtain the set of local values in the calling context
Object persistenceResource = stackFrame.getModelRoot().getPersistenceFile();
if (persistenceResource instanceof IFile) {
templatePath = ((IFile)persistenceResource).getProject().getLocation().append("gen\\").toOSString();
}
// Open the file
File templateFile = new File(templatePath + templateFileName);
try {
BufferedReader br = new BufferedReader(new FileReader(templateFile));
// Get ready to read and match
String inputBuffer = "";
while (inputBuffer != null) {
inputBuffer = br.readLine();
if (inputBuffer != null && inputBuffer.length() > 0) {
char lastChar = inputBuffer.charAt(inputBuffer.length() - 1);
outputBuffer += process(stackFrame, inputBuffer.substring(0, inputBuffer.length() - 1)
+ (lastChar == '\\' ? "" : lastChar + "\n"));
}
}
br.close();
}
catch (FileNotFoundException fne) {
CorePlugin.logError("File not found.", fne);
}
catch (IOException ioe) {
CorePlugin.logError("Unable to read file.", ioe);
}
}
public static String template(StackFrame_c stackFrame, String templateFileName, boolean indent) {
// Obtain the set of local values in the calling context
String outputBuffer = "";
Object persistenceResource = stackFrame.getModelRoot().getPersistenceFile();
if (persistenceResource instanceof IFile) {
templatePath = ((IFile)persistenceResource).getProject().
getLocation().append("gen\\").toOSString();
}
// Open the file
File templateFile = new File(templatePath + templateFileName);
try {
BufferedReader br = new BufferedReader(new FileReader(templateFile));
// Get ready to read and match
String inputBuffer = "";
while (inputBuffer != null) {
inputBuffer = br.readLine();
if (inputBuffer != null && inputBuffer.length() > 0) {
char lastChar = inputBuffer.charAt(inputBuffer.length() - 1);
outputBuffer += process(stackFrame, inputBuffer.substring(0, inputBuffer.length() - 1)
+ (lastChar == '\\' ? "" : lastChar + "\n"));
}
}
// post process string
if (indent) {
outputBuffer = " " + outputBuffer.replaceAll("\\n", "\n ");
if (" ".equals(outputBuffer.substring(outputBuffer.length() - 4))) {
outputBuffer = outputBuffer.substring(0, outputBuffer.length() - 4);
}
}
br.close();
}
catch (FileNotFoundException fne) {
CorePlugin.logError("File not found.", fne);
}
catch (IOException ioe) {
CorePlugin.logError("Unable to read file.", ioe);
}
return outputBuffer;
}
public static void write(StackFrame_c stackFrame, String content) {
outputBuffer += process(stackFrame, content);
}
public static void print(String s){
CorePlugin.out.print(s);
}
public static void b (String s){
outputBuffer+=s;
}
public static String c(String s){
for (int i = 0; i < s.length(); i++) {
if (i == 0) {
s = String.format( "%s%s",
Character.toUpperCase(s.charAt(0)),
s.substring(1) );
}
if (!Character.isLetterOrDigit(s.charAt(i))) {
if (i + 1 < s.length()) {
s = String.format( "%s%s%s",
s.subSequence(0, i+1),
Character.toUpperCase(s.charAt(i + 1)),
s.substring(i+2) );
}
}
}
return s;
}
public static String l(String s){
return s.toLowerCase();
}
public static String r (String s){
return s.replaceAll(" ","");
}
public static String s(int i){
debugLog("Converting integer " + i + " to string");
return ""+i;
}
public static String u(String s){
return s.toUpperCase();
}
public static String u_(String s){
return s.toUpperCase().replaceAll(" ","_");
}
public static String underscore(String s){
return s.replaceAll(" ","_");
}
public static String process(StackFrame_c stackFrame, String input) {
// Set up the matching expression
// Pattern below matches:
// "$<up to 2 format characters>{<a sequence of alpha characters, possibly containing a '.'>}"
// The optional 'eclipse:' prefix to the name inside the braces allows
// specification of eclipse variable values in the output.
// Also sets up 2 to 4 groups with the useful string values.
String pattern = "\\$([UuCcLl_RrOoXx]{0,2})\\{(eclipse:)?([\\w]+)(\\.[\\w]+)?\\}";
Pattern pat = Pattern.compile(pattern);
Matcher mat = pat.matcher(input);
String result = "";
int posn = 0;
boolean eclipseVar = false;
while (mat.find(posn)) {
MatchResult res = mat.toMatchResult();
String plainText = input.substring(posn, res.start());
posn = res.end();
String formatChars = mat.group(1);
if (mat.group(2) != null) {
eclipseVar = true;
}
String instRefName = mat.group(3);
String attrName = "";
if (mat.group(4) != null) {
attrName = mat.group(4).substring(1); // lose a leading '.'
}
if (eclipseVar) {
result += plainText + format(formatChars, getEclipseVar(instRefName));
}
else {
result += plainText + format(formatChars,
getValue(stackFrame,
instRefName, attrName));
}
}
// Copy out trailing plain text
return result + input.substring(posn);
}
private static String getEclipseVar(String instRefName) {
try {
return VariablesPlugin.getDefault().getStringVariableManager().
performStringSubstitution("${" + instRefName + "}");
}
catch (CoreException ce){
CorePlugin.logError(
"Error obtaining the value of eclipse variable, '" +
instRefName +"'.", ce);
}
return null;
}
private static String format(String formatChars, String substitute) {
String toFormat = "";
String lcFormatChars = formatChars.toLowerCase();
if (substitute != null) {
toFormat = substitute;
if (lcFormatChars.contains("l")) {
toFormat = toFormat.toLowerCase();
}
else {
if (lcFormatChars.contains("u")) {
toFormat = toFormat.toUpperCase();
}
}
String [] nonWS = toFormat.split(" ", -1);
String sep = "";
toFormat = "";
for (int i = 0; i < nonWS.length; i++) {
if (lcFormatChars.contains("c")) {
// Capitalize the first letter
nonWS[i] = nonWS[i].substring(0, 1).toUpperCase() +
nonWS[i].substring(1);
}
toFormat += sep + nonWS[i];
if (lcFormatChars.contains("_")) {
sep = "_";
}
else if (!lcFormatChars.toLowerCase().contains("r")) {
// If r is _not_ present, separator is a space,
sep = " ";
}
// else its the empty string because we want to concatenate.
}
}
if (lcFormatChars.contains("x")) {
return xmlify(toFormat);
}
else {
return toFormat;
}
}
public static String getValue(StackFrame_c stackFrame,
final String instRefName, String optionalAttrName) {
Local_c[] locals = Local_c.getManyL_LCLsOnR3000(
BlockInStackFrame_c.getManyI_BSFsOnR2923(stackFrame));
if (optionalAttrName != "") {
// select many variables related by locals->L_LCR[R3001]->
// V_INT[R3004]->V_VAR[R814] where selected.Name == instRefName;
class VariableTest implements ClassQueryInterface_c {
public boolean evaluate(Object candidate) {
Variable_c selected = (Variable_c) candidate;
return selected.getName().equals(instRefName);
}
}
Variable_c [] vars = Variable_c.getManyV_VARsOnR814(
InstanceHandle_c.getManyV_INTsOnR3004(
LocalReference_c.getManyL_LCRsOnR3001(locals)),
new VariableTest());
// if cardinality vars == 1
if (vars.length == 1) {
// There is exactly one variable of that name in scope
// select many filteredLocals related by vars->V_INT[R814]->
// L_LCR[R3004]->L_LCL[R3001];
Local_c [] filteredLocals = Local_c.getManyL_LCLsOnR3001(
LocalReference_c.getManyL_LCRsOnR3004(
InstanceHandle_c.getManyV_INTsOnR814(vars[0])));
// for each local in filteredLocals
boolean localFound = false;
Local_c local = null;
for (int i = 0; i < filteredLocals.length; i++) {
local = filteredLocals[i];
// for each passedLocal in locals
for (int j = 0; j < locals.length; j++) {
Local_c passedLocal = locals[j];
// if local == passedLocal
if (local.equals(passedLocal)) {
localFound = true;
break;
}
}
if (localFound) {
break;
}
}
if (localFound) {
// select many instances related by local->RV_RVL[R3306]->
// RV_SMV[R3300]->RV_IRV[R3308]->L_IIR[R3311]->I_INS[R3013];
Instance_c [] instances = Instance_c.getManyI_INSsOnR3013(
InstanceInReference_c.getManyL_IIRsOnR3311(
InstanceReferenceValue_c.getOneRV_IRVOnR3308(
SimpleValue_c.getOneRV_SMVOnR3300(
RuntimeValue_c.getOneRV_RVLOnR3306(
local)))));
// if cardinality instances == 1
if (instances.length == 1) {
// select many attribute values related by instances->
// I_AVL[R2909];
AttributeValue_c [] avls =
AttributeValue_c.getManyI_AVLsOnR2909(instances[0]);
// for each avl in avls
for (int i = 0; i < avls.length; i++) {
AttributeValue_c avl = avls[i];
// select one attr related by avl->O_ATTR[R2910];
Attribute_c attr = Attribute_c.getOneO_ATTROnR2910(avl);
// if attr.Name == attrName
if (attr.getName().equals(optionalAttrName)) {
return avl.getValue();
}
}
// If we get here, the attribute name was not found
CorePlugin.logError(
"Cannot find attribute value for " + instRefName + "."
+ optionalAttrName +
". The attribute value must have been written by Verifier," +
" even if the desired value is empty.", null);
}
else {
CorePlugin.logError(
"Cannot reference a set in a template file.", null);
}
}
else {
if (vars.length == 0) {
CorePlugin.logError("Variable " + instRefName +
" not found.", null);
}
else {
CorePlugin.logError("More than one variable " +
instRefName + " found.", null);
}
}
}
}
else {
// select many variables related by locals->L_LVL[R3001]->
// V_TRN[R3005]->V_VAR[R814] where selected.Name == instRefName;
class VariableTest implements ClassQueryInterface_c {
public boolean evaluate(Object candidate) {
Variable_c selected = (Variable_c) candidate;
return selected.getName().equals(instRefName);
}
}
Variable_c [] vars = Variable_c.getManyV_VARsOnR814(
TransientVar_c.getManyV_TRNsOnR3005(
LocalValue_c.getManyL_LVLsOnR3001(locals)),
new VariableTest());
// if cardinality vars == 1
if (vars.length == 1) {
// There is exactly one variable of that name in scope
// select many filteredLocals related by vars->V_INT[R814]->
// L_LCR[R3004]->L_LCL[R3001];
Local_c [] filteredLocals = Local_c.getManyL_LCLsOnR3001(
LocalValue_c.getManyL_LVLsOnR3005(
TransientVar_c.getManyV_TRNsOnR814(vars[0])));
// for each local in filteredLocals
boolean localFound = false;
Local_c local = null;
for (int i = 0; i < filteredLocals.length; i++) {
local = filteredLocals[i];
// for each passedLocal in locals
for (int j = 0; j < locals.length; j++) {
Local_c passedLocal = locals[j];
// if local == passedLocal
if (local.equals(passedLocal)) {
localFound = true;
break;
}
}
if (localFound) {
break;
}
}
if (localFound) {
RuntimeValue_c rv = RuntimeValue_c.getOneRV_RVLOnR3306(local);
Object value = rv.Getvalue().toString();
if (value instanceof String || value instanceof Integer ||
value instanceof Float) {
return rv.Getvalue().toString();
}
else {
CorePlugin.logError("Substitution variable: " +
instRefName +
" must be a string, integer or real value.", null);
}
}
else {
CorePlugin.logError("Substitution variable: " +
instRefName + " not found.", null);
}
}
else {
if (vars.length == 0) {
CorePlugin.logError("Variable " + instRefName +
" not found.", null);
}
else {
CorePlugin.logError("More than one variable " +
instRefName + " found.", null);
}
}
}
return null;
}
private static String xmlify(String s) {
debugLog("xmlify-ing string " + s);
String result = null;
if (s != null) {
result = new String();
StringCharacterIterator sci = new StringCharacterIterator(s);
char character = sci.current();
while (character != CharacterIterator.DONE) {
debugLog(" - " + character);
if (character == '<') {
result += "<";
}
else if (character == '>') {
result += ">";
}
else if (character == '&') {
result += "&";
}
else if (character == '\"') {
result += """;
}
else if (character == '\'') {
result += "'";
}
else {
result += character;
}
character = sci.next();
}
}
debugLog(" ** returning string " + result);
return result;
}
public static void emit(StackFrame_c stackFrame, String outputFileName) {
Object persistenceResource = stackFrame.getModelRoot().getPersistenceFile();
if (persistenceResource instanceof IFile) {
templatePath = ((IFile)persistenceResource).getProject().
getLocation().append("gen\\").toOSString();
}
File outputFile = new File(templatePath + outputFileName);
debugLog("Emitting to file: " + outputFile.toString());
//debugLog("-----");
//debugLog(outputBuffer);
///debugLog("-----");
try {
BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
bw.append(outputBuffer);
bw.flush();
bw.close();
}
catch (FileNotFoundException fne) {
CorePlugin.logError("File not found.", fne);
}
catch (IOException ioe) {
CorePlugin.logError("Unable to open file for writing.", ioe);
}
// Docgen assumes that files have an append functionality.
// Otherwise there would be a call to clear() here.
}
public static void clear() {
outputBuffer = "";
}
private static void debugLog(String msg) {
boolean debugOn = false;
if ( debugOn == true ) {
LOG.LogInfo(msg);
}
}
}
|
|
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.apis.view;
import android.app.ActionBar;
import android.app.Activity;
import android.app.FragmentTransaction;
import android.app.ActionBar.Tab;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.ActionMode;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.ImageView;
import android.widget.SearchView;
import android.widget.ShareActionProvider;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.SearchView.OnQueryTextListener;
import com.example.android.apis.R;
/**
* This activity demonstrates some of the available ways to reduce the size or visual contrast of
* the system decor, in order to better focus the user's attention or use available screen real
* estate on the task at hand.
*/
public class SystemUIModes extends Activity
implements OnQueryTextListener, ActionBar.TabListener {
public static class IV extends ImageView implements View.OnSystemUiVisibilityChangeListener {
private SystemUIModes mActivity;
private ActionMode mActionMode;
public IV(Context context) {
super(context);
}
public IV(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void setActivity(SystemUIModes act) {
setOnSystemUiVisibilityChangeListener(this);
mActivity = act;
}
@Override
public void onSizeChanged(int w, int h, int oldw, int oldh) {
mActivity.refreshSizes();
}
@Override
public void onSystemUiVisibilityChange(int visibility) {
mActivity.updateCheckControls();
mActivity.refreshSizes();
}
private class MyActionModeCallback implements ActionMode.Callback {
@Override public boolean onCreateActionMode(ActionMode mode, Menu menu) {
mode.setTitle("My Action Mode!");
mode.setSubtitle(null);
mode.setTitleOptionalHint(false);
menu.add("Sort By Size").setIcon(android.R.drawable.ic_menu_sort_by_size);
menu.add("Sort By Alpha").setIcon(android.R.drawable.ic_menu_sort_alphabetically);
return true;
}
@Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
return true;
}
@Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) {
return true;
}
@Override public void onDestroyActionMode(ActionMode mode) {
mActionMode = null;
mActivity.clearActionMode();
}
}
public void startActionMode() {
if (mActionMode == null) {
ActionMode.Callback cb = new MyActionModeCallback();
mActionMode = startActionMode(cb);
}
}
public void stopActionMode() {
if (mActionMode != null) {
mActionMode.finish();
}
}
}
private void setFullscreen(boolean on) {
Window win = getWindow();
WindowManager.LayoutParams winParams = win.getAttributes();
final int bits = WindowManager.LayoutParams.FLAG_FULLSCREEN;
if (on) {
winParams.flags |= bits;
} else {
winParams.flags &= ~bits;
}
win.setAttributes(winParams);
}
private void setOverscan(boolean on) {
Window win = getWindow();
WindowManager.LayoutParams winParams = win.getAttributes();
final int bits = WindowManager.LayoutParams.FLAG_LAYOUT_IN_OVERSCAN;
if (on) {
winParams.flags |= bits;
} else {
winParams.flags &= ~bits;
}
win.setAttributes(winParams);
}
private void setTranslucentStatus(boolean on) {
Window win = getWindow();
WindowManager.LayoutParams winParams = win.getAttributes();
final int bits = WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS;
if (on) {
winParams.flags |= bits;
} else {
winParams.flags &= ~bits;
}
win.setAttributes(winParams);
}
private void setTranslucentNavigation(boolean on) {
Window win = getWindow();
WindowManager.LayoutParams winParams = win.getAttributes();
final int bits = WindowManager.LayoutParams.FLAG_TRANSLUCENT_NAVIGATION;
if (on) {
winParams.flags |= bits;
} else {
winParams.flags &= ~bits;
}
win.setAttributes(winParams);
}
private String getDisplaySize() {
DisplayMetrics dm = getResources().getDisplayMetrics();
return String.format("DisplayMetrics = (%d x %d)", dm.widthPixels, dm.heightPixels);
}
private String getViewSize() {
return String.format("View = (%d,%d - %d,%d)",
mImage.getLeft(), mImage.getTop(),
mImage.getRight(), mImage.getBottom());
}
void refreshSizes() {
mMetricsText.setText(getDisplaySize() + " " + getViewSize());
}
static int TOAST_LENGTH = 500;
IV mImage;
CheckBox[] mCheckControls = new CheckBox[8];
int[] mCheckFlags = new int[] { View.SYSTEM_UI_FLAG_LOW_PROFILE,
View.SYSTEM_UI_FLAG_FULLSCREEN, View.SYSTEM_UI_FLAG_HIDE_NAVIGATION,
View.SYSTEM_UI_FLAG_IMMERSIVE, View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY,
View.SYSTEM_UI_FLAG_LAYOUT_STABLE, View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN,
View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
};
TextView mMetricsText;
public SystemUIModes() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.system_ui_modes);
mImage = (IV) findViewById(R.id.image);
mImage.setActivity(this);
CompoundButton.OnCheckedChangeListener checkChangeListener
= new CompoundButton.OnCheckedChangeListener() {
@Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
updateSystemUi();
}
};
mCheckControls[0] = (CheckBox) findViewById(R.id.modeLowProfile);
mCheckControls[1] = (CheckBox) findViewById(R.id.modeFullscreen);
mCheckControls[2] = (CheckBox) findViewById(R.id.modeHideNavigation);
mCheckControls[3] = (CheckBox) findViewById(R.id.modeImmersive);
mCheckControls[4] = (CheckBox) findViewById(R.id.modeImmersiveSticky);
mCheckControls[5] = (CheckBox) findViewById(R.id.layoutStable);
mCheckControls[6] = (CheckBox) findViewById(R.id.layoutFullscreen);
mCheckControls[7] = (CheckBox) findViewById(R.id.layoutHideNavigation);
for (int i=0; i<mCheckControls.length; i++) {
mCheckControls[i].setOnCheckedChangeListener(checkChangeListener);
}
((CheckBox) findViewById(R.id.windowFullscreen)).setOnCheckedChangeListener(
new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
setFullscreen(isChecked);
}
}
);
((CheckBox) findViewById(R.id.windowOverscan)).setOnCheckedChangeListener(
new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
setOverscan(isChecked);
}
}
);
((CheckBox) findViewById(R.id.windowTranslucentStatus)).setOnCheckedChangeListener(
new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
setTranslucentStatus(isChecked);
}
}
);
((CheckBox) findViewById(R.id.windowTranslucentNav)).setOnCheckedChangeListener(
new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
setTranslucentNavigation(isChecked);
}
}
);
((CheckBox) findViewById(R.id.windowHideActionBar)).setOnCheckedChangeListener(
new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
getActionBar().hide();
} else {
getActionBar().show();
}
}
}
);
((CheckBox) findViewById(R.id.windowActionMode)).setOnCheckedChangeListener(
new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
mImage.startActionMode();
} else {
mImage.stopActionMode();
}
}
}
);
mMetricsText = (TextView) findViewById(R.id.metricsText);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.content_actions, menu);
SearchView searchView = (SearchView) menu.findItem(R.id.action_search).getActionView();
searchView.setOnQueryTextListener(this);
// Set file with share history to the provider and set the share intent.
MenuItem actionItem = menu.findItem(R.id.menu_item_share_action_provider_action_bar);
ShareActionProvider actionProvider = (ShareActionProvider) actionItem.getActionProvider();
actionProvider.setShareHistoryFileName(ShareActionProvider.DEFAULT_SHARE_HISTORY_FILE_NAME);
// Note that you can set/change the intent any time,
// say when the user has selected an image.
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.setType("image/*");
Uri uri = Uri.fromFile(getFileStreamPath("shared.png"));
shareIntent.putExtra(Intent.EXTRA_STREAM, uri);
actionProvider.setShareIntent(shareIntent);
return true;
}
@Override
public void onAttachedToWindow() {
updateCheckControls();
}
@Override
protected void onResume() {
super.onResume();
}
public void onSort(MenuItem item) {
}
@Override
public boolean onQueryTextChange(String newText) {
return true;
}
@Override
public boolean onQueryTextSubmit(String query) {
Toast.makeText(this, "Searching for: " + query + "...", Toast.LENGTH_SHORT).show();
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.show_tabs:
getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
item.setChecked(true);
return true;
case R.id.hide_tabs:
getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
item.setChecked(true);
return true;
}
return false;
}
@Override
public void onTabSelected(Tab tab, FragmentTransaction ft) {
}
@Override
public void onTabUnselected(Tab tab, FragmentTransaction ft) {
}
@Override
public void onTabReselected(Tab tab, FragmentTransaction ft) {
}
public void updateCheckControls() {
int visibility = mImage.getSystemUiVisibility();
for (int i=0; i<mCheckControls.length; i++) {
mCheckControls[i].setChecked((visibility&mCheckFlags[i]) != 0);
}
}
public void updateSystemUi() {
int visibility = 0;
for (int i=0; i<mCheckControls.length; i++) {
if (mCheckControls[i].isChecked()) {
visibility |= mCheckFlags[i];
}
}
mImage.setSystemUiVisibility(visibility);
}
public void clearActionMode() {
((CheckBox) findViewById(R.id.windowActionMode)).setChecked(false);
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.fileEditor;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.UISettingsState;
import com.intellij.mock.Mock;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.fileEditor.impl.EditorWindow;
import com.intellij.openapi.fileEditor.impl.EditorsSplitters;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbServiceImpl;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.Navigatable;
import com.intellij.testFramework.EditorTestUtil;
import com.intellij.testFramework.FileEditorManagerTestCase;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.util.Arrays;
import java.util.List;
public class FileEditorManagerTest extends FileEditorManagerTestCase {
public void testTabOrder() throws Exception {
openFiles(STRING);
assertOpenFiles("1.txt", "foo.xml", "2.txt", "3.txt");
}
@Override
protected void tearDown() throws Exception {
try {
UISettingsState template = new UISettingsState();
UISettingsState uiSettings = UISettings.getInstance().getState();
uiSettings.setEditorTabLimit(template.getEditorTabLimit());
uiSettings.setReuseNotModifiedTabs(template.getReuseNotModifiedTabs());
uiSettings.setEditorTabPlacement(template.getEditorTabPlacement());
}
catch (Throwable e) {
addSuppressedException(e);
}
finally {
super.tearDown();
}
}
public void testTabLimit() throws Exception {
UISettings.getInstance().getState().setEditorTabLimit(2);
openFiles(STRING);
// note that foo.xml is pinned
assertOpenFiles("foo.xml", "3.txt");
}
public void testSingleTabLimit() throws Exception {
UISettings.getInstance().getState().setEditorTabLimit(1);
openFiles(STRING.replace("pinned=\"true\"", "pinned=\"false\""));
assertOpenFiles("3.txt");
myManager.closeAllFiles();
openFiles(STRING);
// note that foo.xml is pinned
assertOpenFiles("foo.xml");
myManager.openFile(getFile("/src/3.txt"), true);
assertOpenFiles("3.txt", "foo.xml");//limit is still 1 but pinned prevent closing tab and actual tab number may exceed the limit
myManager.closeAllFiles();
myManager.openFile(getFile("/src/3.txt"), true);
myManager.openFile(getFile("/src/foo.xml"), true);
assertOpenFiles("foo.xml");
callTrimToSize();
assertOpenFiles("foo.xml");
}
public void testReuseNotModifiedTabs() {
UISettingsState uiSettings = UISettings.getInstance().getState();
uiSettings.setEditorTabLimit(2);
uiSettings.setReuseNotModifiedTabs(false);
myManager.openFile(getFile("/src/3.txt"), true);
myManager.openFile(getFile("/src/foo.xml"), true);
assertOpenFiles("3.txt", "foo.xml");
uiSettings.setEditorTabLimit(1);
callTrimToSize();
assertOpenFiles("foo.xml");
uiSettings.setEditorTabLimit(2);
myManager.closeAllFiles();
uiSettings.setReuseNotModifiedTabs(true);
myManager.openFile(getFile("/src/3.txt"), true);
assertOpenFiles("3.txt");
myManager.openFile(getFile("/src/foo.xml"), true);
assertOpenFiles("foo.xml");
}
private void callTrimToSize() {
for (EditorsSplitters each : myManager.getAllSplitters()) {
each.trimToSize();
}
}
public void testOpenRecentEditorTab() throws Exception {
FileEditorProvider.EP_FILE_EDITOR_PROVIDER
.getPoint(null).registerExtension(new MyFileEditorProvider(), myFixture.getTestRootDisposable());
openFiles(" <component name=\"FileEditorManager\">\n" +
" <leaf>\n" +
" <file pinned=\"false\" current=\"true\" current-in-tab=\"true\">\n" +
" <entry selected=\"true\" file=\"file://$PROJECT_DIR$/src/1.txt\">\n" +
" <provider editor-type-id=\"mock\" selected=\"true\">\n" +
" <state />\n" +
" </provider>\n" +
" <provider editor-type-id=\"text-editor\">\n" +
" <state/>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" </leaf>\n" +
" </component>\n");
FileEditor[] selectedEditors = myManager.getSelectedEditors();
assertEquals(1, selectedEditors.length);
assertEquals("mockEditor", selectedEditors[0].getName());
}
public void testTrackSelectedEditor() {
FileEditorProvider.EP_FILE_EDITOR_PROVIDER
.getPoint(null).registerExtension(new MyFileEditorProvider(), myFixture.getTestRootDisposable());
VirtualFile file = getFile("/src/1.txt");
assertNotNull(file);
FileEditor[] editors = myManager.openFile(file, true);
assertEquals(2, editors.length);
assertEquals("Text", myManager.getSelectedEditor(file).getName());
myManager.setSelectedEditor(file, "mock");
assertEquals("mockEditor", myManager.getSelectedEditor(file).getName());
VirtualFile file1 = getFile("/src/2.txt");
myManager.openFile(file1, true);
assertEquals("mockEditor", myManager.getSelectedEditor(file).getName());
}
public void testWindowClosingRetainsOtherWindows() {
VirtualFile file = getFile("/src/1.txt");
assertNotNull(file);
myManager.openFile(file, false);
EditorWindow primaryWindow = myManager.getCurrentWindow();
assertNotNull(primaryWindow);
myManager.createSplitter(SwingConstants.VERTICAL, primaryWindow);
EditorWindow secondaryWindow = myManager.getNextWindow(primaryWindow);
assertNotNull(secondaryWindow);
myManager.createSplitter(SwingConstants.VERTICAL, secondaryWindow);
myManager.closeFile(file, primaryWindow);
assertEquals(2, myManager.getWindows().length);
}
public void testOpenFileInTablessSplitter() {
VirtualFile file1 = getFile("/src/1.txt");
assertNotNull(file1);
file1.putUserData(EditorWindow.INITIAL_INDEX_KEY, null);
myManager.openFile(file1, false);
VirtualFile file2 = getFile("/src/2.txt");
file2.putUserData(EditorWindow.INITIAL_INDEX_KEY, null);
assertNotNull(file2);
myManager.openFile(file2, true);
EditorWindow primaryWindow = myManager.getCurrentWindow();//1.txt and selected 2.txt
assertNotNull(primaryWindow);
myManager.createSplitter(SwingConstants.VERTICAL, primaryWindow);
EditorWindow secondaryWindow = myManager.getNextWindow(primaryWindow);//2.txt only, selected and focused
assertNotNull(secondaryWindow);
UISettings.getInstance().setEditorTabPlacement(UISettings.TABS_NONE);
myManager.openFileWithProviders(file1, true, true);//Here we have to ignore 'searchForSplitter'
assertEquals(2, primaryWindow.getTabCount());
assertEquals(2, secondaryWindow.getTabCount());
assertOrderedEquals(primaryWindow.getFiles(), file1, file2);
assertOrderedEquals(secondaryWindow.getFiles(), file2, file1);
}
public void testStoringCaretStateForFileWithFoldingsWithNoTabs() {
UISettings.getInstance().setEditorTabPlacement(UISettings.TABS_NONE);
VirtualFile file = getFile("/src/Test.java");
assertNotNull(file);
FileEditor[] editors = myManager.openFile(file, false);
assertEquals(1, editors.length);
assertTrue(editors[0] instanceof TextEditor);
Editor editor = ((TextEditor)editors[0]).getEditor();
EditorTestUtil.waitForLoading(editor);
final FoldingModel foldingModel = editor.getFoldingModel();
assertEquals(2, foldingModel.getAllFoldRegions().length);
foldingModel.runBatchFoldingOperation(() -> {
for (FoldRegion region : foldingModel.getAllFoldRegions()) {
region.setExpanded(false);
}
});
int textLength = editor.getDocument().getTextLength();
editor.getCaretModel().moveToOffset(textLength);
editor.getSelectionModel().setSelection(textLength - 1, textLength);
myManager.openFile(getFile("/src/1.txt"), false);
assertEquals(1, myManager.getEditors(file).length);
editors = myManager.openFile(file, false);
assertEquals(1, editors.length);
assertTrue(editors[0] instanceof TextEditor);
editor = ((TextEditor)editors[0]).getEditor();
EditorTestUtil.waitForLoading(editor);
assertEquals(textLength, editor.getCaretModel().getOffset());
assertEquals(textLength - 1, editor.getSelectionModel().getSelectionStart());
assertEquals(textLength, editor.getSelectionModel().getSelectionEnd());
}
public void testOpenInDumbMode() {
FileEditorProvider.EP_FILE_EDITOR_PROVIDER
.getPoint(null).registerExtension(new MyFileEditorProvider(), myFixture.getTestRootDisposable());
FileEditorProvider.EP_FILE_EDITOR_PROVIDER.getPoint(null).registerExtension(new DumbAwareProvider(), myFixture.getTestRootDisposable());
try {
DumbServiceImpl.getInstance(getProject()).setDumb(true);
VirtualFile file = getFile("/src/foo.bar");
assertEquals(1, myManager.openFile(file, false).length);
DumbServiceImpl.getInstance(getProject()).setDumb(false);
UIUtil.dispatchAllInvocationEvents();
assertEquals(2, myManager.getAllEditors(file).length);
//assertFalse(FileEditorManagerImpl.isDumbAware(editors[0]));
}
finally {
DumbServiceImpl.getInstance(getProject()).setDumb(false);
}
}
public void testOpenSpecificTextEditor() {
FileEditorProvider.EP_FILE_EDITOR_PROVIDER.getPoint(null)
.registerExtension(new MyTextEditorProvider("one", 1), myFixture.getTestRootDisposable());
FileEditorProvider.EP_FILE_EDITOR_PROVIDER.getPoint(null)
.registerExtension(new MyTextEditorProvider("two", 2), myFixture.getTestRootDisposable());
Project project = getProject();
VirtualFile file = getFile("/src/Test.java");
myManager.openTextEditor(new OpenFileDescriptor(project, file, 1), true);
assertEquals("one", myManager.getSelectedEditor(file).getName());
myManager.openTextEditor(new OpenFileDescriptor(project, file, 2), true);
assertEquals("two", myManager.getSelectedEditor(file).getName());
}
private static final String STRING = "<component name=\"FileEditorManager\">\n" +
" <leaf>\n" +
" <file pinned=\"false\" current=\"false\" current-in-tab=\"false\">\n" +
" <entry file=\"file://$PROJECT_DIR$/src/1.txt\">\n" +
" <provider selected=\"true\" editor-type-id=\"text-editor\">\n" +
" <state line=\"0\" column=\"0\" selection-start=\"0\" selection-end=\"0\" vertical-scroll-proportion=\"0.0\">\n" +
" </state>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" <file pinned=\"true\" current=\"false\" current-in-tab=\"false\">\n" +
" <entry file=\"file://$PROJECT_DIR$/src/foo.xml\">\n" +
" <provider selected=\"true\" editor-type-id=\"text-editor\">\n" +
" <state line=\"0\" column=\"0\" selection-start=\"0\" selection-end=\"0\" vertical-scroll-proportion=\"0.0\">\n" +
" </state>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" <file pinned=\"false\" current=\"true\" current-in-tab=\"true\">\n" +
" <entry file=\"file://$PROJECT_DIR$/src/2.txt\">\n" +
" <provider selected=\"true\" editor-type-id=\"text-editor\">\n" +
" <state line=\"0\" column=\"0\" selection-start=\"0\" selection-end=\"0\" vertical-scroll-proportion=\"0.0\">\n" +
" </state>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" <file pinned=\"false\" current=\"false\" current-in-tab=\"false\">\n" +
" <entry file=\"file://$PROJECT_DIR$/src/3.txt\">\n" +
" <provider selected=\"true\" editor-type-id=\"text-editor\">\n" +
" <state line=\"0\" column=\"0\" selection-start=\"0\" selection-end=\"0\" vertical-scroll-proportion=\"0.0\">\n" +
" </state>\n" +
" </provider>\n" +
" </entry>\n" +
" </file>\n" +
" </leaf>\n" +
" </component>\n";
private void assertOpenFiles(String... fileNames) {
List<String> names = ContainerUtil.map(myManager.getSplitters().getEditorComposites(), composite -> composite.getFile().getName());
assertEquals(Arrays.asList(fileNames), names);
}
@Override
protected String getTestDataPath() {
return PlatformTestUtil.getPlatformTestDataPath() + "fileEditorManager";
}
static class MyFileEditorProvider implements FileEditorProvider {
@NotNull
@Override
public String getEditorTypeId() {
return "mock";
}
@Override
public boolean accept(@NotNull Project project, @NotNull VirtualFile file) {
return true;
}
@NotNull
@Override
public FileEditor createEditor(@NotNull Project project, @NotNull VirtualFile file) {
return new Mock.MyFileEditor() {
@Override
public boolean isValid() {
return true;
}
@NotNull
@Override
public JComponent getComponent() {
return new JLabel();
}
@NotNull
@Override
public String getName() {
return "mockEditor";
}
};
}
@Override
public void disposeEditor(@NotNull FileEditor editor) {
}
@NotNull
@Override
public FileEditorPolicy getPolicy() {
return FileEditorPolicy.PLACE_AFTER_DEFAULT_EDITOR;
}
}
private static class DumbAwareProvider extends MyFileEditorProvider implements DumbAware {
@NotNull
@Override
public String getEditorTypeId() {
return "dumbAware";
}
}
private static class MyTextEditorProvider implements FileEditorProvider, DumbAware {
private final String myId;
private final int myTargetOffset;
private MyTextEditorProvider(String id, int targetOffset) {
myId = id;
myTargetOffset = targetOffset;
}
@Override
public boolean accept(@NotNull Project project, @NotNull VirtualFile file) {
return true;
}
@NotNull
@Override
public FileEditor createEditor(@NotNull Project project, @NotNull VirtualFile file) {
return new MyTextEditor(FileDocumentManager.getInstance().getDocument(file), myId, myTargetOffset);
}
@NotNull
@Override
public String getEditorTypeId() {
return myId;
}
@NotNull
@Override
public FileEditorPolicy getPolicy() {
return FileEditorPolicy.HIDE_DEFAULT_EDITOR;
}
}
private static class MyTextEditor extends Mock.MyFileEditor implements TextEditor {
private final Editor myEditor;
private final String myName;
private final int myTargetOffset;
private MyTextEditor(Document document, String name, int targetOffset) {
myEditor = EditorFactory.getInstance().createEditor(document);
myName = name;
myTargetOffset = targetOffset;
}
@Override
public void dispose() {
try {
EditorFactory.getInstance().releaseEditor(myEditor);
}
finally {
super.dispose();
}
}
@NotNull
@Override
public JComponent getComponent() {
return new JLabel();
}
@NotNull
@Override
public String getName() {
return myName;
}
@NotNull
@Override
public Editor getEditor() {
return myEditor;
}
@Override
public boolean canNavigateTo(@NotNull Navigatable navigatable) {
return navigatable instanceof OpenFileDescriptor && ((OpenFileDescriptor)navigatable).getOffset() == myTargetOffset;
}
@Override
public void navigateTo(@NotNull Navigatable navigatable) {}
}
}
|
|
/*
* Copyright (c) 2008-2015, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.simulator.utils;
import com.hazelcast.simulator.test.annotations.InjectProbe;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.LinkedList;
import java.util.List;
import static java.lang.String.format;
public final class AnnotationReflectionUtils {
static final AnnotationFilter.AlwaysFilter ALWAYS_FILTER = new AnnotationFilter.AlwaysFilter();
private AnnotationReflectionUtils() {
}
public static String getProbeName(Field field) {
if (field == null) {
return null;
}
InjectProbe probeAnnotation = field.getAnnotation(InjectProbe.class);
if (probeAnnotation != null && !InjectProbe.NULL.equals(probeAnnotation.name())) {
return probeAnnotation.name();
}
return field.getName();
}
public static boolean isThroughputProbe(Field field) {
if (field == null) {
return false;
}
InjectProbe probeAnnotation = field.getAnnotation(InjectProbe.class);
if (probeAnnotation != null) {
return probeAnnotation.useForThroughput();
}
return false;
}
/**
* Searches for an optional void method of the given annotation type and skips the arguments check.
*
* @param classType Class to scan
* @param annotationType Type of the annotation
* @return the found method or <tt>null</tt> if no method was found
*/
public static Method getAtMostOneVoidMethodSkipArgsCheck(Class classType, Class<? extends Annotation> annotationType) {
return getAtMostOneMethod(classType, annotationType, ALWAYS_FILTER, null, true);
}
/**
* Searches for an optional void method without arguments of the given annotation type.
*
* @param classType Class to scan
* @param annotationType Type of the annotation
* @return the found method or <tt>null</tt> if no method was found
*/
public static Method getAtMostOneVoidMethodWithoutArgs(Class classType, Class<? extends Annotation> annotationType) {
return getAtMostOneMethod(classType, annotationType, ALWAYS_FILTER, null, false);
}
/**
* Searches for an optional void method without arguments of the given annotation type and custom annotation filter.
*
* @param classType Class to scan
* @param annotationType Type of the annotation
* @param filter {@link AnnotationFilter} to filter by annotation values
* @return the found method or <tt>null</tt> if no method was found
*/
public static Method getAtMostOneVoidMethodWithoutArgs(Class classType, Class<? extends Annotation> annotationType,
AnnotationFilter filter) {
return getAtMostOneMethod(classType, annotationType, filter, null, false);
}
/**
* Searches for an optional method without arguments of the given annotation type with a custom return type.
*
* @param classType Class to scan
* @param annotationType Type of the annotation
* @param returnType Assert the return type of the method, use <tt>null</tt> for void methods
* @return the found method or <tt>null</tt> if no method was found
*/
public static Method getAtMostOneMethodWithoutArgs(Class classType, Class<? extends Annotation> annotationType,
Class returnType) {
return getAtMostOneMethod(classType, annotationType, ALWAYS_FILTER, returnType, false);
}
/**
* Searches for an optional method of the given annotation type.
*
* @param classType Class to scan
* @param annotationType Type of the annotation
* @param filter {@link AnnotationFilter} to filter by annotation values
* @param returnType Assert the return type of the method, use <tt>null</tt> for void methods
* @param skipArgsCheck set <tt>true</tt> if assertNoArgs should be skipped
* @return the found method or <tt>null</tt> if no method was found
*/
private static Method getAtMostOneMethod(Class classType, Class<? extends Annotation> annotationType, AnnotationFilter filter,
Class returnType, boolean skipArgsCheck) {
List<Method> methods = findMethod(classType, annotationType, filter);
if (methods == null) {
return null;
}
assertAtMostOne(methods, classType, annotationType);
Method method = methods.get(0);
method.setAccessible(true);
assertNotStatic(method);
if (returnType == null) {
assertVoidReturnType(classType, method);
} else {
assertReturnType(classType, method, returnType);
}
if (!skipArgsCheck) {
assertNoArgs(method);
}
return method;
}
/**
* Returns a list of annotated methods in a class hierarchy.
*
* Returns more than one method only if they are declared in the same class.
* As soon as at least one method has been found, no super class will be searched.
* So a child class will always overwrite the annotated methods from its superclass.
*
* @param annotation Type of the annotation to search for
* @param filter Filter to filter search result by annotation values
* @return List of found methods with this annotation or <tt>null</tt> if no methods were found
*/
private static List<Method> findMethod(Class classType, Class<? extends Annotation> annotation, AnnotationFilter filter) {
List<Method> methods = new LinkedList<Method>();
do {
findMethod(classType, annotation, filter, methods);
if (!methods.isEmpty()) {
return methods;
}
classType = classType.getSuperclass();
} while (classType != null);
return null;
}
@SuppressWarnings("unchecked")
private static void findMethod(Class searchClass, Class<? extends Annotation> annotation, AnnotationFilter filter,
List<Method> methods) {
for (Method method : searchClass.getDeclaredMethods()) {
Annotation found = method.getAnnotation(annotation);
if (found != null && filter.allowed(found)) {
methods.add(method);
}
}
}
private static void assertAtMostOne(List<Method> methods, Class classType, Class<? extends Annotation> annotation) {
if (methods.size() > 1) {
throw new ReflectionException(format("Too many methods on class %s with annotation %s", classType.getName(),
annotation.getName()));
}
}
private static void assertNotStatic(Method method) {
if (Modifier.isStatic(method.getModifiers())) {
throw new ReflectionException(format("Method %s can't be static", method.getName()));
}
}
private static void assertVoidReturnType(Class classType, Method method) {
assertReturnType(classType, method, Void.TYPE);
}
private static void assertReturnType(Class classType, Method method, Class<?> returnType) {
if (returnType.isAssignableFrom(method.getReturnType())) {
return;
}
throw new ReflectionException(format("Method %s.%s should have returnType %s", classType, method, returnType));
}
private static void assertNoArgs(Method method) {
if (method.getParameterTypes().length == 0) {
return;
}
throw new ReflectionException(format("Method '%s' can't have any args", method));
}
}
|
|
/*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.table;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
import org.h2.api.DatabaseEventListener;
import org.h2.api.ErrorCode;
import org.h2.command.ddl.Analyze;
import org.h2.command.ddl.CreateTableData;
import org.h2.constraint.Constraint;
import org.h2.constraint.ConstraintReferential;
import org.h2.engine.Constants;
import org.h2.engine.DbObject;
import org.h2.engine.Session;
import org.h2.engine.SysProperties;
import org.h2.index.Cursor;
import org.h2.index.HashIndex;
import org.h2.index.Index;
import org.h2.index.IndexType;
import org.h2.index.MultiVersionIndex;
import org.h2.index.NonUniqueHashIndex;
import org.h2.index.PageBtreeIndex;
import org.h2.index.PageDataIndex;
import org.h2.index.PageDelegateIndex;
import org.h2.index.ScanIndex;
import org.h2.index.SpatialTreeIndex;
import org.h2.index.TreeIndex;
import org.h2.message.DbException;
import org.h2.message.Trace;
import org.h2.result.Row;
import org.h2.result.SortOrder;
import org.h2.schema.SchemaObject;
import org.h2.util.MathUtils;
import org.h2.util.New;
import org.h2.value.CompareMode;
import org.h2.value.DataType;
import org.h2.value.Value;
/**
* Most tables are an instance of this class. For this table, the data is stored
* in the database. The actual data is not kept here, instead it is kept in the
* indexes. There is at least one index, the scan index.
*/
public class RegularTable extends TableBase {
private Index scanIndex;
private long rowCount;
private volatile Session lockExclusiveSession;
private HashSet<Session> lockSharedSessions = New.hashSet();
/**
* The queue of sessions waiting to lock the table. It is a FIFO queue to
* prevent starvation, since Java's synchronized locking is biased.
*/
private final ArrayDeque<Session> waitingSessions = new ArrayDeque<Session>();
private final Trace traceLock;
private final ArrayList<Index> indexes = New.arrayList();
private long lastModificationId;
private boolean containsLargeObject;
private final PageDataIndex mainIndex;
private int changesSinceAnalyze;
private int nextAnalyze;
private Column rowIdColumn;
public RegularTable(CreateTableData data) {
super(data);
nextAnalyze = database.getSettings().analyzeAuto;
this.isHidden = data.isHidden;
for (Column col : getColumns()) {
if (DataType.isLargeObject(col.getType())) {
containsLargeObject = true;
}
}
if (data.persistData && database.isPersistent()) {
mainIndex = new PageDataIndex(this, data.id,
IndexColumn.wrap(getColumns()),
IndexType.createScan(data.persistData),
data.create, data.session);
scanIndex = mainIndex;
} else {
mainIndex = null;
scanIndex = new ScanIndex(this, data.id,
IndexColumn.wrap(getColumns()), IndexType.createScan(data.persistData));
}
indexes.add(scanIndex);
traceLock = database.getTrace(Trace.LOCK);
}
@Override
public void close(Session session) {
for (Index index : indexes) {
index.close(session);
}
}
@Override
public Row getRow(Session session, long key) {
return scanIndex.getRow(session, key);
}
@Override
public void addRow(Session session, Row row) {
lastModificationId = database.getNextModificationDataId();
if (database.isMultiVersion()) {
row.setSessionId(session.getId());
}
int i = 0;
try {
for (int size = indexes.size(); i < size; i++) {
Index index = indexes.get(i);
index.add(session, row);
checkRowCount(session, index, 1);
}
rowCount++;
} catch (Throwable e) {
try {
while (--i >= 0) {
Index index = indexes.get(i);
index.remove(session, row);
checkRowCount(session, index, 0);
}
} catch (DbException e2) {
// this could happen, for example on failure in the storage
// but if that is not the case it means there is something wrong
// with the database
trace.error(e2, "could not undo operation");
throw e2;
}
DbException de = DbException.convert(e);
if (de.getErrorCode() == ErrorCode.DUPLICATE_KEY_1) {
for (int j = 0; j < indexes.size(); j++) {
Index index = indexes.get(j);
if (index.getIndexType().isUnique() && index instanceof MultiVersionIndex) {
MultiVersionIndex mv = (MultiVersionIndex) index;
if (mv.isUncommittedFromOtherSession(session, row)) {
throw DbException.get(
ErrorCode.CONCURRENT_UPDATE_1, index.getName());
}
}
}
}
throw de;
}
analyzeIfRequired(session);
}
@Override
public void commit(short operation, Row row) {
lastModificationId = database.getNextModificationDataId();
for (int i = 0, size = indexes.size(); i < size; i++) {
Index index = indexes.get(i);
index.commit(operation, row);
}
}
private void checkRowCount(Session session, Index index, int offset) {
if (SysProperties.CHECK && !database.isMultiVersion()) {
if (!(index instanceof PageDelegateIndex)) {
long rc = index.getRowCount(session);
if (rc != rowCount + offset) {
DbException.throwInternalError(
"rowCount expected " + (rowCount + offset) +
" got " + rc + " " + getName() + "." + index.getName());
}
}
}
}
@Override
public Index getScanIndex(Session session) {
return indexes.get(0);
}
@Override
public Index getUniqueIndex() {
for (Index idx : indexes) {
if (idx.getIndexType().isUnique()) {
return idx;
}
}
return null;
}
@Override
public ArrayList<Index> getIndexes() {
return indexes;
}
@Override
public Index addIndex(Session session, String indexName, int indexId,
IndexColumn[] cols, IndexType indexType, boolean create,
String indexComment) {
if (indexType.isPrimaryKey()) {
for (IndexColumn c : cols) {
Column column = c.column;
if (column.isNullable()) {
throw DbException.get(
ErrorCode.COLUMN_MUST_NOT_BE_NULLABLE_1, column.getName());
}
column.setPrimaryKey(true);
}
}
boolean isSessionTemporary = isTemporary() && !isGlobalTemporary();
if (!isSessionTemporary) {
database.lockMeta(session);
}
Index index;
if (isPersistIndexes() && indexType.isPersistent()) {
int mainIndexColumn;
if (database.isStarting() &&
database.getPageStore().getRootPageId(indexId) != 0) {
mainIndexColumn = -1;
} else if (!database.isStarting() && mainIndex.getRowCount(session) != 0) {
mainIndexColumn = -1;
} else {
mainIndexColumn = getMainIndexColumn(indexType, cols);
}
if (mainIndexColumn != -1) {
mainIndex.setMainIndexColumn(mainIndexColumn);
index = new PageDelegateIndex(this, indexId, indexName,
indexType, mainIndex, create, session);
} else if (indexType.isSpatial()) {
index = new SpatialTreeIndex(this, indexId, indexName, cols,
indexType, true, create, session);
} else {
index = new PageBtreeIndex(this, indexId, indexName, cols,
indexType, create, session);
}
} else {
if (indexType.isHash()) {
if (cols.length != 1) {
throw DbException.getUnsupportedException(
"hash indexes may index only one column");
}
if (indexType.isUnique()) {
index = new HashIndex(this, indexId, indexName, cols,
indexType);
} else {
index = new NonUniqueHashIndex(this, indexId, indexName,
cols, indexType);
}
} else if (indexType.isSpatial()) {
index = new SpatialTreeIndex(this, indexId, indexName, cols,
indexType, false, true, session);
} else {
index = new TreeIndex(this, indexId, indexName, cols, indexType);
}
}
if (database.isMultiVersion()) {
index = new MultiVersionIndex(index, this);
}
if (index.needRebuild() && rowCount > 0) {
try {
Index scan = getScanIndex(session);
long remaining = scan.getRowCount(session);
long total = remaining;
Cursor cursor = scan.find(session, null, null);
long i = 0;
int bufferSize = (int) Math.min(rowCount, database.getMaxMemoryRows());
ArrayList<Row> buffer = New.arrayList(bufferSize);
String n = getName() + ":" + index.getName();
int t = MathUtils.convertLongToInt(total);
while (cursor.next()) {
database.setProgress(DatabaseEventListener.STATE_CREATE_INDEX, n,
MathUtils.convertLongToInt(i++), t);
Row row = cursor.get();
buffer.add(row);
if (buffer.size() >= bufferSize) {
addRowsToIndex(session, buffer, index);
}
remaining--;
}
addRowsToIndex(session, buffer, index);
if (SysProperties.CHECK && remaining != 0) {
DbException.throwInternalError("rowcount remaining=" +
remaining + " " + getName());
}
} catch (DbException e) {
getSchema().freeUniqueName(indexName);
try {
index.remove(session);
} catch (DbException e2) {
// this could happen, for example on failure in the storage
// but if that is not the case it means
// there is something wrong with the database
trace.error(e2, "could not remove index");
throw e2;
}
throw e;
}
}
index.setTemporary(isTemporary());
if (index.getCreateSQL() != null) {
index.setComment(indexComment);
if (isSessionTemporary) {
session.addLocalTempTableIndex(index);
} else {
database.addSchemaObject(session, index);
}
}
indexes.add(index);
setModified();
return index;
}
private int getMainIndexColumn(IndexType indexType, IndexColumn[] cols) {
if (mainIndex.getMainIndexColumn() != -1) {
return -1;
}
if (!indexType.isPrimaryKey() || cols.length != 1) {
return -1;
}
IndexColumn first = cols[0];
if (first.sortType != SortOrder.ASCENDING) {
return -1;
}
switch (first.column.getType()) {
case Value.BYTE:
case Value.SHORT:
case Value.INT:
case Value.LONG:
break;
default:
return -1;
}
return first.column.getColumnId();
}
@Override
public boolean canGetRowCount() {
return true;
}
private static void addRowsToIndex(Session session, ArrayList<Row> list,
Index index) {
final Index idx = index;
Collections.sort(list, new Comparator<Row>() {
@Override
public int compare(Row r1, Row r2) {
return idx.compareRows(r1, r2);
}
});
for (Row row : list) {
index.add(session, row);
}
list.clear();
}
@Override
public boolean canDrop() {
return true;
}
@Override
public long getRowCount(Session session) {
if (database.isMultiVersion()) {
return getScanIndex(session).getRowCount(session);
}
return rowCount;
}
@Override
public void removeRow(Session session, Row row) {
if (database.isMultiVersion()) {
if (row.isDeleted()) {
throw DbException.get(ErrorCode.CONCURRENT_UPDATE_1, getName());
}
int old = row.getSessionId();
int newId = session.getId();
if (old == 0) {
row.setSessionId(newId);
} else if (old != newId) {
throw DbException.get(ErrorCode.CONCURRENT_UPDATE_1, getName());
}
}
lastModificationId = database.getNextModificationDataId();
int i = indexes.size() - 1;
try {
for (; i >= 0; i--) {
Index index = indexes.get(i);
index.remove(session, row);
checkRowCount(session, index, -1);
}
rowCount--;
} catch (Throwable e) {
try {
while (++i < indexes.size()) {
Index index = indexes.get(i);
index.add(session, row);
checkRowCount(session, index, 0);
}
} catch (DbException e2) {
// this could happen, for example on failure in the storage
// but if that is not the case it means there is something wrong
// with the database
trace.error(e2, "could not undo operation");
throw e2;
}
throw DbException.convert(e);
}
analyzeIfRequired(session);
}
@Override
public void truncate(Session session) {
lastModificationId = database.getNextModificationDataId();
for (int i = indexes.size() - 1; i >= 0; i--) {
Index index = indexes.get(i);
index.truncate(session);
}
rowCount = 0;
changesSinceAnalyze = 0;
}
private void analyzeIfRequired(Session session) {
if (nextAnalyze == 0 || nextAnalyze > changesSinceAnalyze++) {
return;
}
changesSinceAnalyze = 0;
int n = 2 * nextAnalyze;
if (n > 0) {
nextAnalyze = n;
}
int rows = session.getDatabase().getSettings().analyzeSample / 10;
Analyze.analyzeTable(session, this, rows, false);
}
@Override
public boolean isLockedExclusivelyBy(Session session) {
return lockExclusiveSession == session;
}
@Override
public boolean lock(Session session, boolean exclusive,
boolean forceLockEvenInMvcc) {
int lockMode = database.getLockMode();
if (lockMode == Constants.LOCK_MODE_OFF) {
return lockExclusiveSession != null;
}
if (!forceLockEvenInMvcc && database.isMultiVersion()) {
// MVCC: update, delete, and insert use a shared lock.
// Select doesn't lock except when using FOR UPDATE
if (exclusive) {
exclusive = false;
} else {
if (lockExclusiveSession == null) {
return false;
}
}
}
if (lockExclusiveSession == session) {
return true;
}
synchronized (database) {
if (lockExclusiveSession == session) {
return true;
}
if (!exclusive && lockSharedSessions.contains(session)) {
return true;
}
session.setWaitForLock(this, Thread.currentThread());
waitingSessions.addLast(session);
try {
doLock1(session, lockMode, exclusive);
} finally {
session.setWaitForLock(null, null);
waitingSessions.remove(session);
}
}
return false;
}
private void doLock1(Session session, int lockMode, boolean exclusive) {
traceLock(session, exclusive, "requesting for");
// don't get the current time unless necessary
long max = 0;
boolean checkDeadlock = false;
while (true) {
// if I'm the next one in the queue
if (waitingSessions.getFirst() == session) {
if (doLock2(session, lockMode, exclusive)) {
return;
}
}
if (checkDeadlock) {
ArrayList<Session> sessions = checkDeadlock(session, null, null);
if (sessions != null) {
throw DbException.get(ErrorCode.DEADLOCK_1,
getDeadlockDetails(sessions, exclusive));
}
} else {
// check for deadlocks from now on
checkDeadlock = true;
}
long now = System.currentTimeMillis();
if (max == 0) {
// try at least one more time
max = now + session.getLockTimeout();
} else if (now >= max) {
traceLock(session, exclusive, "timeout after " + session.getLockTimeout());
throw DbException.get(ErrorCode.LOCK_TIMEOUT_1, getName());
}
try {
traceLock(session, exclusive, "waiting for");
if (database.getLockMode() == Constants.LOCK_MODE_TABLE_GC) {
for (int i = 0; i < 20; i++) {
long free = Runtime.getRuntime().freeMemory();
System.gc();
long free2 = Runtime.getRuntime().freeMemory();
if (free == free2) {
break;
}
}
}
// don't wait too long so that deadlocks are detected early
long sleep = Math.min(Constants.DEADLOCK_CHECK, max - now);
if (sleep == 0) {
sleep = 1;
}
database.wait(sleep);
} catch (InterruptedException e) {
// ignore
}
}
}
private boolean doLock2(Session session, int lockMode, boolean exclusive) {
if (exclusive) {
if (lockExclusiveSession == null) {
if (lockSharedSessions.isEmpty()) {
traceLock(session, exclusive, "added for");
session.addLock(this);
lockExclusiveSession = session;
return true;
} else if (lockSharedSessions.size() == 1 &&
lockSharedSessions.contains(session)) {
traceLock(session, exclusive, "add (upgraded) for ");
lockExclusiveSession = session;
return true;
}
}
} else {
if (lockExclusiveSession == null) {
if (lockMode == Constants.LOCK_MODE_READ_COMMITTED) {
if (!database.isMultiThreaded() && !database.isMultiVersion()) {
// READ_COMMITTED: a read lock is acquired,
// but released immediately after the operation
// is complete.
// When allowing only one thread, no lock is
// required.
// Row level locks work like read committed.
return true;
}
}
if (!lockSharedSessions.contains(session)) {
traceLock(session, exclusive, "ok");
session.addLock(this);
lockSharedSessions.add(session);
}
return true;
}
}
return false;
}
private static String getDeadlockDetails(ArrayList<Session> sessions, boolean exclusive) {
// We add the thread details here to make it easier for customers to
// match up these error messages with their own logs.
StringBuilder buff = new StringBuilder();
for (Session s : sessions) {
Table lock = s.getWaitForLock();
Thread thread = s.getWaitForLockThread();
buff.append("\nSession ").
append(s.toString()).
append(" on thread ").
append(thread.getName()).
append(" is waiting to lock ").
append(lock.toString()).
append(exclusive ? " (exclusive)" : " (shared)").
append(" while locking ");
int i = 0;
for (Table t : s.getLocks()) {
if (i++ > 0) {
buff.append(", ");
}
buff.append(t.toString());
if (t instanceof RegularTable) {
if (((RegularTable) t).lockExclusiveSession == s) {
buff.append(" (exclusive)");
} else {
buff.append(" (shared)");
}
}
}
buff.append('.');
}
return buff.toString();
}
@Override
public ArrayList<Session> checkDeadlock(Session session, Session clash,
Set<Session> visited) {
// only one deadlock check at any given time
synchronized (RegularTable.class) {
if (clash == null) {
// verification is started
clash = session;
visited = New.hashSet();
} else if (clash == session) {
// we found a circle where this session is involved
return New.arrayList();
} else if (visited.contains(session)) {
// we have already checked this session.
// there is a circle, but the sessions in the circle need to
// find it out themselves
return null;
}
visited.add(session);
ArrayList<Session> error = null;
for (Session s : lockSharedSessions) {
if (s == session) {
// it doesn't matter if we have locked the object already
continue;
}
Table t = s.getWaitForLock();
if (t != null) {
error = t.checkDeadlock(s, clash, visited);
if (error != null) {
error.add(session);
break;
}
}
}
if (error == null && lockExclusiveSession != null) {
Table t = lockExclusiveSession.getWaitForLock();
if (t != null) {
error = t.checkDeadlock(lockExclusiveSession, clash, visited);
if (error != null) {
error.add(session);
}
}
}
return error;
}
}
private void traceLock(Session session, boolean exclusive, String s) {
if (traceLock.isDebugEnabled()) {
traceLock.debug("{0} {1} {2} {3}", session.getId(),
exclusive ? "exclusive write lock" : "shared read lock", s, getName());
}
}
@Override
public boolean isLockedExclusively() {
return lockExclusiveSession != null;
}
@Override
public void unlock(Session s) {
if (database != null) {
traceLock(s, lockExclusiveSession == s, "unlock");
if (lockExclusiveSession == s) {
lockExclusiveSession = null;
}
if (lockSharedSessions.size() > 0) {
lockSharedSessions.remove(s);
}
synchronized (database) {
if (!waitingSessions.isEmpty()) {
database.notifyAll();
}
}
}
}
/**
* Set the row count of this table.
*
* @param count the row count
*/
public void setRowCount(long count) {
this.rowCount = count;
}
@Override
public void removeChildrenAndResources(Session session) {
if (containsLargeObject) {
// unfortunately, the data is gone on rollback
truncate(session);
database.getLobStorage().removeAllForTable(getId());
database.lockMeta(session);
}
super.removeChildrenAndResources(session);
// go backwards because database.removeIndex will call table.removeIndex
while (indexes.size() > 1) {
Index index = indexes.get(1);
if (index.getName() != null) {
database.removeSchemaObject(session, index);
}
// needed for session temporary indexes
indexes.remove(index);
}
if (SysProperties.CHECK) {
for (SchemaObject obj : database.getAllSchemaObjects(DbObject.INDEX)) {
Index index = (Index) obj;
if (index.getTable() == this) {
DbException.throwInternalError("index not dropped: " + index.getName());
}
}
}
scanIndex.remove(session);
database.removeMeta(session, getId());
scanIndex = null;
lockExclusiveSession = null;
lockSharedSessions = null;
invalidate();
}
@Override
public String toString() {
return getSQL();
}
@Override
public void checkRename() {
// ok
}
@Override
public void checkSupportAlter() {
// ok
}
@Override
public boolean canTruncate() {
if (getCheckForeignKeyConstraints() && database.getReferentialIntegrity()) {
ArrayList<Constraint> constraints = getConstraints();
if (constraints != null) {
for (int i = 0, size = constraints.size(); i < size; i++) {
Constraint c = constraints.get(i);
if (!(c.getConstraintType().equals(Constraint.REFERENTIAL))) {
continue;
}
ConstraintReferential ref = (ConstraintReferential) c;
if (ref.getRefTable() == this) {
return false;
}
}
}
}
return true;
}
@Override
public String getTableType() {
return Table.TABLE;
}
@Override
public long getMaxDataModificationId() {
return lastModificationId;
}
public boolean getContainsLargeObject() {
return containsLargeObject;
}
@Override
public long getRowCountApproximation() {
return scanIndex.getRowCountApproximation();
}
@Override
public long getDiskSpaceUsed() {
return scanIndex.getDiskSpaceUsed();
}
public void setCompareMode(CompareMode compareMode) {
this.compareMode = compareMode;
}
@Override
public boolean isDeterministic() {
return true;
}
@Override
public Column getRowIdColumn() {
if (rowIdColumn == null) {
rowIdColumn = new Column(Column.ROWID, Value.LONG);
rowIdColumn.setTable(this, -1);
}
return rowIdColumn;
}
}
|
|
/**
* Copyright (c) 2008-2016, Massachusetts Institute of Technology (MIT)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.mit.ll.em.api.util;
public final class SADisplayConstants {
//CONTACT TYPES
public static final String EMAIL_TYPE = "email";
public static final String PHONE_HOME_TYPE = "phone_home";
public static final String PHONE_CELL_TYPE = "phone_cell";
public static final String PHONE_OTHER_TYPE = "phone_other";
public static final String PHONE_OFFICE_TYPE = "phone_office";
public static final String RADIO_NUMBER_TYPE = "radio_number";
public static final String TYPE = "type";
public static final String CONTACT_TYPE_ID = "contacttypeid";
public static final int EMAIL_TYPE_ID=0;
//USER ROLES
public static final String ADMIN_ROLE = "admin";
public static final String SUPER_ROLE = "super";
public static final String USER_ROLE = "user";
public static final String READ_ONLY_ROLE = "readOnly";
//USER ROLES
public static final int SUPER_ROLE_ID = 0;
public static final int USER_ROLE_ID = 1;
public static final int READ_ONLY_ROLE_ID = 2;
public static final int GIS_ROLE_ID = 3;
public static final int ADMIN_ROLE_ID = 4;
//TABLE NAMES
public static final String USERSESSION_TABLE = "Usersession";
public static final String CURRENT_USERSESSION_TABLE = "CurrentUserSession";
public static final String COLLAB_ROOM_TABLE = "CollabRoom";
public static final String COLLABROOM_FEATURE = "CollabroomFeature";
public static final String INCIDENT_TABLE = "Incident";
public static final String MESSAGE_ARCHIVE_TABLE = "MessageArchive";
public static final String SEAM_USER_TABLE = "SeamUser";
public static final String USER_ORG_TABLE = "UserOrg";
public static final String ORG_ORGTYPE_TABLE = "org_orgtype";
public static final String ORG_TYPE_TABLE = "OrgType";
public static final String FEATURE = "Feature";
public static final String ROOT_FOLDER_TABLE = "Rootfolder";
public static final String FOLDER_TABLE = "Folder";
public static final String CONTACT_TYPE_TABLE = "ContactType";
public static final String CONTACT_TABLE = "contact";
public static final String DATASOURCE_TABLE = "Datasource";;
public static final String DATALAYER_TABLE = "Datalayer";
public static final String DATASOURCE_TYPE_TABLE = "Datasourcetype";
public static final String SYSTEM_ROLE_TABLE = "SystemRole";
public static final String COLLAB_ROOM_PERMISSION_TABLE = "CollabroomPermission";
public static final String DATALAYER_FOLDER_TABLE = "Datalayerfolder";
public static final String FORM_TABLE = "Form";
public static final String FORM_TYPE_TABLE = "FormType";
public static final String ORG_TABLE = "Org";
public static final String LOG_TABLE = "Log";
public static final String ORG_FOLDER_TABLE = "OrgFolder";
public static final String MESSAGE_PERMISSIONS_TABLE = "MessagePermissions";
public static final String DATALAYER_SOURCE_TABLE = "Datalayersource";
public static final String USER_FEATURE = "UserFeature";
public static final String INCIDENT_TYPE_TABLE = "incidenttype";
public static final String INCIDENT_INCIDENTTYPE_TABLE = "incident_incidenttype";
//COMMON COLUMN NAMES
public static final String CREATED = "created";
//COLLAB ROOM COLUMNS
public static final String COLLAB_ROOM_ID = "collabroomid";
public static final String COLLAB_ROOM_NAME = "name";
//INCIDENT COLUMNS
public static final String INCIDENT_NAME = "incidentName";
public static final String INCIDENT_ID = "incidentId";
public static final String PARENT_INCIDENT_ID = "parentincidentid";
public static final String ACTIVE = "active";
public static final String FOLDER = "folder"; // LDDRS-648
//USER COLUMNS
public static final String USER_ID = "userId";
public static final String USER_NAME = "username";
public static final String ENABLED = "enabled";
//MESSAGE ARCHIVE COLUMNS
public static final String MESSAGE_TYPE = "messageType";
public static final String TOPIC = "topic";
public static final String INSERTED_TIME_STAMP = "insertedTimestamp";
//CURRENT USERSESSION CONSTANTS
public static final String DISPLAY_NAME = "displayname";
public static final String LOGGED_IN = "loggedin";
//USERSESSION CONSTANTS
public static final String USERSESSION_ID ="usersessionid";
public static final String SESSION_ID = "sessionid";
//DATASOURCE TYPE CONSTANTS
public static final String DATASOURCE_TYPE_NAME = "typename";
public static final String DATASOURCE_INTERNAL_URL = "internalurl";
//ORGANIZATION CONSTANTS
public static final String ORG_ID = "orgId";
//FOLDER COLUMN NAMES
public static final String FOLDER_ID = "folderid";
public static final String PARENT_FOLDER_ID = "parentfolderid";
//DATALAYER COLUMN NAMES
public static final String DATALAYER_ID = "datalayerid";
//SYSTEM ROLE COLUMN NAMES
public static final String SYSTEM_ROLE_NAME = "rolename";
public static final String SYSTEM_ROLE_ID = "systemroleid";
//DATALAYER FOLDER COLUMN NAMES
public static final String INDEX = "index";
public static final String DATALAYER_FOLDER_ID = "datalayerfolderid";
//FORM COLUMN NAMES
public static final String FORM_TYPE_NAME = "formtypename";
public static final String FORM_TYPE_ID = "formtypeid";
public static final String SEQ_TIME = "seqtime";
// LDDRS-648
// FORM TYPE CONSTANTS
public static final String FORM_TYPE_ROC = "ROC";
public static final String FORM_TYPE_RESC = "RESC";
public static final String FORM_TYPE_FDNY = "FDNY";
public static final String FORM_TYPE_SITREP = "SITREP";
// END LDDRS-648
//LOG COLUMN NAMES
public static final String LOG_TYPE_ID = "logtypeid";
public static final String MESSAGE = "message";
public static final String STATUS = "status";
public static final String LOG_ID = "logid";
//CONTACT COLUMN NAMES
public static final String VALUE = "value";
public static final String ORG_TYPE_NAME = "orgtypename";
public static final String LAYERNAME = "layername";
public static final String MASTER_MAP = "IncidentMap";
public static final String MASTER_MAP_ROLE = "masterMapRole";
public static final String WORKING_MAP = "WorkingMap";
public static final String NICS_TOPIC = "NICS.ws.1.";
public static final String NEW_USER_TOPIC = NICS_TOPIC + "user.new";
public static final String NEW_USER_MSG_TYPE = "newuser";
public static final String DATALAYER_SOURCE_ID = "datalayersourceid";
public static final String DATASOURCE_ID = "datasourceid";
public static final String DATASOURCE_TYPE_ID = "datasourcetypeid";
public static final String INCIDENT_TYPE_ID = "incidenttypeid";
public static final String USER_ESCAPED = "\"user\"";
public static final String FEATURE_ID = "featureid";
public static final String USER_ORG_ORG_ID = "userorg.orgid";
public static final String ID = "id";
/**Properties returned to the UI for displaying the datalayers*/
public static final String TABNAME = "tabname";
public static final String TITLE = "title";
public static final String LAYER = "layer";
public static final String URL = "url";
public static final String LAYER_TYPE = "layerType";
public static final String CRS = "crs";
public static final String BASE_LAYER = "baselayer";
public static final String FORMAT = "format";
public static final String STYLE = "style";
public static final String OPACITY = "opacity";
public static final String REFRESH_RATE ="refreshrate";
public static final String STYLE_PATH = "stylepath";
public static final String ATTRIBUTES = "attributes";
public static final String WMTS = "WMTS";
public static final String PASSWORD_HASH = "passwordhash";
public static final String PASSWORD_ENCRYPTED = "passwordencrypted";
public static final String FOLDER_MANAGEMENT_FEATURE = "folderManagement";
public static final int ALERT_LOGTYPE_ID = 1;
public static final String CURRENT_USERSESSION_ID = "currentusersessionid";
public static final String LAST_SEEN = "lastseen";
public static final String USER_SESSION_TABLE = "usersession";
public static final String USER_ORG_ID = "userorgid";
public static final String CURRENT_USERSESSION_SEQ = "current_user_session_seq";
public static final String USERSESSION_SEQ = "user_session_seq";
public static final String USER_ORG_WORKSPACE_ENABLED = "userorg_workspace.enabled";
public static final String OTHER_ENABLED = "other_enabled";
public static final String PRODUCTION = "production";
public static final String TRAINING = "training";
public static final String ARCHIVED = "/Archived/";
public static final String EMPTY_STRING = "";
public static final String ORG_NAME = "name";
public static final String ORG_TYPE_ID = "orgtypeid";
public static final String PREFIX = "prefix";
public static final String COLLABROOM_AND_NAME = "collabroom.name";
public static final String FIRSTNAME = "firstname";
public static final String LASTNAME = "lastname";
public static final String SECURE_INCIDENT_MAP = "secureIncidentMap";
public static final String MESSAGE_SEQUENCE = "message_sequence";
public static final String TYPE_NAME = "typename";
public static final String FOLDER_NAME = "foldername";
public static final String ORG_FOLDER_ID = "orgfolderid";
public static final Object LOG_CREATED = "log.created";
public static final int ANNOUNCEMENTS_LOG_TYPE = 0;
public static final String CONTACT_SEQUENCE = "contact_seq";
public static final String CONTACT_ID = "contactid";
public static final String CONTACT_ENABLED = "contact.enabled";
public static final String CURRENT_USERSESSION_USER_ID = "currentusersession.userid";
public static final String ORG_FORM_TYPE_TABLE = "orgformtype";
public static final String DISTRIBUTED = "distributed";
public static final String FORM_ID = "formid";
public static final String SEQ_NUM = "seqnum";
public static final String DOCUMENT_TABLE = "document";
public static final String DOCUMENT_INCIDENT_TABLE = "document_incident";
public static final String DOCUMENT_ID = "documentid";
public static final String FILENAME = "filename";
public static final String FILETYPE = "filetype";
public static final String GLOBAL_VIEW = "globalview";
public static final String DISTRIBUTION = "distribution";
public static final String LONGITUDE = "lon";
public static final String LATITUDE = "lat";
public static final String INCIDENT_SEQUENCE_TABLE = "incident_seq";
public static final String INCIDENT_INCIDENTTYPE_ID = "incident_incidenttypeid";
public static final String HIBERNATE_SEQUENCE_TABLE = "hibernate_sequence";
public static final String COLLAB_ROOM_SEQUENCE_TABLE = "collab_room_seq";
public static final String LAST_UPDATED = "lastupdated";
public static final String USER_SEQUENCE_TABLE = "user_seq";
public static final String CONTACT_SEQUENCE_TABLE = "contact_seq";
public static final String USER_ORG_SEQUENCE_TABLE = "user_org_seq";
public static final String COLLABROOM_FEATURE_TABLE = "collabroomfeature";
public static final String COLLAB_ROOM_FEATURE_ID = "collabroomfeature.collabroomid";
public static final String USER_FEATURE_TABLE = "userfeature";
public static final String COLLAB_ROOM_PERMISSION_ID = "collabroompermissionid";
public static String EXISTING_SESSION_MSG = "User has an existing session. Removing...";
public static String REMOVE_SESSION_SUCCESS = "Removed pre-existing session(s)";
public static String REMOVE_SESSION_FAILURE = "No pre-existing sessions to remove";
public static String DEFAULT_REMOVE_MESSAGE = "Your account has been logged into from another location.";
public static int DEFAULT_SESSION_ID = -1;
public static final String COLLAB_ROOM_DAO = "collabroomDao";
public static final String DATA_LAYER_DAO = "datalayerDao";
public static final String DOCUMENT_DAO = "documentDao";
public static final String FEATURE_DAO = "featureDao";
public static final String FOLDER_DAO = "folderDao";
public static final String FORM_DAO = "formDao";
public static final String INCIDENT_DAO = "incidentDao";
public static final String LOG_DAO = "logDao";
public static final String MESSAGE_ARCHIVE_DAO = "messageArchiveDao";
public static final String ORG_DAO = "orgDao";
public static final String SYSTEM_ROLE_DAO = "systemroleDao";
public static final String USER_DAO = "userDao";
public static final String USER_ORG_DAO = "userOrgDao";
public static final String USER_SESSION_DAO = "userSessionDao";
public static final String TASKING_DAO = "taskingDao";
public static final String FEATURES = "features";
public static final String DEFAULT_LAT = "defaultlatitude";
public static final String DEFAULT_LON = "defaultlongitude";
public static final String INCIDENT_TYPE_NAME = "incidenttypename";
public static final String STATE = "state";
public static final String IMAGE_FORMAT = "imageformat";
public static final String NATIVE_PROJECTION = "nativeprojection";
public static final String TILE_GRID_SET = "tilegridset";
public static final String TILE_SIZE = "tilesize";
public static final String EXTERNAL_URL = "externalurl";
public static final String INTERNAL_URL = "internalurl";
public static final String ROOT_FOLDER_ID = "rootid";
public static final String USER_FEATURE_ID = "userfeatureid";
public static final String LAST_UPDATE = "lastupdate";
public static final String DESCRIPTION = "description";
public static final String RANK = "rank";
public static final String JOB_TITLE = "jobtitle";
public static final String WORKSPACE_TABLE = "workspace";
public static final String WORKSPACE_ID = "workspaceid";
public static final String WORKSPACE_NAME = "workspacename";
public static final String WORKSPACE_DAO ="workspaceDao";
public static final String LOG_WORKSPACE_TABLE = "log_workspace";
public static final String LOG_SEQ = "log_seq";
public static final String LOG_WORKSPACE_ID = "log_workspace_id";
public static final String USER_ORG_WORKSPACE_TABLE = "userorg_workspace";
public static final String USER_ORG_WORKSPACE_ID = "userorg_workspace_id";
public static final String SYSTEM_ROLE_WORKSPACE_TABLE = "systemrole_workspace";
public static final Object ARCGIS_CACHE = "ARCGISCACHE";
public static final String DATALAYERFOLDER_INDEX = "datalayerfolder.index";
public static final String ENABLE_LOGIN = "enablelogin";
public static final String USER_ENABLED = USER_ESCAPED + ".enabled";
public static final String USER_ENABLED_PARAM = "user_enabled";
public static final String PASSWORD_CHANGED = "passwordchanged";
public static final String INCIDENT_MAP = "Incident Map";
public static final Object PASSWORD = "password";
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.form.engine.impl.el;
import java.beans.FeatureDescriptor;
import java.math.BigDecimal;
import java.util.Iterator;
import javax.el.CompositeELResolver;
import javax.el.ELContext;
import javax.el.ELException;
import javax.el.ELResolver;
import javax.el.PropertyNotWritableException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* Defines property resolution behavior on JsonNodes.
*
* @see CompositeELResolver
* @see ELResolver
*/
public class JsonNodeELResolver extends ELResolver {
private final boolean readOnly;
/**
* Creates a new read/write BeanELResolver.
*/
public JsonNodeELResolver() {
this(false);
}
/**
* Creates a new BeanELResolver whose read-only status is determined by the given parameter.
*/
public JsonNodeELResolver(boolean readOnly) {
this.readOnly = readOnly;
}
/**
* If the base object is not null, returns the most general type that this resolver accepts for
* the property argument. Otherwise, returns null. Assuming the base is not null, this method
* will always return Object.class. This is because any object is accepted as a key and is
* coerced into a string.
*
* @param context
* The context of this evaluation.
* @param base
* The bean to analyze.
* @return null if base is null; otherwise Object.class.
*/
@Override
public Class<?> getCommonPropertyType(ELContext context, Object base) {
return isResolvable(base) ? Object.class : null;
}
/**
* If the base object is not null, returns an Iterator containing the set of JavaBeans
* properties available on the given object. Otherwise, returns null. The Iterator returned must
* contain zero or more instances of java.beans.FeatureDescriptor. Each info object contains
* information about a property in the bean, as obtained by calling the
* BeanInfo.getPropertyDescriptors method. The FeatureDescriptor is initialized using the same
* fields as are present in the PropertyDescriptor, with the additional required named
* attributes "type" and "resolvableAtDesignTime" set as follows:
* <ul>
* <li>{@link ELResolver#TYPE} - The runtime type of the property, from
* PropertyDescriptor.getPropertyType().</li>
* <li>{@link ELResolver#RESOLVABLE_AT_DESIGN_TIME} - true.</li>
* </ul>
*
* @param context
* The context of this evaluation.
* @param base
* The bean to analyze.
* @return An Iterator containing zero or more FeatureDescriptor objects, each representing a
* property on this bean, or null if the base object is null.
*/
@Override
public Iterator<FeatureDescriptor> getFeatureDescriptors(ELContext context, Object base) {
if (isResolvable(base)) {
JsonNode node = (JsonNode) base;
final Iterator<String> keys = node.fieldNames();
return new Iterator<FeatureDescriptor>() {
public boolean hasNext() {
return keys.hasNext();
}
public FeatureDescriptor next() {
Object key = keys.next();
FeatureDescriptor feature = new FeatureDescriptor();
feature.setDisplayName(key == null ? "null" : key.toString());
feature.setName(feature.getDisplayName());
feature.setShortDescription("");
feature.setExpert(true);
feature.setHidden(false);
feature.setPreferred(true);
feature.setValue(TYPE, key == null ? "null" : key.getClass());
feature.setValue(RESOLVABLE_AT_DESIGN_TIME, true);
return feature;
}
public void remove() {
throw new UnsupportedOperationException("cannot remove");
}
};
}
return null;
}
/**
* If the base object is a map, returns the most general acceptable type for a value in this
* map. If the base is a Map, the propertyResolved property of the ELContext object must be set
* to true by this resolver, before returning. If this property is not true after this method is
* called, the caller should ignore the return value. Assuming the base is a Map, this method
* will always return Object.class. This is because Maps accept any object as the value for a
* given key.
*
* @param context
* The context of this evaluation.
* @param base
* The map to analyze. Only bases of type Map are handled by this resolver.
* @param property
* The key to return the acceptable type for. Ignored by this resolver.
* @return If the propertyResolved property of ELContext was set to true, then the most general
* acceptable type; otherwise undefined.
* @throws NullPointerException
* if context is null
* @throws ELException
* if an exception was thrown while performing the property or variable resolution.
* The thrown exception must be included as the cause property of this exception, if
* available.
*/
@Override
public Class<?> getType(ELContext context, Object base, Object property) {
if (context == null) {
throw new NullPointerException("context is null");
}
Class<?> result = null;
if (isResolvable(base)) {
result = Object.class;
context.setPropertyResolved(true);
}
return result;
}
/**
* If the base object is a map, returns the value associated with the given key, as specified by
* the property argument. If the key was not found, null is returned. If the base is a Map, the
* propertyResolved property of the ELContext object must be set to true by this resolver,
* before returning. If this property is not true after this method is called, the caller should
* ignore the return value. Just as in java.util.Map.get(Object), just because null is returned
* doesn't mean there is no mapping for the key; it's also possible that the Map explicitly maps
* the key to null.
*
* @param context
* The context of this evaluation.
* @param base
* The map to analyze. Only bases of type Map are handled by this resolver.
* @param property
* The key to return the acceptable type for. Ignored by this resolver.
* @return If the propertyResolved property of ELContext was set to true, then the value
* associated with the given key or null if the key was not found. Otherwise, undefined.
* @throws ClassCastException
* if the key is of an inappropriate type for this map (optionally thrown by the
* underlying Map).
* @throws NullPointerException
* if context is null, or if the key is null and this map does not permit null keys
* (the latter is optionally thrown by the underlying Map).
* @throws ELException
* if an exception was thrown while performing the property or variable resolution.
* The thrown exception must be included as the cause property of this exception, if
* available.
*/
@Override
public Object getValue(ELContext context, Object base, Object property) {
if (context == null) {
throw new NullPointerException("context is null");
}
Object result = null;
if (isResolvable(base)) {
JsonNode resultNode = ((JsonNode) base).get(property.toString());
if (resultNode != null && resultNode.isValueNode()) {
if (resultNode.isBoolean()) {
result = resultNode.asBoolean();
} else if (resultNode.isLong()) {
result = resultNode.asLong();
} else if (resultNode.isBigDecimal() || resultNode.isDouble()) {
result = resultNode.asDouble();
} else if (resultNode.isTextual()) {
result = resultNode.asText();
} else {
result = resultNode.toString();
}
} else {
result = resultNode;
}
context.setPropertyResolved(true);
}
return result;
}
/**
* If the base object is a map, returns whether a call to
* {@link #setValue(ELContext, Object, Object, Object)} will always fail. If the base is a Map,
* the propertyResolved property of the ELContext object must be set to true by this resolver,
* before returning. If this property is not true after this method is called, the caller should
* ignore the return value. If this resolver was constructed in read-only mode, this method will
* always return true. If a Map was created using java.util.Collections.unmodifiableMap(Map),
* this method must return true. Unfortunately, there is no Collections API method to detect
* this. However, an implementation can create a prototype unmodifiable Map and query its
* runtime type to see if it matches the runtime type of the base object as a workaround.
*
* @param context
* The context of this evaluation.
* @param base
* The map to analyze. Only bases of type Map are handled by this resolver.
* @param property
* The key to return the acceptable type for. Ignored by this resolver.
* @return If the propertyResolved property of ELContext was set to true, then true if calling
* the setValue method will always fail or false if it is possible that such a call may
* succeed; otherwise undefined.
* @throws NullPointerException
* if context is null.
* @throws ELException
* if an exception was thrown while performing the property or variable resolution.
* The thrown exception must be included as the cause property of this exception, if
* available.
*/
@Override
public boolean isReadOnly(ELContext context, Object base, Object property) {
if (context == null) {
throw new NullPointerException("context is null");
}
if (isResolvable(base)) {
context.setPropertyResolved(true);
}
return readOnly;
}
/**
* If the base object is a map, attempts to set the value associated with the given key, as
* specified by the property argument. If the base is a Map, the propertyResolved property of
* the ELContext object must be set to true by this resolver, before returning. If this property
* is not true after this method is called, the caller can safely assume no value was set. If
* this resolver was constructed in read-only mode, this method will always throw
* PropertyNotWritableException. If a Map was created using
* java.util.Collections.unmodifiableMap(Map), this method must throw
* PropertyNotWritableException. Unfortunately, there is no Collections API method to detect
* this. However, an implementation can create a prototype unmodifiable Map and query its
* runtime type to see if it matches the runtime type of the base object as a workaround.
*
* @param context
* The context of this evaluation.
* @param base
* The map to analyze. Only bases of type Map are handled by this resolver.
* @param property
* The key to return the acceptable type for. Ignored by this resolver.
* @param value
* The value to be associated with the specified key.
* @throws ClassCastException
* if the class of the specified key or value prevents it from being stored in this
* map.
* @throws NullPointerException
* if context is null, or if this map does not permit null keys or values, and the
* specified key or value is null.
* @throws IllegalArgumentException
* if some aspect of this key or value prevents it from being stored in this map.
* @throws PropertyNotWritableException
* if this resolver was constructed in read-only mode, or if the put operation is
* not supported by the underlying map.
* @throws ELException
* if an exception was thrown while performing the property or variable resolution.
* The thrown exception must be included as the cause property of this exception, if
* available.
*/
@Override
public void setValue(ELContext context, Object base, Object property, Object value) {
if (context == null) {
throw new NullPointerException("context is null");
}
if (base instanceof ObjectNode) {
if (readOnly) {
throw new PropertyNotWritableException("resolver is read-only");
}
ObjectNode node = (ObjectNode) base;
if (value instanceof BigDecimal) {
node.put(property.toString(), (BigDecimal) value);
} else if (value instanceof Boolean) {
node.put(property.toString(), (Boolean) value);
} else if (value instanceof Long) {
node.put(property.toString(), (Long) value);
} else if (value instanceof Double) {
node.put(property.toString(), (Double) value);
} else if (value != null) {
node.put(property.toString(), value.toString());
} else {
node.putNull(property.toString());
}
context.setPropertyResolved(true);
}
}
/**
* Test whether the given base should be resolved by this ELResolver.
*
* @param base
* The bean to analyze.
* @param property
* The name of the property to analyze. Will be coerced to a String.
* @return base != null
*/
private final boolean isResolvable(Object base) {
return base != null && base instanceof JsonNode;
}
}
|
|
/*
* Copyright (c) JForum Team
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* 2) Redistributions in binary form must reproduce the
* above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* 3) Neither the name of "Rafael Steil" nor
* the names of its contributors may be used to endorse
* or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
* HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
*
* Created on Jan 11, 2005 11:22:19 PM
* The JForum Project
* http://www.jforum.net
*/
package net.jforum.dao.generic;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import net.jforum.JForumExecutionContext;
import net.jforum.entities.Karma;
import net.jforum.entities.KarmaStatus;
import net.jforum.entities.User;
import net.jforum.exceptions.DatabaseException;
import net.jforum.util.DbUtils;
import net.jforum.util.preferences.SystemGlobals;
/**
* @author Rafael Steil
* @version $Id$
*/
public class GenericKarmaDAO implements net.jforum.dao.KarmaDAO
{
/**
* @see net.jforum.dao.KarmaDAO#addKarma(net.jforum.entities.Karma)
*/
public void addKarma(final Karma karma)
{
PreparedStatement pstmt = null;
try {
pstmt = JForumExecutionContext.getConnection().prepareStatement(SystemGlobals.getSql("KarmaModel.add"));
pstmt.setInt(1, karma.getPostId());
pstmt.setInt(2, karma.getPostUserId());
pstmt.setInt(3, karma.getFromUserId());
pstmt.setInt(4, karma.getPoints());
pstmt.setInt(5, karma.getTopicId());
pstmt.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
pstmt.executeUpdate();
this.updateUserKarma(karma.getPostUserId());
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(pstmt);
}
}
/**
* @see net.jforum.dao.KarmaDAO#getUserKarma(int)
*/
public KarmaStatus getUserKarma(final int userId)
{
final KarmaStatus status = new KarmaStatus();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try {
pstmt = JForumExecutionContext.getConnection()
.prepareStatement(SystemGlobals.getSql("KarmaModel.getUserKarma"));
pstmt.setInt(1, userId);
resultSet = pstmt.executeQuery();
if (resultSet.next()) {
status.setKarmaPoints(Math.round(resultSet.getDouble("user_karma")));
}
return status;
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(resultSet, pstmt);
}
}
/**
* @see net.jforum.dao.KarmaDAO#updateUserKarma(int)
*/
public void updateUserKarma(final int userId)
{
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try {
pstmt = JForumExecutionContext.getConnection().prepareStatement(
SystemGlobals.getSql("KarmaModel.getUserKarmaPoints"));
pstmt.setInt(1, userId);
int totalRecords = 0;
double totalPoints = 0;
resultSet = pstmt.executeQuery();
while (resultSet.next()) {
final int points = resultSet.getInt("points");
final int votes = resultSet.getInt("votes");
totalPoints += ((double) points / votes);
totalRecords++;
}
resultSet.close();
pstmt.close();
pstmt = JForumExecutionContext.getConnection().prepareStatement(
SystemGlobals.getSql("KarmaModel.updateUserKarma"));
double karmaPoints = totalPoints / totalRecords;
if (Double.isNaN(karmaPoints)) {
karmaPoints = 0;
}
pstmt.setDouble(1, karmaPoints);
pstmt.setInt(2, userId);
pstmt.executeUpdate();
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(resultSet, pstmt);
}
}
/**
* @see net.jforum.dao.KarmaDAO#update(net.jforum.entities.Karma)
*/
public void update(final Karma karma)
{
PreparedStatement pstmt = null;
try {
pstmt = JForumExecutionContext.getConnection().prepareStatement(SystemGlobals.getSql("KarmaModel.update"));
pstmt.setInt(1, karma.getPoints());
pstmt.setInt(2, karma.getId());
pstmt.executeUpdate();
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(pstmt);
}
}
/**
* @see net.jforum.dao.KarmaDAO#getPostKarma(int)
*/
public KarmaStatus getPostKarma(final int postId)
{
final KarmaStatus karma = new KarmaStatus();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try {
pstmt = JForumExecutionContext.getConnection()
.prepareStatement(SystemGlobals.getSql("KarmaModel.getPostKarma"));
pstmt.setInt(1, postId);
resultSet = pstmt.executeQuery();
if (resultSet.next()) {
karma.setKarmaPoints(Math.round(resultSet.getDouble(1)));
}
return karma;
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(resultSet, pstmt);
}
}
/**
* @see net.jforum.dao.KarmaDAO#deletePostKarma(int)
*/
public void deletePostKarma(final int postId)
{
PreparedStatement pstmt = null;
try {
pstmt = JForumExecutionContext.getConnection()
.prepareStatement(SystemGlobals.getSql("KarmaModel.deletePostKarma"));
pstmt.setInt(1, postId);
pstmt.executeUpdate();
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(pstmt);
}
}
/**
* @see net.jforum.dao.KarmaDAO#userCanAddKarma(int, int)
*/
public boolean userCanAddKarma(final int userId, final int postId)
{
boolean status = true;
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try {
pstmt = JForumExecutionContext.getConnection().prepareStatement(
SystemGlobals.getSql("KarmaModel.userCanAddKarma"));
pstmt.setInt(1, postId);
pstmt.setInt(2, userId);
resultSet = pstmt.executeQuery();
if (resultSet.next()) {
status = resultSet.getInt(1) < 1;
}
return status;
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(resultSet, pstmt);
}
}
/**
* @see net.jforum.dao.KarmaDAO#getUserVotes(int, int)
*/
public Map<Integer, Integer> getUserVotes(final int topicId, final int userId)
{
final Map<Integer, Integer> map = new ConcurrentHashMap<Integer, Integer>();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try {
pstmt = JForumExecutionContext.getConnection()
.prepareStatement(SystemGlobals.getSql("KarmaModel.getUserVotes"));
pstmt.setInt(1, topicId);
pstmt.setInt(2, userId);
resultSet = pstmt.executeQuery();
while (resultSet.next()) {
map.put(Integer.valueOf(resultSet.getInt("post_id")), Integer.valueOf(resultSet.getInt("points")));
}
return map;
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(resultSet, pstmt);
}
}
public void getUserTotalKarma(final User user)
{
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try {
pstmt = JForumExecutionContext.getConnection().prepareStatement(
SystemGlobals.getSql("KarmaModel.getUserTotalVotes"));
pstmt.setInt(1, user.getId());
resultSet = pstmt.executeQuery();
user.setKarma(new KarmaStatus());
if (resultSet.next()) {
user.getKarma().setTotalPoints(resultSet.getInt("points"));
user.getKarma().setVotesReceived(resultSet.getInt("votes"));
}
if (user.getKarma().getVotesReceived() != 0) {
// prevetns division by zero.
user.getKarma().setKarmaPoints(user.getKarma().getTotalPoints() / (double)user.getKarma().getVotesReceived());
}
this.getVotesGiven(user);
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(resultSet, pstmt);
}
}
private void getVotesGiven(final User user)
{
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try {
pstmt = JForumExecutionContext.getConnection().prepareStatement(
SystemGlobals.getSql("KarmaModel.getUserGivenVotes"));
pstmt.setInt(1, user.getId());
resultSet = pstmt.executeQuery();
if (resultSet.next()) {
user.getKarma().setVotesGiven(resultSet.getInt("votes"));
}
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(resultSet, pstmt);
}
}
/**
* @see net.jforum.dao.KarmaDAO#getMostRatedUserByPeriod(int, java.util.Date, java.util.Date,
* String)
*/
public List<User> getMostRatedUserByPeriod(final int start, final Date firstPeriod, final Date lastPeriod, final String orderField)
{
String sql = SystemGlobals.getSql("KarmaModel.getMostRatedUserByPeriod");
sql = new StringBuilder(sql).append(" ORDER BY ").append(orderField).append(" DESC").toString();
return this.getMostRatedUserByPeriod(sql, firstPeriod, lastPeriod);
}
/**
*
* @param sql String
* @param firstPeriod Date
* @param lastPeriod Date
* @return List
*/
protected List<User> getMostRatedUserByPeriod(final String sql, final Date firstPeriod, final Date lastPeriod)
{
if (firstPeriod.after(lastPeriod)) {
throw new DatabaseException("First Date needs to be before the Last Date");
}
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try {
pstmt = JForumExecutionContext.getConnection().prepareStatement(sql);
pstmt.setTimestamp(1, new Timestamp(firstPeriod.getTime()));
pstmt.setTimestamp(2, new Timestamp(lastPeriod.getTime()));
resultSet = pstmt.executeQuery();
return this.fillUser(resultSet);
}
catch (SQLException e) {
throw new DatabaseException(e);
}
finally {
DbUtils.close(resultSet, pstmt);
}
}
protected List<User> fillUser(final ResultSet resultSet) throws SQLException
{
final List<User> usersAndPoints = new ArrayList<User>();
KarmaStatus karma = null;
while (resultSet.next()) {
final User user = new User();
karma = new KarmaStatus();
karma.setTotalPoints(resultSet.getInt("total"));
karma.setVotesReceived(resultSet.getInt("votes_received"));
karma.setKarmaPoints(resultSet.getDouble("user_karma"));
karma.setVotesGiven(resultSet.getInt("votes_given"));
user.setUsername(resultSet.getString("username"));
user.setId(resultSet.getInt("user_id"));
user.setKarma(karma);
usersAndPoints.add(user);
}
return usersAndPoints;
}
}
|
|
package io.digdag.core.log;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.time.Instant;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.Files;
import java.nio.file.DirectoryStream;
import com.google.inject.Inject;
import com.google.common.base.Optional;
import com.google.common.io.ByteStreams;
import io.digdag.commons.ThrowablesUtil;
import io.digdag.core.agent.AgentId;
import io.digdag.spi.LogServer;
import io.digdag.spi.LogServerFactory;
import io.digdag.spi.LogFilePrefix;
import io.digdag.spi.DirectUploadHandle;
import io.digdag.spi.StorageFileNotFoundException;
import io.digdag.client.config.Config;
import static java.nio.charset.StandardCharsets.UTF_8;
public class LocalFileLogServerFactory
implements LogServerFactory
{
private static final String LOG_GZ_FILE_SUFFIX = ".log.gz";
private final Path logPath;
private final long logSplitSize;
private final AgentId agentId;
@Inject
public LocalFileLogServerFactory(Config systemConfig, AgentId agentId)
{
this.logPath = FileSystems.getDefault().getPath(systemConfig.get("log-server.local.path", String.class, "digdag.log"))
.toAbsolutePath()
.normalize();
this.agentId = agentId;
this.logSplitSize = systemConfig.get("log-server.local.split_size", Long.class, 0L);
}
@Override
public String getType()
{
return "local";
}
@Override
public LogServer getLogServer()
{
try {
return new LocalFileLogServer(logPath);
}
catch (IOException ex) {
throw ThrowablesUtil.propagate(ex);
}
}
class LocalFileLogServer
extends AbstractFileLogServer
{
private final Path logPath;
private final ReentrantReadWriteLock lock;
private final ReentrantReadWriteLock.ReadLock logAppendLock;
public LocalFileLogServer(Path logPath)
throws IOException
{
this.logPath = logPath;
this.lock = new ReentrantReadWriteLock();
this.logAppendLock = lock.readLock();
}
@Override
public Optional<DirectUploadHandle> getDirectUploadHandle(String dateDir, String attemptDir, String fileName)
{
return Optional.absent();
}
@Override
protected void putFile(String dateDir, String attemptDir, String fileName, byte[] gzData)
{
Path dir = getPrefixDir(dateDir, attemptDir);
try {
Files.createDirectories(dir);
Path path = dir.resolve(fileName);
try (OutputStream out = Files.newOutputStream(path)) {
out.write(gzData);
}
}
catch (IOException ex) {
throw ThrowablesUtil.propagate(ex);
}
}
@Override
protected void listFiles(String dateDir, String attemptDir, boolean enableDirectDownload, FileMetadataConsumer consumer)
{
Path dir = getPrefixDir(dateDir, attemptDir);
if (!Files.exists(dir)) {
return;
}
try (DirectoryStream<Path> ds = Files.newDirectoryStream(dir)) {
for (Path path : ds) {
consumer.accept(
path.getFileName().toString(),
Files.size(path),
null);
}
}
catch (IOException ex) {
throw ThrowablesUtil.propagate(ex);
}
}
@Override
protected byte[] getFile(String dateDir, String attemptDir, String fileName)
throws StorageFileNotFoundException
{
Path path = getPrefixDir(dateDir, attemptDir).resolve(fileName);
try (InputStream in = Files.newInputStream(path)) {
return ByteStreams.toByteArray(in);
}
catch (FileNotFoundException ex) {
throw new StorageFileNotFoundException(ex);
}
catch (IOException ex) {
throw ThrowablesUtil.propagate(ex);
}
}
private Path getPrefixDir(String dateDir, String attemptDir)
{
return logPath.resolve(dateDir).resolve(attemptDir);
}
public LocalFileDirectTaskLogger newDirectTaskLogger(LogFilePrefix prefix, String taskName)
{
try {
return new LocalFileDirectTaskLogger(prefix, taskName, logSplitSize);
}
catch (IOException ex) {
throw ThrowablesUtil.propagate(ex);
}
}
class LocalFileDirectTaskLogger
implements TaskLogger
{
private CountingLogOutputStream output;
private final long splitSize;
private final Path dir;
private final String taskName;
public LocalFileDirectTaskLogger(LogFilePrefix prefix, String taskName, Long splitSize)
throws IOException
{
String dateDir = LogFiles.formatDataDir(prefix);
String attemptDir = LogFiles.formatSessionAttemptDir(prefix);
this.dir = getPrefixDir(dateDir, attemptDir);
this.taskName = taskName;
this.splitSize = splitSize;
this.output = openNewFile();
}
private CountingLogOutputStream openNewFile()
throws IOException
{
String fileName = LogFiles.formatFileName(taskName, Instant.now(), agentId.toString());
Files.createDirectories(dir);
Path path = dir.resolve(fileName);
return new CountingLogOutputStream(path);
}
@Override
public void log(LogLevel level, long timestamp, String message)
{
byte[] data = message.getBytes(UTF_8);
log(data, 0, data.length);
}
@Override
public synchronized void log(byte[] data, int off, int len)
{
try {
if (output == null) {
output = openNewFile();
}
else if (splitSize > 0 && output.getUncompressedSize() > splitSize) {
output.close();
output = null;
output = openNewFile();
}
output.write(data, off, len);
}
catch (IOException ex) {
// here can do almost nothing. adding logs to logger causes infinite loop
throw ThrowablesUtil.propagate(ex);
}
}
@Override
public synchronized void close()
{
try {
output.close();
}
catch (IOException ex) {
throw ThrowablesUtil.propagate(ex);
}
}
}
}
}
|
|
/*
* Copyright Ekagra and SemanticBits, LLC
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/clinical-connector/LICENSE.txt for details.
*/
package gov.nih.nci.cdmsconnector.util;
import gov.nih.nci.cdmsconnector.util.StringEncrypter.EncryptionException;
import java.io.UnsupportedEncodingException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.spec.KeySpec;
import javax.crypto.Cipher;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.DESKeySpec;
import javax.crypto.spec.DESedeKeySpec;
import sun.misc.BASE64Decoder;
import sun.misc.BASE64Encoder;
import java.sql.DriverManager;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import oracle.jdbc.driver.OracleDriver;
public class StringEncrypter {
public static final String DESEDE_ENCRYPTION_SCHEME = "DESede";
public static final String DES_ENCRYPTION_SCHEME = "DES";
public static final String DEFAULT_ENCRYPTION_KEY = "12C3PR34567890ENCRYPTIONC3PR4KEY5678901234567890";
private KeySpec keySpec;
private SecretKeyFactory keyFactory;
private Cipher cipher;
private static final String UNICODE_FORMAT = "UTF8";
protected static final String ENCRYPT =
"SELECT getEncrypt(?,?,?,?) OUTSTRING from dual";
protected static final String DECRYPT =
"SELECT getDecrypt(?,?,?,?) OUTSTRING from dual";
public StringEncrypter() throws EncryptionException {
this(DES_ENCRYPTION_SCHEME, DEFAULT_ENCRYPTION_KEY);
}
public StringEncrypter(String encryptionScheme) throws EncryptionException {
this(encryptionScheme, DEFAULT_ENCRYPTION_KEY);
}
public StringEncrypter(String encryptionScheme, String encryptionKey) throws EncryptionException {
if (encryptionKey == null)
throw new IllegalArgumentException("encryption key was null");
if (encryptionKey.trim().length() < 24)
throw new IllegalArgumentException("encryption key was less than 24 characters");
try {
byte[] keyAsBytes = encryptionKey.getBytes(UNICODE_FORMAT);
if (encryptionScheme.equals(DESEDE_ENCRYPTION_SCHEME)) {
keySpec = new DESedeKeySpec(keyAsBytes);
} else
if (encryptionScheme.equals(DES_ENCRYPTION_SCHEME)) {
keySpec = new DESKeySpec(keyAsBytes);
} else {
throw new IllegalArgumentException("Encryption scheme not supported: " + encryptionScheme);
}
keyFactory = SecretKeyFactory.getInstance(encryptionScheme);
cipher = Cipher.getInstance(encryptionScheme);
} catch (InvalidKeyException e) {
throw new EncryptionException(e);
} catch (UnsupportedEncodingException e) {
throw new EncryptionException(e);
} catch (NoSuchAlgorithmException e) {
throw new EncryptionException(e);
} catch (NoSuchPaddingException e) {
throw new EncryptionException(e);
}
}
public String jEncrypt(String unencryptedString) throws EncryptionException {
if (unencryptedString == null || unencryptedString.trim().length() == 0)
throw new IllegalArgumentException("unencrypted string was null or empty");
try {
SecretKey key = keyFactory.generateSecret(keySpec);
cipher.init(Cipher.ENCRYPT_MODE, key);
byte[] cleartext = unencryptedString.getBytes(UNICODE_FORMAT);
byte[] ciphertext = cipher.doFinal(cleartext);
BASE64Encoder base64encoder = new BASE64Encoder();
return base64encoder.encode(ciphertext);
} catch (Exception e) {
throw new EncryptionException(e);
}
}
public String jDecrypt(String encryptedString) throws EncryptionException {
if (encryptedString == null || encryptedString.trim().length() <= 0)
throw new IllegalArgumentException("encrypted string was null or empty");
try {
SecretKey key = keyFactory.generateSecret(keySpec);
cipher.init(Cipher.DECRYPT_MODE, key);
BASE64Decoder base64decoder = new BASE64Decoder();
byte[] cleartext = base64decoder.decodeBuffer(encryptedString);
byte[] ciphertext = cipher.doFinal(cleartext);
return bytes2String(ciphertext);
} catch (Exception e) {
throw new EncryptionException(e);
}
}
private static String bytes2String(byte[] bytes) {
StringBuffer stringBuffer = new StringBuffer();
for (int i = 0; i < bytes.length; i++) {
stringBuffer.append((char) bytes[i]);
}
return stringBuffer.toString();
}
public static class EncryptionException extends Exception {
public EncryptionException(Throwable t) {
super(t);
}
}
public String oEncrypt(String unencryptedString) throws EncryptionException {
PreparedStatement stmt1 = null;
Connection cn = null;
String encryptedString = null;
// Encrypt Password
try {
Class.forName("oracle.jdbc.driver.OracleDriver");
cn = gov.nih.nci.clinicalconnector.dao.BaseJDBCDAO.getConnection();
// Find the Grid User
stmt1 = cn.prepareStatement(ENCRYPT);
stmt1.setString(1, unencryptedString);
stmt1.setString(2, DEFAULT_ENCRYPTION_KEY);
stmt1.setString(3, DES_ENCRYPTION_SCHEME);
stmt1.setString(4, "1");
ResultSet rs1 = stmt1.executeQuery();
while (rs1.next()) {
encryptedString = rs1.getString("OUTSTRING");
}
stmt1.close();
} catch (Exception ex) {
ex.printStackTrace();
}
try {
cn.close();
} catch (Exception ex){
ex.printStackTrace();
}
return encryptedString;
}
public String oEncrypt(String unencryptedString, String dUser, String dPass, String dDB) throws EncryptionException {
PreparedStatement stmt1 = null;
Connection cn = null;
String encryptedString = null;
// Encrypt Password
try {
Class.forName("oracle.jdbc.driver.OracleDriver");
cn = DriverManager.getConnection(dDB,dUser, dPass);
// Find the Grid User
stmt1 = cn.prepareStatement(ENCRYPT);
stmt1.setString(1, unencryptedString);
stmt1.setString(2, DEFAULT_ENCRYPTION_KEY);
stmt1.setString(3, DES_ENCRYPTION_SCHEME);
stmt1.setString(4, "1");
ResultSet rs1 = stmt1.executeQuery();
while (rs1.next()) {
encryptedString = rs1.getString("OUTSTRING");;
}
stmt1.close();
} catch (Exception ex) {
ex.printStackTrace();
}
try {
cn.close();
} catch (Exception ex){
ex.printStackTrace();
}
return encryptedString;
}
public String oDecrypt(String encryptedString) throws EncryptionException {
PreparedStatement stmt1 = null;
Connection cn = null;
String unencryptedString = null;
// Decrypt Password
try {
Class.forName("oracle.jdbc.driver.OracleDriver");
cn = gov.nih.nci.clinicalconnector.dao.BaseJDBCDAO.getConnection();
// Find the Grid User
stmt1 = cn.prepareStatement(DECRYPT);
stmt1.setString(1, encryptedString);
stmt1.setString(2, DEFAULT_ENCRYPTION_KEY);
stmt1.setString(3, DES_ENCRYPTION_SCHEME);
stmt1.setString(4, "1");
ResultSet rs1 = stmt1.executeQuery();
while (rs1.next()) {
unencryptedString = rs1.getString("OUTSTRING");
}
stmt1.close();
} catch (Exception ex) {
ex.printStackTrace();
}
try {
cn.close();
} catch (Exception ex){
ex.printStackTrace();
}
return unencryptedString;
}
public String oDecrypt(String encryptedString, String dUser, String dPass, String dDB) throws EncryptionException {
PreparedStatement stmt1 = null;
Connection cn = null;
String unencryptedString = null;
// Decrypt Password
try {
Class.forName("oracle.jdbc.driver.OracleDriver");
cn = DriverManager.getConnection(dDB,dUser, dPass);
// Find the Grid User
stmt1 = cn.prepareStatement(DECRYPT);
stmt1.setString(1, encryptedString);
stmt1.setString(2, DEFAULT_ENCRYPTION_KEY);
stmt1.setString(3, DES_ENCRYPTION_SCHEME);
stmt1.setString(4, "1");
ResultSet rs1 = stmt1.executeQuery();
while (rs1.next()) {
unencryptedString = rs1.getString("OUTSTRING");
}
stmt1.close();
} catch (Exception ex) {
ex.printStackTrace();
}
try {
cn.close();
} catch (Exception ex){
ex.printStackTrace();
}
return unencryptedString;
}
public static void helpMessage() {
System.out.println("String Encrypter");
System.out.println(" ");
System.out.println(" Used to Encrypt or Decrypt a character string.");
System.out.println(" Usage: java StringEncrypter ");
System.out.println(" jEncrypt [string] - Returns encrypted string using javax.crypto DES" );
System.out.println(" jDecrypt [string] - Returns decrypted string using javax.crypto DES" );
System.out.println(" oEncrypt [string] [user] [pass] [db] - Returns encrypted string using Oracle DES3" );
System.out.println(" oDecrypt [string] [user] [pass] [db] - Returns decrypted string using Oracle DES3" );
System.out.println(" ");
}
public static void main(String args[]){
if (args.length == 0) {
helpMessage();
System.exit(0);
}
if (args[0] == null || "".equals(args[0]) ||
"HELP".equals(args[0].toUpperCase()) ||
"/H".equals(args[0].toUpperCase() )) {
helpMessage();
System.exit(0);
}
if ( ("JENCRYPT".equals(args[0].toUpperCase()) ||
"JDECRYPT".equals(args[0].toUpperCase()) ) && args.length < 2) {
System.out.println("Two parameter are require when using jEncrypt or jDecrypt.");
System.exit(0);
}
if ( ("OENCRYPT".equals(args[0].toUpperCase()) ||
"ODECRYPT".equals(args[0].toUpperCase()) ) && args.length < 5) {
System.out.println("Five parameter are require when using oEncrypt or oDecrypt.");
System.exit(0);
}
try{
StringEncrypter se = new StringEncrypter();
if ( "JENCRYPT".equals(args[0].toUpperCase())) {
if (args[1] == null || "".equals(args[1])) {
System.out.println("No string specified. Use 'help' for more information.");
} else {
System.out.println( se.jEncrypt(args[1]) );
}
}
if ( "JDECRYPT".equals(args[0].toUpperCase()) ) {
if (args[1] == null || "".equals(args[1])) {
System.out.println("No string specified. Use 'help' for more information.");
} else {
System.out.println( se.jDecrypt(args[1]) );
}
}
if ( "OENCRYPT".equals(args[0].toUpperCase()) ) {
if (args[1] == null || "".equals(args[1])) {
System.out.println("No string specified. Use 'help' for more information.");
} else {
System.out.println( se.oEncrypt(args[1],args[2],args[3],args[4]) );
}
}
if ( "ODECRYPT".equals(args[0].toUpperCase()) ) {
if (args[1] == null || "".equals(args[1])) {
System.out.println("No string specified. Use 'help' for more information.");
} else {
System.out.println( se.oDecrypt(args[1],args[2],args[3],args[4]) );
}
}
if ( "OHELP".equals(args[0].toUpperCase()) ) {
System.out.println("Sample jdbc thin - jdbc:oracle:thin:@cbiodb2.nci.nih.gov:1521:ocdev");
} else {
helpMessage();
}
}catch(Throwable e){
e.printStackTrace();
}
}
}
|
|
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import com.orientechnologies.common.collection.OMultiValue;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ODocumentHelper;
import com.orientechnologies.orient.core.sql.OCommandSQLParsingException;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import com.orientechnologies.orient.enterprise.channel.binary.OResponseProcessingException;
import com.orientechnologies.orient.object.db.OObjectDatabaseTx;
import org.testng.Assert;
import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import java.util.List;
@Test(groups = "sql-select")
public class SQLSelectProjectionsTest extends DocumentDBBaseTest {
@Parameters(value = "url")
public SQLSelectProjectionsTest(@Optional String url) {
super(url);
}
@Test
public void queryProjectionOk() {
List<ODocument> result = database
.command(
new OSQLSynchQuery<ODocument>(
" select nick, followings, followers from Profile where nick is defined and followings is defined and followers is defined"))
.execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
String[] colNames = d.fieldNames();
Assert.assertEquals(colNames.length, 3);
Assert.assertEquals(colNames[0], "nick");
Assert.assertEquals(colNames[1], "followings");
Assert.assertEquals(colNames[2], "followers");
Assert.assertNull(d.getClassName());
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionObjectLevel() {
OObjectDatabaseTx db = new OObjectDatabaseTx(url);
db.open("admin", "admin");
List<ODocument> result = db.getUnderlying().query(
new OSQLSynchQuery<ODocument>(" select nick, followings, followers from Profile "));
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 3);
Assert.assertNull(d.getClassName());
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
db.close();
}
@Test
public void queryProjectionLinkedAndFunction() {
List<ODocument> result = database.command(
new OSQLSynchQuery<ODocument>("select name.toUppercase(), address.city.country.name from Profile")).execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 2);
if (d.field("name") != null)
Assert.assertTrue(d.field("name").equals(((String) d.field("name")).toUpperCase()));
Assert.assertNull(d.getClassName());
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionSameFieldTwice() {
List<ODocument> result = database.command(
new OSQLSynchQuery<ODocument>("select name, name.toUppercase() from Profile where name is not null")).execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 2);
Assert.assertNotNull(d.field("name"));
Assert.assertNotNull(d.field("name2"));
Assert.assertNull(d.getClassName());
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionStaticValues() {
List<ODocument> result = database
.command(
new OSQLSynchQuery<ODocument>(
"select location.city.country.name, address.city.country.name from Profile where location.city.country.name is not null"))
.execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertNotNull(d.field("location"));
Assert.assertNull(d.field("address"));
Assert.assertNull(d.getClassName());
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionPrefixAndAppend() {
List<ODocument> result = database.command(
new OSQLSynchQuery<ODocument>(
"select *, name.prefix('Mr. ').append(' ').append(surname).append('!') as test from Profile where name is not null"))
.execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertEquals(d.field("test").toString(), "Mr. " + d.field("name") + " " + d.field("surname") + "!");
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionFunctionsAndFieldOperators() {
List<ODocument> result = database.command(
new OSQLSynchQuery<ODocument>("select name.append('.').prefix('Mr. ') as name from Profile where name is not null"))
.execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 1);
Assert.assertTrue(d.field("name").toString().startsWith("Mr. "));
Assert.assertTrue(d.field("name").toString().endsWith("."));
Assert.assertNull(d.getClassName());
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionAliases() {
List<ODocument> result = database.command(
new OSQLSynchQuery<ODocument>(
"select name.append('!') as 1, surname as 2 from Profile where name is not null and surname is not null")).execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 2);
Assert.assertTrue(d.field("1").toString().endsWith("!"));
Assert.assertNotNull(d.field("2"));
Assert.assertNull(d.getClassName());
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionSimpleValues() {
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("select 10, 'ciao' from Profile LIMIT 1")).execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 2);
Assert.assertEquals(((Integer) d.field("10")).intValue(), 10l);
Assert.assertEquals(d.field("ciao"), "ciao");
Assert.assertNull(d.getClassName());
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionJSON() {
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("select @this.toJson() as json from Profile"))
.execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 1);
Assert.assertNotNull(d.field("json"));
new ODocument().fromJSON((String) d.field("json"));
}
}
@Test
public void queryProjectionContentCollection() {
List<ODocument> result = database.command(
new OSQLSynchQuery<ODocument>("SELECT FLATTEN( outE() ) FROM V WHERE outE() TRAVERSE(1,1) (@class = 'E')")).execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.getSchemaClass().isSubClassOf("E"));
Assert.assertEquals(ORecordInternal.getRecordType(d), ODocument.RECORD_TYPE);
}
}
@Test
public void queryProjectionFlattenError() {
try {
database.command(new OSQLSynchQuery<ODocument>("SELECT FLATTEN( out_ ), in_ FROM V WHERE out_ TRAVERSE(1,1) (@class = 'E')"))
.execute();
Assert.fail();
} catch (OCommandSQLParsingException e) {
} catch (OResponseProcessingException e) {
Assert.assertTrue(e.getCause() instanceof OCommandSQLParsingException);
}
}
public void queryProjectionRid() {
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("select @rid FROM V")).execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 1);
Assert.assertNotNull(d.field("rid"));
final ORID rid = d.field("rid", ORID.class);
Assert.assertTrue(rid.isValid());
}
}
public void queryProjectionOrigin() {
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("select @raw FROM V")).execute();
Assert.assertTrue(result.size() != 0);
for (ODocument d : result) {
Assert.assertTrue(d.fieldNames().length <= 1);
Assert.assertNotNull(d.field("raw"));
}
}
public void queryProjectionEval() {
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("select eval('1 + 4') as result")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument d : result)
Assert.assertEquals(d.field("result"), 5);
}
@SuppressWarnings("unchecked")
public void queryProjectionContextArray() {
List<ODocument> result = database.command(
new OSQLSynchQuery<ODocument>("select $a[0] as a0, $a as a from V let $a = outE() where outE().size() > 0")).execute();
Assert.assertFalse(result.isEmpty());
for (ODocument d : result) {
Assert.assertTrue(d.containsField("a"));
Assert.assertTrue(d.containsField("a0"));
final ODocument a0doc = d.field("a0");
final ODocument firstADoc = (ODocument) d.<Iterable<OIdentifiable>> field("a").iterator().next();
Assert.assertTrue(ODocumentHelper.hasSameContentOf(a0doc, database, firstADoc, database, null));
}
}
public void ifNullFunction() {
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("SELECT ifnull('a', 'b')")).execute();
Assert.assertFalse(result.isEmpty());
Assert.assertEquals(result.get(0).field("ifnull"), "a");
result = database.command(new OSQLSynchQuery<ODocument>("SELECT ifnull('a', 'b', 'c')")).execute();
Assert.assertFalse(result.isEmpty());
Assert.assertEquals(result.get(0).field("ifnull"), "c");
result = database.command(new OSQLSynchQuery<ODocument>("SELECT ifnull(null, 'b')")).execute();
Assert.assertFalse(result.isEmpty());
Assert.assertEquals(result.get(0).field("ifnull"), "b");
}
public void filteringArrayInChain() {
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("SELECT set(name)[0-1] as set from OUser")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument d : result) {
Assert.assertTrue(OMultiValue.isMultiValue(d.field("set")));
Assert.assertTrue(OMultiValue.getSize(d.field("set")) <= 2);
}
result = database.command(new OSQLSynchQuery<ODocument>("SELECT set(name)[0,1] as set from OUser")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument d : result) {
Assert.assertTrue(OMultiValue.isMultiValue(d.field("set")));
Assert.assertTrue(OMultiValue.getSize(d.field("set")) <= 2);
}
result = database.command(new OSQLSynchQuery<ODocument>("SELECT set(name)[0] as unique from OUser")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument d : result) {
Assert.assertFalse(OMultiValue.isMultiValue(d.field("unique")));
}
}
public void projectionWithNoTarget() {
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("select 'Ay' as a , 'bEE'")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument d : result) {
Assert.assertTrue(d.field("a").equals("Ay"));
Assert.assertTrue(d.field("bEE").equals("bEE"));
}
result = database.command(new OSQLSynchQuery<ODocument>("select 'Ay' as a , 'bEE' as b")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument d : result) {
Assert.assertTrue(d.field("a").equals("Ay"));
Assert.assertTrue(d.field("b").equals("bEE"));
}
result = database.command(new OSQLSynchQuery<ODocument>("select 'Ay' as a , 'bEE' as b fetchplan *:1")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument d : result) {
Assert.assertTrue(d.field("a").equals("Ay"));
Assert.assertTrue(d.field("b").equals("bEE"));
}
result = database.command(new OSQLSynchQuery<ODocument>("select 'Ay' as a , 'bEE' fetchplan *:1")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument d : result) {
Assert.assertTrue(d.field("a").equals("Ay"));
Assert.assertTrue(d.field("bEE").equals("bEE"));
}
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticache.model;
import java.io.Serializable;
/**
* <p>
* Represents the output of a <i>DescribeCacheSecurityGroups</i> action.
* </p>
*/
public class DescribeCacheSecurityGroupsResult implements Serializable,
Cloneable {
/**
* <p>
* Provides an identifier to allow retrieval of paginated results.
* </p>
*/
private String marker;
/**
* <p>
* A list of cache security groups. Each element in the list contains
* detailed information about one group.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<CacheSecurityGroup> cacheSecurityGroups;
/**
* <p>
* Provides an identifier to allow retrieval of paginated results.
* </p>
*
* @param marker
* Provides an identifier to allow retrieval of paginated results.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* Provides an identifier to allow retrieval of paginated results.
* </p>
*
* @return Provides an identifier to allow retrieval of paginated results.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* Provides an identifier to allow retrieval of paginated results.
* </p>
*
* @param marker
* Provides an identifier to allow retrieval of paginated results.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCacheSecurityGroupsResult withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* <p>
* A list of cache security groups. Each element in the list contains
* detailed information about one group.
* </p>
*
* @return A list of cache security groups. Each element in the list
* contains detailed information about one group.
*/
public java.util.List<CacheSecurityGroup> getCacheSecurityGroups() {
if (cacheSecurityGroups == null) {
cacheSecurityGroups = new com.amazonaws.internal.SdkInternalList<CacheSecurityGroup>();
}
return cacheSecurityGroups;
}
/**
* <p>
* A list of cache security groups. Each element in the list contains
* detailed information about one group.
* </p>
*
* @param cacheSecurityGroups
* A list of cache security groups. Each element in the list contains
* detailed information about one group.
*/
public void setCacheSecurityGroups(
java.util.Collection<CacheSecurityGroup> cacheSecurityGroups) {
if (cacheSecurityGroups == null) {
this.cacheSecurityGroups = null;
return;
}
this.cacheSecurityGroups = new com.amazonaws.internal.SdkInternalList<CacheSecurityGroup>(
cacheSecurityGroups);
}
/**
* <p>
* A list of cache security groups. Each element in the list contains
* detailed information about one group.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setCacheSecurityGroups(java.util.Collection)} or
* {@link #withCacheSecurityGroups(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param cacheSecurityGroups
* A list of cache security groups. Each element in the list contains
* detailed information about one group.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCacheSecurityGroupsResult withCacheSecurityGroups(
CacheSecurityGroup... cacheSecurityGroups) {
if (this.cacheSecurityGroups == null) {
setCacheSecurityGroups(new com.amazonaws.internal.SdkInternalList<CacheSecurityGroup>(
cacheSecurityGroups.length));
}
for (CacheSecurityGroup ele : cacheSecurityGroups) {
this.cacheSecurityGroups.add(ele);
}
return this;
}
/**
* <p>
* A list of cache security groups. Each element in the list contains
* detailed information about one group.
* </p>
*
* @param cacheSecurityGroups
* A list of cache security groups. Each element in the list contains
* detailed information about one group.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCacheSecurityGroupsResult withCacheSecurityGroups(
java.util.Collection<CacheSecurityGroup> cacheSecurityGroups) {
setCacheSecurityGroups(cacheSecurityGroups);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMarker() != null)
sb.append("Marker: " + getMarker() + ",");
if (getCacheSecurityGroups() != null)
sb.append("CacheSecurityGroups: " + getCacheSecurityGroups());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeCacheSecurityGroupsResult == false)
return false;
DescribeCacheSecurityGroupsResult other = (DescribeCacheSecurityGroupsResult) obj;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null
&& other.getMarker().equals(this.getMarker()) == false)
return false;
if (other.getCacheSecurityGroups() == null
^ this.getCacheSecurityGroups() == null)
return false;
if (other.getCacheSecurityGroups() != null
&& other.getCacheSecurityGroups().equals(
this.getCacheSecurityGroups()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getMarker() == null) ? 0 : getMarker().hashCode());
hashCode = prime
* hashCode
+ ((getCacheSecurityGroups() == null) ? 0
: getCacheSecurityGroups().hashCode());
return hashCode;
}
@Override
public DescribeCacheSecurityGroupsResult clone() {
try {
return (DescribeCacheSecurityGroupsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
|
|
package dev.gigaherz.enderthing.blocks;
import dev.gigaherz.enderthing.Enderthing;
import dev.gigaherz.enderthing.KeyUtils;
import dev.gigaherz.enderthing.gui.Containers;
import net.minecraft.client.gui.screens.Screen;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.core.particles.ParticleTypes;
import net.minecraft.server.level.ServerPlayer;
import net.minecraft.util.Mth;
import net.minecraft.world.InteractionHand;
import net.minecraft.world.InteractionResult;
import net.minecraft.world.entity.LivingEntity;
import net.minecraft.world.entity.player.Player;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.item.context.BlockPlaceContext;
import net.minecraft.world.level.BlockGetter;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.LevelAccessor;
import net.minecraft.world.level.block.*;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.entity.BlockEntityTicker;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.entity.ChestBlockEntity;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.level.block.state.StateDefinition;
import net.minecraft.world.level.block.state.properties.BlockStateProperties;
import net.minecraft.world.level.block.state.properties.BooleanProperty;
import net.minecraft.world.level.block.state.properties.DirectionProperty;
import net.minecraft.world.level.material.FluidState;
import net.minecraft.world.level.material.Fluids;
import net.minecraft.world.level.pathfinder.PathComputationType;
import net.minecraft.world.phys.BlockHitResult;
import net.minecraft.world.phys.HitResult;
import net.minecraft.world.phys.shapes.CollisionContext;
import net.minecraft.world.phys.shapes.VoxelShape;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
import net.minecraftforge.items.IItemHandler;
import net.minecraftforge.items.ItemHandlerHelper;
import javax.annotation.Nullable;
import java.util.Random;
public class EnderKeyChestBlock extends AbstractChestBlock<EnderKeyChestBlockEntity> implements SimpleWaterloggedBlock
{
public static final DirectionProperty FACING = EnderChestBlock.FACING;
public static final BooleanProperty WATERLOGGED = BlockStateProperties.WATERLOGGED;
protected static final VoxelShape SHAPE = Block.box(1.0D, 0.0D, 1.0D, 15.0D, 14.0D, 15.0D);
public EnderKeyChestBlock(Properties properties)
{
super(properties, () -> EnderKeyChestBlockEntity.TYPE); // Material.ROCK
registerDefaultState(this.getStateDefinition().any()
.setValue(WATERLOGGED, false)
.setValue(FACING, Direction.NORTH));
}
@Override
@OnlyIn(Dist.CLIENT)
public DoubleBlockCombiner.NeighborCombineResult<? extends ChestBlockEntity> combine(BlockState p_225536_1_, Level p_225536_2_, BlockPos p_225536_3_, boolean p_225536_4_)
{
return DoubleBlockCombiner.Combiner::acceptNone;
}
@Override
protected void createBlockStateDefinition(StateDefinition.Builder<Block, BlockState> builder)
{
builder.add(FACING, WATERLOGGED);
}
@Override
public VoxelShape getShape(BlockState state, BlockGetter worldIn, BlockPos pos, CollisionContext context)
{
return SHAPE;
}
@Nullable
@Override
public <T extends BlockEntity> BlockEntityTicker<T> getTicker(Level level, BlockState state, BlockEntityType<T> blockEntityType)
{
return level.isClientSide ? createTickerHelper(blockEntityType, this.blockEntityType(), EnderKeyChestBlockEntity::lidAnimationTick) : null;
}
public BlockEntityType<? extends EnderKeyChestBlockEntity> blockEntityType()
{
return this.blockEntityType.get();
}
@Nullable
@Override
public BlockEntity newBlockEntity(BlockPos blockPos, BlockState blockState)
{
return new EnderKeyChestBlockEntity(blockPos, blockState);
}
@Deprecated
@Override
public RenderShape getRenderShape(BlockState state)
{
return RenderShape.ENTITYBLOCK_ANIMATED;
}
@Override
public ItemStack getCloneItemStack(BlockState state, HitResult target, BlockGetter world, BlockPos pos, Player player)
{
return getItem(world, pos, Screen.hasShiftDown() || (player.getAbilities().instabuild && Screen.hasControlDown()));
}
@Override
public FluidState getFluidState(BlockState state)
{
return state.getValue(WATERLOGGED) ? Fluids.WATER.defaultFluidState() : Fluids.EMPTY.defaultFluidState();
}
@Override
public InteractionResult use(BlockState state, Level worldIn, BlockPos pos, Player player, InteractionHand handIn, BlockHitResult hit)
{
BlockEntity te = worldIn.getBlockEntity(pos);
if (!(te instanceof EnderKeyChestBlockEntity))
return InteractionResult.PASS;
if (worldIn.getBlockState(pos.above()).isRedstoneConductor(worldIn, pos))
return InteractionResult.FAIL;
if (worldIn.isClientSide)
return InteractionResult.SUCCESS;
if (player.isShiftKeyDown())
{
ItemHandlerHelper.giveItemToPlayer(player, getItem(worldIn, pos, false));
worldIn.setBlockAndUpdate(pos, Blocks.ENDER_CHEST.defaultBlockState()
.setValue(EnderKeyChestBlock.WATERLOGGED, state.getValue(EnderChestBlock.WATERLOGGED))
.setValue(EnderKeyChestBlock.FACING, state.getValue(EnderChestBlock.FACING)));
return InteractionResult.SUCCESS;
}
EnderKeyChestBlockEntity chest = (EnderKeyChestBlockEntity) te;
if (player instanceof ServerPlayer)
Containers.openBlockGui((ServerPlayer) player, chest);
return InteractionResult.SUCCESS;
}
@Nullable
@Override
public BlockState getStateForPlacement(BlockPlaceContext context)
{
FluidState fluidState = context.getLevel().getFluidState(context.getClickedPos());
return this.defaultBlockState().setValue(FACING, context.getPlayer().getDirection().getOpposite())
.setValue(WATERLOGGED, fluidState.getType() == Fluids.WATER);
}
@Override
public void setPlacedBy(Level worldIn, BlockPos pos, BlockState state, LivingEntity placer, ItemStack stack)
{
worldIn.setBlockAndUpdate(pos, state.setValue(FACING, placer.getDirection().getOpposite()));
}
@Override
@OnlyIn(Dist.CLIENT)
public void animateTick(BlockState stateIn, Level worldIn, BlockPos pos, Random rand)
{
for (int i = 0; i < 3; ++i)
{
int xOffset = rand.nextInt(2) * 2 - 1;
int zOffset = rand.nextInt(2) * 2 - 1;
double xPos = pos.getX() + 0.5 + xOffset * 0.25;
double yPos = pos.getY() + rand.nextFloat();
double zPos = pos.getZ() + 0.5 + zOffset * 0.25;
double xSpeed = rand.nextFloat() * xOffset;
double ySpeed = rand.nextFloat() * 0.125 - 0.0625;
double zSpeed = rand.nextFloat() * zOffset;
worldIn.addParticle(ParticleTypes.PORTAL, xPos, yPos, zPos, xSpeed, ySpeed, zSpeed);
}
}
@Deprecated
@Override
public BlockState updateShape(BlockState stateIn, Direction facing, BlockState facingState, LevelAccessor worldIn, BlockPos currentPos, BlockPos facingPos)
{
if (stateIn.getValue(WATERLOGGED))
{
worldIn.scheduleTick(currentPos, Fluids.WATER, Fluids.WATER.getTickDelay(worldIn));
}
return super.updateShape(stateIn, facing, facingState, worldIn, currentPos, facingPos);
}
@Deprecated
@Override
public boolean isPathfindable(BlockState state, BlockGetter worldIn, BlockPos pos, PathComputationType type)
{
return false;
}
@Deprecated
@Override
public boolean triggerEvent(BlockState state, Level worldIn, BlockPos pos, int id, int param)
{
super.triggerEvent(state, worldIn, pos, id, param);
BlockEntity tileentity = worldIn.getBlockEntity(pos);
return tileentity != null && tileentity.triggerEvent(id, param);
}
@Deprecated
@Override
public boolean hasAnalogOutputSignal(BlockState state)
{
return true;
}
@Deprecated
@Override
public int getAnalogOutputSignal(BlockState state, Level level, BlockPos pos)
{
if (level.getBlockEntity(pos) instanceof EnderKeyChestBlockEntity be && be.hasInventory())
return getRedstoneSignalFromContainer(be.getInventory());
return 0;
}
public static int getRedstoneSignalFromContainer(@Nullable IItemHandler handler) {
if (handler == null) {
return 0;
}
int nonEmptyStacks = 0;
float fullness = 0.0F;
for(int j = 0; j < handler.getSlots(); ++j) {
ItemStack itemstack = handler.getStackInSlot(j);
if (!itemstack.isEmpty())
{
fullness += (float)itemstack.getCount() / (float)Math.min(handler.getSlotLimit(j), itemstack.getMaxStackSize());
++nonEmptyStacks;
}
}
fullness /= (float)handler.getSlots();
return Mth.floor(fullness * 14.0F) + (nonEmptyStacks > 0 ? 1 : 0);
}
@Deprecated
@Override
public BlockState rotate(BlockState state, Rotation rotation) {
return state.setValue(FACING, rotation.rotate(state.getValue(FACING)));
}
@Deprecated
@Override
public BlockState mirror(BlockState state, Mirror mirror) {
return state.rotate(mirror.getRotation(state.getValue(FACING)));
}
private static ItemStack getItem(BlockGetter world, BlockPos pos, boolean asChest)
{
BlockEntity te = world.getBlockEntity(pos);
if (te instanceof EnderKeyChestBlockEntity te1)
{
long id = te1.getKey();
boolean priv = te1.isPrivate();
return asChest ? KeyUtils.getKeyChest(id, priv, te1.getPlayerBound()) : KeyUtils.getLock(id, priv);
}
return asChest ? new ItemStack(Enderthing.KEY_CHEST) : new ItemStack(Enderthing.LOCK);
}
}
|
|
package apple.uikit;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSAttributedString;
import apple.foundation.NSCoder;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.foundation.protocol.NSSecureCoding;
import apple.uikit.protocol.UIConfigurationState;
import apple.uikit.protocol.UIContentConfiguration;
import apple.uikit.struct.NSDirectionalEdgeInsets;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.ByValue;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NFloat;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("UIKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class UIListContentConfiguration extends NSObject implements UIContentConfiguration, NSSecureCoding {
static {
NatJ.register();
}
@Generated
protected UIListContentConfiguration(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
/**
* Returns the default configuration for an accompanied sidebar list cell.
*/
@Generated
@Selector("accompaniedSidebarCellConfiguration")
public static native UIListContentConfiguration accompaniedSidebarCellConfiguration();
/**
* Returns the default configuration for an accompanied sidebar list cell with subtitle text.
*/
@Generated
@Selector("accompaniedSidebarSubtitleCellConfiguration")
public static native UIListContentConfiguration accompaniedSidebarSubtitleCellConfiguration();
@Generated
@Owned
@Selector("alloc")
public static native UIListContentConfiguration alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native UIListContentConfiguration allocWithZone(VoidPtr zone);
/**
* An attributed variant of the primary text, which supersedes the `text` and some properties of the `textProperties` if set.
*/
@Generated
@Selector("attributedText")
public native NSAttributedString attributedText();
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
/**
* Whether the content view will preserve inherited layout margins from its superview on the horizontal and/or vertical axes.
*/
@Generated
@Selector("axesPreservingSuperviewLayoutMargins")
@NUInt
public native long axesPreservingSuperviewLayoutMargins();
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
/**
* Returns the default configuration for a list cell.
*/
@Generated
@Selector("cellConfiguration")
public static native UIListContentConfiguration cellConfiguration();
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Owned
@Selector("copyWithZone:")
@MappedReturn(ObjCObjectMapper.class)
public native Object copyWithZone(VoidPtr zone);
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
/**
* The margins for the content to the edges of the content view. (When preserving superview layout margins on one or both axes, these are just minimum margins, as inherited margins may be larger.)
*/
@Generated
@Selector("directionalLayoutMargins")
@ByValue
public native NSDirectionalEdgeInsets directionalLayoutMargins();
@Generated
@Selector("encodeWithCoder:")
public native void encodeWithCoder(NSCoder coder);
/**
* Returns the default configuration for a grouped list footer.
*/
@Generated
@Selector("groupedFooterConfiguration")
public static native UIListContentConfiguration groupedFooterConfiguration();
/**
* Returns the default configuration for a grouped list header.
*/
@Generated
@Selector("groupedHeaderConfiguration")
public static native UIListContentConfiguration groupedHeaderConfiguration();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
/**
* The image to display.
*/
@Generated
@Selector("image")
public native UIImage image();
/**
* Additional properties to configure the image.
*/
@Generated
@Selector("imageProperties")
public native UIListContentImageProperties imageProperties();
/**
* Padding between the image and text. Only applies when there is both an image and text.
*/
@Generated
@Selector("imageToTextPadding")
@NFloat
public native double imageToTextPadding();
@Generated
@Selector("init")
public native UIListContentConfiguration init();
@Generated
@Selector("initWithCoder:")
public native UIListContentConfiguration initWithCoder(NSCoder coder);
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Selector("makeContentView")
public native UIView makeContentView();
@Generated
@Owned
@Selector("new")
public static native UIListContentConfiguration new_objc();
/**
* Returns the default configuration for a plain list footer.
*/
@Generated
@Selector("plainFooterConfiguration")
public static native UIListContentConfiguration plainFooterConfiguration();
/**
* Returns the default configuration for a plain list header.
*/
@Generated
@Selector("plainHeaderConfiguration")
public static native UIListContentConfiguration plainHeaderConfiguration();
/**
* When YES, the text and secondary text will be positioned side-by-side if there is sufficient space. Otherwise, the text will be stacked in a vertical layout.
*/
@Generated
@Selector("prefersSideBySideTextAndSecondaryText")
public native boolean prefersSideBySideTextAndSecondaryText();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
/**
* An attributed variant of the secondary text, which supersedes the `secondaryText` and some properties of the `secondaryTextProperties` if set.
*/
@Generated
@Selector("secondaryAttributedText")
public native NSAttributedString secondaryAttributedText();
/**
* The secondary text.
*/
@Generated
@Selector("secondaryText")
public native String secondaryText();
/**
* Additional properties to configure the secondary text.
*/
@Generated
@Selector("secondaryTextProperties")
public native UIListContentTextProperties secondaryTextProperties();
/**
* An attributed variant of the primary text, which supersedes the `text` and some properties of the `textProperties` if set.
*/
@Generated
@Selector("setAttributedText:")
public native void setAttributedText(NSAttributedString value);
/**
* Whether the content view will preserve inherited layout margins from its superview on the horizontal and/or vertical axes.
*/
@Generated
@Selector("setAxesPreservingSuperviewLayoutMargins:")
public native void setAxesPreservingSuperviewLayoutMargins(@NUInt long value);
/**
* The margins for the content to the edges of the content view. (When preserving superview layout margins on one or both axes, these are just minimum margins, as inherited margins may be larger.)
*/
@Generated
@Selector("setDirectionalLayoutMargins:")
public native void setDirectionalLayoutMargins(@ByValue NSDirectionalEdgeInsets value);
/**
* The image to display.
*/
@Generated
@Selector("setImage:")
public native void setImage(UIImage value);
/**
* Padding between the image and text. Only applies when there is both an image and text.
*/
@Generated
@Selector("setImageToTextPadding:")
public native void setImageToTextPadding(@NFloat double value);
/**
* When YES, the text and secondary text will be positioned side-by-side if there is sufficient space. Otherwise, the text will be stacked in a vertical layout.
*/
@Generated
@Selector("setPrefersSideBySideTextAndSecondaryText:")
public native void setPrefersSideBySideTextAndSecondaryText(boolean value);
/**
* An attributed variant of the secondary text, which supersedes the `secondaryText` and some properties of the `secondaryTextProperties` if set.
*/
@Generated
@Selector("setSecondaryAttributedText:")
public native void setSecondaryAttributedText(NSAttributedString value);
/**
* The secondary text.
*/
@Generated
@Selector("setSecondaryText:")
public native void setSecondaryText(String value);
/**
* The primary text.
*/
@Generated
@Selector("setText:")
public native void setText(String value);
/**
* Horizontal (minimum) padding between the text and secondary text. Only applies when there is both text and secondary text, and they are in a side-by-side layout.
*/
@Generated
@Selector("setTextToSecondaryTextHorizontalPadding:")
public native void setTextToSecondaryTextHorizontalPadding(@NFloat double value);
/**
* Vertical padding between the text and secondary text. Only applies when there is both text and secondary text, and they are in a stacked layout.
*/
@Generated
@Selector("setTextToSecondaryTextVerticalPadding:")
public native void setTextToSecondaryTextVerticalPadding(@NFloat double value);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
/**
* Returns the default configuration for a sidebar list cell.
*/
@Generated
@Selector("sidebarCellConfiguration")
public static native UIListContentConfiguration sidebarCellConfiguration();
/**
* Returns the default configuration for a sidebar list header.
*/
@Generated
@Selector("sidebarHeaderConfiguration")
public static native UIListContentConfiguration sidebarHeaderConfiguration();
/**
* Returns the default configuration for a sidebar list cell with subtitle text.
*/
@Generated
@Selector("sidebarSubtitleCellConfiguration")
public static native UIListContentConfiguration sidebarSubtitleCellConfiguration();
/**
* Returns the default configuration for a list cell with subtitle text.
*/
@Generated
@Selector("subtitleCellConfiguration")
public static native UIListContentConfiguration subtitleCellConfiguration();
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("supportsSecureCoding")
public static native boolean supportsSecureCoding();
@Generated
@ProtocolClassMethod("supportsSecureCoding")
public boolean _supportsSecureCoding() {
return supportsSecureCoding();
}
/**
* The primary text.
*/
@Generated
@Selector("text")
public native String text();
/**
* Additional properties to configure the primary text.
*/
@Generated
@Selector("textProperties")
public native UIListContentTextProperties textProperties();
/**
* Horizontal (minimum) padding between the text and secondary text. Only applies when there is both text and secondary text, and they are in a side-by-side layout.
*/
@Generated
@Selector("textToSecondaryTextHorizontalPadding")
@NFloat
public native double textToSecondaryTextHorizontalPadding();
/**
* Vertical padding between the text and secondary text. Only applies when there is both text and secondary text, and they are in a stacked layout.
*/
@Generated
@Selector("textToSecondaryTextVerticalPadding")
@NFloat
public native double textToSecondaryTextVerticalPadding();
@Generated
@Selector("updatedConfigurationForState:")
public native UIListContentConfiguration updatedConfigurationForState(
@Mapped(ObjCObjectMapper.class) UIConfigurationState state);
/**
* Returns the default configuration for a list cell with side-by-side value text.
*/
@Generated
@Selector("valueCellConfiguration")
public static native UIListContentConfiguration valueCellConfiguration();
@Generated
@Selector("version")
@NInt
public static native long version_static();
/**
* Returns the default configuration for an extra prominent inset grouped list header.
*/
@Generated
@Selector("extraProminentInsetGroupedHeaderConfiguration")
public static native UIListContentConfiguration extraProminentInsetGroupedHeaderConfiguration();
/**
* Returns the default configuration for a prominent inset grouped list header.
*/
@Generated
@Selector("prominentInsetGroupedHeaderConfiguration")
public static native UIListContentConfiguration prominentInsetGroupedHeaderConfiguration();
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.federation.router;
import static org.apache.hadoop.hdfs.server.federation.FederationTestUtils.createFile;
import static org.apache.hadoop.hdfs.server.federation.FederationTestUtils.verifyFileExists;
import static org.apache.hadoop.test.GenericTestUtils.getMethodName;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.verify;
import java.io.IOException;
import java.util.List;
import java.util.Arrays;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.server.federation.MiniRouterDFSCluster;
import org.apache.hadoop.hdfs.server.federation.MiniRouterDFSCluster.RouterContext;
import org.apache.hadoop.hdfs.server.federation.resolver.RemoteLocation;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.GroupMappingServiceProvider;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
/**
* Basic tests of router federation rename. Rename across namespaces.
*/
public class TestRouterFederationRename extends TestRouterFederationRenameBase {
public static class MockGroupsMapping implements
GroupMappingServiceProvider {
@Override
public List<String> getGroups(String user) {
return Arrays.asList(user+"_group");
}
@Override
public void cacheGroupsRefresh() {
}
@Override
public void cacheGroupsAdd(List<String> groups) {
}
@Override
public Set<String> getGroupsSet(String user) {
return ImmutableSet.of(user+"_group");
}
}
private RouterContext router;
private FileSystem routerFS;
private MiniRouterDFSCluster cluster;
@BeforeClass
public static void before() throws Exception {
globalSetUp();
}
@AfterClass
public static void after() {
tearDown();
}
@Before
public void testSetup() throws Exception {
setup();
router = getRouterContext();
routerFS = getRouterFileSystem();
cluster = getCluster();
}
private void testRenameDir(RouterContext testRouter, String path,
String renamedPath, boolean exceptionExpected, Callable<Object> call)
throws IOException {
createDir(testRouter.getFileSystem(), path);
// rename
boolean exceptionThrown = false;
try {
call.call();
assertFalse(verifyFileExists(testRouter.getFileSystem(), path));
assertTrue(
verifyFileExists(testRouter.getFileSystem(), renamedPath + "/file"));
} catch (Exception ex) {
exceptionThrown = true;
assertTrue(verifyFileExists(testRouter.getFileSystem(), path + "/file"));
assertFalse(verifyFileExists(testRouter.getFileSystem(), renamedPath));
} finally {
FileContext fileContext = testRouter.getFileContext();
fileContext.delete(new Path(path), true);
fileContext.delete(new Path(renamedPath), true);
}
if (exceptionExpected) {
// Error was expected.
assertTrue(exceptionThrown);
} else {
// No error was expected.
assertFalse(exceptionThrown);
}
}
@Test
public void testSuccessfulRbfRename() throws Exception {
List<String> nss = cluster.getNameservices();
String ns0 = nss.get(0);
String ns1 = nss.get(1);
// Test successfully rename a dir to a destination that is in a different
// namespace.
String dir =
cluster.getFederatedTestDirectoryForNS(ns0) + "/" + getMethodName();
String renamedDir =
cluster.getFederatedTestDirectoryForNS(ns1) + "/" + getMethodName();
testRenameDir(router, dir, renamedDir, false, () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename(dir, renamedDir);
return null;
});
testRenameDir(router, dir, renamedDir, false, () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename2(dir, renamedDir);
return null;
});
}
@Test
public void testRbfRenameFile() throws Exception {
List<String> nss = cluster.getNameservices();
String ns0 = nss.get(0);
String ns1 = nss.get(1);
// Test router federation rename a file.
String file =
cluster.getFederatedTestDirectoryForNS(ns0) + "/" + getMethodName();
String renamedFile =
cluster.getFederatedTestDirectoryForNS(ns1) + "/" + getMethodName();
createFile(routerFS, file, 32);
getRouterFileSystem().mkdirs(new Path(renamedFile));
LambdaTestUtils.intercept(RemoteException.class, "should be a directory",
"Expect RemoteException.", () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename(file, renamedFile);
return null;
});
LambdaTestUtils.intercept(RemoteException.class, "should be a directory",
"Expect RemoteException.", () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename2(file, renamedFile);
return null;
});
getRouterFileSystem().delete(new Path(file), true);
getRouterFileSystem().delete(new Path(renamedFile), true);
}
@Test
public void testRbfRenameWhenDstAlreadyExists() throws Exception {
List<String> nss = cluster.getNameservices();
String ns0 = nss.get(0);
String ns1 = nss.get(1);
// Test router federation rename a path to a destination that is in a
// different namespace and already exists.
String dir =
cluster.getFederatedTestDirectoryForNS(ns0) + "/" + getMethodName();
String renamedDir =
cluster.getFederatedTestDirectoryForNS(ns1) + "/" + getMethodName();
createDir(routerFS, dir);
getRouterFileSystem().mkdirs(new Path(renamedDir));
LambdaTestUtils.intercept(RemoteException.class, "already exists",
"Expect RemoteException.", () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename(dir, renamedDir);
return null;
});
LambdaTestUtils.intercept(RemoteException.class, "already exists",
"Expect RemoteException.", () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename2(dir, renamedDir);
return null;
});
getRouterFileSystem().delete(new Path(dir), true);
getRouterFileSystem().delete(new Path(renamedDir), true);
}
@Test
public void testRbfRenameWhenSrcNotExists() throws Exception {
List<String> nss = cluster.getNameservices();
String ns0 = nss.get(0);
String ns1 = nss.get(1);
// Test router federation rename un-existed path.
String dir =
cluster.getFederatedTestDirectoryForNS(ns0) + "/" + getMethodName();
String renamedDir =
cluster.getFederatedTestDirectoryForNS(ns1) + "/" + getMethodName();
LambdaTestUtils.intercept(RemoteException.class, "File does not exist",
"Expect RemoteException.", () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename(dir, renamedDir);
return null;
});
LambdaTestUtils.intercept(RemoteException.class, "File does not exist",
"Expect RemoteException.", () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename2(dir, renamedDir);
return null;
});
}
@Test
public void testRbfRenameOfMountPoint() throws Exception {
List<String> nss = cluster.getNameservices();
String ns0 = nss.get(0);
String ns1 = nss.get(1);
// Test router federation rename a mount point.
String dir = cluster.getFederatedPathForNS(ns0);
String renamedDir = cluster.getFederatedPathForNS(ns1);
LambdaTestUtils.intercept(RemoteException.class, "is a mount point",
"Expect RemoteException.", () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename(dir, renamedDir);
return null;
});
LambdaTestUtils.intercept(RemoteException.class, "is a mount point",
"Expect RemoteException.", () -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename2(dir, renamedDir);
return null;
});
}
@Test
public void testRbfRenameWithMultiDestination() throws Exception {
List<String> nss = cluster.getNameservices();
String ns1 = nss.get(1);
FileSystem rfs = getRouterFileSystem();
// Test router federation rename a path with multi-destination.
String dir = "/same/" + getMethodName();
String renamedDir = cluster.getFederatedTestDirectoryForNS(ns1) + "/"
+ getMethodName();
createDir(rfs, dir);
getRouterFileSystem().mkdirs(new Path(renamedDir));
LambdaTestUtils.intercept(RemoteException.class,
"The remote location should be exactly one", "Expect RemoteException.",
() -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename(dir, renamedDir);
return null;
});
LambdaTestUtils.intercept(RemoteException.class,
"The remote location should be exactly one", "Expect RemoteException.",
() -> {
DFSClient client = router.getClient();
ClientProtocol clientProtocol = client.getNamenode();
clientProtocol.rename2(dir, renamedDir);
return null;
});
getRouterFileSystem().delete(new Path(dir), true);
getRouterFileSystem().delete(new Path(renamedDir), true);
}
@Test(timeout = 20000)
public void testCounter() throws Exception {
final RouterRpcServer rpcServer = router.getRouter().getRpcServer();
List<String> nss = cluster.getNameservices();
String ns0 = nss.get(0);
String ns1 = nss.get(1);
RouterFederationRename rbfRename =
Mockito.spy(new RouterFederationRename(rpcServer, router.getConf()));
String path = "/src";
createDir(cluster.getCluster().getFileSystem(0), path);
// Watch the scheduler job count.
int expectedSchedulerCount = rpcServer.getSchedulerJobCount() + 1;
AtomicInteger maxSchedulerCount = new AtomicInteger();
AtomicBoolean watch = new AtomicBoolean(true);
Thread watcher = new Thread(() -> {
while (watch.get()) {
int schedulerCount = rpcServer.getSchedulerJobCount();
if (schedulerCount > maxSchedulerCount.get()) {
maxSchedulerCount.set(schedulerCount);
}
try {
Thread.sleep(1);
} catch (InterruptedException e) {
}
}
});
watcher.start();
// Trigger rename.
rbfRename.routerFedRename("/src", "/dst",
Arrays.asList(new RemoteLocation(ns0, path, null)),
Arrays.asList(new RemoteLocation(ns1, path, null)));
// Verify count.
verify(rbfRename).countIncrement();
verify(rbfRename).countDecrement();
watch.set(false);
watcher.interrupt();
watcher.join();
assertEquals(expectedSchedulerCount, maxSchedulerCount.get());
// Clean up.
assertFalse(cluster.getCluster().getFileSystem(0).exists(new Path(path)));
assertTrue(
cluster.getCluster().getFileSystem(1).delete(new Path(path), true));
}
}
|
|
package com.lab.epam.dao.imp;
import com.lab.epam.dao.AbstractJDBCDao;
import com.lab.epam.dao.PersistException;
import com.lab.epam.entity.Way;
import com.lab.epam.helper.ClassName;
import com.lab.epam.persistant.ConnectionManager;
import com.lab.epam.persistant.ConnectionPool;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Admin on 10.06.2015.
*/
public class MySqlWayDao extends AbstractJDBCDao<Way, Integer> {
ConnectionPool connection = ConnectionManager.getConnection();
private static final Logger loger = LogManager.getLogger(ClassName.getCurrentClassName());
private static final String GET_WAY_BY_USER_ID = "SELECT w.id, w.rating, w.name, w.visible, w.way_days, w.way_time, w.date_begin, w.date_end, w.deleted, w.recomended, w.is_recommend FROM way AS w JOIN user_way AS uw JOIN user AS u WHERE uw.user_id = u.id AND uw.way_id = w.id AND uw.deleted='false' AND u.id = ?";
private static final String GET_WAY_BY_WAY_ID = "SELECT w.id, w.rating, w.name, w.visible, w.way_days, w.way_time, w.date_begin, w.date_end, w.deleted, w.recomended, w.is_recommend FROM way AS w JOIN user_way AS uw JOIN user AS u WHERE uw.user_id = u.id AND uw.way_id = w.id AND uw.deleted='false' AND uw.way_id = ?";
private static final String DELETE_WAY_BY_USER_ID_WAY_ID = "UPDATE user_way SET deleted = true WHERE user_id = ? AND way_id = ?";
private static final String GET_LAST_ADDED = "SELECT * FROM way ORDER BY id DESC LIMIT 0,1";
private static final String CREATE_USER_WAY = "INSERT INTO user_way (user_id, way_id, way_days) VALUES (?,?,?);";
private static final String UPDATE_WAY_DAY = "UPDATE user_way SET way_days = ? WHERE user_id = ? AND way_id = ?";
private static final String UPDATE_WAY_BEGIN_DATE = "UPDATE way SET date_begin = ? WHERE id = ?";
private static final String UPDATE_WAY_END_DATE = "UPDATE way SET date_end = ? WHERE id = ?";
private static final String UPDATE_WAY_RATING = "UPDATE way SET rating = ? WHERE id = ?";
private static final String UPDATE_WAY_ISRECOMENDED = "UPDATE way SET is_recommend = false WHERE id = ?";
private static final String DELETE_WAY_ISRECOMENDED = "UPDATE way SET recomended = false WHERE id = ?";
private static final String UPDATE_CONFIRM_WAY_ISRECOMENDED = "UPDATE way SET is_recommend = false, recomended = true WHERE id = ?";
private static final String GET_WAY_RECOMENDED = "SELECT * FROM way WHERE recomended=true AND deleted=false AND visible=true";
private static final String SET_WAY_IS_RECOMMENDED = "UPDATE way SET is_recommend = true WHERE id = ?";
private static final String GET_ALL_CONFIRM_RECOMMENDED_WAY = "SELECT * FROM way AS w WHERE w.deleted=false AND w.visible=true AND w.recomended=false AND w.is_recommend=true";
private class PersistGroup extends Way {
public void setId(int id) {
super.setId(id);
}
}
public MySqlWayDao() {
}
public Class getClassModel() {
return Way.class;
}
public void updateConfirmWayRecommended(Integer way_id) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(UPDATE_CONFIRM_WAY_ISRECOMENDED)) {
statement.setInt(1, way_id);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
}
loger.info("updateConfirmWayRecommended method");
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public void updateWayIsRecommended(Integer way_id) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(UPDATE_WAY_ISRECOMENDED)) {
statement.setInt(1, way_id);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
}
loger.info("updateWayIsRecommended method");
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public void deleteWayIsRecommended(Integer way_id) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(DELETE_WAY_ISRECOMENDED)) {
statement.setInt(1, way_id);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
}
loger.info("deleteWayIsRecommended method");
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public List<Way> getWaysByUserId(Integer user_id) throws PersistException {
List<Way> list;
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(GET_WAY_BY_USER_ID)) {
statement.setInt(1, user_id);
ResultSet rs = statement.executeQuery();
//loger.info("Get ways from user with id" + user_id + " is succesfull " + rs);
list = parseResultSet(rs);
//loger.info("Parse result with Transformer is succesfull list = " + list);
if (list.size() <= 0) {
loger.info("DB has any ways from user with " + user_id + " user_id");
return null;
}
} catch (Exception e) {
loger.warn("Cant get ways from user with " + user_id + " user_id");
throw new PersistException(e);
} finally {
connection.putback(conn);
}
loger.info("getWaysByUserId method");
return list;
}
public List<Way> getAllConfirmRecommendedWay() throws PersistException {
List<Way> wayList = new ArrayList<>();
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(GET_ALL_CONFIRM_RECOMMENDED_WAY)) {
ResultSet rs = statement.executeQuery();
// loger.info("Get last way is succesfull ");
wayList = parseResultSet(rs);
//loger.info("Parse result with Transformer is succesfull");
if (wayList.size() <= 0) {
loger.info("DB has any ways");
return null;
}
if (wayList.size() > 1) {
loger.info("DB has more than one last way");
}
} catch (Exception e) {
loger.warn("Can not get all confirm recommended way.");
throw new PersistException(e);
}
loger.info("getAllConfirmRecommendedWay method");
return wayList;
}
public List<Way> getWaysByWayId(Integer way_id) throws PersistException {
List<Way> list;
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(GET_WAY_BY_WAY_ID)) {
statement.setInt(1, way_id);
ResultSet rs = statement.executeQuery();
list = parseResultSet(rs);
if (list.size() <= 0) {
loger.info("DB has any ways from way with " + way_id + " way_id");
return null;
}
} catch (Exception e) {
loger.warn("Cant get ways from way with " + way_id + " way_id");
throw new PersistException(e);
} finally {
connection.putback(conn);
}
loger.info("getWaysByWayId method");
return list;
}
public void create(Connection conn, Way way) throws PersistException {
String sql = prepareStatementForInsert(way);//getCreateQuery();
try (PreparedStatement statement = conn.prepareStatement(sql)) {
// prepareStatementForInsert(statement, object);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
loger.info("Create is succesfule");
}
loger.info("create method");
} catch (Exception e) {
throw new PersistException(e);
}
}
public void deleteWaysByUserIdWayId(Integer user_id, Integer way_id) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(DELETE_WAY_BY_USER_ID_WAY_ID)) {
try {
statement.setObject(1, user_id);
statement.setObject(2, way_id);
} catch (Exception e) {
throw new PersistException(e);
}
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On delete modify more then 1 record: " + count);
}
loger.info("deleteWaysByUserIdWayId method");
} catch (Exception e) {
loger.warn("Cant delete way from user with " + user_id + " user_id and " + way_id + " way_id");
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public Way getLastAdded() throws PersistException {
List<Way> list;
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(GET_LAST_ADDED)) {
ResultSet rs = statement.executeQuery();
loger.info("Get last way is succesfull ");
list = parseResultSet(rs);
loger.info("Parse result with Transformer is succesfull");
if (list.size() <= 0) {
loger.info("DB has any ways");
return null;
}
if (list.size() > 1) {
loger.info("DB has more than one last way");
}
} catch (Exception e) {
loger.warn("Cant last way");
throw new PersistException(e);
} finally {
connection.putback(conn);
}
loger.info("getLastAdded method");
return list.iterator().next();
}
public Way getLastAdded(Connection conn) throws PersistException {
List<Way> list;
try (PreparedStatement statement = conn.prepareStatement(GET_LAST_ADDED)) {
ResultSet rs = statement.executeQuery();
// loger.info("Get last way is succesfull ");
list = parseResultSet(rs);
//loger.info("Parse result with Transformer is succesfull");
if (list.size() <= 0) {
loger.info("DB has any ways");
return null;
}
if (list.size() > 1) {
loger.info("DB has more than one last way");
}
} catch (Exception e) {
loger.warn("Cant last way");
throw new PersistException(e);
}
loger.info("getLastAdded method");
return list.iterator().next();
}
public void createUserWay(Integer user_id, Integer way_id, Integer day) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(CREATE_USER_WAY)) {
statement.setInt(1, user_id);
statement.setInt(2, way_id);
statement.setInt(3, day);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
loger.info("Create is successful");
}
loger.info("createUserWay method");
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public void createUserWay(Connection conn, Integer user_id, Integer way_id, Integer day) throws PersistException {
try (PreparedStatement statement = conn.prepareStatement(CREATE_USER_WAY)) {
statement.setInt(1, user_id);
statement.setInt(2, way_id);
statement.setInt(3, day);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
loger.info("Create is successful");
}
loger.info("createUserWay method");
} catch (Exception e) {
throw new PersistException(e);
}
}
public void updateWayDay(Integer user_id, Integer way_id, Integer day) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(UPDATE_WAY_DAY)) {
statement.setInt(2, user_id);
statement.setInt(3, way_id);
statement.setInt(1, day);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
}
loger.info("updateWayDay method");
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public void updateWayBeginDate(Integer way_id, Date beginDate) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(UPDATE_WAY_BEGIN_DATE)) {
statement.setDate(1, beginDate);
statement.setInt(2, way_id);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
}
loger.info("updateWayBeginDate method");
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public void updateWayEndDate(Integer way_id, Date endDate) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(UPDATE_WAY_END_DATE)) {
statement.setDate(1, endDate);
statement.setInt(2, way_id);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
}
loger.info("updateWayEndDate method");
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public void updateWayRating(Integer way_id, Integer rating) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(UPDATE_WAY_RATING)) {
statement.setInt(1, rating);
statement.setInt(2, way_id);
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On persist modify more then 1 record: " + count);
} else {
}
loger.info("updateWayRating method");
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
public List<Way> getAllWayRecomended() throws PersistException {
List<Way> list;
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(GET_WAY_RECOMENDED)) {
ResultSet rs = statement.executeQuery();
list = parseResultSet(rs);
} catch (Exception e) {
throw new PersistException(e);
} finally {
connection.putback(conn);
}
loger.info("getAllWayRecomended method");
return list;
}
public void setWayIsRecommended(Integer way_id) throws PersistException {
Connection conn = connection.retrieve();
try (PreparedStatement statement = conn.prepareStatement(SET_WAY_IS_RECOMMENDED)) {
try {
statement.setInt(1, way_id);
} catch (Exception e) {
throw new PersistException(e);
}
int count = statement.executeUpdate();
if (count != 1) {
throw new PersistException("On insert modify more then 1 record: " + count);
}
loger.info("setWayIsRecommended method");
} catch (Exception e) {
loger.warn("Cant set is recommended from " + way_id + " way_id");
throw new PersistException(e);
} finally {
connection.putback(conn);
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.sagemaker.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* A structure describing the source of a context.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ContextSource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ContextSource implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The URI of the source.
* </p>
*/
private String sourceUri;
/**
* <p>
* The type of the source.
* </p>
*/
private String sourceType;
/**
* <p>
* The ID of the source.
* </p>
*/
private String sourceId;
/**
* <p>
* The URI of the source.
* </p>
*
* @param sourceUri
* The URI of the source.
*/
public void setSourceUri(String sourceUri) {
this.sourceUri = sourceUri;
}
/**
* <p>
* The URI of the source.
* </p>
*
* @return The URI of the source.
*/
public String getSourceUri() {
return this.sourceUri;
}
/**
* <p>
* The URI of the source.
* </p>
*
* @param sourceUri
* The URI of the source.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContextSource withSourceUri(String sourceUri) {
setSourceUri(sourceUri);
return this;
}
/**
* <p>
* The type of the source.
* </p>
*
* @param sourceType
* The type of the source.
*/
public void setSourceType(String sourceType) {
this.sourceType = sourceType;
}
/**
* <p>
* The type of the source.
* </p>
*
* @return The type of the source.
*/
public String getSourceType() {
return this.sourceType;
}
/**
* <p>
* The type of the source.
* </p>
*
* @param sourceType
* The type of the source.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContextSource withSourceType(String sourceType) {
setSourceType(sourceType);
return this;
}
/**
* <p>
* The ID of the source.
* </p>
*
* @param sourceId
* The ID of the source.
*/
public void setSourceId(String sourceId) {
this.sourceId = sourceId;
}
/**
* <p>
* The ID of the source.
* </p>
*
* @return The ID of the source.
*/
public String getSourceId() {
return this.sourceId;
}
/**
* <p>
* The ID of the source.
* </p>
*
* @param sourceId
* The ID of the source.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContextSource withSourceId(String sourceId) {
setSourceId(sourceId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSourceUri() != null)
sb.append("SourceUri: ").append(getSourceUri()).append(",");
if (getSourceType() != null)
sb.append("SourceType: ").append(getSourceType()).append(",");
if (getSourceId() != null)
sb.append("SourceId: ").append(getSourceId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ContextSource == false)
return false;
ContextSource other = (ContextSource) obj;
if (other.getSourceUri() == null ^ this.getSourceUri() == null)
return false;
if (other.getSourceUri() != null && other.getSourceUri().equals(this.getSourceUri()) == false)
return false;
if (other.getSourceType() == null ^ this.getSourceType() == null)
return false;
if (other.getSourceType() != null && other.getSourceType().equals(this.getSourceType()) == false)
return false;
if (other.getSourceId() == null ^ this.getSourceId() == null)
return false;
if (other.getSourceId() != null && other.getSourceId().equals(this.getSourceId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSourceUri() == null) ? 0 : getSourceUri().hashCode());
hashCode = prime * hashCode + ((getSourceType() == null) ? 0 : getSourceType().hashCode());
hashCode = prime * hashCode + ((getSourceId() == null) ? 0 : getSourceId().hashCode());
return hashCode;
}
@Override
public ContextSource clone() {
try {
return (ContextSource) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.sagemaker.model.transform.ContextSourceMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* -------------------------------- Package --------------------------------- */
package com.github.jessemull.microflex.stat.statbigdecimal;
/* ------------------------------ Dependencies ------------------------------ */
import static org.junit.Assert.*;
import java.io.OutputStream;
import java.io.PrintStream;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.TreeMap;
import java.util.Map;
import java.util.Random;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import com.github.jessemull.microflex.bigdecimalflex.plate.PlateBigDecimal;
import com.github.jessemull.microflex.bigdecimalflex.plate.WellBigDecimal;
import com.github.jessemull.microflex.bigdecimalflex.plate.WellSetBigDecimal;
import com.github.jessemull.microflex.bigdecimalflex.stat.SumOfSquaresBigDecimal;
import com.github.jessemull.microflex.util.RandomUtil;
/**
* This class tests the methods in the sum square big decimal class.
*
* @author Jesse L. Mull
* @update Updated Oct 18, 2016
* @address http://www.jessemull.com
* @email [email protected]
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class SumOfSquaresBigDecimalWeightsTest {
/* ---------------------------- Local Fields -----------------------------*/
/* Minimum and maximum values for random well and lists */
private static BigDecimal minValue = new BigDecimal(0); // Minimum big decimal value for wells
private static BigDecimal maxValue = new BigDecimal(10); // Maximum big decimal value for wells
private static Random random = new Random(); // Generates random integers
private static MathContext mc = new MathContext(10, RoundingMode.HALF_DOWN); // The math context for input values
private static MathContext contextWeights = new MathContext(2, RoundingMode.HALF_DOWN); // The math context for weights
/* The addition operation */
private static SumOfSquaresBigDecimal sum = new SumOfSquaresBigDecimal();
/* Random objects and numbers for testing */
private static int rows = 5;
private static int columns = 4;
private static int length = 5;
private static int lengthIndices = 10;
private static int plateNumber = 10;
private static int plateNumberIndices = 5;
private static PlateBigDecimal[] array = new PlateBigDecimal[plateNumber];
private static PlateBigDecimal[] arrayIndices = new PlateBigDecimal[plateNumberIndices];
private static double[] weights = new double[length];
private static double[] weightsIndices = new double[lengthIndices];
/* Value of false redirects System.err */
private static boolean error = true;
private static PrintStream originalOut = System.out;
/**
* Generates random objects and numbers for testing.
*/
@BeforeClass
public static void setUp() {
if(error) {
System.setErr(new PrintStream(new OutputStream() {
public void write(int x) {}
}));
}
for(int j = 0; j < array.length; j++) {
PlateBigDecimal plate = RandomUtil.randomPlateBigDecimal(
rows, columns, minValue, maxValue, length, "Plate1-" + j);
array[j] = plate;
}
for(int j = 0; j < arrayIndices.length; j++) {
PlateBigDecimal plateIndices = RandomUtil.randomPlateBigDecimal(
rows, columns, minValue, maxValue, lengthIndices, "Plate1-" + j);
arrayIndices[j] = plateIndices;
}
for(int i = 0; i < weights.length; i++) {
double randomDouble = random.nextDouble();
weights[i] = new BigDecimal(randomDouble + "", contextWeights).doubleValue();
}
for(int i = 0; i < weightsIndices.length; i++) {
double randomDouble = random.nextDouble();
weightsIndices[i] = new BigDecimal(randomDouble + "", contextWeights).doubleValue();
}
}
/**
* Toggles system error.
*/
@AfterClass
public static void restoreErrorOut() {
System.setErr(originalOut);
}
/* ---------------- Well statistics for all plate wells ----------------- */
/**
* Tests the plate statistics method.
*/
@Test
public void testPlate() {
for(PlateBigDecimal plate : array) {
Map<WellBigDecimal, BigDecimal> resultMap = new TreeMap<WellBigDecimal, BigDecimal>();
Map<WellBigDecimal, BigDecimal> returnedMap = sum.plate(plate, weights, mc);
for(WellBigDecimal well : plate) {
double[] input = new double[well.size()];
int index = 0;
for(BigDecimal bd : well) {
input[index] = bd.doubleValue() * weights[index];
index++;
}
DescriptiveStatistics stat = new DescriptiveStatistics(input);
double resultDouble = stat.getSumsq();
BigDecimal result = new BigDecimal(resultDouble, mc);
resultMap.put(well, result);
}
for(WellBigDecimal well : plate) {
BigDecimal result = resultMap.get(well);
BigDecimal returned = returnedMap.get(well);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
}
/**
* Tests the plate statistics method using the values between the indices.
*/
@Test
public void testPlateIndices() {
for(PlateBigDecimal plate : arrayIndices) {
int begin = random.nextInt(plate.first().size() - 4);
int end = begin + random.nextInt(3) + 3;
Map<WellBigDecimal, BigDecimal> resultMap = new TreeMap<WellBigDecimal, BigDecimal>();
Map<WellBigDecimal, BigDecimal> returnedMap = sum.plate(plate, ArrayUtils.subarray(weightsIndices, begin, end), begin, end - begin, mc);
for(WellBigDecimal well : plate) {
double[] input = new double[well.size()];
int index = 0;
for(BigDecimal bd : well) {
input[index] = bd.doubleValue() * weightsIndices[index];
index++;
}
DescriptiveStatistics stat = new DescriptiveStatistics(ArrayUtils.subarray(input, begin, end));
double resultDouble = stat.getSumsq();
BigDecimal result = new BigDecimal(resultDouble, mc);
resultMap.put(well, result);
}
for(WellBigDecimal well : plate) {
BigDecimal result = resultMap.get(well);
BigDecimal returned = returnedMap.get(well);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
}
/* --------------------- Aggregated plate statistics ------------------- */
/**
* Tests the aggregated plate statistics method.
*/
@Test
public void testAggregatedPlate() {
for(PlateBigDecimal plate : array) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
BigDecimal aggregatedReturned = sum.platesAggregated(plate, weights, mc);
for (WellBigDecimal well : plate) {
List<BigDecimal> input = well.data();
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weights[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
BigDecimal[] corrected = correctRoundingErrors(aggregatedResult, aggregatedReturned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using a collection.
*/
@Test
public void testAggregatedPlateCollection() {
List<PlateBigDecimal> collection = Arrays.asList(array);
Map<PlateBigDecimal, BigDecimal> aggregatedReturnedMap = sum.platesAggregated(collection, weights, mc);
Map<PlateBigDecimal, BigDecimal> aggregatedResultMap = new TreeMap<PlateBigDecimal, BigDecimal>();
for(PlateBigDecimal plate : collection) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
for (WellBigDecimal well : plate) {
List<BigDecimal> input = well.data();
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weights[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
aggregatedResultMap.put(plate, aggregatedResult);
}
for(PlateBigDecimal plate : collection) {
BigDecimal result = aggregatedResultMap.get(plate);
BigDecimal returned = aggregatedReturnedMap.get(plate);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using an array.
*/
@Test
public void testAggregatedPlateArray() {
Map<PlateBigDecimal, BigDecimal> aggregatedReturnedMap = sum.platesAggregated(array, weights, mc);
Map<PlateBigDecimal, BigDecimal> aggregatedResultMap = new TreeMap<PlateBigDecimal, BigDecimal>();
for(PlateBigDecimal plate : array) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
for (WellBigDecimal well : plate) {
List<BigDecimal> input = well.data();
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weights[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
aggregatedResultMap.put(plate, aggregatedResult);
}
for(PlateBigDecimal plate : array) {
BigDecimal result = aggregatedResultMap.get(plate);
BigDecimal returned = aggregatedReturnedMap.get(plate);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using the values between the indices.
*/
@Test
public void testAggregatedPlateIndices() {
for(PlateBigDecimal plate : arrayIndices) {
int begin = random.nextInt(plate.first().size() - 4);
int end = begin + random.nextInt(3) + 3;
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
BigDecimal aggregatedReturned = sum.platesAggregated(plate, weightsIndices, begin, end - begin, mc);
for (WellBigDecimal well : plate) {
List<BigDecimal> input = well.data().subList(begin, end);
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weightsIndices[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
BigDecimal[] corrected = correctRoundingErrors(aggregatedResult, aggregatedReturned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using the values between the indices of
* the collection.
*/
@Test
public void testAggregatedPlateCollectionIndices() {
int begin = random.nextInt(arrayIndices[0].first().size() - 4);
int end = begin + random.nextInt(3) + 3;
List<PlateBigDecimal> collection = Arrays.asList(arrayIndices);
Map<PlateBigDecimal, BigDecimal> aggregatedReturnedMap = sum.platesAggregated(collection, weightsIndices, begin, end - begin, mc);
Map<PlateBigDecimal, BigDecimal> aggregatedResultMap = new TreeMap<PlateBigDecimal, BigDecimal>();
for(PlateBigDecimal plate : collection) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
for (WellBigDecimal well : plate) {
List<BigDecimal> input = well.data().subList(begin, end);
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weightsIndices[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
aggregatedResultMap.put(plate, aggregatedResult);
}
for(PlateBigDecimal plate : collection) {
BigDecimal result = aggregatedResultMap.get(plate);
BigDecimal returned = aggregatedReturnedMap.get(plate);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using the values between the indices of
* the array.
*/
@Test
public void testAggregatedPlateArrayIndices() {
int begin = random.nextInt(arrayIndices[0].first().size() - 4);
int end = begin + random.nextInt(3) + 3;
Map<PlateBigDecimal, BigDecimal> aggregatedReturnedMap = sum.platesAggregated(arrayIndices, weightsIndices, begin, end - begin, mc);
Map<PlateBigDecimal, BigDecimal> aggregatedResultMap = new TreeMap<PlateBigDecimal, BigDecimal>();
for(PlateBigDecimal plate : arrayIndices) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
for (WellBigDecimal well : plate) {
List<BigDecimal> input = well.data().subList(begin, end);
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weightsIndices[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
aggregatedResultMap.put(plate, aggregatedResult);
}
for(PlateBigDecimal plate : arrayIndices) {
BigDecimal result = aggregatedResultMap.get(plate);
BigDecimal returned = aggregatedReturnedMap.get(plate);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
/* --------------- Well statistics for all wells in a set -------------- */
/**
* Tests set calculation.
*/
@Test
public void testSet() {
for(PlateBigDecimal plate : array) {
Map<WellBigDecimal, BigDecimal> resultMap = new TreeMap<WellBigDecimal, BigDecimal>();
Map<WellBigDecimal, BigDecimal> returnedMap = sum.set(plate.dataSet(), weights, mc);
for(WellBigDecimal well : plate) {
double[] input = new double[well.size()];
int index = 0;
for(BigDecimal bd : well) {
input[index] = bd.doubleValue() * weights[index];
index++;
}
DescriptiveStatistics stat = new DescriptiveStatistics(input);
double resultDouble = stat.getSumsq();
BigDecimal result = new BigDecimal(resultDouble, mc);
resultMap.put(well, result);
}
for(WellBigDecimal well : plate) {
BigDecimal result = resultMap.get(well);
BigDecimal returned = returnedMap.get(well);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
}
/**
* Tests set calculation using indices.
*/
@Test
public void testSetIndices() {
for(PlateBigDecimal plate : arrayIndices) {
int begin = random.nextInt(plate.first().size() - 4);
int end = begin + random.nextInt(3) + 3;
Map<WellBigDecimal, BigDecimal> resultMap = new TreeMap<WellBigDecimal, BigDecimal>();
Map<WellBigDecimal, BigDecimal> returnedMap = sum.set(plate.dataSet(), ArrayUtils.subarray(weightsIndices, begin, end), begin, end - begin, mc);
for(WellBigDecimal well : plate) {
double[] input = new double[well.size()];
int index = 0;
for(BigDecimal bd : well) {
input[index] = bd.doubleValue() * weightsIndices[index];
index++;
}
DescriptiveStatistics stat = new DescriptiveStatistics(ArrayUtils.subarray(input, begin, end));
double resultDouble = stat.getSumsq();
BigDecimal result = new BigDecimal(resultDouble, mc);
resultMap.put(well, result);
}
for(WellBigDecimal well : plate) {
BigDecimal result = resultMap.get(well);
BigDecimal returned = returnedMap.get(well);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
}
/* ---------------------- Aggregated set statistics -------------------- */
/**
* Tests the aggregated plate statistics method.
*/
@Test
public void testAggregatedSet() {
for(PlateBigDecimal plate : array) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
BigDecimal aggregatedReturned = sum.setsAggregated(plate.dataSet(), weights, mc);
for (WellBigDecimal well : plate) {
List<BigDecimal> input = well.data();
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weights[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
BigDecimal[] corrected = correctRoundingErrors(aggregatedResult, aggregatedReturned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using a collection.
*/
@Test
public void testAggregatedSetCollection() {
List<WellSetBigDecimal> collection = new ArrayList<WellSetBigDecimal>();
for(PlateBigDecimal plate : array) {
collection.add(plate.dataSet());
}
Map<WellSetBigDecimal, BigDecimal> aggregatedReturnedMap = sum.setsAggregated(collection, weights, mc);
Map<WellSetBigDecimal, BigDecimal> aggregatedResultMap = new TreeMap<WellSetBigDecimal, BigDecimal>();
for(WellSetBigDecimal set : collection) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
for (WellBigDecimal well : set) {
List<BigDecimal> input = well.data();
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weights[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
aggregatedResultMap.put(set, aggregatedResult);
}
for(WellSetBigDecimal set : collection) {
BigDecimal result = aggregatedResultMap.get(set);
BigDecimal returned = aggregatedReturnedMap.get(set);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using an array.
*/
@Test
public void testAggregatedSetArray() {
WellSetBigDecimal[] setArray = new WellSetBigDecimal[array.length];
for(int i = 0; i < setArray.length; i++) {
setArray[i] = array[i].dataSet();
}
Map<WellSetBigDecimal, BigDecimal> aggregatedReturnedMap = sum.setsAggregated(setArray, weights, mc);
Map<WellSetBigDecimal, BigDecimal> aggregatedResultMap = new TreeMap<WellSetBigDecimal, BigDecimal>();
for(WellSetBigDecimal set : setArray) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
for (WellBigDecimal well : set) {
List<BigDecimal> input = well.data();
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weights[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
aggregatedResultMap.put(set, aggregatedResult);
}
for(WellSetBigDecimal set : setArray) {
BigDecimal result = aggregatedResultMap.get(set);
BigDecimal returned = aggregatedReturnedMap.get(set);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using the values between the indices.
*/
@Test
public void testAggregatedSetIndices() {
for(PlateBigDecimal plate : arrayIndices) {
int begin = random.nextInt(plate.first().size() - 4);
int end = begin + random.nextInt(3) + 3;
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
BigDecimal aggregatedReturned = sum.setsAggregated(plate.dataSet(), weightsIndices, begin, end - begin, mc);
for (WellBigDecimal well : plate) {
List<BigDecimal> input = well.data().subList(begin, end);
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weightsIndices[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
BigDecimal[] corrected = correctRoundingErrors(aggregatedResult, aggregatedReturned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using the values between the indices of
* the collection.
*/
@Test
public void testAggregatedSetCollectionIndices() {
int begin = random.nextInt(arrayIndices[0].first().size() - 4);
int end = begin + random.nextInt(3) + 3;
List<WellSetBigDecimal> collection = new ArrayList<WellSetBigDecimal>();
for(PlateBigDecimal plate : arrayIndices) {
collection.add(plate.dataSet());
}
Map<WellSetBigDecimal, BigDecimal> aggregatedReturnedMap = sum.setsAggregated(collection, weightsIndices, begin, end - begin, mc);
Map<WellSetBigDecimal, BigDecimal> aggregatedResultMap = new TreeMap<WellSetBigDecimal, BigDecimal>();
for(WellSetBigDecimal set : collection) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
for (WellBigDecimal well : set) {
List<BigDecimal> input = well.data().subList(begin, end);
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weightsIndices[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
aggregatedResultMap.put(set, aggregatedResult);
}
for(WellSetBigDecimal set : collection) {
BigDecimal result = aggregatedResultMap.get(set);
BigDecimal returned = aggregatedReturnedMap.get(set);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
/**
* Tests the aggregated plate statistics method using the values between the indices of
* the array.
*/
@Test
public void testAggregatedSetArrayIndices() {
int begin = random.nextInt(arrayIndices[0].first().size() - 4);
int end = begin + random.nextInt(3) + 3;
WellSetBigDecimal[] setArrayIndices = new WellSetBigDecimal[arrayIndices.length];
for(int i = 0; i < setArrayIndices.length; i++) {
setArrayIndices[i] = arrayIndices[i].dataSet();
}
Map<WellSetBigDecimal, BigDecimal> aggregatedReturnedMap = sum.setsAggregated(setArrayIndices, weightsIndices, begin, end - begin, mc);
Map<WellSetBigDecimal, BigDecimal> aggregatedResultMap = new TreeMap<WellSetBigDecimal, BigDecimal>();
for(WellSetBigDecimal set : setArrayIndices) {
List<BigDecimal> resultList = new ArrayList<BigDecimal>();
for (WellBigDecimal well : set) {
List<BigDecimal> input = well.data().subList(begin, end);
for(int i = 0; i < input.size(); i++) {
resultList.add(input.get(i).multiply(new BigDecimal(weightsIndices[i])));
}
}
double[] inputAggregated = new double[resultList.size()];
for(int i = 0; i < resultList.size(); i++) {
inputAggregated[i] = resultList.get(i).doubleValue();
}
DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated);
double resultAggregatedDouble = statAggregated.getSumsq();
BigDecimal aggregatedResult = new BigDecimal(resultAggregatedDouble, mc);
aggregatedResultMap.put(set, aggregatedResult);
}
for(WellSetBigDecimal plate : setArrayIndices) {
BigDecimal result = aggregatedResultMap.get(plate);
BigDecimal returned = aggregatedReturnedMap.get(plate);
BigDecimal[] corrected = correctRoundingErrors(result, returned);
assertEquals(corrected[0], corrected[1]);
}
}
/* -------------------------- Well statistics -------------------------- */
/**
* Tests well calculation.
*/
@Test
public void testWell() {
for(PlateBigDecimal plate : array) {
for(WellBigDecimal well : plate) {
double[] input = new double[well.size()];
int index = 0;
for(BigDecimal bd : well) {
input[index] = bd.doubleValue() * weights[index];
index++;
}
DescriptiveStatistics stat = new DescriptiveStatistics(input);
double resultDouble = stat.getSumsq();
BigDecimal returned = sum.well(well, weights, mc);
BigDecimal result = new BigDecimal(resultDouble, mc);
BigDecimal[] corrected = correctRoundingErrors(returned, result);
assertEquals(corrected[0], corrected[1]);
}
}
}
/**
* Tests well calculation using indices.
*/
@Test
public void testWellIndices() {
for(PlateBigDecimal plate : arrayIndices) {
for(WellBigDecimal well : plate) {
double[] input = new double[well.size()];
int index = 0;
for(BigDecimal bd : well) {
input[index] = bd.doubleValue() * weightsIndices[index];
index++;
}
int begin = random.nextInt(well.size() - 4);
int end = begin + random.nextInt(3) + 3;
DescriptiveStatistics stat = new DescriptiveStatistics(ArrayUtils.subarray(input, begin, end));
double resultDouble = stat.getSumsq();
BigDecimal returned = sum.well(well, ArrayUtils.subarray(weightsIndices, begin, end), begin, end - begin, mc);
BigDecimal result = new BigDecimal(resultDouble, mc);
BigDecimal[] corrected = correctRoundingErrors(returned, result);
assertEquals(corrected[0], corrected[1]);
}
}
}
/*---------------------------- Helper Methods ----------------------------*/
/**
* Corrects any rounding errors due to differences in the implementation of
* the statistic between the Apache and MicroFlex libraries
* @param BigDecimal the first result
* @param BigDecimal the second result
* @return corrected results
*/
private static BigDecimal[] correctRoundingErrors(BigDecimal bd1, BigDecimal bd2) {
BigDecimal[] array = new BigDecimal[2];
int scale = mc.getPrecision();
while(!bd1.equals(bd2) && scale > mc.getPrecision() / 4) {
bd1 = bd1.setScale(scale, RoundingMode.HALF_DOWN);
bd2 = bd2.setScale(scale, RoundingMode.HALF_DOWN);
if(bd1.subtract(bd1.ulp()).equals(bd2)) {
bd1 = bd1.subtract(bd1.ulp());
}
if(bd1.add(bd1.ulp()).equals(bd2)) {
bd1 = bd1.add(bd1.ulp());
}
scale--;
}
array[0] = bd1;
array[1] = bd2;
return array;
}
}
|
|
package markpeng.kaggle.mmc;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.shingle.ShingleFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.Version;
public class CmdNgramFeatureGenerator {
private static final int BUFFER_LENGTH = 1000;
private static final String newLine = System.getProperty("line.separator");
public List<String> readFeature(String... featureFiles) throws Exception {
List<String> features = new ArrayList<String>();
for (String featureFile : featureFiles) {
BufferedReader in = new BufferedReader(new InputStreamReader(
new FileInputStream(featureFile), "UTF-8"));
try {
String aLine = null;
while ((aLine = in.readLine()) != null) {
String tmp = aLine.toLowerCase().trim();
if (tmp.length() > 0 && !features.contains(tmp))
features.add(tmp);
}
} finally {
in.close();
}
}
// extra features
if (!features.contains("db"))
features.add("db");
if (!features.contains("dd"))
features.add("dd");
return features;
}
public void generate(String trainFolder, String outputTxt, String fileType,
int minDF, int ngram, String... featureFiles) throws Exception {
List<String> features = readFeature(featureFiles);
TreeMap<String, Integer> output = new TreeMap<String, Integer>();
StringBuffer resultStr = new StringBuffer();
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(outputTxt, false), "UTF-8"));
try {
List<String> fileList = new ArrayList<String>();
for (final File fileEntry : (new File(trainFolder)).listFiles()) {
if (fileEntry.getName().contains("." + fileType)) {
String tmp = fileEntry.getName().substring(0,
fileEntry.getName().lastIndexOf("."));
fileList.add(tmp);
}
}
for (String file : fileList) {
File f = new File(trainFolder + "/" + file + "." + fileType);
System.out.println("Loading " + f.getAbsolutePath());
if (f.exists()) {
List<String> lineCmds = new ArrayList<String>();
String aLine = null;
BufferedReader in = new BufferedReader(
new InputStreamReader(new FileInputStream(
f.getAbsolutePath()), "UTF-8"));
while ((aLine = in.readLine()) != null) {
String tmp = aLine.toLowerCase().trim();
String[] sp = tmp.split("\\t{2,}\\s{2,}");
if (sp.length > 1) {
String cmd = sp[1].trim();
if (features.contains(cmd)) {
lineCmds.add(cmd);
}
}
}
in.close();
// create ngrams
HashSet<String> ngrams = getNgramFreqByLucene(lineCmds,
ngram);
// count DF
for (String n : ngrams) {
if (output.containsKey(n))
output.put(n, output.get(n) + 1);
else
output.put(n, 1);
}
System.out.println("Completed filtering file: " + file);
}
} // end of file loop
// check if each feature exists
SortedSet<Map.Entry<String, Integer>> sortedFeatures = entriesSortedByValues(output);
int validN = 0;
for (Map.Entry<String, Integer> m : sortedFeatures) {
String feature = m.getKey();
int df = m.getValue();
if (df >= minDF) {
resultStr.append(feature.replace(" ", "_") + "," + df
+ newLine);
if (resultStr.length() >= BUFFER_LENGTH) {
out.write(resultStr.toString());
out.flush();
resultStr.setLength(0);
}
validN++;
}
} // end of feature loop
System.out.println("Total # of features (DF >= " + minDF + "): "
+ validN);
} finally {
out.write(resultStr.toString());
out.flush();
out.close();
resultStr.setLength(0);
}
}
public static <K, V extends Comparable<? super V>> SortedSet<Map.Entry<K, V>> entriesSortedByValues(
Map<K, V> map) {
SortedSet<Map.Entry<K, V>> sortedEntries = new TreeSet<Map.Entry<K, V>>(
new Comparator<Map.Entry<K, V>>() {
@Override
public int compare(Map.Entry<K, V> e1, Map.Entry<K, V> e2) {
int res = e1.getValue().compareTo(e2.getValue());
if (res > 0)
return -1;
if (res < 0)
return 1;
else
return res;
}
});
sortedEntries.addAll(map.entrySet());
return sortedEntries;
}
private HashSet<String> getNgramFreqByLucene(List<String> lineCmds,
int ngram) throws IOException {
HashSet<String> result = new HashSet<String>();
StringBuffer text = new StringBuffer();
for (String l : lineCmds)
text.append(l + " ");
TokenStream ts = new StandardTokenizer(Version.LUCENE_46,
new StringReader(text.toString()));
ts = new ShingleFilter(ts, ngram, ngram);
try {
CharTermAttribute termAtt = ts
.addAttribute(CharTermAttribute.class);
ts.reset();
while (ts.incrementToken()) {
if (termAtt.length() > 0) {
String word = termAtt.toString();
if (word.split("\\s").length == ngram) {
System.out.println(word);
result.add(word);
}
}
}
} finally {
// Fixed error : close ts:TokenStream
ts.end();
ts.close();
}
return result;
}
public static void main(String[] args) throws Exception {
// List<String> lineCmds = new ArrayList<String>();
// lineCmds.add("push");
// lineCmds.add("push");
// lineCmds.add("pop");
// lineCmds.add("push");
// CmdNgramFeatureGenerator worker = new CmdNgramFeatureGenerator();
// worker.getNgramFreqByLucene(lineCmds, 2);
if (args.length < 6) {
System.out
.println("Arguments: [train folder] [featureFiles] [output txt] [file type] [minDF] [ngram] ");
return;
}
String trainFolder = args[0];
String[] featureFiles = args[1].split("\\|");
String outputTxt = args[2];
String fileType = args[3];
int minDF = Integer.parseInt(args[4]);
int ngram = Integer.parseInt(args[5]);
CmdNgramFeatureGenerator worker = new CmdNgramFeatureGenerator();
worker.generate(trainFolder, outputTxt, fileType, minDF, ngram,
featureFiles);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.metrics.util;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MetricOptions;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.memory.MemoryAllocationException;
import org.apache.flink.runtime.memory.MemoryManager;
import org.apache.flink.runtime.metrics.MetricNames;
import org.apache.flink.runtime.rpc.RpcService;
import org.apache.flink.runtime.rpc.akka.AkkaRpcService;
import org.apache.flink.runtime.taskexecutor.TaskManagerServices;
import org.apache.flink.runtime.taskexecutor.TaskManagerServicesBuilder;
import org.apache.flink.runtime.taskexecutor.slot.TestingTaskSlotTable;
import org.apache.flink.runtime.taskmanager.Task;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.guava18.com.google.common.collect.Sets;
import akka.actor.ActorSystem;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.apache.flink.runtime.metrics.util.MetricUtils.METRIC_GROUP_FLINK;
import static org.apache.flink.runtime.metrics.util.MetricUtils.METRIC_GROUP_MANAGED_MEMORY;
import static org.apache.flink.runtime.metrics.util.MetricUtils.METRIC_GROUP_MEMORY;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
/**
* Tests for the {@link MetricUtils} class.
*/
public class MetricUtilsTest extends TestLogger {
private static final Logger LOG = LoggerFactory.getLogger(MetricUtilsTest.class);
/**
* Tests that the {@link MetricUtils#startRemoteMetricsRpcService(Configuration, String)} respects
* the given {@link MetricOptions#QUERY_SERVICE_THREAD_PRIORITY}.
*/
@Test
public void testStartMetricActorSystemRespectsThreadPriority() throws Exception {
final Configuration configuration = new Configuration();
final int expectedThreadPriority = 3;
configuration.setInteger(MetricOptions.QUERY_SERVICE_THREAD_PRIORITY, expectedThreadPriority);
final RpcService rpcService = MetricUtils.startRemoteMetricsRpcService(configuration, "localhost");
assertThat(rpcService, instanceOf(AkkaRpcService.class));
final ActorSystem actorSystem = ((AkkaRpcService) rpcService).getActorSystem();
try {
final int threadPriority = actorSystem.settings().config().getInt("akka.actor.default-dispatcher.thread-priority");
assertThat(threadPriority, is(expectedThreadPriority));
} finally {
AkkaUtils.terminateActorSystem(actorSystem).get();
}
}
@Test
public void testNonHeapMetricsCompleteness() {
final InterceptingOperatorMetricGroup nonHeapMetrics = new InterceptingOperatorMetricGroup();
MetricUtils.instantiateNonHeapMemoryMetrics(nonHeapMetrics);
Assert.assertNotNull(nonHeapMetrics.get(MetricNames.MEMORY_USED));
Assert.assertNotNull(nonHeapMetrics.get(MetricNames.MEMORY_COMMITTED));
Assert.assertNotNull(nonHeapMetrics.get(MetricNames.MEMORY_MAX));
}
@Test
public void testMetaspaceCompleteness() {
final InterceptingOperatorMetricGroup metaspaceMetrics = new InterceptingOperatorMetricGroup() {
@Override
public MetricGroup addGroup(String name) {
return this;
}
};
MetricUtils.instantiateMetaspaceMemoryMetrics(metaspaceMetrics);
Assert.assertNotNull(metaspaceMetrics.get(MetricNames.MEMORY_USED));
Assert.assertNotNull(metaspaceMetrics.get(MetricNames.MEMORY_COMMITTED));
Assert.assertNotNull(metaspaceMetrics.get(MetricNames.MEMORY_MAX));
}
@Test
public void testHeapMetricsCompleteness() {
final InterceptingOperatorMetricGroup heapMetrics = new InterceptingOperatorMetricGroup();
MetricUtils.instantiateHeapMemoryMetrics(heapMetrics);
Assert.assertNotNull(heapMetrics.get(MetricNames.MEMORY_USED));
Assert.assertNotNull(heapMetrics.get(MetricNames.MEMORY_COMMITTED));
Assert.assertNotNull(heapMetrics.get(MetricNames.MEMORY_MAX));
}
/**
* Tests that heap/non-heap metrics do not rely on a static MemoryUsage instance.
*
* <p>We can only check this easily for the currently used heap memory, so we use it this as a proxy for testing
* the functionality in general.
*/
@Test
public void testHeapMetricUsageNotStatic() throws Exception {
final InterceptingOperatorMetricGroup heapMetrics = new InterceptingOperatorMetricGroup();
MetricUtils.instantiateHeapMemoryMetrics(heapMetrics);
@SuppressWarnings("unchecked")
final Gauge<Long> used = (Gauge<Long>) heapMetrics.get(MetricNames.MEMORY_USED);
final long usedHeapInitially = used.getValue();
// check memory usage difference multiple times since other tests may affect memory usage as well
for (int x = 0; x < 10; x++) {
final byte[] array = new byte[1024 * 1024 * 8];
final long usedHeapAfterAllocation = used.getValue();
if (usedHeapInitially != usedHeapAfterAllocation) {
return;
}
Thread.sleep(50);
}
Assert.fail("Heap usage metric never changed it's value.");
}
@Test
public void testMetaspaceMetricUsageNotStatic() throws InterruptedException {
final InterceptingOperatorMetricGroup metaspaceMetrics = new InterceptingOperatorMetricGroup() {
@Override
public MetricGroup addGroup(String name) {
return this;
}
};
MetricUtils.instantiateMetaspaceMemoryMetrics(metaspaceMetrics);
@SuppressWarnings("unchecked")
final Gauge<Long> used = (Gauge<Long>) metaspaceMetrics.get(MetricNames.MEMORY_USED);
final long usedMetaspaceInitially = used.getValue();
// check memory usage difference multiple times since other tests may affect memory usage as well
for (int x = 0; x < 10; x++) {
List<Runnable> consumerList = new ArrayList<>();
for (int i = 0; i < 10; i++) {
consumerList.add(() -> {});
}
final long usedMetaspaceAfterAllocation = used.getValue();
if (usedMetaspaceInitially != usedMetaspaceAfterAllocation) {
return;
}
Thread.sleep(50);
}
Assert.fail("Metaspace usage metric never changed it's value.");
}
@Test
public void testManagedMemoryMetricsInitialization() throws MemoryAllocationException {
final int maxMemorySize = 16284;
final int numberOfAllocatedPages = 2;
final int pageSize = 4096;
final Object owner = new Object();
final MemoryManager memoryManager = MemoryManager.create(maxMemorySize, pageSize);
memoryManager.allocatePages(owner, numberOfAllocatedPages);
final TaskManagerServices taskManagerServices = new TaskManagerServicesBuilder()
.setTaskSlotTable(new TestingTaskSlotTable.TestingTaskSlotTableBuilder<Task>()
.memoryManagerGetterReturns(memoryManager)
.allActiveSlotAllocationIds(() -> Sets.newHashSet(new AllocationID()))
.build())
.setManagedMemorySize(maxMemorySize)
.build();
List<String> actualSubGroupPath = new ArrayList<>();
final InterceptingOperatorMetricGroup metricGroup = new InterceptingOperatorMetricGroup() {
@Override
public MetricGroup addGroup(String name) {
actualSubGroupPath.add(name);
return this;
}
};
MetricUtils.instantiateFlinkMemoryMetricGroup(
metricGroup,
taskManagerServices.getTaskSlotTable(),
taskManagerServices::getManagedMemorySize);
Gauge<Number> usedMetric = (Gauge<Number>) metricGroup.get("Used");
Gauge<Number> maxMetric = (Gauge<Number>) metricGroup.get("Total");
assertThat(usedMetric.getValue().intValue(), is(numberOfAllocatedPages * pageSize));
assertThat(maxMetric.getValue().intValue(), is(maxMemorySize));
assertThat(actualSubGroupPath, is(Arrays.asList(METRIC_GROUP_FLINK, METRIC_GROUP_MEMORY, METRIC_GROUP_MANAGED_MEMORY)));
}
}
|
|
/*
* Copyright (c) 2015 LingoChamp Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.liulishuo.filedownloader.util;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import com.liulishuo.filedownloader.BaseDownloadTask;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
/**
* The serial queue, what used to dynamically increase tasks, and tasks in the queue will
* automatically start download one by one.
*/
public class FileDownloadSerialQueue {
private final Object operationLock = new Object();
private final BlockingQueue<BaseDownloadTask> mTasks = new LinkedBlockingQueue<>();
private final List<BaseDownloadTask> pausedList = new ArrayList<>();
private final HandlerThread mHandlerThread;
private final Handler mHandler;
private static final int WHAT_NEXT = 1;
public static final int ID_INVALID = 0;
volatile BaseDownloadTask workingTask;
final SerialFinishCallback finishCallback;
volatile boolean paused = false;
public FileDownloadSerialQueue() {
mHandlerThread = new HandlerThread(
FileDownloadUtils.getThreadPoolName("SerialDownloadManager"));
mHandlerThread.start();
mHandler = new Handler(mHandlerThread.getLooper(), new SerialLoop());
finishCallback = new SerialFinishCallback(new WeakReference<>(this));
sendNext();
}
/**
* Enqueues the given task sometime in the serial queue. If the {@code task} is in the head of
* the serial queue, the {@code task} will be started automatically.
*/
public void enqueue(BaseDownloadTask task) {
synchronized (finishCallback) {
if (paused) {
pausedList.add(task);
return;
}
try {
mTasks.put(task);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
/**
* Pause the queue.
*
* @see #resume()
*/
public void pause() {
synchronized (finishCallback) {
if (paused) {
FileDownloadLog.w(this, "require pause this queue(remain %d), but "
+ "it has already been paused", mTasks.size());
return;
}
paused = true;
mTasks.drainTo(pausedList);
if (workingTask != null) {
workingTask.removeFinishListener(finishCallback);
workingTask.pause();
}
}
}
/**
* Resume the queue if the queue is paused.
*
* @see #pause()
*/
public void resume() {
synchronized (finishCallback) {
if (!paused) {
FileDownloadLog.w(this, "require resume this queue(remain %d), but it is"
+ " still running", mTasks.size());
return;
}
paused = false;
mTasks.addAll(pausedList);
pausedList.clear();
if (workingTask == null) {
sendNext();
} else {
workingTask.addFinishListener(finishCallback);
workingTask.start();
}
}
}
/**
* Returns the identify of the working task, if there is task is working, you will receive
* {@link #ID_INVALID}.
*
* @return the identify of the working task
*/
public int getWorkingTaskId() {
return workingTask != null ? workingTask.getId() : ID_INVALID;
}
/**
* Get the count of tasks which is waiting on this queue.
*
* @return the count of waiting tasks on this queue.
*/
public int getWaitingTaskCount() {
return mTasks.size() + pausedList.size();
}
/**
* Attempts to stop the working task, halts the processing of waiting tasks, and returns a list
* of the tasks that were awaiting execution. These tasks are drained (removed) from the task
* queue upon return from this method.
*/
public List<BaseDownloadTask> shutdown() {
synchronized (finishCallback) {
if (workingTask != null) {
pause();
}
final List<BaseDownloadTask> unDealTaskList = new ArrayList<>(pausedList);
pausedList.clear();
mHandler.removeMessages(WHAT_NEXT);
mHandlerThread.interrupt();
mHandlerThread.quit();
return unDealTaskList;
}
}
private class SerialLoop implements Handler.Callback {
@Override
public boolean handleMessage(Message msg) {
switch (msg.what) {
case WHAT_NEXT:
try {
if (paused) break;
workingTask = mTasks.take();
workingTask.addFinishListener(finishCallback)
.start();
} catch (InterruptedException ignored) { }
break;
default:
//ignored
}
return false;
}
}
private static class SerialFinishCallback implements BaseDownloadTask.FinishListener {
private final WeakReference<FileDownloadSerialQueue> mQueueWeakReference;
SerialFinishCallback(WeakReference<FileDownloadSerialQueue> queueWeakReference) {
this.mQueueWeakReference = queueWeakReference;
}
@Override
public synchronized void over(BaseDownloadTask task) {
task.removeFinishListener(this);
if (mQueueWeakReference == null) {
return;
}
final FileDownloadSerialQueue queue = mQueueWeakReference.get();
if (queue == null) {
return;
}
queue.workingTask = null;
if (queue.paused) {
return;
}
queue.sendNext();
}
}
private void sendNext() {
mHandler.sendEmptyMessage(WHAT_NEXT);
}
}
|
|
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.appengine.actions;
import com.intellij.CommonBundle;
import com.intellij.appengine.cloud.AppEngineAuthData;
import com.intellij.appengine.cloud.AppEngineServerConfiguration;
import com.intellij.appengine.descriptor.dom.AppEngineWebApp;
import com.intellij.appengine.facet.AppEngineAccountDialog;
import com.intellij.appengine.facet.AppEngineFacet;
import com.intellij.appengine.sdk.AppEngineSdk;
import com.intellij.appengine.util.AppEngineUtil;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.configurations.CommandLineBuilder;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.configurations.JavaParameters;
import com.intellij.execution.configurations.ParametersList;
import com.intellij.execution.process.*;
import com.intellij.execution.ui.ConsoleView;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.compiler.CompileContext;
import com.intellij.openapi.compiler.CompileScope;
import com.intellij.openapi.compiler.CompileStatusNotification;
import com.intellij.openapi.compiler.CompilerManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.KeyValue;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.packaging.artifacts.Artifact;
import com.intellij.packaging.artifacts.ArtifactManager;
import com.intellij.packaging.elements.PackagingElementResolvingContext;
import com.intellij.packaging.impl.artifacts.ArtifactUtil;
import com.intellij.packaging.impl.compiler.ArtifactCompileScope;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.remoteServer.runtime.deployment.DeploymentRuntime;
import com.intellij.remoteServer.runtime.deployment.ServerRuntimeInstance;
import com.intellij.remoteServer.runtime.log.LoggingHandler;
import com.intellij.util.net.HttpConfigurable;
import com.intellij.util.xml.GenericDomValue;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.util.Collections;
import java.util.List;
/**
* @author nik
*/
public class AppEngineUploader {
private static final Logger LOG = Logger.getInstance("#com.intellij.appengine.actions.AppEngineUploader");
private final Project myProject;
private final Artifact myArtifact;
private final AppEngineFacet myAppEngineFacet;
private final AppEngineSdk mySdk;
private final AppEngineAuthData myAuthData;
private final ServerRuntimeInstance.DeploymentOperationCallback myCallback;
private final LoggingHandler myLoggingHandler;
private AppEngineUploader(Project project, Artifact artifact, AppEngineFacet appEngineFacet, AppEngineSdk sdk, AppEngineAuthData authData,
ServerRuntimeInstance.DeploymentOperationCallback callback, @NotNull LoggingHandler loggingHandler) {
myProject = project;
myArtifact = artifact;
myAppEngineFacet = appEngineFacet;
mySdk = sdk;
myAuthData = authData;
myCallback = callback;
myLoggingHandler = loggingHandler;
}
@Nullable
public static AppEngineUploader createUploader(@NotNull Project project,
@NotNull Artifact artifact,
@NotNull AppEngineServerConfiguration configuration,
@NotNull ServerRuntimeInstance.DeploymentOperationCallback callback, @NotNull LoggingHandler loggingHandler) {
final String explodedPath = artifact.getOutputPath();
if (explodedPath == null) {
callback.errorOccurred("Output path isn't specified for '" + artifact.getName() + "' artifact");
return null;
}
final AppEngineFacet appEngineFacet = AppEngineUtil.findAppEngineFacet(project, artifact);
if (appEngineFacet == null) {
callback.errorOccurred("App Engine facet not found in '" + artifact.getName() + "' artifact");
return null;
}
final AppEngineSdk sdk = appEngineFacet.getSdk();
if (!sdk.getAppCfgFile().exists()) {
callback.errorOccurred("Path to App Engine SDK isn't specified correctly in App Engine Facet settings");
return null;
}
PackagingElementResolvingContext context = ArtifactManager.getInstance(project).getResolvingContext();
VirtualFile descriptorFile = ArtifactUtil.findSourceFileByOutputPath(artifact, "WEB-INF/appengine-web.xml", context);
final AppEngineWebApp root = AppEngineFacet.getDescriptorRoot(descriptorFile, appEngineFacet.getModule().getProject());
if (root != null) {
final GenericDomValue<String> application = root.getApplication();
if (StringUtil.isEmptyOrSpaces(application.getValue())) {
final String name = Messages.showInputDialog(project, "<html>Application name is not specified in appengine-web.xml.<br>" +
"Enter application name (see your <a href=\"http://appengine.google.com\">AppEngine account</a>):</html>", CommonBundle.getErrorTitle(), null, "", null);
if (name == null) return null;
final PsiFile file = application.getXmlTag().getContainingFile();
new WriteCommandAction(project, file) {
protected void run(final Result result) {
application.setStringValue(name);
}
}.execute();
final Document document = PsiDocumentManager.getInstance(project).getDocument(file);
if (document != null) {
FileDocumentManager.getInstance().saveDocument(document);
}
}
}
AppEngineAuthData authData = AppEngineAccountDialog.createAuthData(project, configuration);
if (authData == null) return null;
return new AppEngineUploader(project, artifact, appEngineFacet, sdk, authData, callback, loggingHandler);
}
public void startUploading() {
FileDocumentManager.getInstance().saveAllDocuments();
ProgressManager.getInstance().run(new Task.Backgroundable(myProject, "Uploading application", true, null) {
public void run(@NotNull ProgressIndicator indicator) {
compileAndUpload();
}
});
}
private void compileAndUpload() {
final Runnable startUploading = new Runnable() {
public void run() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
startUploadingProcess();
}
});
}
};
final CompilerManager compilerManager = CompilerManager.getInstance(myProject);
final CompileScope moduleScope = compilerManager.createModuleCompileScope(myAppEngineFacet.getModule(), true);
final CompileScope compileScope = ArtifactCompileScope.createScopeWithArtifacts(moduleScope, Collections.singletonList(myArtifact));
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
compilerManager.make(compileScope, new CompileStatusNotification() {
public void finished(boolean aborted, int errors, int warnings, CompileContext compileContext) {
if (!aborted && errors == 0) {
startUploading.run();
}
}
});
}
});
}
private void startUploadingProcess() {
final Process process;
final GeneralCommandLine commandLine;
try {
JavaParameters parameters = new JavaParameters();
parameters.configureByModule(myAppEngineFacet.getModule(), JavaParameters.JDK_ONLY);
parameters.setMainClass("com.google.appengine.tools.admin.AppCfg");
parameters.getClassPath().add(mySdk.getToolsApiJarFile().getAbsolutePath());
final List<KeyValue<String,String>> list = HttpConfigurable.getJvmPropertiesList(false, null);
if (! list.isEmpty()) {
final ParametersList parametersList = parameters.getVMParametersList();
for (KeyValue<String, String> value : list) {
parametersList.defineProperty(value.getKey(), value.getValue());
}
}
final ParametersList programParameters = parameters.getProgramParametersList();
if (myAuthData.isOAuth2()) {
programParameters.add("--oauth2");
}
else {
programParameters.add("--email=" + myAuthData.getEmail());
programParameters.add("--passin");
}
programParameters.add("--no_cookies");
programParameters.add("update");
programParameters.add(FileUtil.toSystemDependentName(myArtifact.getOutputPath()));
commandLine = CommandLineBuilder.createFromJavaParameters(parameters);
process = commandLine.createProcess();
}
catch (ExecutionException e) {
myCallback.errorOccurred("Cannot start uploading: " + e.getMessage());
return;
}
final ProcessHandler processHandler = new OSProcessHandler(process, commandLine.getCommandLineString());
processHandler.addProcessListener(new MyProcessListener(processHandler, null, myLoggingHandler));
myLoggingHandler.attachToProcess(processHandler);
processHandler.startNotify();
}
private class MyProcessListener extends ProcessAdapter {
private boolean myPasswordEntered;
private final ProcessHandler myProcessHandler;
@Nullable private final ConsoleView myConsole;
@Nullable private final LoggingHandler myLoggingHandler;
public MyProcessListener(ProcessHandler processHandler, @Nullable ConsoleView console, @Nullable LoggingHandler loggingHandler) {
myProcessHandler = processHandler;
myConsole = console;
myLoggingHandler = loggingHandler;
}
@Override
public void onTextAvailable(ProcessEvent event, Key outputType) {
if (!myAuthData.isOAuth2() && !myPasswordEntered && !outputType.equals(ProcessOutputTypes.SYSTEM) && event.getText().contains(myAuthData.getEmail())) {
myPasswordEntered = true;
final OutputStream processInput = myProcessHandler.getProcessInput();
if (processInput != null) {
//noinspection IOResourceOpenedButNotSafelyClosed
final PrintWriter input = new PrintWriter(processInput);
input.println(myAuthData.getPassword());
input.flush();
String message = StringUtil.repeatSymbol('*', myAuthData.getPassword().length()) + "\n";
if (myConsole != null) {
myConsole.print(message, ConsoleViewContentType.USER_INPUT);
}
else if (myLoggingHandler != null) {
myLoggingHandler.print(message);
}
}
}
}
@Override
public void processTerminated(ProcessEvent event) {
int exitCode = event.getExitCode();
if (exitCode == 0) {
myCallback.succeeded(new DeploymentRuntime() {
@Override
public boolean isUndeploySupported() {
return false;
}
@Override
public void undeploy(@NotNull UndeploymentTaskCallback callback) {
}
});
}
else {
myCallback.errorOccurred("Process terminated with exit code " + exitCode);
}
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server;
import com.facebook.presto.execution.QueryId;
import com.facebook.presto.execution.QueryInfo;
import com.facebook.presto.execution.QueryManager;
import com.facebook.presto.execution.StageInfo;
import com.facebook.presto.execution.TaskId;
import com.facebook.presto.execution.TaskInfo;
import com.facebook.presto.execution.TaskStatus;
import com.facebook.presto.execution.buffer.BufferInfo;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
import io.airlift.units.DataSize;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static com.google.common.io.Resources.getResource;
import static java.util.Objects.requireNonNull;
@Path("/")
public class QueryExecutionResource
{
// synthetic task id used by the output buffer of the top task
private static final TaskId OUTPUT_TASK_ID = new TaskId("output", "buffer", 0);
private final QueryManager manager;
@Inject
public QueryExecutionResource(QueryManager manager)
{
requireNonNull(manager, "manager is null");
this.manager = manager;
}
@GET
@Path("/ui/plan")
@Produces(MediaType.TEXT_HTML)
public String getPlanUi()
throws IOException
{
return Resources.toString(getResource(getClass(), "plan.html"), StandardCharsets.UTF_8);
}
@GET
@Path("/ui/query-execution")
@Produces(MediaType.TEXT_HTML)
public String getUi()
throws IOException
{
return Resources.toString(getResource(getClass(), "query-execution.html"), StandardCharsets.UTF_8);
}
@GET
@Path("/v1/query-execution/{queryId}")
@Produces(MediaType.APPLICATION_JSON)
public Response getTaskInfo(@PathParam("queryId") String queryId)
{
QueryInfo query;
try {
query = manager.getQueryInfo(QueryId.valueOf(queryId));
}
catch (NoSuchElementException e) {
return Response.status(Response.Status.NOT_FOUND).build();
}
List<StageInfo> stages = collectStages(query.getOutputStage());
List<Task> tasks = new ArrayList<>();
List<Flow> flows = new ArrayList<>();
for (StageInfo stage : stages) {
for (TaskInfo task : stage.getTasks()) {
int bufferedPages = 0;
TaskStatus taskStatus = task.getTaskStatus();
for (BufferInfo bufferInfo : task.getOutputBuffers().getBuffers()) {
bufferedPages += bufferInfo.getBufferedPages();
if (!bufferInfo.getBufferId().equals(OUTPUT_TASK_ID)) {
flows.add(new Flow(
taskStatus.getTaskId().toString(),
bufferInfo.getBufferId().toString(),
bufferInfo.getPageBufferInfo().getPagesAdded(),
bufferInfo.getBufferedPages(),
bufferInfo.isFinished()));
}
}
long last = TimeUnit.NANOSECONDS.toMillis(System.nanoTime());
if (task.getStats().getEndTime() != null) {
last = task.getStats().getEndTime().getMillis();
}
tasks.add(new Task(taskStatus.getTaskId().toString(),
taskStatus.getState().toString(),
taskStatus.getSelf().getHost(),
last - task.getStats().getCreateTime().getMillis(),
task.getStats().getTotalCpuTime().roundTo(TimeUnit.MILLISECONDS),
task.getStats().getTotalBlockedTime().roundTo(TimeUnit.MILLISECONDS),
task.getStats().getRawInputDataSize().roundTo(DataSize.Unit.BYTE),
task.getStats().getRawInputPositions(),
task.getStats().getOutputDataSize().roundTo(DataSize.Unit.BYTE),
task.getStats().getOutputPositions(),
task.getStats().getMemoryReservation().roundTo(DataSize.Unit.BYTE),
task.getStats().getQueuedDrivers(),
task.getStats().getRunningDrivers(),
task.getStats().getCompletedDrivers(),
bufferedPages));
}
}
Map<String, Object> result = ImmutableMap.<String, Object>builder()
.put("tasks", tasks)
.put("flows", flows)
.build();
return Response.ok(result).build();
}
private static List<StageInfo> collectStages(Optional<StageInfo> stage)
{
ImmutableList.Builder<StageInfo> result = ImmutableList.builder();
collectStages(stage, result);
return result.build();
}
private static void collectStages(Optional<StageInfo> stageInfo, ImmutableList.Builder<StageInfo> result)
{
stageInfo.ifPresent(stage -> {
result.add(stage);
stage.getSubStages().stream()
.forEach(subStage -> collectStages(Optional.ofNullable(subStage), result));
});
}
public static class Flow
{
private final String from;
private final String to;
private final long pagesSent;
private final int bufferedPages;
private final boolean finished;
public Flow(String from, String to, long pagesSent, int bufferedPages, boolean finished)
{
this.from = from;
this.to = to;
this.pagesSent = pagesSent;
this.bufferedPages = bufferedPages;
this.finished = finished;
}
@JsonProperty
public String getFrom()
{
return from;
}
@JsonProperty
public String getTo()
{
return to;
}
@JsonProperty
public long getPagesSent()
{
return pagesSent;
}
@JsonProperty
public int getBufferedPages()
{
return bufferedPages;
}
@JsonProperty
public boolean isFinished()
{
return finished;
}
}
public static class Task
{
private final String taskId;
private final String state;
private final String host;
private final long uptime;
private final long cpuMillis;
private final long blockedMillis;
private final long inputBytes;
private final long inputRows;
private final long outputBytes;
private final long outputRows;
private final long usedMemoryBytes;
private final int queuedSplits;
private final int runningSplits;
private final int completedSplits;
private final int bufferedPages;
public Task(
String taskId,
String state,
String host,
long uptimeMillis,
long cpuMillis,
long blockedMillis,
long inputBytes,
long inputRows,
long outputBytes,
long outputRows,
long usedMemoryBytes,
int queuedSplits,
int runningSplits,
int completedSplits,
int bufferedPages)
{
this.taskId = taskId;
this.state = state;
this.host = host;
this.uptime = uptimeMillis;
this.cpuMillis = cpuMillis;
this.blockedMillis = blockedMillis;
this.inputBytes = inputBytes;
this.inputRows = inputRows;
this.outputBytes = outputBytes;
this.outputRows = outputRows;
this.usedMemoryBytes = usedMemoryBytes;
this.queuedSplits = queuedSplits;
this.runningSplits = runningSplits;
this.completedSplits = completedSplits;
this.bufferedPages = bufferedPages;
}
@JsonProperty
public String getTaskId()
{
return taskId;
}
@JsonProperty
public String getState()
{
return state;
}
@JsonProperty
public String getHost()
{
return host;
}
@JsonProperty
public long getUptime()
{
return uptime;
}
@JsonProperty
public long getCpuMillis()
{
return cpuMillis;
}
@JsonProperty
public long getBlockedMillis()
{
return blockedMillis;
}
@JsonProperty
public long getInputBytes()
{
return inputBytes;
}
@JsonProperty
public long getInputRows()
{
return inputRows;
}
@JsonProperty
public long getOutputBytes()
{
return outputBytes;
}
@JsonProperty
public long getOutputRows()
{
return outputRows;
}
@JsonProperty
public long getUsedMemoryBytes()
{
return usedMemoryBytes;
}
@JsonProperty
public int getQueuedSplits()
{
return queuedSplits;
}
@JsonProperty
public int getRunningSplits()
{
return runningSplits;
}
@JsonProperty
public int getCompletedSplits()
{
return completedSplits;
}
@JsonProperty
public int getBufferedPages()
{
return bufferedPages;
}
}
}
|
|
// -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.util.Arrays;
import java.util.EventListener;
import java.util.EventObject;
import java.util.List;
import java.util.Objects;
import java.util.stream.Stream;
import javax.swing.*;
// import javax.swing.event.EventListenerList;
public final class MainPanel extends JPanel {
private MainPanel() {
super(new BorderLayout());
Box northBox = Box.createVerticalBox();
Box centerBox = Box.createVerticalBox();
Box southBox = Box.createVerticalBox();
List<? extends AbstractExpansionPanel> panelList = makeList();
ExpansionListener rl = e -> {
setVisible(false);
Component source = (Component) e.getSource();
centerBox.removeAll();
northBox.removeAll();
southBox.removeAll();
boolean insertSouth = false;
for (AbstractExpansionPanel exp : panelList) {
if (source.equals(exp) && exp.isExpanded()) {
centerBox.add(exp);
insertSouth = true;
continue;
}
exp.setExpanded(false);
if (insertSouth) {
southBox.add(exp);
} else {
northBox.add(exp);
}
}
setVisible(true);
};
panelList.forEach(exp -> {
northBox.add(exp);
exp.addExpansionListener(rl);
});
JPanel panel = new JPanel(new BorderLayout()) {
@Override public Dimension getMinimumSize() {
Dimension d = super.getMinimumSize();
d.width = 120;
return d;
}
};
panel.add(northBox, BorderLayout.NORTH);
panel.add(centerBox);
panel.add(southBox, BorderLayout.SOUTH);
JSplitPane sp = new JSplitPane();
sp.setLeftComponent(panel);
sp.setRightComponent(new JScrollPane(new JTree()));
add(sp);
setPreferredSize(new Dimension(320, 240));
}
private List<? extends AbstractExpansionPanel> makeList() {
return Arrays.asList(
new AbstractExpansionPanel("Panel1") {
@Override public Container makePanel() {
Box p = Box.createVerticalBox();
p.setBorder(BorderFactory.createEmptyBorder(5, 15, 5, 15));
p.add(new JCheckBox("11111"));
p.add(new JCheckBox("2222222222"));
return p;
}
},
new AbstractExpansionPanel("Panel2") {
@Override public Container makePanel() {
Box p = Box.createVerticalBox();
p.setBorder(BorderFactory.createEmptyBorder(5, 15, 5, 15));
for (int i = 0; i < 16; i++) {
p.add(makeLabel(i));
}
return p;
}
private JLabel makeLabel(int i) {
return new JLabel(String.format("%02d", i));
}
},
new AbstractExpansionPanel("Panel3") {
@Override public Container makePanel() {
Box p = Box.createVerticalBox();
p.setBorder(BorderFactory.createEmptyBorder(5, 15, 5, 15));
ButtonGroup bg = new ButtonGroup();
Stream.of(
new JRadioButton("aa"),
new JRadioButton("bb"),
new JRadioButton("cc", true)
).forEach(b -> {
p.add(b);
bg.add(b);
});
return p;
}
});
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
abstract class AbstractExpansionPanel extends JPanel {
// OvershadowingSubclassFields:
// JComponent: private final EventListenerList listenerList = new EventListenerList();
private ExpansionEvent expansionEvent;
private boolean openFlag;
private final JScrollPane scroll = new JScrollPane();
private final JButton button;
protected AbstractExpansionPanel(String title) {
super(new BorderLayout());
button = new JButton(title);
init();
}
private void init() {
scroll.setViewportView(makePanel());
scroll.getVerticalScrollBar().setUnitIncrement(25);
button.addActionListener(e -> {
setExpanded(!isExpanded());
fireExpansionEvent();
});
add(button, BorderLayout.NORTH);
}
public abstract Container makePanel();
public boolean isExpanded() {
return openFlag;
}
public void setExpanded(boolean flg) {
openFlag = flg;
if (openFlag) {
add(scroll);
} else {
remove(scroll);
}
}
public void addExpansionListener(ExpansionListener l) {
listenerList.add(ExpansionListener.class, l);
}
// public void removeExpansionListener(ExpansionListener l) {
// listenerList.remove(ExpansionListener.class, l);
// }
// Notify all listeners that have registered interest in
// notification on this event type.The event instance
// is lazily created using the parameters passed into
// the fire method.
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
protected void fireExpansionEvent() {
// Guaranteed to return a non-null array
Object[] listeners = listenerList.getListenerList();
// Process the listeners last to first, notifying
// those that are interested in this event
for (int i = listeners.length - 2; i >= 0; i -= 2) {
if (listeners[i] == ExpansionListener.class) {
// Lazily create the event:
if (Objects.isNull(expansionEvent)) {
expansionEvent = new ExpansionEvent(this);
}
((ExpansionListener) listeners[i + 1]).expansionStateChanged(expansionEvent);
}
}
}
}
class ExpansionEvent extends EventObject {
private static final long serialVersionUID = 1L;
protected ExpansionEvent(Object source) {
super(source);
}
}
interface ExpansionListener extends EventListener {
void expansionStateChanged(ExpansionEvent e);
}
|
|
package com.logimethods.connector.nats.to_spark;
import static org.junit.Assert.assertEquals;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.log4j.Level;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.util.LongAccumulator;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.logimethods.connector.nats.spark.test.NatsPublisher;
import com.logimethods.connector.nats.spark.test.NatsToSparkValidator;
import com.logimethods.connector.nats.spark.test.TestClient;
import com.logimethods.connector.nats.spark.test.UnitTestUtilities;
import com.logimethods.connector.nats.to_spark.api.StandardNatsToSparkConnectorTest;
public abstract class AbstractNatsToSparkTest {
protected static String DEFAULT_SUBJECT_ROOT = "nats2sparkSubject";
protected static int DEFAULT_SUBJECT_INR = 0;
protected static String DEFAULT_SUBJECT;
protected static JavaSparkContext sc;
// protected static AtomicInteger TOTAL_COUNT = new AtomicInteger();
protected static Logger logger = null;
protected static Boolean rightNumber = true;
protected static Boolean atLeastSomeData = false;
protected static String payload = null;
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// Enable tracing for debugging as necessary.
Level level = Level.WARN;
UnitTestUtilities.setLogLevel(NatsToSparkConnector.class, level);
UnitTestUtilities.setLogLevel(StandardNatsToSparkConnectorTest.class, level);
UnitTestUtilities.setLogLevel(TestClient.class, level);
UnitTestUtilities.setLogLevel("org.apache.spark", level);
UnitTestUtilities.setLogLevel("org.spark-project", level);
logger = LoggerFactory.getLogger(StandardNatsToSparkConnectorTest.class);
UnitTestUtilities.startDefaultServer();
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
UnitTestUtilities.stopDefaultServer();
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
// assertTrue(logger.isDebugEnabled());
// assertTrue(LoggerFactory.getLogger(NatsToSparkConnector.class).isTraceEnabled());
// To avoid "Only one StreamingContext may be started in this JVM. Currently running StreamingContext was started at .../..."
Thread.sleep(500);
DEFAULT_SUBJECT = DEFAULT_SUBJECT_ROOT + (DEFAULT_SUBJECT_INR++);
NatsToSparkValidator.TOTAL_COUNT.set(0);
rightNumber = true;
atLeastSomeData = false;
// https://stackoverflow.com/questions/41864985/hadoop-ioexception-failure-to-login
// UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser("sparkuser"));
SparkConf sparkConf =
UnitTestUtilities.newSparkConf()
.setAppName("My Spark Job");
sc = new JavaSparkContext(sparkConf);
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
if (sc != null)
sc.stop();
}
protected void validateTheReceptionOfMessages(JavaStreamingContext ssc,
JavaReceiverInputDStream<String> stream) throws InterruptedException {
JavaDStream<String> messages = stream.repartition(3);
ExecutorService executor = Executors.newFixedThreadPool(6);
final int nbOfMessages = 5;
NatsPublisher np = getNatsPublisher(nbOfMessages);
if (logger.isDebugEnabled()) {
messages.print();
}
messages.foreachRDD(new VoidFunction<JavaRDD<String>>() {
private static final long serialVersionUID = 1L;
@Override
public void call(JavaRDD<String> rdd) throws Exception {
logger.debug("RDD received: {}", rdd.collect());
final long count = rdd.count();
if ((count != 0) && (count != nbOfMessages)) {
rightNumber = false;
logger.error("The number of messages received should have been {} instead of {}.", nbOfMessages, count);
}
NatsToSparkValidator.TOTAL_COUNT.getAndAdd((int) count);
atLeastSomeData = atLeastSomeData || (count > 0);
for (String str :rdd.collect()) {
if (! str.startsWith(NatsPublisher.NATS_PAYLOAD)) {
payload = str;
}
}
}
});
closeTheValidation(ssc, executor, nbOfMessages, np);
}
protected void validateTheReceptionOfIntegerMessages(JavaStreamingContext ssc,
JavaReceiverInputDStream<Integer> stream) throws InterruptedException {
JavaDStream<Integer> messages = stream.repartition(3);
ExecutorService executor = Executors.newFixedThreadPool(6);
final int nbOfMessages = 5;
NatsPublisher np = getNatsPublisher(nbOfMessages);
// if (logger.isDebugEnabled()) {
messages.print();
// }
/* messages.foreachRDD(new VoidFunction<JavaRDD<Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public void call(JavaRDD<Integer> rdd) throws Exception {
logger.debug("RDD received: {}", rdd.collect());
System.out.println("RDD received: " + rdd.collect());
final long count = rdd.count();
if ((count != 0) && (count != nbOfMessages)) {
rightNumber = false;
logger.error("The number of messages received should have been {} instead of {}.", nbOfMessages, count);
}
NatsToSparkValidator.TOTAL_COUNT.getAndAdd((int) count);
atLeastSomeData = atLeastSomeData || (count > 0);
for (Integer value :rdd.collect()) {
if (value < NatsPublisher.NATS_PAYLOAD_INT) {
payload = value.toString();
}
}
}
});*/
final LongAccumulator count = ssc.sparkContext().sc().longAccumulator();
NatsToSparkValidator.validateTheReceptionOfIntegerMessages(messages, count);
closeTheValidation(ssc, executor, nbOfMessages, np);
assertEquals(nbOfMessages, count.sum());
}
protected void validateTheReceptionOfMessages(final JavaStreamingContext ssc,
final JavaPairDStream<String, String> messages) throws InterruptedException {
ExecutorService executor = Executors.newFixedThreadPool(6);
final int nbOfMessages = 5;
NatsPublisher np = getNatsPublisher(nbOfMessages);
if (logger.isDebugEnabled()) {
messages.print();
}
final LongAccumulator count = ssc.sparkContext().sc().longAccumulator();
NatsToSparkValidator.validateTheReceptionOfMessages(messages, count);
closeTheValidation(ssc, executor, nbOfMessages, np);
assertEquals(nbOfMessages, count.sum());
}
protected void closeTheValidation(JavaStreamingContext ssc, ExecutorService executor, final int nbOfMessages,
NatsPublisher np) throws InterruptedException {
ssc.start();
Thread.sleep(1000);
// start the publisher
executor.execute(np);
np.waitUntilReady();
Thread.sleep(2000);
ssc.close();
Thread.sleep(2000);
}
protected abstract NatsPublisher getNatsPublisher(final int nbOfMessages);
}
|
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.sdk.skeletons;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.execution.ExecutionException;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.Consumer;
import com.intellij.util.Function;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.ZipUtil;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.codeInsight.userSkeletons.PyUserSkeletonsUtil;
import com.jetbrains.python.psi.resolve.PythonSdkPathCache;
import com.jetbrains.python.remote.PythonRemoteInterpreterManager;
import com.jetbrains.python.sdk.InvalidSdkException;
import com.jetbrains.python.sdk.PySdkUtil;
import com.jetbrains.python.sdk.PythonSdkType;
import consulo.container.boot.ContainerPathManager;
import consulo.python.buildout.module.extension.BuildoutModuleExtension;
import consulo.vfs.util.ArchiveVfsUtil;
import org.jetbrains.annotations.NonNls;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.awt.*;
import java.io.*;
import java.util.List;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.jetbrains.python.sdk.skeletons.SkeletonVersionChecker.fromVersionString;
/**
* Handles a refresh of SDK's skeletons.
* Does all the heavy lifting calling skeleton generator, managing blacklists, etc.
* One-time, non-reusable instances.
* <br/>
* User: dcheryasov
* Date: 4/15/11 5:38 PM
*/
public class PySkeletonRefresher
{
private static final Logger LOG = Logger.getInstance("#" + PySkeletonRefresher.class.getName());
@Nullable
private Project myProject;
private
@Nullable
final ProgressIndicator myIndicator;
@Nonnull
private final Sdk mySdk;
private String mySkeletonsPath;
@NonNls
public static final String BLACKLIST_FILE_NAME = ".blacklist";
private final static Pattern BLACKLIST_LINE = Pattern.compile("^([^=]+) = (\\d+\\.\\d+) (\\d+)\\s*$");
// we use the equals sign after filename so that we can freely include space in the filename
// Path (the first component) may contain spaces, this header spec is deprecated
private static final Pattern VERSION_LINE_V1 = Pattern.compile("# from (\\S+) by generator (\\S+)\\s*");
// Skeleton header spec v2
private static final Pattern FROM_LINE_V2 = Pattern.compile("# from (.*)$");
private static final Pattern BY_LINE_V2 = Pattern.compile("# by generator (.*)$");
private static int ourGeneratingCount = 0;
private String myExtraSyspath;
private VirtualFile myPregeneratedSkeletons;
private int myGeneratorVersion;
private Map<String, Pair<Integer, Long>> myBlacklist;
private SkeletonVersionChecker myVersionChecker;
private PySkeletonGenerator mySkeletonsGenerator;
public static synchronized boolean isGeneratingSkeletons()
{
return ourGeneratingCount > 0;
}
private static synchronized void changeGeneratingSkeletons(int increment)
{
ourGeneratingCount += increment;
}
public static void refreshSkeletonsOfSdk(@Nullable Project project, Component ownerComponent, String skeletonsPath, @Nonnull Sdk sdk) throws InvalidSdkException
{
final Map<String, List<String>> errors = new TreeMap<>();
final List<String> failedSdks = new SmartList<>();
final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
final String homePath = sdk.getHomePath();
if(skeletonsPath == null)
{
LOG.info("Could not find skeletons path for SDK path " + homePath);
}
else
{
LOG.info("Refreshing skeletons for " + homePath);
SkeletonVersionChecker checker = new SkeletonVersionChecker(0); // this default version won't be used
final PySkeletonRefresher refresher = new PySkeletonRefresher(project, ownerComponent, sdk, skeletonsPath, indicator, null);
changeGeneratingSkeletons(1);
try
{
List<String> sdkErrors = refresher.regenerateSkeletons(checker);
if(sdkErrors.size() > 0)
{
String sdkName = sdk.getName();
List<String> knownErrors = errors.get(sdkName);
if(knownErrors == null)
{
errors.put(sdkName, sdkErrors);
}
else
{
knownErrors.addAll(sdkErrors);
}
}
}
finally
{
changeGeneratingSkeletons(-1);
}
}
if(failedSdks.size() > 0 || errors.size() > 0)
{
int module_errors = 0;
for(String sdk_name : errors.keySet())
{
module_errors += errors.get(sdk_name).size();
}
String message;
if(failedSdks.size() > 0)
{
message = PyBundle.message("sdk.errorlog.$0.mods.fail.in.$1.sdks.$2.completely", module_errors, errors.size(), failedSdks.size());
}
else
{
message = PyBundle.message("sdk.errorlog.$0.mods.fail.in.$1.sdks", module_errors, errors.size());
}
logErrors(errors, failedSdks, message);
}
}
private static void logErrors(@Nonnull final Map<String, List<String>> errors, @Nonnull final List<String> failedSdks, @Nonnull final String message)
{
LOG.warn(PyBundle.message("sdk.some.skeletons.failed"));
LOG.warn(message);
if(failedSdks.size() > 0)
{
LOG.warn(PyBundle.message("sdk.error.dialog.failed.sdks"));
LOG.warn(StringUtil.join(failedSdks, ", "));
}
if(errors.size() > 0)
{
LOG.warn(PyBundle.message("sdk.error.dialog.failed.modules"));
for(String sdkName : errors.keySet())
{
for(String moduleName : errors.get(sdkName))
{
LOG.warn(moduleName);
}
}
}
}
/**
* Creates a new object that refreshes skeletons of given SDK.
*
* @param sdk a Python SDK
* @param skeletonsPath if known; null means 'determine and create as needed'.
* @param indicator to report progress of long operations
*/
public PySkeletonRefresher(@Nullable Project project,
@Nullable Component ownerComponent,
@Nonnull Sdk sdk,
@Nullable String skeletonsPath,
@Nullable ProgressIndicator indicator,
@Nullable String folder) throws InvalidSdkException
{
myProject = project;
myIndicator = indicator;
mySdk = sdk;
mySkeletonsPath = skeletonsPath;
final PythonRemoteInterpreterManager remoteInterpreterManager = PythonRemoteInterpreterManager.getInstance();
if(PySdkUtil.isRemote(sdk) && remoteInterpreterManager != null)
{
try
{
mySkeletonsGenerator = remoteInterpreterManager.createRemoteSkeletonGenerator(myProject, ownerComponent, sdk, getSkeletonsPath());
}
catch(ExecutionException e)
{
throw new InvalidSdkException(e.getMessage(), e.getCause());
}
}
else
{
mySkeletonsGenerator = new PySkeletonGenerator(getSkeletonsPath(), mySdk, folder);
}
}
private void indicate(String msg)
{
if(myIndicator != null)
{
myIndicator.checkCanceled();
myIndicator.setText(msg);
myIndicator.setText2("");
}
}
private void indicateMinor(String msg)
{
if(myIndicator != null)
{
myIndicator.setText2(msg);
}
}
private void checkCanceled()
{
if(myIndicator != null)
{
myIndicator.checkCanceled();
}
}
private static String calculateExtraSysPath(@Nonnull final Sdk sdk, @Nullable final String skeletonsPath)
{
final File skeletons = skeletonsPath != null ? new File(skeletonsPath) : null;
final VirtualFile userSkeletonsDir = PyUserSkeletonsUtil.getUserSkeletonsDirectory();
final File userSkeletons = userSkeletonsDir != null ? new File(userSkeletonsDir.getPath()) : null;
final VirtualFile remoteSourcesDir = PySdkUtil.findAnyRemoteLibrary(sdk);
final File remoteSources = remoteSourcesDir != null ? new File(remoteSourcesDir.getPath()) : null;
final List<VirtualFile> paths = new ArrayList<>();
paths.addAll(Arrays.asList(sdk.getRootProvider().getFiles(OrderRootType.CLASSES)));
paths.addAll(BuildoutModuleExtension.getExtraPathForAllOpenModules());
return Joiner.on(File.pathSeparator).join(ContainerUtil.mapNotNull(paths, (Function<VirtualFile, Object>) file -> {
if(file.isInLocalFileSystem())
{
// We compare canonical files, not strings because "c:/some/folder" equals "c:\\some\\bin\\..\\folder\\"
final File canonicalFile = new File(file.getPath());
if(canonicalFile.exists() &&
!FileUtil.filesEqual(canonicalFile, skeletons) &&
!FileUtil.filesEqual(canonicalFile, userSkeletons) &&
!FileUtil.filesEqual(canonicalFile, remoteSources))
{
return file.getPath();
}
}
return null;
}));
}
/**
* Creates if needed all path(s) used to store skeletons of its SDK.
*
* @return path name of skeleton dir for the SDK, guaranteed to be already created.
*/
@Nonnull
public String getSkeletonsPath() throws InvalidSdkException
{
if(mySkeletonsPath == null)
{
mySkeletonsPath = PythonSdkType.getSkeletonsPath(ContainerPathManager.get().getSystemPath(), mySdk.getHomePath());
final File skeletonsDir = new File(mySkeletonsPath);
if(!skeletonsDir.exists() && !skeletonsDir.mkdirs())
{
throw new InvalidSdkException("Can't create skeleton dir " + String.valueOf(mySkeletonsPath));
}
}
return mySkeletonsPath;
}
public List<String> regenerateSkeletons(@Nullable SkeletonVersionChecker cachedChecker) throws InvalidSdkException
{
final List<String> errorList = new SmartList<>();
final String homePath = mySdk.getHomePath();
final String skeletonsPath = getSkeletonsPath();
final File skeletonsDir = new File(skeletonsPath);
if(!skeletonsDir.exists())
{
//noinspection ResultOfMethodCallIgnored
skeletonsDir.mkdirs();
}
final String readablePath = FileUtil.getLocationRelativeToUserHome(homePath);
mySkeletonsGenerator.prepare();
myBlacklist = loadBlacklist();
indicate(PyBundle.message("sdk.gen.querying.$0", readablePath));
// get generator version and binary libs list in one go
final String extraSysPath = calculateExtraSysPath(mySdk, getSkeletonsPath());
final PySkeletonGenerator.ListBinariesResult binaries = mySkeletonsGenerator.listBinaries(mySdk, extraSysPath);
myGeneratorVersion = binaries.generatorVersion;
myPregeneratedSkeletons = findPregeneratedSkeletons();
indicate(PyBundle.message("sdk.gen.reading.versions.file"));
if(cachedChecker != null)
{
myVersionChecker = cachedChecker.withDefaultVersionIfUnknown(myGeneratorVersion);
}
else
{
myVersionChecker = new SkeletonVersionChecker(myGeneratorVersion);
}
// check builtins
final String builtinsFileName = PythonSdkType.getBuiltinsFileName(mySdk);
final File builtinsFile = new File(skeletonsPath, builtinsFileName);
final SkeletonHeader oldHeader = readSkeletonHeader(builtinsFile);
final boolean oldOrNonExisting = oldHeader == null || oldHeader.getVersion() == 0;
if(myPregeneratedSkeletons != null && oldOrNonExisting)
{
unpackPreGeneratedSkeletons();
}
if(oldOrNonExisting)
{
copyBaseSdkSkeletonsToVirtualEnv(skeletonsPath, binaries);
}
final boolean builtinsUpdated = updateSkeletonsForBuiltins(readablePath, builtinsFile);
if(!binaries.modules.isEmpty())
{
indicate(PyBundle.message("sdk.gen.updating.$0", readablePath));
final List<UpdateResult> updateErrors = updateOrCreateSkeletons(binaries.modules);
if(updateErrors.size() > 0)
{
indicateMinor(BLACKLIST_FILE_NAME);
for(UpdateResult error : updateErrors)
{
if(error.isFresh())
{
errorList.add(error.getName());
}
myBlacklist.put(error.getPath(), new Pair<>(myGeneratorVersion, error.getTimestamp()));
}
storeBlacklist(skeletonsDir, myBlacklist);
}
else
{
removeBlacklist(skeletonsDir);
}
}
indicate(PyBundle.message("sdk.gen.reloading"));
mySkeletonsGenerator.refreshGeneratedSkeletons();
if(!oldOrNonExisting)
{
indicate(PyBundle.message("sdk.gen.cleaning.$0", readablePath));
cleanUpSkeletons(skeletonsDir);
}
if((builtinsUpdated || PySdkUtil.isRemote(mySdk)) && myProject != null)
{
ApplicationManager.getApplication().invokeLater(() -> DaemonCodeAnalyzer.getInstance(myProject).restart(), myProject.getDisposed());
}
return errorList;
}
private boolean updateSkeletonsForBuiltins(String readablePath, File builtinsFile) throws InvalidSdkException
{
final SkeletonHeader newHeader = readSkeletonHeader(builtinsFile);
final boolean mustUpdateBuiltins = myPregeneratedSkeletons == null && (newHeader == null || newHeader.getVersion() < myVersionChecker.getBuiltinVersion());
if(mustUpdateBuiltins)
{
indicate(PyBundle.message("sdk.gen.updating.builtins.$0", readablePath));
mySkeletonsGenerator.generateBuiltinSkeletons(mySdk);
if(myProject != null)
{
PythonSdkPathCache.getInstance(myProject, mySdk).clearBuiltins();
}
}
return mustUpdateBuiltins;
}
private void copyBaseSdkSkeletonsToVirtualEnv(String skeletonsPath, PySkeletonGenerator.ListBinariesResult binaries) throws InvalidSdkException
{
final Sdk base = PythonSdkType.getInstance().getVirtualEnvBaseSdk(mySdk);
if(base != null)
{
indicate("Copying base SDK skeletons for virtualenv...");
final String baseSkeletonsPath = PythonSdkType.getSkeletonsPath(ContainerPathManager.get().getSystemPath(), base.getHomePath());
final PySkeletonGenerator.ListBinariesResult baseBinaries = mySkeletonsGenerator.listBinaries(base, calculateExtraSysPath(base, baseSkeletonsPath));
for(Map.Entry<String, PyBinaryItem> entry : binaries.modules.entrySet())
{
final String module = entry.getKey();
final PyBinaryItem binary = entry.getValue();
final PyBinaryItem baseBinary = baseBinaries.modules.get(module);
final File fromFile = getSkeleton(module, baseSkeletonsPath);
if(baseBinaries.modules.containsKey(module) &&
fromFile.exists() &&
binary.length() == baseBinary.length())
{ // Weak binary modules equality check
final File toFile = fromFile.isDirectory() ? getPackageSkeleton(module, skeletonsPath) : getModuleSkeleton(module, skeletonsPath);
try
{
FileUtil.copy(fromFile, toFile);
}
catch(IOException e)
{
LOG.info("Error copying base virtualenv SDK skeleton for " + module, e);
}
}
}
}
}
private void unpackPreGeneratedSkeletons() throws InvalidSdkException
{
indicate("Unpacking pregenerated skeletons...");
try
{
final VirtualFile jar = ArchiveVfsUtil.getVirtualFileForJar(myPregeneratedSkeletons);
if(jar != null)
{
ZipUtil.extract(new File(jar.getPath()), new File(getSkeletonsPath()), null);
}
}
catch(IOException e)
{
LOG.info("Error unpacking pregenerated skeletons", e);
}
}
@Nullable
public static SkeletonHeader readSkeletonHeader(@Nonnull File file)
{
try
{
final LineNumberReader reader = new LineNumberReader(new FileReader(file));
try
{
String line = null;
// Read 3 lines, skip first 2: encoding, module name
for(int i = 0; i < 3; i++)
{
line = reader.readLine();
if(line == null)
{
return null;
}
}
// Try the old whitespace-unsafe header format v1 first
final Matcher v1Matcher = VERSION_LINE_V1.matcher(line);
if(v1Matcher.matches())
{
return new SkeletonHeader(v1Matcher.group(1), fromVersionString(v1Matcher.group(2)));
}
final Matcher fromMatcher = FROM_LINE_V2.matcher(line);
if(fromMatcher.matches())
{
final String binaryFile = fromMatcher.group(1);
line = reader.readLine();
if(line != null)
{
final Matcher byMatcher = BY_LINE_V2.matcher(line);
if(byMatcher.matches())
{
final int version = fromVersionString(byMatcher.group(1));
return new SkeletonHeader(binaryFile, version);
}
}
}
}
finally
{
reader.close();
}
}
catch(IOException ignored)
{
}
return null;
}
public static class SkeletonHeader
{
@Nonnull
private final String myFile;
private final int myVersion;
public SkeletonHeader(@Nonnull String binaryFile, int version)
{
myFile = binaryFile;
myVersion = version;
}
@Nonnull
public String getBinaryFile()
{
return myFile;
}
public int getVersion()
{
return myVersion;
}
}
private Map<String, Pair<Integer, Long>> loadBlacklist()
{
Map<String, Pair<Integer, Long>> ret = new HashMap<>();
File blacklistFile = new File(mySkeletonsPath, BLACKLIST_FILE_NAME);
if(blacklistFile.exists() && blacklistFile.canRead())
{
Reader input;
try
{
input = new FileReader(blacklistFile);
LineNumberReader lines = new LineNumberReader(input);
try
{
String line;
do
{
line = lines.readLine();
if(line != null && line.length() > 0 && line.charAt(0) != '#')
{ // '#' begins a comment
Matcher matcher = BLACKLIST_LINE.matcher(line);
boolean notParsed = true;
if(matcher.matches())
{
final int version = fromVersionString(matcher.group(2));
if(version > 0)
{
try
{
final long timestamp = Long.parseLong(matcher.group(3));
final String filename = matcher.group(1);
ret.put(filename, new Pair<>(version, timestamp));
notParsed = false;
}
catch(NumberFormatException ignore)
{
}
}
}
if(notParsed)
{
LOG.warn("In blacklist at " + mySkeletonsPath + " strange line '" + line + "'");
}
}
}
while(line != null);
}
catch(IOException ex)
{
LOG.warn("Failed to read blacklist in " + mySkeletonsPath, ex);
}
finally
{
lines.close();
}
}
catch(IOException ignore)
{
}
}
return ret;
}
private static void storeBlacklist(File skeletonDir, Map<String, Pair<Integer, Long>> blacklist)
{
File blacklistFile = new File(skeletonDir, BLACKLIST_FILE_NAME);
PrintWriter output;
try
{
output = new PrintWriter(blacklistFile);
try
{
output.println("# PyCharm failed to generate skeletons for these modules.");
output.println("# These skeletons will be re-generated automatically");
output.println("# when a newer module version or an updated generator becomes available.");
// each line: filename = version.string timestamp
for(String fname : blacklist.keySet())
{
Pair<Integer, Long> data = blacklist.get(fname);
output.print(fname);
output.print(" = ");
output.print(SkeletonVersionChecker.toVersionString(data.getFirst()));
output.print(" ");
output.print(data.getSecond());
output.println();
}
}
finally
{
output.close();
}
}
catch(IOException ex)
{
LOG.warn("Failed to store blacklist in " + skeletonDir.getPath(), ex);
}
}
private static void removeBlacklist(File skeletonDir)
{
File blacklistFile = new File(skeletonDir, BLACKLIST_FILE_NAME);
if(blacklistFile.exists())
{
boolean okay = blacklistFile.delete();
if(!okay)
{
LOG.warn("Could not delete blacklist file in " + skeletonDir.getPath());
}
}
}
/**
* For every existing skeleton file, take its module file name,
* and remove the skeleton if the module file does not exist.
* Works recursively starting from dir. Removes dirs that become empty.
*/
private void cleanUpSkeletons(final File dir)
{
indicateMinor(dir.getPath());
final File[] files = dir.listFiles();
if(files == null)
{
return;
}
for(File item : files)
{
if(item.isDirectory())
{
cleanUpSkeletons(item);
// was the dir emptied?
File[] remaining = item.listFiles();
if(remaining != null && remaining.length == 0)
{
mySkeletonsGenerator.deleteOrLog(item);
}
else if(remaining != null && remaining.length == 1)
{ //clean also if contains only __init__.py
File lastFile = remaining[0];
if(PyNames.INIT_DOT_PY.equals(lastFile.getName()) && lastFile.length() == 0)
{
boolean deleted = mySkeletonsGenerator.deleteOrLog(lastFile);
if(deleted)
{
mySkeletonsGenerator.deleteOrLog(item);
}
}
}
}
else if(item.isFile())
{
// clean up an individual file
final String itemName = item.getName();
if(PyNames.INIT_DOT_PY.equals(itemName) && item.length() == 0)
{
continue; // these are versionless
}
if(BLACKLIST_FILE_NAME.equals(itemName))
{
continue; // don't touch the blacklist
}
if(PythonSdkType.getBuiltinsFileName(mySdk).equals(itemName))
{
continue;
}
final SkeletonHeader header = readSkeletonHeader(item);
boolean canLive = header != null;
if(canLive)
{
final String binaryFile = header.getBinaryFile();
canLive = SkeletonVersionChecker.BUILTIN_NAME.equals(binaryFile) || mySkeletonsGenerator.exists(binaryFile);
}
if(!canLive)
{
mySkeletonsGenerator.deleteOrLog(item);
}
}
}
}
private static class UpdateResult
{
private final String myPath;
private final String myName;
private final long myTimestamp;
public boolean isFresh()
{
return myIsFresh;
}
private final boolean myIsFresh;
private UpdateResult(String name, String path, long timestamp, boolean fresh)
{
myName = name;
myPath = path;
myTimestamp = timestamp;
myIsFresh = fresh;
}
public String getName()
{
return myName;
}
public String getPath()
{
return myPath;
}
public Long getTimestamp()
{
return myTimestamp;
}
}
/**
* (Re-)generates skeletons for all binary python modules. Up-to-date skeletons are not regenerated.
* Does one module at a time: slower, but avoids certain conflicts.
*
* @param modules output of generator3 -L
* @return blacklist data; whatever was not generated successfully is put here.
*/
private List<UpdateResult> updateOrCreateSkeletons(Map<String, PyBinaryItem> modules) throws InvalidSdkException
{
long startTime = System.currentTimeMillis();
final List<String> names = Lists.newArrayList(modules.keySet());
Collections.sort(names);
final List<UpdateResult> results = new ArrayList<>();
final int count = names.size();
for(int i = 0; i < count; i++)
{
checkCanceled();
if(myIndicator != null)
{
myIndicator.setFraction((double) i / count);
}
final String name = names.get(i);
final PyBinaryItem module = modules.get(name);
if(module != null)
{
updateOrCreateSkeleton(module, results);
}
}
finishSkeletonsGeneration();
long doneInMs = System.currentTimeMillis() - startTime;
LOG.info("Rebuilding skeletons for binaries took " + doneInMs + " ms");
return results;
}
private void finishSkeletonsGeneration()
{
mySkeletonsGenerator.finishSkeletonsGeneration();
}
private static File getSkeleton(String moduleName, String skeletonsPath)
{
final File module = getModuleSkeleton(moduleName, skeletonsPath);
return module.exists() ? module : getPackageSkeleton(moduleName, skeletonsPath);
}
private static File getModuleSkeleton(String module, String skeletonsPath)
{
final String modulePath = module.replace('.', '/');
return new File(skeletonsPath, modulePath + ".py");
}
private static File getPackageSkeleton(String pkg, String skeletonsPath)
{
final String packagePath = pkg.replace('.', '/');
return new File(new File(skeletonsPath, packagePath), PyNames.INIT_DOT_PY);
}
private boolean updateOrCreateSkeleton(final PyBinaryItem binaryItem, final List<UpdateResult> errorList) throws InvalidSdkException
{
final String moduleName = binaryItem.getModule();
final File skeleton = getSkeleton(moduleName, getSkeletonsPath());
final SkeletonHeader header = readSkeletonHeader(skeleton);
boolean mustRebuild = true; // guilty unless proven fresh enough
if(header != null)
{
int requiredVersion = myVersionChecker.getRequiredVersion(moduleName);
mustRebuild = header.getVersion() < requiredVersion;
}
if(!mustRebuild)
{ // ...but what if the lib was updated?
mustRebuild = (skeleton.exists() && binaryItem.lastModified() > skeleton.lastModified());
// really we can omit both exists() calls but I keep these to make the logic clear
}
if(myBlacklist != null)
{
Pair<Integer, Long> versionInfo = myBlacklist.get(binaryItem.getPath());
if(versionInfo != null)
{
int failedGeneratorVersion = versionInfo.getFirst();
long failedTimestamp = versionInfo.getSecond();
mustRebuild &= failedGeneratorVersion < myGeneratorVersion || failedTimestamp < binaryItem.lastModified();
if(!mustRebuild)
{ // we're still failing to rebuild, it, keep it in blacklist
errorList.add(new UpdateResult(moduleName, binaryItem.getPath(), binaryItem.lastModified(), false));
}
}
}
if(mustRebuild)
{
indicateMinor(moduleName);
if(myPregeneratedSkeletons != null && copyPregeneratedSkeleton(moduleName))
{
return true;
}
LOG.info("Skeleton for " + moduleName);
generateSkeleton(moduleName, binaryItem.getPath(), null, generated -> {
if(!generated)
{
errorList.add(new UpdateResult(moduleName, binaryItem.getPath(), binaryItem.lastModified(), true));
}
});
}
return false;
}
public static class PyBinaryItem
{
private String myPath;
private String myModule;
private long myLength;
private long myLastModified;
PyBinaryItem(String module, String path, long length, long lastModified)
{
myPath = path;
myModule = module;
myLength = length;
myLastModified = lastModified * 1000;
}
public String getPath()
{
return myPath;
}
public String getModule()
{
return myModule;
}
public long length()
{
return myLength;
}
public long lastModified()
{
return myLastModified;
}
}
private boolean copyPregeneratedSkeleton(String moduleName) throws InvalidSdkException
{
File targetDir;
final String modulePath = moduleName.replace('.', '/');
File skeletonsDir = new File(getSkeletonsPath());
VirtualFile pregenerated = myPregeneratedSkeletons.findFileByRelativePath(modulePath + ".py");
if(pregenerated == null)
{
pregenerated = myPregeneratedSkeletons.findFileByRelativePath(modulePath + "/" + PyNames.INIT_DOT_PY);
targetDir = new File(skeletonsDir, modulePath);
}
else
{
int pos = modulePath.lastIndexOf('/');
if(pos < 0)
{
targetDir = skeletonsDir;
}
else
{
final String moduleParentPath = modulePath.substring(0, pos);
targetDir = new File(skeletonsDir, moduleParentPath);
}
}
if(pregenerated != null && (targetDir.exists() || targetDir.mkdirs()))
{
LOG.info("Pregenerated skeleton for " + moduleName);
File target = new File(targetDir, pregenerated.getName());
try
{
FileOutputStream fos = new FileOutputStream(target);
try
{
FileUtil.copy(pregenerated.getInputStream(), fos);
}
finally
{
fos.close();
}
}
catch(IOException e)
{
LOG.info("Error copying pregenerated skeleton", e);
return false;
}
return true;
}
return false;
}
@Nullable
private VirtualFile findPregeneratedSkeletons()
{
final File root = findPregeneratedSkeletonsRoot();
if(root == null)
{
return null;
}
LOG.info("Pregenerated skeletons root is " + root);
@NonNls final String versionString = mySdk.getVersionString();
if(versionString == null)
{
return null;
}
if(PySdkUtil.isRemote(mySdk))
{
return null;
}
String version = versionString.toLowerCase().replace(" ", "-");
File f;
if(SystemInfo.isMac)
{
String osVersion = SystemInfo.OS_VERSION;
int dot = osVersion.indexOf('.');
if(dot >= 0)
{
int secondDot = osVersion.indexOf('.', dot + 1);
if(secondDot >= 0)
{
osVersion = osVersion.substring(0, secondDot);
}
}
f = new File(root, "skeletons-mac-" + myGeneratorVersion + "-" + osVersion + "-" + version + ".zip");
}
else
{
String os = SystemInfo.isWindows ? "win" : "nix";
f = new File(root, "skeletons-" + os + "-" + myGeneratorVersion + "-" + version + ".zip");
}
if(f.exists())
{
LOG.info("Found pregenerated skeletons at " + f.getPath());
final VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(f);
if(virtualFile == null)
{
LOG.info("Could not find pregenerated skeletons in VFS");
return null;
}
return ArchiveVfsUtil.getJarRootForLocalFile(virtualFile);
}
else
{
LOG.info("Not found pregenerated skeletons at " + f.getPath());
return null;
}
}
@Nullable
private static File findPregeneratedSkeletonsRoot()
{
final String path = ContainerPathManager.get().getHomePath();
LOG.info("Home path is " + path);
File f = new File(path, "python/skeletons"); // from sources
if(f.exists())
{
return f;
}
f = new File(path, "skeletons"); // compiled binary
if(f.exists())
{
return f;
}
return null;
}
/**
* Generates a skeleton for a particular binary module.
*
* @param modname name of the binary module as known to Python (e.g. 'foo.bar')
* @param modfilename name of file which defines the module, null for built-in modules
* @param assemblyRefs refs that generator wants to know in .net environment, if applicable
* @param resultConsumer accepts true if generation completed successfully
*/
public void generateSkeleton(@Nonnull String modname, @Nullable String modfilename, @Nullable List<String> assemblyRefs, Consumer<Boolean> resultConsumer) throws InvalidSdkException
{
mySkeletonsGenerator.generateSkeleton(modname, modfilename, assemblyRefs, getExtraSyspath(), mySdk.getHomePath(), resultConsumer);
}
private String getExtraSyspath()
{
if(myExtraSyspath == null)
{
myExtraSyspath = calculateExtraSysPath(mySdk, mySkeletonsPath);
}
return myExtraSyspath;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.maven.packaging;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import static org.w3c.dom.Node.ELEMENT_NODE;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectHelper;
import static org.apache.camel.maven.packaging.PackageHelper.loadText;
/**
* Prepares the Karaf provider camel catalog to include component it supports
*/
@Mojo(name = "prepare-catalog-karaf", threadSafe = true)
public class PrepareCatalogKarafMojo extends AbstractMojo {
public static final int BUFFER_SIZE = 128 * 1024;
/**
* The maven project.
*/
@Parameter(property = "project", required = true, readonly = true)
protected MavenProject project;
/**
* The output directory for components catalog
*/
@Parameter(defaultValue = "${project.build.directory}/classes/org/apache/camel/catalog/karaf/components")
protected File componentsOutDir;
/**
* The output directory for dataformats catalog
*/
@Parameter(defaultValue = "${project.build.directory}/classes/org/apache/camel/catalog/karaf/dataformats")
protected File dataFormatsOutDir;
/**
* The output directory for languages catalog
*/
@Parameter(defaultValue = "${project.build.directory}/classes/org/apache/camel/catalog/karaf/languages")
protected File languagesOutDir;
/**
* The output directory for others catalog
*/
@Parameter(defaultValue = "${project.build.directory}/classes/org/apache/camel/catalog/karaf/others")
protected File othersOutDir;
/**
* The karaf features directory
*/
@Parameter(defaultValue = "${project.build.directory}/../../../platforms/karaf/features/src/main/resources/")
protected File featuresDir;
/**
* The components directory where all the Apache Camel components are
*/
@Parameter(defaultValue = "${project.build.directory}/../../../components")
protected File componentsDir;
/**
* The camel-core directory
*/
@Parameter(defaultValue = "${project.build.directory}/../../../core/camel-core")
protected File coreDir;
/**
* The camel-base directory
*/
@Parameter(defaultValue = "${project.build.directory}/../../../core/camel-base")
protected File baseDir;
/**
* Maven ProjectHelper.
*/
@Component
private MavenProjectHelper projectHelper;
/**
* Execute goal.
*
* @throws MojoExecutionException execution of the main class or one of the
* threads it generated failed.
* @throws MojoFailureException something bad happened...
*/
public void execute() throws MojoExecutionException, MojoFailureException {
Set<String> features = findKarafFeatures();
executeComponents(features);
executeDataFormats(features);
executeLanguages(features);
executeOthers(features);
}
protected void executeComponents(Set<String> features) throws MojoExecutionException, MojoFailureException {
getLog().info("Copying all Camel component json descriptors");
// lets use sorted set/maps
Set<File> jsonFiles = new TreeSet<>();
Set<File> componentFiles = new TreeSet<>();
// find all json files in components and camel-core
if (componentsDir != null && componentsDir.isDirectory()) {
File[] components = componentsDir.listFiles();
if (components != null) {
for (File dir : components) {
if (dir.isDirectory() && !"target".equals(dir.getName())) {
File target = new File(dir, "target/classes");
// the directory must be in the list of known features
if (!features.contains(dir.getName())) {
continue;
}
// special for some components which is in a sub dir
if ("camel-as2".equals(dir.getName())) {
target = new File(dir, "camel-as2-component/target/classes");
} else if ("camel-box".equals(dir.getName())) {
target = new File(dir, "camel-box-component/target/classes");
} else if ("camel-salesforce".equals(dir.getName())) {
target = new File(dir, "camel-salesforce-component/target/classes");
} else if ("camel-linkedin".equals(dir.getName())) {
target = new File(dir, "camel-linkedin-component/target/classes");
} else if ("camel-servicenow".equals(dir.getName())) {
target = new File(dir, "camel-servicenow-component/target/classes");
} else {
// this module must be active with a source folder
File src = new File(dir, "src");
boolean active = src.isDirectory() && src.exists();
if (!active) {
continue;
}
}
findComponentFilesRecursive(target, jsonFiles, componentFiles, new CamelComponentsFileFilter());
}
}
}
}
if (coreDir != null && coreDir.isDirectory()) {
File target = new File(coreDir, "target/classes");
findComponentFilesRecursive(target, jsonFiles, componentFiles, new CamelComponentsFileFilter());
}
getLog().info("Found " + componentFiles.size() + " component.properties files");
getLog().info("Found " + jsonFiles.size() + " component json files");
// make sure to create out dir
componentsOutDir.mkdirs();
for (File file : jsonFiles) {
File to = new File(componentsOutDir, file.getName());
try {
copyFile(file, to);
} catch (IOException e) {
throw new MojoFailureException("Cannot copy file from " + file + " -> " + to, e);
}
}
File all = new File(componentsOutDir, "../components.properties");
try {
FileOutputStream fos = new FileOutputStream(all, false);
String[] names = componentsOutDir.list();
List<String> components = new ArrayList<>();
// sort the names
for (String name : names) {
if (name.endsWith(".json")) {
// strip out .json from the name
String componentName = name.substring(0, name.length() - 5);
components.add(componentName);
}
}
Collections.sort(components);
for (String name : components) {
fos.write(name.getBytes());
fos.write("\n".getBytes());
}
fos.close();
} catch (IOException e) {
throw new MojoFailureException("Error writing to file " + all);
}
}
protected void executeDataFormats(Set<String> features) throws MojoExecutionException, MojoFailureException {
getLog().info("Copying all Camel dataformat json descriptors");
// lets use sorted set/maps
Set<File> jsonFiles = new TreeSet<>();
Set<File> dataFormatFiles = new TreeSet<>();
// find all data formats from the components directory
if (componentsDir != null && componentsDir.isDirectory()) {
File[] dataFormats = componentsDir.listFiles();
if (dataFormats != null) {
for (File dir : dataFormats) {
if (dir.isDirectory() && !"target".equals(dir.getName())) {
// the directory must be in the list of known features
if (!features.contains(dir.getName())) {
continue;
}
// this module must be active with a source folder
File src = new File(dir, "src");
boolean active = src.isDirectory() && src.exists();
if (!active) {
continue;
}
File target = new File(dir, "target/classes");
findDataFormatFilesRecursive(target, jsonFiles, dataFormatFiles, new CamelDataFormatsFileFilter());
}
}
}
}
if (coreDir != null && coreDir.isDirectory()) {
File target = new File(coreDir, "target/classes");
findDataFormatFilesRecursive(target, jsonFiles, dataFormatFiles, new CamelDataFormatsFileFilter());
}
getLog().info("Found " + dataFormatFiles.size() + " dataformat.properties files");
getLog().info("Found " + jsonFiles.size() + " dataformat json files");
// make sure to create out dir
dataFormatsOutDir.mkdirs();
for (File file : jsonFiles) {
File to = new File(dataFormatsOutDir, file.getName());
try {
copyFile(file, to);
} catch (IOException e) {
throw new MojoFailureException("Cannot copy file from " + file + " -> " + to, e);
}
}
File all = new File(dataFormatsOutDir, "../dataformats.properties");
try {
FileOutputStream fos = new FileOutputStream(all, false);
String[] names = dataFormatsOutDir.list();
List<String> dataFormats = new ArrayList<>();
// sort the names
for (String name : names) {
if (name.endsWith(".json")) {
// strip out .json from the name
String dataFormatName = name.substring(0, name.length() - 5);
dataFormats.add(dataFormatName);
}
}
Collections.sort(dataFormats);
for (String name : dataFormats) {
fos.write(name.getBytes());
fos.write("\n".getBytes());
}
fos.close();
} catch (IOException e) {
throw new MojoFailureException("Error writing to file " + all);
}
}
protected void executeLanguages(Set<String> features) throws MojoExecutionException, MojoFailureException {
getLog().info("Copying all Camel language json descriptors");
// lets use sorted set/maps
Set<File> jsonFiles = new TreeSet<>();
Set<File> languageFiles = new TreeSet<>();
// find all languages from the components directory
if (componentsDir != null && componentsDir.isDirectory()) {
File[] languages = componentsDir.listFiles();
if (languages != null) {
for (File dir : languages) {
// the directory must be in the list of known features (or known languages)
if (!features.contains(dir.getName())
&& !dir.getName().equals("camel-bean")
&& !dir.getName().equals("camel-xpath")) {
continue;
}
// this module must be active with a source folder
File src = new File(dir, "src");
boolean active = src.isDirectory() && src.exists();
if (!active) {
continue;
}
if (dir.isDirectory() && !"target".equals(dir.getName())) {
File target = new File(dir, "target/classes");
findLanguageFilesRecursive(target, jsonFiles, languageFiles, new CamelLanguagesFileFilter());
}
}
}
}
if (baseDir != null && baseDir.isDirectory()) {
File target = new File(baseDir, "target/classes");
findLanguageFilesRecursive(target, jsonFiles, languageFiles, new CamelLanguagesFileFilter());
// also look in camel-jaxp
target = new File(baseDir, "../camel-jaxp/target/classes");
findLanguageFilesRecursive(target, jsonFiles, languageFiles, new CamelLanguagesFileFilter());
}
getLog().info("Found " + languageFiles.size() + " language.properties files");
getLog().info("Found " + jsonFiles.size() + " language json files");
// make sure to create out dir
languagesOutDir.mkdirs();
for (File file : jsonFiles) {
File to = new File(languagesOutDir, file.getName());
try {
copyFile(file, to);
} catch (IOException e) {
throw new MojoFailureException("Cannot copy file from " + file + " -> " + to, e);
}
}
File all = new File(languagesOutDir, "../languages.properties");
try {
FileOutputStream fos = new FileOutputStream(all, false);
String[] names = languagesOutDir.list();
List<String> languages = new ArrayList<>();
// sort the names
for (String name : names) {
if (name.endsWith(".json")) {
// strip out .json from the name
String languageName = name.substring(0, name.length() - 5);
languages.add(languageName);
}
}
Collections.sort(languages);
for (String name : languages) {
fos.write(name.getBytes());
fos.write("\n".getBytes());
}
fos.close();
} catch (IOException e) {
throw new MojoFailureException("Error writing to file " + all);
}
}
protected void executeOthers(Set<String> features) throws MojoExecutionException, MojoFailureException {
getLog().info("Copying all Camel other json descriptors");
// lets use sorted set/maps
Set<File> jsonFiles = new TreeSet<>();
Set<File> otherFiles = new TreeSet<>();
// find all languages from the components directory
if (componentsDir != null && componentsDir.isDirectory()) {
File[] others = componentsDir.listFiles();
if (others != null) {
for (File dir : others) {
// the directory must be in the list of known features
if (!features.contains(dir.getName())) {
continue;
}
// skip these special cases
boolean special = "camel-core-osgi".equals(dir.getName())
|| "camel-core-xml".equals(dir.getName())
|| "camel-http-common".equals(dir.getName())
|| "camel-jetty-common".equals(dir.getName());
boolean special2 = "camel-as2".equals(dir.getName())
|| "camel-box".equals(dir.getName())
|| "camel-linkedin".equals(dir.getName())
|| "camel-olingo2".equals(dir.getName())
|| "camel-olingo4".equals(dir.getName())
|| "camel-servicenow".equals(dir.getName())
|| "camel-salesforce".equals(dir.getName());
if (special || special2) {
continue;
}
// this module must be active with a source folder
File src = new File(dir, "src");
boolean active = src.isDirectory() && src.exists();
if (!active) {
continue;
}
if (dir.isDirectory() && !"target".equals(dir.getName())) {
File target = new File(dir, "target/classes");
findOtherFilesRecursive(target, jsonFiles, otherFiles, new CamelOthersFileFilter());
}
}
}
}
getLog().info("Found " + otherFiles.size() + " other.properties files");
getLog().info("Found " + jsonFiles.size() + " other json files");
// make sure to create out dir
othersOutDir.mkdirs();
for (File file : jsonFiles) {
File to = new File(othersOutDir, file.getName());
try {
copyFile(file, to);
} catch (IOException e) {
throw new MojoFailureException("Cannot copy file from " + file + " -> " + to, e);
}
}
File all = new File(othersOutDir, "../others.properties");
try {
FileOutputStream fos = new FileOutputStream(all, false);
String[] names = othersOutDir.list();
List<String> others = new ArrayList<>();
// sort the names
for (String name : names) {
if (name.endsWith(".json")) {
// strip out .json from the name
String otherName = name.substring(0, name.length() - 5);
others.add(otherName);
}
}
Collections.sort(others);
for (String name : others) {
fos.write(name.getBytes());
fos.write("\n".getBytes());
}
fos.close();
} catch (IOException e) {
throw new MojoFailureException("Error writing to file " + all);
}
}
private void findComponentFilesRecursive(File dir, Set<File> found, Set<File> components, FileFilter filter) {
File[] files = dir.listFiles(filter);
if (files != null) {
for (File file : files) {
// skip files in root dirs as Camel does not store information there but others may do
boolean rootDir = "classes".equals(dir.getName()) || "META-INF".equals(dir.getName());
boolean jsonFile = !rootDir && file.isFile() && file.getName().endsWith(".json");
boolean componentFile = !rootDir && file.isFile() && file.getName().equals("component.properties");
if (jsonFile) {
found.add(file);
} else if (componentFile) {
components.add(file);
} else if (file.isDirectory()) {
findComponentFilesRecursive(file, found, components, filter);
}
}
}
}
private void findDataFormatFilesRecursive(File dir, Set<File> found, Set<File> dataFormats, FileFilter filter) {
File[] files = dir.listFiles(filter);
if (files != null) {
for (File file : files) {
// skip files in root dirs as Camel does not store information there but others may do
boolean rootDir = "classes".equals(dir.getName()) || "META-INF".equals(dir.getName());
boolean jsonFile = !rootDir && file.isFile() && file.getName().endsWith(".json");
boolean dataFormatFile = !rootDir && file.isFile() && file.getName().equals("dataformat.properties");
if (jsonFile) {
found.add(file);
} else if (dataFormatFile) {
dataFormats.add(file);
} else if (file.isDirectory()) {
findDataFormatFilesRecursive(file, found, dataFormats, filter);
}
}
}
}
private void findLanguageFilesRecursive(File dir, Set<File> found, Set<File> languages, FileFilter filter) {
File[] files = dir.listFiles(filter);
if (files != null) {
for (File file : files) {
// skip files in root dirs as Camel does not store information there but others may do
boolean rootDir = "classes".equals(dir.getName()) || "META-INF".equals(dir.getName());
boolean jsonFile = !rootDir && file.isFile() && file.getName().endsWith(".json");
boolean languageFile = !rootDir && file.isFile() && file.getName().equals("language.properties");
if (jsonFile) {
found.add(file);
} else if (languageFile) {
languages.add(file);
} else if (file.isDirectory()) {
findLanguageFilesRecursive(file, found, languages, filter);
}
}
}
}
private void findOtherFilesRecursive(File dir, Set<File> found, Set<File> others, FileFilter filter) {
File[] files = dir.listFiles(filter);
if (files != null) {
for (File file : files) {
// skip files in root dirs as Camel does not store information there but others may do
boolean rootDir = "classes".equals(dir.getName()) || "META-INF".equals(dir.getName());
boolean jsonFile = rootDir && file.isFile() && file.getName().endsWith(".json");
boolean otherFile = !rootDir && file.isFile() && file.getName().equals("other.properties");
if (jsonFile) {
found.add(file);
} else if (otherFile) {
others.add(file);
} else if (file.isDirectory()) {
findOtherFilesRecursive(file, found, others, filter);
}
}
}
}
private class CamelComponentsFileFilter implements FileFilter {
@Override
public boolean accept(File pathname) {
if (pathname.isDirectory() && pathname.getName().equals("model")) {
// do not check the camel-core model packages as there is no components there
return false;
}
if (pathname.isFile() && pathname.getName().endsWith(".json")) {
// must be a components json file
try {
String json = loadText(new FileInputStream(pathname));
return json != null && json.contains("\"kind\": \"component\"");
} catch (IOException e) {
// ignore
}
}
return pathname.isDirectory() || (pathname.isFile() && pathname.getName().equals("component.properties"));
}
}
private class CamelDataFormatsFileFilter implements FileFilter {
@Override
public boolean accept(File pathname) {
if (pathname.isDirectory() && pathname.getName().equals("model")) {
// do not check the camel-core model packages as there is no components there
return false;
}
if (pathname.isFile() && pathname.getName().endsWith(".json")) {
// must be a dataformat json file
try {
String json = loadText(new FileInputStream(pathname));
return json != null && json.contains("\"kind\": \"dataformat\"");
} catch (IOException e) {
// ignore
}
}
return pathname.isDirectory() || (pathname.isFile() && pathname.getName().equals("dataformat.properties"));
}
}
private class CamelLanguagesFileFilter implements FileFilter {
@Override
public boolean accept(File pathname) {
if (pathname.isDirectory() && pathname.getName().equals("model")) {
// do not check the camel-core model packages as there is no components there
return false;
}
if (pathname.isFile() && pathname.getName().endsWith(".json")) {
// must be a language json file
try {
String json = loadText(new FileInputStream(pathname));
return json != null && json.contains("\"kind\": \"language\"");
} catch (IOException e) {
// ignore
}
}
return pathname.isDirectory() || (pathname.isFile() && pathname.getName().equals("language.properties"));
}
}
private class CamelOthersFileFilter implements FileFilter {
@Override
public boolean accept(File pathname) {
if (pathname.isFile() && pathname.getName().endsWith(".json")) {
// must be a language json file
try {
String json = loadText(new FileInputStream(pathname));
return json != null && json.contains("\"kind\": \"other\"");
} catch (IOException e) {
// ignore
}
}
return pathname.isDirectory() || (pathname.isFile() && pathname.getName().equals("other.properties"));
}
}
public static void copyFile(File from, File to) throws IOException {
FileChannel in = null;
FileChannel out = null;
try (FileInputStream fis = new FileInputStream(from); FileOutputStream fos = new FileOutputStream(to)) {
try {
in = fis.getChannel();
out = fos.getChannel();
long size = in.size();
long position = 0;
while (position < size) {
position += in.transferTo(position, BUFFER_SIZE, out);
}
} finally {
if (in != null) {
in.close();
}
if (out != null) {
out.close();
}
}
}
}
private Set<String> findKarafFeatures() throws MojoExecutionException, MojoFailureException {
// load features.xml file and parse it
Set<String> answer = new LinkedHashSet<>();
try {
File file = new File(featuresDir, "features.xml");
InputStream is = new FileInputStream(file);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setIgnoringComments(true);
dbf.setIgnoringElementContentWhitespace(true);
dbf.setNamespaceAware(false);
dbf.setValidating(false);
dbf.setXIncludeAware(false);
dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, Boolean.TRUE);
Document dom = dbf.newDocumentBuilder().parse(is);
NodeList children = dom.getElementsByTagName("features");
for (int i = 0; i < children.getLength(); i++) {
Node child = children.item(i);
if (child.getNodeType() == ELEMENT_NODE) {
NodeList children2 = child.getChildNodes();
for (int j = 0; j < children2.getLength(); j++) {
Node child2 = children2.item(j);
if ("feature".equals(child2.getNodeName())) {
String artifactId = child2.getAttributes().getNamedItem("name").getTextContent();
if (artifactId != null && artifactId.startsWith("camel-")) {
answer.add(artifactId);
}
}
}
}
}
getLog().info("Found " + answer.size() + " Camel features in file: " + file);
} catch (Exception e) {
throw new MojoExecutionException("Error reading features.xml file", e);
}
return answer;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.apache.cassandra.cache.KeyCacheKey;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.metrics.CacheMetrics;
import org.apache.cassandra.metrics.CassandraMetricsRegistry;
import org.apache.cassandra.service.CacheService;
import org.apache.cassandra.service.StorageService;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNull;
import org.apache.cassandra.utils.Pair;
public class KeyCacheCqlTest extends CQLTester
{
static final String commonColumnsDef =
"part_key_a int," +
"part_key_b text," +
"clust_key_a int," +
"clust_key_b text," +
"clust_key_c frozen<list<text>>," + // to make it really big
"col_text text," +
"col_int int," +
"col_long bigint," +
"col_blob blob,";
static final String commonColumns =
"part_key_a," +
"part_key_b," +
"clust_key_a," +
"clust_key_b," +
"clust_key_c," + // to make it really big
"col_text," +
"col_int," +
"col_long";
// 1200 chars
static final String longString = "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789";
@Test
public void testSliceQueries() throws Throwable
{
createTable("CREATE TABLE %s (pk text, ck1 int, ck2 int, val text, vpk text, vck1 int, vck2 int, PRIMARY KEY (pk, ck1, ck2))");
for (int pkInt = 0; pkInt < 20; pkInt++)
{
String pk = Integer.toString(pkInt);
for (int ck1 = 0; ck1 < 10; ck1++)
{
for (int ck2 = 0; ck2 < 10; ck2++)
{
execute("INSERT INTO %s (pk, ck1, ck2, val, vpk, vck1, vck2) VALUES (?, ?, ?, ?, ?, ?, ?)",
pk, ck1, ck2, makeStringValue(pk, ck1, ck2), pk, ck1, ck2);
}
}
}
StorageService.instance.forceKeyspaceFlush(KEYSPACE);
for (int pkInt = 0; pkInt < 20; pkInt++)
{
String pk = Integer.toString(pkInt);
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=?", pk),
pk, 0, 10, 0, 10);
for (int ck1 = 0; ck1 < 10; ck1++)
{
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1=?", pk, ck1),
pk, ck1, ck1+1, 0, 10);
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1<?", pk, ck1),
pk, 0, ck1, 0, 10);
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1>?", pk, ck1),
pk, ck1+1, 10, 0, 10);
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1<=?", pk, ck1),
pk, 0, ck1+1, 0, 10);
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1>=?", pk, ck1),
pk, ck1, 10, 0, 10);
for (int ck2 = 0; ck2 < 10; ck2++)
{
assertRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1=? AND ck2=?", pk, ck1, ck2),
new Object[]{ makeStringValue(pk, ck1, ck2), pk, ck1, ck2 });
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1=? AND ck2<?", pk, ck1, ck2),
pk, ck1, ck1+1, 0, ck2);
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1=? AND ck2>?", pk, ck1, ck2),
pk, ck1, ck1+1, ck2+1, 10);
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1=? AND ck2<=?", pk, ck1, ck2),
pk, ck1, ck1+1, 0, ck2+1);
assertClusterRows(execute("SELECT val, vpk, vck1, vck2 FROM %s WHERE pk=? AND ck1=? AND ck2>=?", pk, ck1, ck2),
pk, ck1, ck1+1, ck2, 10);
}
}
}
}
private static void assertClusterRows(UntypedResultSet rows, String pk, int ck1from, int ck1to, int ck2from, int ck2to)
{
String info = "pk=" + pk + ", ck1from=" + ck1from + ", ck1to=" + ck1to + ", ck2from=" + ck2from + ", ck2to=" + ck2to;
Iterator<UntypedResultSet.Row> iter = rows.iterator();
int cnt = 0;
int expect = (ck1to - ck1from) * (ck2to - ck2from);
for (int ck1 = ck1from; ck1 < ck1to; ck1++)
{
for (int ck2 = ck2from; ck2 < ck2to; ck2++)
{
assertTrue("expected " + expect + " (already got " + cnt + ") rows, but more rows are available for " + info, iter.hasNext());
UntypedResultSet.Row row = iter.next();
assertEquals(makeStringValue(pk, ck1, ck2), row.getString("val"));
assertEquals(pk, row.getString("vpk"));
assertEquals(ck1, row.getInt("vck1"));
assertEquals(ck2, row.getInt("vck2"));
}
}
assertFalse("expected " + expect + " (already got " + cnt + ") rows, but more rows are available for " + info, iter.hasNext());
}
private static String makeStringValue(String pk, int ck1, int ck2)
{
return longString + ',' + pk + ',' + ck1 + ',' + ck2;
}
@Test
public void test2iKeyCachePaths() throws Throwable
{
String table = createTable("CREATE TABLE %s ("
+ commonColumnsDef
+ "PRIMARY KEY ((part_key_a, part_key_b),clust_key_a,clust_key_b,clust_key_c))");
createIndex("CREATE INDEX some_index ON %s (col_int)");
insertData(table, "some_index", true);
clearCache();
CacheMetrics metrics = CacheService.instance.keyCache.getMetrics();
for (int i = 0; i < 10; i++)
{
UntypedResultSet result = execute("SELECT part_key_a FROM %s WHERE col_int = ?", i);
assertEquals(500, result.size());
}
long hits = metrics.hits.getCount();
long requests = metrics.requests.getCount();
assertEquals(0, hits);
assertEquals(210, requests);
for (int i = 0; i < 10; i++)
{
UntypedResultSet result = execute("SELECT part_key_a FROM %s WHERE col_int = ?", i);
// 100 part-keys * 50 clust-keys
// indexed on part-key % 10 = 10 index partitions
// (50 clust-keys * 100-part-keys / 10 possible index-values) = 500
assertEquals(500, result.size());
}
metrics = CacheService.instance.keyCache.getMetrics();
hits = metrics.hits.getCount();
requests = metrics.requests.getCount();
assertEquals(200, hits);
assertEquals(420, requests);
CacheService.instance.keyCache.submitWrite(Integer.MAX_VALUE).get();
int beforeSize = CacheService.instance.keyCache.size();
CacheService.instance.keyCache.clear();
Assert.assertEquals(0, CacheService.instance.keyCache.size());
// then load saved
CacheService.instance.keyCache.loadSaved();
assertEquals(beforeSize, CacheService.instance.keyCache.size());
for (int i = 0; i < 10; i++)
{
UntypedResultSet result = execute("SELECT part_key_a FROM %s WHERE col_int = ?", i);
// 100 part-keys * 50 clust-keys
// indexed on part-key % 10 = 10 index partitions
// (50 clust-keys * 100-part-keys / 10 possible index-values) = 500
assertEquals(500, result.size());
}
//Test Schema.getColumnFamilyStoreIncludingIndexes, several null check paths
//are defensive and unreachable
assertNull(Schema.instance.getColumnFamilyStoreIncludingIndexes(Pair.create("foo", "bar")));
assertNull(Schema.instance.getColumnFamilyStoreIncludingIndexes(Pair.create(KEYSPACE, "bar")));
dropTable("DROP TABLE %s");
//Test loading for a dropped 2i/table
CacheService.instance.keyCache.clear();
// then load saved
CacheService.instance.keyCache.loadSaved();
assertEquals(0, CacheService.instance.keyCache.size());
}
@Test
public void test2iKeyCachePathsSaveKeysForDroppedTable() throws Throwable
{
String table = createTable("CREATE TABLE %s ("
+ commonColumnsDef
+ "PRIMARY KEY ((part_key_a, part_key_b),clust_key_a,clust_key_b,clust_key_c))");
createIndex("CREATE INDEX some_index ON %s (col_int)");
insertData(table, "some_index", true);
clearCache();
CacheMetrics metrics = CacheService.instance.keyCache.getMetrics();
for (int i = 0; i < 10; i++)
{
UntypedResultSet result = execute("SELECT part_key_a FROM %s WHERE col_int = ?", i);
assertEquals(500, result.size());
}
long hits = metrics.hits.getCount();
long requests = metrics.requests.getCount();
assertEquals(0, hits);
assertEquals(210, requests);
//
for (int i = 0; i < 10; i++)
{
UntypedResultSet result = execute("SELECT part_key_a FROM %s WHERE col_int = ?", i);
// 100 part-keys * 50 clust-keys
// indexed on part-key % 10 = 10 index partitions
// (50 clust-keys * 100-part-keys / 10 possible index-values) = 500
assertEquals(500, result.size());
}
metrics = CacheService.instance.keyCache.getMetrics();
hits = metrics.hits.getCount();
requests = metrics.requests.getCount();
assertEquals(200, hits);
assertEquals(420, requests);
dropTable("DROP TABLE %s");
CacheService.instance.keyCache.submitWrite(Integer.MAX_VALUE).get();
CacheService.instance.keyCache.clear();
Assert.assertEquals(0, CacheService.instance.keyCache.size());
// then load saved
CacheService.instance.keyCache.loadSaved();
Iterator<KeyCacheKey> iter = CacheService.instance.keyCache.keyIterator();
while(iter.hasNext())
{
KeyCacheKey key = iter.next();
Assert.assertFalse(key.ksAndCFName.left.equals("KEYSPACE"));
Assert.assertFalse(key.ksAndCFName.right.startsWith(table));
}
}
@Test
public void testKeyCacheNonClustered() throws Throwable
{
String table = createTable("CREATE TABLE %s ("
+ commonColumnsDef
+ "PRIMARY KEY ((part_key_a, part_key_b)))");
insertData(table, null, false);
clearCache();
for (int i = 0; i < 10; i++)
{
assertRows(execute("SELECT col_text FROM %s WHERE part_key_a = ? AND part_key_b = ?", i, Integer.toOctalString(i)),
new Object[]{ String.valueOf(i) + '-' + String.valueOf(0) });
}
CacheMetrics metrics = CacheService.instance.keyCache.getMetrics();
long hits = metrics.hits.getCount();
long requests = metrics.requests.getCount();
assertEquals(0, hits);
assertEquals(10, requests);
for (int i = 0; i < 100; i++)
{
assertRows(execute("SELECT col_text FROM %s WHERE part_key_a = ? AND part_key_b = ?", i, Integer.toOctalString(i)),
new Object[]{ String.valueOf(i) + '-' + String.valueOf(0) });
}
hits = metrics.hits.getCount();
requests = metrics.requests.getCount();
assertEquals(10, hits);
assertEquals(120, requests);
}
@Test
public void testKeyCacheClustered() throws Throwable
{
String table = createTable("CREATE TABLE %s ("
+ commonColumnsDef
+ "PRIMARY KEY ((part_key_a, part_key_b),clust_key_a,clust_key_b,clust_key_c))");
insertData(table, null, true);
clearCache();
// query on partition key
// 10 queries, each 50 result rows
for (int i = 0; i < 10; i++)
{
assertEquals(50, execute("SELECT col_text FROM %s WHERE part_key_a = ? AND part_key_b = ?", i, Integer.toOctalString(i)).size());
}
CacheMetrics metrics = CacheService.instance.keyCache.getMetrics();
long hits = metrics.hits.getCount();
long requests = metrics.requests.getCount();
assertEquals(0, hits);
assertEquals(10, requests);
// 10 queries, each 50 result rows
for (int i = 0; i < 10; i++)
{
assertEquals(50, execute("SELECT col_text FROM %s WHERE part_key_a = ? AND part_key_b = ?", i, Integer.toOctalString(i)).size());
}
metrics = CacheService.instance.keyCache.getMetrics();
hits = metrics.hits.getCount();
requests = metrics.requests.getCount();
assertEquals(10, hits);
assertEquals(10 + 10, requests);
// 100 queries - must get a hit in key-cache
for (int i = 0; i < 10; i++)
{
for (int c = 0; c < 10; c++)
{
assertRows(execute("SELECT col_text, col_long FROM %s WHERE part_key_a = ? AND part_key_b = ? and clust_key_a = ?", i, Integer.toOctalString(i), c),
new Object[]{ String.valueOf(i) + '-' + String.valueOf(c), (long) c });
}
}
metrics = CacheService.instance.keyCache.getMetrics();
hits = metrics.hits.getCount();
requests = metrics.requests.getCount();
assertEquals(10 + 100, hits);
assertEquals(20 + 100, requests);
// 5000 queries - first 10 partitions already in key cache
for (int i = 0; i < 100; i++)
{
for (int c = 0; c < 50; c++)
{
assertRows(execute("SELECT col_text, col_long FROM %s WHERE part_key_a = ? AND part_key_b = ? and clust_key_a = ?", i, Integer.toOctalString(i), c),
new Object[]{ String.valueOf(i) + '-' + String.valueOf(c), (long) c });
}
}
hits = metrics.hits.getCount();
requests = metrics.requests.getCount();
assertEquals(110 + 4910, hits);
assertEquals(120 + 5500, requests);
}
// Inserts 100 partitions split over 10 sstables (flush after 10 partitions).
// Clustered tables receive 50 CQL rows per partition.
private void insertData(String table, String index, boolean withClustering) throws Throwable
{
prepareTable(table);
if (index != null)
{
StorageService.instance.disableAutoCompaction(KEYSPACE, table + '.' + index);
Keyspace.open(KEYSPACE).getColumnFamilyStore(table).indexManager.getIndexByName(index).getBlockingFlushTask().call();
}
for (int i = 0; i < 100; i++)
{
int partKeyA = i;
String partKeyB = Integer.toOctalString(i);
for (int c = 0; c < (withClustering ? 50 : 1); c++)
{
int clustKeyA = c;
String clustKeyB = Integer.toOctalString(c);
List<String> clustKeyC = makeList(clustKeyB);
String colText = String.valueOf(i) + '-' + String.valueOf(c);
int colInt = i % 10;
long colLong = c;
execute("INSERT INTO %s (" + commonColumns + ") VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
partKeyA, partKeyB,
clustKeyA, clustKeyB, clustKeyC,
colText, colInt, colLong);
}
if (i % 10 == 9)
{
Keyspace.open(KEYSPACE).getColumnFamilyStore(table).forceFlush().get();
if (index != null)
Keyspace.open(KEYSPACE).getColumnFamilyStore(table).indexManager.getIndexByName(index).getBlockingFlushTask().call();
}
}
}
private static void prepareTable(String table) throws IOException, InterruptedException, java.util.concurrent.ExecutionException
{
StorageService.instance.disableAutoCompaction(KEYSPACE, table);
Keyspace.open(KEYSPACE).getColumnFamilyStore(table).forceFlush().get();
Keyspace.open(KEYSPACE).getColumnFamilyStore(table).truncateBlocking();
}
private static List<String> makeList(String value)
{
List<String> list = new ArrayList<>(50);
for (int i = 0; i < 50; i++)
{
list.add(value + i);
}
return list;
}
private static void clearCache()
{
CassandraMetricsRegistry.Metrics.getNames().forEach(CassandraMetricsRegistry.Metrics::remove);
CacheService.instance.keyCache.clear();
CacheMetrics metrics = CacheService.instance.keyCache.getMetrics();
Assert.assertEquals(0, metrics.entries.getValue().intValue());
Assert.assertEquals(0L, metrics.hits.getCount());
Assert.assertEquals(0L, metrics.requests.getCount());
Assert.assertEquals(0L, metrics.size.getValue().longValue());
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.resiliencehub.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/resiliencehub-2020-04-30/StartAppAssessment" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class StartAppAssessmentRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The Amazon Resource Name (ARN) of the application. The format for this ARN is: arn:<code>partition</code>:dcps:
* <code>region</code>:<code>account</code>:app/<code>app-id</code>. For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html"> Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* </p>
*/
private String appArn;
/**
* <p>
* The version of the application.
* </p>
*/
private String appVersion;
/**
* <p>
* The name for the assessment.
* </p>
*/
private String assessmentName;
/**
* <p>
* Used for an idempotency token. A client token is a unique, case-sensitive string of up to 64 ASCII characters.
* You should not reuse the same client token for other API requests.
* </p>
*/
private String clientToken;
/**
* <p>
* The tags assigned to the resource. A tag is a label that you assign to an Amazon Web Services resource. Each tag
* consists of a key/value pair.
* </p>
*/
private java.util.Map<String, String> tags;
/**
* <p>
* The Amazon Resource Name (ARN) of the application. The format for this ARN is: arn:<code>partition</code>:dcps:
* <code>region</code>:<code>account</code>:app/<code>app-id</code>. For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html"> Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* </p>
*
* @param appArn
* The Amazon Resource Name (ARN) of the application. The format for this ARN is: arn:<code>partition</code>
* :dcps:<code>region</code>:<code>account</code>:app/<code>app-id</code>. For more information about ARNs,
* see <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html"> Amazon Resource
* Names (ARNs)</a> in the <i>AWS General Reference</i>.
*/
public void setAppArn(String appArn) {
this.appArn = appArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the application. The format for this ARN is: arn:<code>partition</code>:dcps:
* <code>region</code>:<code>account</code>:app/<code>app-id</code>. For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html"> Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* </p>
*
* @return The Amazon Resource Name (ARN) of the application. The format for this ARN is: arn:<code>partition</code>
* :dcps:<code>region</code>:<code>account</code>:app/<code>app-id</code>. For more information about ARNs,
* see <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html"> Amazon Resource
* Names (ARNs)</a> in the <i>AWS General Reference</i>.
*/
public String getAppArn() {
return this.appArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the application. The format for this ARN is: arn:<code>partition</code>:dcps:
* <code>region</code>:<code>account</code>:app/<code>app-id</code>. For more information about ARNs, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html"> Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* </p>
*
* @param appArn
* The Amazon Resource Name (ARN) of the application. The format for this ARN is: arn:<code>partition</code>
* :dcps:<code>region</code>:<code>account</code>:app/<code>app-id</code>. For more information about ARNs,
* see <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html"> Amazon Resource
* Names (ARNs)</a> in the <i>AWS General Reference</i>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartAppAssessmentRequest withAppArn(String appArn) {
setAppArn(appArn);
return this;
}
/**
* <p>
* The version of the application.
* </p>
*
* @param appVersion
* The version of the application.
*/
public void setAppVersion(String appVersion) {
this.appVersion = appVersion;
}
/**
* <p>
* The version of the application.
* </p>
*
* @return The version of the application.
*/
public String getAppVersion() {
return this.appVersion;
}
/**
* <p>
* The version of the application.
* </p>
*
* @param appVersion
* The version of the application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartAppAssessmentRequest withAppVersion(String appVersion) {
setAppVersion(appVersion);
return this;
}
/**
* <p>
* The name for the assessment.
* </p>
*
* @param assessmentName
* The name for the assessment.
*/
public void setAssessmentName(String assessmentName) {
this.assessmentName = assessmentName;
}
/**
* <p>
* The name for the assessment.
* </p>
*
* @return The name for the assessment.
*/
public String getAssessmentName() {
return this.assessmentName;
}
/**
* <p>
* The name for the assessment.
* </p>
*
* @param assessmentName
* The name for the assessment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartAppAssessmentRequest withAssessmentName(String assessmentName) {
setAssessmentName(assessmentName);
return this;
}
/**
* <p>
* Used for an idempotency token. A client token is a unique, case-sensitive string of up to 64 ASCII characters.
* You should not reuse the same client token for other API requests.
* </p>
*
* @param clientToken
* Used for an idempotency token. A client token is a unique, case-sensitive string of up to 64 ASCII
* characters. You should not reuse the same client token for other API requests.
*/
public void setClientToken(String clientToken) {
this.clientToken = clientToken;
}
/**
* <p>
* Used for an idempotency token. A client token is a unique, case-sensitive string of up to 64 ASCII characters.
* You should not reuse the same client token for other API requests.
* </p>
*
* @return Used for an idempotency token. A client token is a unique, case-sensitive string of up to 64 ASCII
* characters. You should not reuse the same client token for other API requests.
*/
public String getClientToken() {
return this.clientToken;
}
/**
* <p>
* Used for an idempotency token. A client token is a unique, case-sensitive string of up to 64 ASCII characters.
* You should not reuse the same client token for other API requests.
* </p>
*
* @param clientToken
* Used for an idempotency token. A client token is a unique, case-sensitive string of up to 64 ASCII
* characters. You should not reuse the same client token for other API requests.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartAppAssessmentRequest withClientToken(String clientToken) {
setClientToken(clientToken);
return this;
}
/**
* <p>
* The tags assigned to the resource. A tag is a label that you assign to an Amazon Web Services resource. Each tag
* consists of a key/value pair.
* </p>
*
* @return The tags assigned to the resource. A tag is a label that you assign to an Amazon Web Services resource.
* Each tag consists of a key/value pair.
*/
public java.util.Map<String, String> getTags() {
return tags;
}
/**
* <p>
* The tags assigned to the resource. A tag is a label that you assign to an Amazon Web Services resource. Each tag
* consists of a key/value pair.
* </p>
*
* @param tags
* The tags assigned to the resource. A tag is a label that you assign to an Amazon Web Services resource.
* Each tag consists of a key/value pair.
*/
public void setTags(java.util.Map<String, String> tags) {
this.tags = tags;
}
/**
* <p>
* The tags assigned to the resource. A tag is a label that you assign to an Amazon Web Services resource. Each tag
* consists of a key/value pair.
* </p>
*
* @param tags
* The tags assigned to the resource. A tag is a label that you assign to an Amazon Web Services resource.
* Each tag consists of a key/value pair.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartAppAssessmentRequest withTags(java.util.Map<String, String> tags) {
setTags(tags);
return this;
}
/**
* Add a single Tags entry
*
* @see StartAppAssessmentRequest#withTags
* @returns a reference to this object so that method calls can be chained together.
*/
public StartAppAssessmentRequest addTagsEntry(String key, String value) {
if (null == this.tags) {
this.tags = new java.util.HashMap<String, String>();
}
if (this.tags.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tags.put(key, value);
return this;
}
/**
* Removes all the entries added into Tags.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StartAppAssessmentRequest clearTagsEntries() {
this.tags = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAppArn() != null)
sb.append("AppArn: ").append(getAppArn()).append(",");
if (getAppVersion() != null)
sb.append("AppVersion: ").append(getAppVersion()).append(",");
if (getAssessmentName() != null)
sb.append("AssessmentName: ").append(getAssessmentName()).append(",");
if (getClientToken() != null)
sb.append("ClientToken: ").append(getClientToken()).append(",");
if (getTags() != null)
sb.append("Tags: ").append("***Sensitive Data Redacted***");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof StartAppAssessmentRequest == false)
return false;
StartAppAssessmentRequest other = (StartAppAssessmentRequest) obj;
if (other.getAppArn() == null ^ this.getAppArn() == null)
return false;
if (other.getAppArn() != null && other.getAppArn().equals(this.getAppArn()) == false)
return false;
if (other.getAppVersion() == null ^ this.getAppVersion() == null)
return false;
if (other.getAppVersion() != null && other.getAppVersion().equals(this.getAppVersion()) == false)
return false;
if (other.getAssessmentName() == null ^ this.getAssessmentName() == null)
return false;
if (other.getAssessmentName() != null && other.getAssessmentName().equals(this.getAssessmentName()) == false)
return false;
if (other.getClientToken() == null ^ this.getClientToken() == null)
return false;
if (other.getClientToken() != null && other.getClientToken().equals(this.getClientToken()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAppArn() == null) ? 0 : getAppArn().hashCode());
hashCode = prime * hashCode + ((getAppVersion() == null) ? 0 : getAppVersion().hashCode());
hashCode = prime * hashCode + ((getAssessmentName() == null) ? 0 : getAssessmentName().hashCode());
hashCode = prime * hashCode + ((getClientToken() == null) ? 0 : getClientToken().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public StartAppAssessmentRequest clone() {
return (StartAppAssessmentRequest) super.clone();
}
}
|
|
package org.utilities.reports;
/*
* #%L
* reports
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2015 ElGuille
* %%
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the ElGuille nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.io.File;
import java.sql.Connection;
public class JReport extends AbstractJasperReports
{
/***
* Global and final variables
*/
public static final String PATHORIGIN = "path";
public static final String FILEORIGIN = "file";
public static final String CONNECTIONDATASOURCE = "connection";
public static final String COLLECTIONDATASOURCE = "collection";
public static final String CSVDATASOURCE = "csv";
// ---------------------------------------------------------
/***
* Properties of the model
*/
private String name = "";
private String department = "";
private String path = "";
private File file = new File( "" );
private Connection connection = null;
private List<?> collection = new ArrayList();
private File csvFile = new File( "" );
private Map<String, Object> parameters = new HashMap();
private String reportOrigin = ""; // path or file
private String reportDataSource = ""; // connection or collection
// ---------------------------------------------------------
/***
* Constructors
*/
public JReport() {}
public JReport( String name, String department, String reportOrigin, Object origin )
{
this.name = name;
this.department = department;
this.reportOrigin = reportOrigin;
configureOrigin( origin );
}
public JReport( String name, String department, Map parameters, String reportOrigin, Object origin )
{
this( name, department, reportOrigin, origin );
this.parameters = parameters;
}
public JReport( String name, String department, String reportOrigin, Object origin, String reportDataSource, Object dataSource )
{
this( name, department, reportOrigin, origin );
this.reportDataSource = reportDataSource;
configureDataSource( dataSource );
}
public JReport( String name, String department, Map parameters, String reportOrigin, Object origin, String reportDataSource, Object dataSource )
{
this( name, department, reportOrigin, origin, reportDataSource, dataSource );
this.parameters = parameters;
}
// ----------------------------------------------------
private void configureOrigin( Object origin )
{
if( origin != null )
{
switch( reportOrigin )
{
case "path":
this.path = String.valueOf( origin );
break;
case "file":
this.file = (File) origin;
break;
}
}
}
private void configureDataSource( Object dataSource )
{
if( ( !reportDataSource.equals( "" ) ) && ( dataSource != null ) )
{
switch( reportDataSource )
{
case "connection":
this.connection = (Connection) dataSource;
break;
case "collection":
this.collection = (List<?>) dataSource;
break;
case "csv":
this.csvFile = (File) dataSource;
break;
}
}
}
// ----------------------------------------------------
public void setParameter( String key, Object value )
{
if( this.parameters == null )
this.parameters = new HashMap<String, Object>();
this.parameters.put( key, value );
}
public String getPath() {
return this.path;
}
public void setPath(String path)
{
reportOrigin = PATHORIGIN;
this.path = path;
}
public File getFile() {
return file;
}
public void setFile(File file)
{
reportOrigin = FILEORIGIN;
this.file = file;
}
public Connection getConnection() {
return this.connection;
}
public void setConnection(Connection connection)
{
this.reportDataSource = CONNECTIONDATASOURCE;
this.connection = connection;
}
public List<?> getCollection() {
return this.collection;
}
public void setCollection(List<?> collection)
{
this.reportDataSource = COLLECTIONDATASOURCE;
this.collection = collection;
}
public File getCSVFile() {
return this.csvFile;
}
public void setCSVFile( File csvFile )
{
this.reportDataSource = CSVDATASOURCE;
this.csvFile = file;
}
public Map<String, Object> getParameters() {
return parameters;
}
public void setParameters(Map<String, Object> parameters) {
this.parameters = parameters;
}
public String getReportOrigin() {
return this.reportOrigin;
}
public void setReportOrigin( String reportOrigin ) {
this.reportOrigin = reportOrigin;
}
public String getReportDataSource() {
return this.reportDataSource;
}
public void setReportDataSource(String reportDataSource) {
this.reportDataSource = reportDataSource;
}
}
|
|
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.registry.type.entity;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityList;
import net.minecraft.entity.boss.EntityDragonPart;
import net.minecraft.entity.effect.EntityLightningBolt;
import net.minecraft.entity.effect.EntityWeatherEffect;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.entity.projectile.EntityEgg;
import net.minecraft.entity.projectile.EntityFishHook;
import org.spongepowered.api.data.type.HorseColors;
import org.spongepowered.api.data.type.HorseStyles;
import org.spongepowered.api.data.type.HorseVariants;
import org.spongepowered.api.data.type.OcelotTypes;
import org.spongepowered.api.data.type.RabbitTypes;
import org.spongepowered.api.data.type.SkeletonTypes;
import org.spongepowered.api.entity.EntityType;
import org.spongepowered.api.entity.EntityTypes;
import org.spongepowered.api.registry.ExtraClassCatalogRegistryModule;
import org.spongepowered.api.registry.util.CustomCatalogRegistration;
import org.spongepowered.api.registry.util.RegisterCatalog;
import org.spongepowered.api.text.translation.Translation;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.entity.SpongeEntityConstants;
import org.spongepowered.common.entity.SpongeEntityType;
import org.spongepowered.common.entity.living.human.EntityHuman;
import org.spongepowered.common.registry.RegistryHelper;
import org.spongepowered.common.registry.SpongeAdditionalCatalogRegistryModule;
import org.spongepowered.common.text.translation.SpongeTranslation;
import java.util.Collection;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
public final class EntityTypeRegistryModule implements ExtraClassCatalogRegistryModule<EntityType, Entity>, SpongeAdditionalCatalogRegistryModule<EntityType> {
@RegisterCatalog(EntityTypes.class)
protected final Map<String, EntityType> entityTypeMappings = Maps.newHashMap();
public final Map<Class<? extends Entity>, EntityType> entityClassToTypeMappings = Maps.newHashMap();
public static EntityTypeRegistryModule getInstance() {
return Holder.INSTANCE;
}
public void registerEntityType(EntityType type) {
this.entityTypeMappings.put(type.getId(), type);
this.entityClassToTypeMappings.put(((SpongeEntityType) type).entityClass, type);
}
@Override
public Optional<EntityType> getById(String id) {
if (!checkNotNull(id).contains(":")) {
id = "minecraft:" + id;
}
return Optional.ofNullable(this.entityTypeMappings.get(id.toLowerCase(Locale.ENGLISH)));
}
@Override
public Collection<EntityType> getAll() {
return ImmutableList.copyOf(this.entityTypeMappings.values());
}
@Override
public void registerDefaults() {
this.entityTypeMappings.put("item", newEntityTypeFromName("Item"));
this.entityTypeMappings.put("experience_orb", newEntityTypeFromName("XPOrb"));
this.entityTypeMappings.put("leash_hitch", newEntityTypeFromName("LeashKnot"));
this.entityTypeMappings.put("painting", newEntityTypeFromName("Painting"));
this.entityTypeMappings.put("arrow", newEntityTypeFromName("Arrow"));
this.entityTypeMappings.put("snowball", newEntityTypeFromName("Snowball"));
this.entityTypeMappings.put("fireball", newEntityTypeFromName("LargeFireball", "Fireball"));
this.entityTypeMappings.put("small_fireball", newEntityTypeFromName("SmallFireball"));
this.entityTypeMappings.put("ender_pearl", newEntityTypeFromName("ThrownEnderpearl"));
this.entityTypeMappings.put("eye_of_ender", newEntityTypeFromName("EyeOfEnderSignal"));
this.entityTypeMappings.put("splash_potion", newEntityTypeFromName("ThrownPotion"));
this.entityTypeMappings.put("thrown_exp_bottle", newEntityTypeFromName("ThrownExpBottle"));
this.entityTypeMappings.put("item_frame", newEntityTypeFromName("ItemFrame"));
this.entityTypeMappings.put("wither_skull", newEntityTypeFromName("WitherSkull"));
this.entityTypeMappings.put("primed_tnt", newEntityTypeFromName("PrimedTnt"));
this.entityTypeMappings.put("falling_block", newEntityTypeFromName("FallingSand"));
this.entityTypeMappings.put("firework", newEntityTypeFromName("FireworksRocketEntity"));
this.entityTypeMappings.put("armor_stand", newEntityTypeFromName("ArmorStand"));
this.entityTypeMappings.put("boat", newEntityTypeFromName("Boat"));
this.entityTypeMappings.put("rideable_minecart", newEntityTypeFromName("MinecartRideable"));
this.entityTypeMappings.put("chested_minecart", newEntityTypeFromName("MinecartChest"));
this.entityTypeMappings.put("furnace_minecart", newEntityTypeFromName("MinecartFurnace"));
this.entityTypeMappings.put("tnt_minecart", newEntityTypeFromName("MinecartTnt", "MinecartTNT"));
this.entityTypeMappings.put("hopper_minecart", newEntityTypeFromName("MinecartHopper"));
this.entityTypeMappings.put("mob_spawner_minecart", newEntityTypeFromName("MinecartSpawner"));
this.entityTypeMappings.put("commandblock_minecart", newEntityTypeFromName("MinecartCommandBlock"));
this.entityTypeMappings.put("creeper", newEntityTypeFromName("Creeper"));
this.entityTypeMappings.put("skeleton", newEntityTypeFromName("Skeleton"));
this.entityTypeMappings.put("spider", newEntityTypeFromName("Spider"));
this.entityTypeMappings.put("giant", newEntityTypeFromName("Giant"));
this.entityTypeMappings.put("zombie", newEntityTypeFromName("Zombie"));
this.entityTypeMappings.put("slime", newEntityTypeFromName("Slime"));
this.entityTypeMappings.put("ghast", newEntityTypeFromName("Ghast"));
this.entityTypeMappings.put("pig_zombie", newEntityTypeFromName("PigZombie"));
this.entityTypeMappings.put("enderman", newEntityTypeFromName("Enderman"));
this.entityTypeMappings.put("cave_spider", newEntityTypeFromName("CaveSpider"));
this.entityTypeMappings.put("silverfish", newEntityTypeFromName("Silverfish"));
this.entityTypeMappings.put("blaze", newEntityTypeFromName("Blaze"));
this.entityTypeMappings.put("magma_cube", newEntityTypeFromName("LavaSlime"));
this.entityTypeMappings.put("ender_dragon", newEntityTypeFromName("EnderDragon"));
this.entityTypeMappings.put("wither", newEntityTypeFromName("WitherBoss"));
this.entityTypeMappings.put("bat", newEntityTypeFromName("Bat"));
this.entityTypeMappings.put("witch", newEntityTypeFromName("Witch"));
this.entityTypeMappings.put("endermite", newEntityTypeFromName("Endermite"));
this.entityTypeMappings.put("guardian", newEntityTypeFromName("Guardian"));
this.entityTypeMappings.put("pig", newEntityTypeFromName("Pig"));
this.entityTypeMappings.put("sheep", newEntityTypeFromName("Sheep"));
this.entityTypeMappings.put("cow", newEntityTypeFromName("Cow"));
this.entityTypeMappings.put("chicken", newEntityTypeFromName("Chicken"));
this.entityTypeMappings.put("squid", newEntityTypeFromName("Squid"));
this.entityTypeMappings.put("wolf", newEntityTypeFromName("Wolf"));
this.entityTypeMappings.put("mushroom_cow", newEntityTypeFromName("MushroomCow"));
this.entityTypeMappings.put("snowman", newEntityTypeFromName("SnowMan"));
this.entityTypeMappings.put("ocelot", newEntityTypeFromName("Ocelot", "Ozelot"));
this.entityTypeMappings.put("iron_golem", newEntityTypeFromName("VillagerGolem"));
this.entityTypeMappings.put("horse", newEntityTypeFromName("EntityHorse"));
this.entityTypeMappings.put("rabbit", newEntityTypeFromName("Rabbit"));
this.entityTypeMappings.put("villager", newEntityTypeFromName("Villager"));
this.entityTypeMappings.put("ender_crystal", newEntityTypeFromName("EnderCrystal"));
this.entityTypeMappings.put("egg", new SpongeEntityType(-1, "Egg", EntityEgg.class, new SpongeTranslation("item.egg.name")));
this.entityTypeMappings.put("fishing_hook", new SpongeEntityType(-2, "FishingHook", EntityFishHook.class, new SpongeTranslation("item.fishingRod.name")));
this.entityTypeMappings.put("lightning", new SpongeEntityType(-3, "Lightning", EntityLightningBolt.class, null));
this.entityTypeMappings.put("weather", new SpongeEntityType(-4, "Weather", EntityWeatherEffect.class, new SpongeTranslation("soundCategory.weather")));
this.entityTypeMappings.put("player", new SpongeEntityType(-5, "Player", EntityPlayerMP.class, new SpongeTranslation("soundCategory.player")));
this.entityTypeMappings.put("complex_part", new SpongeEntityType(-6, "ComplexPart", EntityDragonPart.class, null));
this.entityTypeMappings.put("human", registerCustomEntity(EntityHuman.class, "Human", -7, null));
}
@SuppressWarnings("unchecked")
private SpongeEntityType newEntityTypeFromName(String spongeName, String mcName) {
return new SpongeEntityType(EntityList.stringToIDMapping.get(mcName), spongeName,
EntityList.stringToClassMapping.get(mcName),
new SpongeTranslation("entity." + mcName + ".name"));
}
private SpongeEntityType newEntityTypeFromName(String name) {
return newEntityTypeFromName(name, name);
}
@SuppressWarnings("unchecked")
private SpongeEntityType registerCustomEntity(Class<? extends Entity> entityClass, String entityName, int entityId, Translation translation) {
String entityFullName = String.format("%s.%s", SpongeImpl.ECOSYSTEM_NAME, entityName);
EntityList.classToStringMapping.put(entityClass, entityFullName);
EntityList.stringToClassMapping.put(entityFullName, entityClass);
return new SpongeEntityType(entityId, entityName, SpongeImpl.ECOSYSTEM_NAME, entityClass, translation);
}
@CustomCatalogRegistration
public void registerCatalogs() {
registerDefaults();
RegistryHelper.mapFields(EntityTypes.class, fieldName -> {
if (fieldName.equals("UNKNOWN")) {
return SpongeEntityType.UNKNOWN;
}
EntityType entityType = this.entityTypeMappings.get(fieldName.toLowerCase(Locale.ENGLISH));
this.entityClassToTypeMappings.put(((SpongeEntityType) entityType).entityClass, entityType);
// remove old mapping
this.entityTypeMappings.remove(fieldName.toLowerCase(Locale.ENGLISH));
// add new mapping with minecraft id
this.entityTypeMappings.put(entityType.getId(), entityType);
return entityType;
});
this.entityTypeMappings.put("minecraft:ozelot", this.entityTypeMappings.get("minecraft:ocelot"));
RegistryHelper.mapFields(SkeletonTypes.class, SpongeEntityConstants.SKELETON_TYPES);
RegistryHelper.mapFields(HorseColors.class, SpongeEntityConstants.HORSE_COLORS);
RegistryHelper.mapFields(HorseVariants.class, SpongeEntityConstants.HORSE_VARIANTS);
RegistryHelper.mapFields(HorseStyles.class, SpongeEntityConstants.HORSE_STYLES);
RegistryHelper.mapFields(OcelotTypes.class, SpongeEntityConstants.OCELOT_TYPES);
RegistryHelper.mapFields(RabbitTypes.class, SpongeEntityConstants.RABBIT_TYPES);
}
@Override
public boolean allowsApiRegistration() {
return false;
}
@Override
public void registerAdditionalCatalog(EntityType extraCatalog) {
this.entityTypeMappings.put(extraCatalog.getId(), extraCatalog);
this.entityClassToTypeMappings.put(((SpongeEntityType) extraCatalog).entityClass, extraCatalog);
}
@Override
public boolean hasRegistrationFor(Class<? extends Entity> mappedClass) {
return false;
}
@Override
public EntityType getForClass(Class<? extends Entity> clazz) {
EntityType type = this.entityClassToTypeMappings.get(clazz);
if (type == null) {
type = EntityTypes.UNKNOWN;
}
return type;
}
EntityTypeRegistryModule() {
}
private static final class Holder {
static final EntityTypeRegistryModule INSTANCE = new EntityTypeRegistryModule();
}
public Optional<EntityType> getEntity(Class<? extends org.spongepowered.api.entity.Entity> entityClass) {
for (EntityType type : this.entityTypeMappings.values()) {
if (entityClass.isAssignableFrom(type.getEntityClass())) {
return Optional.of(type);
}
}
return Optional.empty();
}
}
|
|
package com.ryactiv.ryactivsdk.sqlite;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteStatement;
import android.util.Log;
import java.text.ParseException;
import java.util.ArrayList;
public abstract class SQLiteManager extends SQLiteOpenHelper implements SQLiteConstants {
public static final String TAG_ERROR = "DB ERROR";
public SQLiteDatabase database;
public Cursor cursor;
protected String[] tables;
protected String[] createTables;
public SQLiteManager(Context context, String name, int version) {
super(context, name, null, version);
initialize();
}
public abstract void initialize();
@Override
public void onCreate(SQLiteDatabase db) {
createTables(db);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
dropTables(db);
createTables(db);
}
protected void createTables(SQLiteDatabase db) {
for (String query: createTables) {
db.execSQL(query);
}
}
protected void dropTables(SQLiteDatabase db) {
for (int i = tables.length; --i >= 0;) {
db.execSQL("DROP TABLE IF EXISTS " + tables[i]);
}
}
public void connect(int option) {
database = (option == READABLE) ? getReadableDatabase() : getWritableDatabase();
}
public void disconnect() {
database.close();
}
public Cursor query(String query) {
return database.rawQuery(query, null);
}
public Cursor query(String query, String[] whereArgs) {
return database.rawQuery(query, whereArgs);
}
public Cursor query(String table, String[] columns, String orderBy) {
return database.query(table, columns, null, null, null, null, orderBy);
}
public Cursor query(String table, String[] columns, String where, String[] whereArgs) {
return database.query(table, columns, where, whereArgs, null, null, null);
}
public Cursor query(String table, String[] columns, String where, String[] whereArgs, String orderBy) {
return database.query(table, columns, where, whereArgs, null, null, orderBy);
}
public Cursor query(String table, String[] columns, String where, String[] whereArgs,
String groupBy, String having, String orderBy) {
return database.query(table, columns, where, whereArgs, groupBy, having, orderBy);
}
public Cursor query(String table, String[] columns, String where, String[] whereArgs,
String groupBy, String having, String orderBy, String limit) {
return database.query(table, columns, where, whereArgs, groupBy, having, orderBy, limit);
}
public Cursor query(boolean distinct, String table, String[] columns, String where, String[] whereArgs,
String groupBy, String having, String orderBy, String limit) {
return database.query(distinct, table, columns, where, whereArgs, groupBy, having, orderBy, limit);
}
public long insert(String table, String[] columns, String[] values) {
connect(WRITABLE);
ContentValues contentValues = new ContentValues();
for (int i = 0; i < columns.length; i++) {
contentValues.put(columns[i], values[i]);
}
long id = database.insertWithOnConflict(table, null, contentValues, SQLiteDatabase.CONFLICT_IGNORE);
disconnect();
return id;
}
public int update(String table, String[] columns, String[] values) {
return update(table, columns, values, null, null);
}
public int update(String table, String[] columns, String[] values, String where) {
return update(table, columns, values, where, null);
}
public int update(String table, String[] columns, String[] values, String where, String[] whereArgs) {
connect(WRITABLE);
ContentValues contentValues = new ContentValues();
for (int i = 0; i < columns.length; i++) {
contentValues.put(columns[i], values[i]);
}
int rowsAffected = database.update(table, contentValues, where, whereArgs);
disconnect();
return rowsAffected;
}
public int delete(String table) {
return delete(table, null, null);
}
public int delete(String table, String where) {
return delete(table, where, null);
}
public int delete(String table, String where, String[] whereArgs) {
connect(WRITABLE);
int rowsAffected = database.delete(table, where, whereArgs);
disconnect();
return rowsAffected;
}
public boolean transaction(String[] sentences, String[][] whereArgs) {
boolean success;
connect(WRITABLE);
try {
database.beginTransaction();
for (int i = 0; i < sentences.length; i++) {
String sqlQuery = sentences[i];
SQLiteStatement query = database.compileStatement(sqlQuery);
query.bindAllArgsAsStrings(whereArgs[i]);
query.execute();
query.close();
}
database.setTransactionSuccessful();
success = true;
} catch(SQLException e) {
success = false;
Log.i(TAG_ERROR, e.getMessage());
} finally {
database.endTransaction();
}
disconnect();
return success;
}
public ArrayList<ArrayList<Object>> execQuery(String query, int... types) {
return execQuery(query, null, types);
}
public ArrayList<ArrayList<Object>> execQuery(String query, String[] whereArgs, int... types) {
ArrayList<ArrayList<Object>> table = new ArrayList<>();
connect(READABLE);
cursor = (whereArgs != null) ? query(query, whereArgs) : query(query);
if (cursor.moveToFirst()) {
do {
ArrayList<Object> record = new ArrayList<>();
for (int i = 0; i < types.length; i++) {
record.add(getValue(types[i], i));
}
table.add(record);
} while(cursor.moveToNext());
}
cursor.close();
disconnect();
return table;
}
private Object getValue(int tipo, int indice) {
switch (tipo) {
case BOOLEAN:
return cursor.getInt(indice) == 1;
case BYTES:
return cursor.getBlob(indice);
case SHORT:
return cursor.getShort(indice);
case INT:
return cursor.getInt(indice);
case LONG:
return cursor.getLong(indice);
case FLOAT:
return cursor.getFloat(indice);
case DOUBLE:
return cursor.getDouble(indice);
case CHAR:
return cursor.getString(indice).charAt(0);
case STRING:
return cursor.getString(indice);
case DATE:
try {
return FORMAT_DATE.parse(cursor.getString(indice));
} catch (ParseException e) {}
case TIME:
try {
return FORMAT_TIME.parse(cursor.getString(indice));
} catch (ParseException e) {}
case DATETIME:
try {
return FORMAT_DATETIME.parse(cursor.getString(indice));
} catch (ParseException e) {}
default:
return null;
}
}
public String[] getTables() {
return tables;
}
public String[] getCreateTables() {
return createTables;
}
}
|
|
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.engine.node.service.impl;
import org.apache.commons.collections.ComparatorUtils;
import org.kuali.rice.kew.doctype.bo.DocumentType;
import org.kuali.rice.kew.engine.RouteHelper;
import org.kuali.rice.kew.engine.node.Branch;
import org.kuali.rice.kew.engine.node.BranchState;
import org.kuali.rice.kew.engine.node.NodeGraphContext;
import org.kuali.rice.kew.engine.node.NodeGraphSearchCriteria;
import org.kuali.rice.kew.engine.node.NodeGraphSearchResult;
import org.kuali.rice.kew.engine.node.NodeMatcher;
import org.kuali.rice.kew.engine.node.NodeState;
import org.kuali.rice.kew.engine.node.ProcessDefinitionBo;
import org.kuali.rice.kew.engine.node.RouteNode;
import org.kuali.rice.kew.engine.node.RouteNodeInstance;
import org.kuali.rice.kew.engine.node.RouteNodeUtils;
import org.kuali.rice.kew.engine.node.dao.RouteNodeDAO;
import org.kuali.rice.kew.engine.node.service.RouteNodeService;
import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue;
import org.kuali.rice.kew.service.KEWServiceLocator;
import org.kuali.rice.krad.data.DataObjectService;
import org.kuali.rice.krad.data.PersistenceOption;
import org.springframework.beans.factory.annotation.Required;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class RouteNodeServiceImpl implements RouteNodeService {
protected final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(getClass());
public static final String REVOKED_NODE_INSTANCES_STATE_KEY = "NodeInstances.Revoked";
private static final Comparator NODE_INSTANCE_FORWARD_SORT = new NodeInstanceIdSorter();
private static final Comparator NODE_INSTANCE_BACKWARD_SORT =
ComparatorUtils.reversedComparator(NODE_INSTANCE_FORWARD_SORT);
private RouteHelper helper = new RouteHelper();
private RouteNodeDAO routeNodeDAO;
private DataObjectService dataObjectService;
public RouteNode save(RouteNode node) {
return dataObjectService.save(node);
}
public RouteNodeInstance save(RouteNodeInstance nodeInstance) {
return dataObjectService.save(nodeInstance);
}
public void save(NodeState nodeState) {
dataObjectService.save(nodeState);
}
public Branch save(Branch branch) {
return dataObjectService.save(branch);
}
public RouteNode findRouteNodeById(String nodeId) {
return dataObjectService.find(RouteNode.class,nodeId);
}
public RouteNodeInstance findRouteNodeInstanceById(String nodeInstanceId) {
return routeNodeDAO.findRouteNodeInstanceById(nodeInstanceId);
}
public RouteNodeInstance findRouteNodeInstanceById(String nodeInstanceId, DocumentRouteHeaderValue document) {
return RouteNodeUtils.findRouteNodeInstanceById(nodeInstanceId, document);
}
public List<RouteNodeInstance> getCurrentNodeInstances(String documentId) {
List<RouteNodeInstance> currentNodeInstances = getActiveNodeInstances(documentId);
if (currentNodeInstances.isEmpty()) {
currentNodeInstances = getTerminalNodeInstances(documentId);
}
return currentNodeInstances;
}
public List<RouteNodeInstance> getActiveNodeInstances(String documentId) {
return routeNodeDAO.getActiveNodeInstances(documentId);
}
public List<RouteNodeInstance> getActiveNodeInstances(DocumentRouteHeaderValue document) {
List<RouteNodeInstance> flattenedNodeInstances = getFlattenedNodeInstances(document, true);
List<RouteNodeInstance> activeNodeInstances = new ArrayList<RouteNodeInstance>();
for (RouteNodeInstance nodeInstance : flattenedNodeInstances) {
if (nodeInstance.isActive()) {
activeNodeInstances.add(nodeInstance);
}
}
return activeNodeInstances;
}
@Override
public List<String> getCurrentRouteNodeNames(String documentId) {
return routeNodeDAO.getCurrentRouteNodeNames(documentId);
}
@Override
public List<String> getActiveRouteNodeNames(String documentId) {
return routeNodeDAO.getActiveRouteNodeNames(documentId);
}
public List<RouteNodeInstance> getTerminalNodeInstances(String documentId) {
return routeNodeDAO.getTerminalNodeInstances(documentId);
}
@Override
public List<String> getTerminalRouteNodeNames(String documentId) {
return routeNodeDAO.getTerminalRouteNodeNames(documentId);
}
public List getInitialNodeInstances(String documentId) {
return routeNodeDAO.getInitialNodeInstances(documentId);
}
public NodeState findNodeState(Long nodeInstanceId, String key) {
return routeNodeDAO.findNodeState(nodeInstanceId, key);
}
public RouteNode findRouteNodeByName(String documentTypeId, String name) {
return routeNodeDAO.findRouteNodeByName(documentTypeId, name);
}
public List<RouteNode> findFinalApprovalRouteNodes(String documentTypeId) {
DocumentType documentType = KEWServiceLocator.getDocumentTypeService().findById(documentTypeId);
documentType = documentType.getRouteDefiningDocumentType();
return routeNodeDAO.findFinalApprovalRouteNodes(documentType.getDocumentTypeId());
}
public List findNextRouteNodesInPath(RouteNodeInstance nodeInstance, String nodeName) {
List<RouteNode> nodesInPath = new ArrayList<RouteNode>();
for (Iterator<RouteNode> iterator = nodeInstance.getRouteNode().getNextNodes().iterator(); iterator.hasNext();) {
RouteNode nextNode = iterator.next();
nodesInPath.addAll(findNextRouteNodesInPath(nodeName, nextNode, new HashSet<String>()));
}
return nodesInPath;
}
private List<RouteNode> findNextRouteNodesInPath(String nodeName, RouteNode node, Set<String> inspected) {
List<RouteNode> nextNodesInPath = new ArrayList<RouteNode>();
if (inspected.contains(node.getRouteNodeId())) {
return nextNodesInPath;
}
inspected.add(node.getRouteNodeId());
if (node.getRouteNodeName().equals(nodeName)) {
nextNodesInPath.add(node);
} else {
if (helper.isSubProcessNode(node)) {
ProcessDefinitionBo subProcess = node.getDocumentType().getNamedProcess(node.getRouteNodeName());
RouteNode subNode = subProcess.getInitialRouteNode();
if (subNode != null) {
nextNodesInPath.addAll(findNextRouteNodesInPath(nodeName, subNode, inspected));
}
}
for (Iterator<RouteNode> iterator = node.getNextNodes().iterator(); iterator.hasNext();) {
RouteNode nextNode = iterator.next();
nextNodesInPath.addAll(findNextRouteNodesInPath(nodeName, nextNode, inspected));
}
}
return nextNodesInPath;
}
public boolean isNodeInPath(DocumentRouteHeaderValue document, String nodeName) {
boolean isInPath = false;
Collection<RouteNodeInstance> activeNodes = getActiveNodeInstances(document.getDocumentId());
for (Iterator<RouteNodeInstance> iterator = activeNodes.iterator(); iterator.hasNext();) {
RouteNodeInstance nodeInstance = iterator.next();
List nextNodesInPath = findNextRouteNodesInPath(nodeInstance, nodeName);
isInPath = isInPath || !nextNodesInPath.isEmpty();
}
return isInPath;
}
public List findRouteNodeInstances(String documentId) {
return this.routeNodeDAO.findRouteNodeInstances(documentId);
}
public void setRouteNodeDAO(RouteNodeDAO dao) {
this.routeNodeDAO = dao;
}
public List findProcessNodeInstances(RouteNodeInstance process) {
return this.routeNodeDAO.findProcessNodeInstances(process);
}
public List<String> findPreviousNodeNames(String documentId) {
DocumentRouteHeaderValue document = KEWServiceLocator.getRouteHeaderService().getRouteHeader(documentId);
List<String> revokedIds = Collections.emptyList();
List<String> nodeNames = new ArrayList<String>();
if(document.getRootBranch() != null) {
String revoked = document.getRootBranch().getBranchState(REVOKED_NODE_INSTANCES_STATE_KEY) == null ? null : document.getRootBranch().getBranchState(REVOKED_NODE_INSTANCES_STATE_KEY).getValue();
if (revoked != null) {
revokedIds = Arrays.asList(revoked.split(","));
}
List <RouteNodeInstance> currentNodeInstances = KEWServiceLocator.getRouteNodeService().getCurrentNodeInstances(documentId);
List<RouteNodeInstance> nodeInstances = new ArrayList<RouteNodeInstance>();
for (RouteNodeInstance nodeInstance : currentNodeInstances) {
nodeInstances.addAll(nodeInstance.getPreviousNodeInstances());
}
while (!nodeInstances.isEmpty()) {
RouteNodeInstance nodeInstance = nodeInstances.remove(0);
if (!revokedIds.contains(nodeInstance.getRouteNodeInstanceId())) {
nodeNames.add(nodeInstance.getName());
}
nodeInstances.addAll(nodeInstance.getPreviousNodeInstances());
}
//reverse the order, because it was built last to first
Collections.reverse(nodeNames);
}
return nodeNames;
}
public List<String> findFutureNodeNames(String documentId) {
List currentNodeInstances = KEWServiceLocator.getRouteNodeService().getCurrentNodeInstances(documentId);
List<RouteNode> nodes = new ArrayList<RouteNode>();
for (Iterator iterator = currentNodeInstances.iterator(); iterator.hasNext();) {
RouteNodeInstance nodeInstance = (RouteNodeInstance) iterator.next();
nodes.addAll(nodeInstance.getRouteNode().getNextNodes());
}
List<String> nodeNames = new ArrayList<String>();
while (!nodes.isEmpty()) {
RouteNode node = nodes.remove(0);
if (!nodeNames.contains(node.getRouteNodeName())) {
nodeNames.add(node.getRouteNodeName());
}
nodes.addAll(node.getNextNodes());
}
return nodeNames;
}
public List<RouteNode> getFlattenedNodes(DocumentType documentType, boolean climbHierarchy) {
List<RouteNode> nodes = new ArrayList<RouteNode>();
if (!documentType.isRouteInherited() || climbHierarchy) {
for (Iterator iterator = documentType.getProcesses().iterator(); iterator.hasNext();) {
ProcessDefinitionBo process = (ProcessDefinitionBo) iterator.next();
nodes.addAll(getFlattenedNodes(process));
}
}
Collections.sort(nodes, new RouteNodeSorter());
return nodes;
}
public List<RouteNode> getFlattenedNodes(ProcessDefinitionBo process) {
Map<String, RouteNode> nodesMap = new HashMap<String, RouteNode>();
if (process.getInitialRouteNode() != null) {
flattenNodeGraph(nodesMap, process.getInitialRouteNode());
List<RouteNode> nodes = new ArrayList<RouteNode>(nodesMap.values());
Collections.sort(nodes, new RouteNodeSorter());
return nodes;
} else {
List<RouteNode> nodes = new ArrayList<RouteNode>();
nodes.add(new RouteNode());
return nodes;
}
}
/**
* Recursively walks the node graph and builds up the map. Uses a map because we will
* end up walking through duplicates, as is the case with Join nodes.
*/
private void flattenNodeGraph(Map<String, RouteNode> nodes, RouteNode node) {
if (node != null) {
if (nodes.containsKey(node.getRouteNodeName())) {
return;
}
nodes.put(node.getRouteNodeName(), node);
for (Iterator<RouteNode> iterator = node.getNextNodes().iterator(); iterator.hasNext();) {
RouteNode nextNode = iterator.next();
flattenNodeGraph(nodes, nextNode);
}
} else {
return;
}
}
public List<RouteNodeInstance> getFlattenedNodeInstances(DocumentRouteHeaderValue document, boolean includeProcesses) {
List<RouteNodeInstance> nodeInstances = new ArrayList<RouteNodeInstance>();
Set<String> visitedNodeInstanceIds = new HashSet<String>();
for (Iterator<RouteNodeInstance> iterator = document.getInitialRouteNodeInstances().iterator(); iterator.hasNext();) {
RouteNodeInstance initialNodeInstance = iterator.next();
flattenNodeInstanceGraph(nodeInstances, visitedNodeInstanceIds, initialNodeInstance, includeProcesses);
}
return nodeInstances;
}
private void flattenNodeInstanceGraph(List<RouteNodeInstance> nodeInstances, Set<String> visitedNodeInstanceIds, RouteNodeInstance nodeInstance, boolean includeProcesses) {
if (nodeInstance != null) {
if (visitedNodeInstanceIds.contains(nodeInstance.getRouteNodeInstanceId())) {
return;
}
if (includeProcesses && nodeInstance.getProcess() != null) {
flattenNodeInstanceGraph(nodeInstances, visitedNodeInstanceIds, nodeInstance.getProcess(), includeProcesses);
}
visitedNodeInstanceIds.add(nodeInstance.getRouteNodeInstanceId());
nodeInstances.add(nodeInstance);
for (Iterator<RouteNodeInstance> iterator = nodeInstance.getNextNodeInstances().iterator(); iterator.hasNext();) {
RouteNodeInstance nextNodeInstance = iterator.next();
flattenNodeInstanceGraph(nodeInstances, visitedNodeInstanceIds, nextNodeInstance, includeProcesses);
}
}
}
public NodeGraphSearchResult searchNodeGraph(NodeGraphSearchCriteria criteria) {
NodeGraphContext context = new NodeGraphContext();
if (criteria.getSearchDirection() == NodeGraphSearchCriteria.SEARCH_DIRECTION_BACKWARD) {
searchNodeGraphBackward(context, criteria.getMatcher(), null, criteria.getStartingNodeInstances());
} else {
throw new UnsupportedOperationException("Search feature can only search backward currently.");
}
List exactPath = determineExactPath(context, criteria.getSearchDirection(), criteria.getStartingNodeInstances());
return new NodeGraphSearchResult(context.getCurrentNodeInstance(), exactPath);
}
private void searchNodeGraphBackward(NodeGraphContext context, NodeMatcher matcher, RouteNodeInstance previousNodeInstance, Collection nodeInstances) {
if (nodeInstances == null) {
return;
}
for (Iterator iterator = nodeInstances.iterator(); iterator.hasNext();) {
RouteNodeInstance nodeInstance = (RouteNodeInstance) iterator.next();
context.setPreviousNodeInstance(previousNodeInstance);
context.setCurrentNodeInstance(nodeInstance);
searchNodeGraphBackward(context, matcher);
if (context.getResultNodeInstance() != null) {
// we've located the node instance we're searching for, we're done
break;
}
}
}
private void searchNodeGraphBackward(NodeGraphContext context, NodeMatcher matcher) {
RouteNodeInstance current = context.getCurrentNodeInstance();
int numBranches = current.getNextNodeInstances().size();
// if this is a split node, we want to wait here, until all branches join back to us
if (numBranches > 1) {
// determine the number of branches that have joined back to the split thus far
Integer joinCount = (Integer)context.getSplitState().get(current.getRouteNodeInstanceId());
if (joinCount == null) {
joinCount = new Integer(0);
}
// if this split is not a leaf node we increment the count
if (context.getPreviousNodeInstance() != null) {
joinCount = new Integer(joinCount.intValue()+1);
}
context.getSplitState().put(current.getRouteNodeInstanceId(), joinCount);
// if not all branches have joined, stop and wait for other branches to join
if (joinCount.intValue() != numBranches) {
return;
}
}
if (matcher.isMatch(context)) {
context.setResultNodeInstance(current);
} else {
context.getVisited().put(current.getRouteNodeInstanceId(), current);
searchNodeGraphBackward(context, matcher, current, current.getPreviousNodeInstances());
}
}
public List<RouteNodeInstance> getActiveNodeInstances(DocumentRouteHeaderValue document, String nodeName) {
Collection<RouteNodeInstance> activeNodes = getActiveNodeInstances(document.getDocumentId());
List<RouteNodeInstance> foundNodes = new ArrayList<RouteNodeInstance>();
for (Iterator<RouteNodeInstance> iterator = activeNodes.iterator(); iterator.hasNext();) {
RouteNodeInstance nodeInstance = iterator.next();
if (nodeInstance.getName().equals(nodeName)) {
foundNodes.add(nodeInstance);
}
}
return foundNodes;
}
private List determineExactPath(NodeGraphContext context, int searchDirection, Collection<RouteNodeInstance> startingNodeInstances) {
List<RouteNodeInstance> exactPath = new ArrayList<RouteNodeInstance>();
if (context.getResultNodeInstance() == null) {
exactPath.addAll(context.getVisited().values());
} else {
determineExactPath(exactPath, new HashMap<String, RouteNodeInstance>(), startingNodeInstances, context.getResultNodeInstance());
}
if (NodeGraphSearchCriteria.SEARCH_DIRECTION_FORWARD == searchDirection) {
Collections.sort(exactPath, NODE_INSTANCE_BACKWARD_SORT);
} else {
Collections.sort(exactPath, NODE_INSTANCE_FORWARD_SORT);
}
return exactPath;
}
private void determineExactPath(List<RouteNodeInstance> exactPath, Map<String, RouteNodeInstance> visited, Collection<RouteNodeInstance> startingNodeInstances, RouteNodeInstance nodeInstance) {
if (nodeInstance == null) {
return;
}
if (visited.containsKey(nodeInstance.getRouteNodeInstanceId())) {
return;
}
visited.put(nodeInstance.getRouteNodeInstanceId(), nodeInstance);
exactPath.add(nodeInstance);
for (RouteNodeInstance startingNode : startingNodeInstances) {
if (startingNode.getRouteNodeInstanceId().equals(nodeInstance.getRouteNodeInstanceId())) {
return;
}
}
for (Iterator<RouteNodeInstance> iterator = nodeInstance.getNextNodeInstances().iterator(); iterator.hasNext(); ) {
RouteNodeInstance nextNodeInstance = iterator.next();
determineExactPath(exactPath, visited, startingNodeInstances, nextNodeInstance);
}
}
/**
* Sorts by RouteNodeId or the order the nodes will be evaluated in *roughly*. This is
* for display purposes when rendering a flattened list of nodes.
*
* @author Kuali Rice Team ([email protected])
*/
private static class RouteNodeSorter implements Comparator {
public int compare(Object arg0, Object arg1) {
RouteNode rn1 = (RouteNode)arg0;
RouteNode rn2 = (RouteNode)arg1;
return rn1.getRouteNodeId().compareTo(rn2.getRouteNodeId());
}
}
private static class NodeInstanceIdSorter implements Comparator {
public int compare(Object arg0, Object arg1) {
RouteNodeInstance nodeInstance1 = (RouteNodeInstance)arg0;
RouteNodeInstance nodeInstance2 = (RouteNodeInstance)arg1;
return nodeInstance1.getRouteNodeInstanceId().compareTo(nodeInstance2.getRouteNodeInstanceId());
}
}
public void deleteByRouteNodeInstance(RouteNodeInstance routeNodeInstance){
//update the route node instance link table to cancel the relationship between the to-be-deleted instance and the previous node instances
routeNodeDAO.deleteLinksToPreNodeInstances(routeNodeInstance);
//delete the routeNodeInstance and its next node instances
routeNodeDAO.deleteRouteNodeInstancesHereAfter(routeNodeInstance);
}
public void deleteNodeStateById(Long nodeStateId){
routeNodeDAO.deleteNodeStateById(nodeStateId);
}
public void deleteNodeStates(List statesToBeDeleted){
routeNodeDAO.deleteNodeStates(statesToBeDeleted);
}
/**
* Records the revocation in the root BranchState of the document.
*/
public void revokeNodeInstance(DocumentRouteHeaderValue document, RouteNodeInstance nodeInstance) {
if (document == null) {
throw new IllegalArgumentException("Document must not be null.");
}
if (nodeInstance == null || nodeInstance.getRouteNodeInstanceId() == null) {
throw new IllegalArgumentException("In order to revoke a final approval node the node instance must be persisent and have an id.");
}
// get the initial node instance, the root branch is where we will store the state
Branch rootBranch = document.getRootBranch();
BranchState state = null;
if (rootBranch != null) {
state = rootBranch.getBranchState(REVOKED_NODE_INSTANCES_STATE_KEY);
}
if (state == null) {
state = new BranchState();
state.setKey(REVOKED_NODE_INSTANCES_STATE_KEY);
state.setValue("");
rootBranch.addBranchState(state);
}
if (state.getValue() == null) {
state.setValue("");
}
state.setValue(state.getValue() + nodeInstance.getRouteNodeInstanceId() + ",");
save(rootBranch);
}
/**
* Queries the list of revoked node instances from the root BranchState of the Document
* and returns a List of revoked RouteNodeInstances.
*/
public List getRevokedNodeInstances(DocumentRouteHeaderValue document) {
if (document == null) {
throw new IllegalArgumentException("Document must not be null.");
}
List<RouteNodeInstance> revokedNodeInstances = new ArrayList<RouteNodeInstance>();
Branch rootBranch = document.getRootBranch();
BranchState state = null;
if (rootBranch != null) {
state = rootBranch.getBranchState(REVOKED_NODE_INSTANCES_STATE_KEY);
}
if (state == null || org.apache.commons.lang.StringUtils.isEmpty(state.getValue())) {
return revokedNodeInstances;
}
String[] revokedNodes = state.getValue().split(",");
for (int index = 0; index < revokedNodes.length; index++) {
String revokedNodeInstanceId = revokedNodes[index];
RouteNodeInstance revokedNodeInstance = findRouteNodeInstanceById(revokedNodeInstanceId);
if (revokedNodeInstance == null) {
LOG.warn("Could not locate revoked RouteNodeInstance with the given id: " + revokedNodeInstanceId);
} else {
revokedNodeInstances.add(revokedNodeInstance);
}
}
return revokedNodeInstances;
}
public DataObjectService getDataObjectService() {
return dataObjectService;
}
@Required
public void setDataObjectService(DataObjectService dataObjectService) {
this.dataObjectService = dataObjectService;
}
}
|
|
package org.knowm.xchange.ccex;
import java.math.BigDecimal;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import org.knowm.xchange.ccex.dto.account.CCEXBalance;
import org.knowm.xchange.ccex.dto.marketdata.CCEXBuySellData;
import org.knowm.xchange.ccex.dto.marketdata.CCEXGetorderbook;
import org.knowm.xchange.ccex.dto.marketdata.CCEXMarket;
import org.knowm.xchange.ccex.dto.marketdata.CCEXTrade;
import org.knowm.xchange.ccex.dto.marketdata.CCEXTrades;
import org.knowm.xchange.ccex.dto.ticker.CCEXPriceResponse;
import org.knowm.xchange.ccex.dto.trade.CCEXOpenorder;
import org.knowm.xchange.ccex.dto.trade.CCEXOrderhistory;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.marketdata.Trades.TradeSortType;
import org.knowm.xchange.dto.meta.CurrencyMetaData;
import org.knowm.xchange.dto.meta.CurrencyPairMetaData;
import org.knowm.xchange.dto.meta.ExchangeMetaData;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.UserTrade;
public class CCEXAdapters {
private CCEXAdapters() {}
public static Trades adaptTrades(CCEXTrades cCEXTrades, CurrencyPair currencyPair) {
List<Trade> trades = new ArrayList<>();
List<CCEXTrade> cCEXTradestmp = cCEXTrades.getResult();
for (CCEXTrade cCEXTrade : cCEXTradestmp) {
trades.add(adaptCCEXPublicTrade(cCEXTrade, currencyPair));
}
return new Trades(trades, TradeSortType.SortByTimestamp);
}
public static Trade adaptCCEXPublicTrade(CCEXTrade cCEXTrade, CurrencyPair currencyPair) {
OrderType type =
cCEXTrade.getOrderType().equalsIgnoreCase("BUY") ? OrderType.BID : OrderType.ASK;
Date timestamp = stringToDate(cCEXTrade.getTimestamp());
Trade trade =
new Trade(
type,
cCEXTrade.getQuantity(),
currencyPair,
cCEXTrade.getPrice(),
timestamp,
cCEXTrade.getId());
return trade;
}
/**
* Adapts a org.knowm.xchange.ccex.api.model.OrderBook to a OrderBook Object
*
* @param currencyPair (e.g. BTC/USD)
* @return The C-Cex OrderBook
*/
public static OrderBook adaptOrderBook(
CCEXGetorderbook ccexOrderBook, CurrencyPair currencyPair) {
List<LimitOrder> asks =
createOrders(currencyPair, Order.OrderType.ASK, ccexOrderBook.getAsks());
List<LimitOrder> bids =
createOrders(currencyPair, Order.OrderType.BID, ccexOrderBook.getBids());
Date date = new Date();
return new OrderBook(date, asks, bids);
}
public static List<LimitOrder> createOrders(
CurrencyPair currencyPair, Order.OrderType orderType, List<CCEXBuySellData> orders) {
List<LimitOrder> limitOrders = new ArrayList<>();
if (orders == null) {
return new ArrayList<>();
}
for (CCEXBuySellData ask : orders) {
limitOrders.add(createOrder(currencyPair, ask, orderType));
}
return limitOrders;
}
public static LimitOrder createOrder(
CurrencyPair currencyPair, CCEXBuySellData priceAndAmount, Order.OrderType orderType) {
return new LimitOrder(
orderType, priceAndAmount.getQuantity(), currencyPair, "", null, priceAndAmount.getRate());
}
public static CurrencyPair adaptCurrencyPair(CCEXMarket product) {
return new CurrencyPair(product.getBaseCurrency(), product.getMarketCurrency());
}
public static ExchangeMetaData adaptToExchangeMetaData(
ExchangeMetaData exchangeMetaData, List<CCEXMarket> products) {
Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = new HashMap<>();
Map<Currency, CurrencyMetaData> currencies = new HashMap<>();
for (CCEXMarket product : products) {
BigDecimal minSize = product.getMinTradeSize();
CurrencyPairMetaData cpmd = new CurrencyPairMetaData(null, minSize, null, 0);
CurrencyPair pair = adaptCurrencyPair(product);
currencyPairs.put(pair, cpmd);
currencies.put(pair.base, null);
currencies.put(pair.counter, null);
}
return new ExchangeMetaData(currencyPairs, currencies, null, null, true);
}
public static CurrencyPair adaptCurrencyPair(String pair) {
final String[] currencies = pair.toUpperCase().split("-");
return new CurrencyPair(currencies[0].toUpperCase(), currencies[1].toUpperCase());
}
public static Date stringToDate(String dateString) {
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
return sdf.parse(dateString);
} catch (ParseException e) {
return new Date(0);
}
}
public static Wallet adaptWallet(List<CCEXBalance> balances) {
List<Balance> wallets = new ArrayList<>(balances.size());
for (CCEXBalance balance : balances) {
wallets.add(
new Balance(
Currency.getInstance(balance.getCurrency().toUpperCase()),
balance.getBalance(),
balance.getAvailable(),
balance.getBalance().subtract(balance.getAvailable()).subtract(balance.getPending()),
BigDecimal.ZERO,
BigDecimal.ZERO,
BigDecimal.ZERO,
balance.getPending()));
}
return new Wallet(wallets);
}
public static List<LimitOrder> adaptOpenOrders(List<CCEXOpenorder> cCexOpenOrders) {
List<LimitOrder> openOrders = new ArrayList<>();
for (CCEXOpenorder order : cCexOpenOrders) {
openOrders.add(adaptOpenOrder(order));
}
return openOrders;
}
public static LimitOrder adaptOpenOrder(CCEXOpenorder cCEXOpenOrder) {
OrderType type =
cCEXOpenOrder.getOrderType().equalsIgnoreCase("LIMIT_SELL") ? OrderType.ASK : OrderType.BID;
String[] currencies = cCEXOpenOrder.getExchange().split("-");
CurrencyPair pair = new CurrencyPair(currencies[1], currencies[0]);
return new LimitOrder(
type,
cCEXOpenOrder.getQuantityRemaining(),
pair,
cCEXOpenOrder.getOrderUuid(),
null,
cCEXOpenOrder.getLimit());
}
public static List<UserTrade> adaptUserTrades(List<CCEXOrderhistory> cCEXOrderhistory) {
List<UserTrade> trades = new ArrayList<>();
for (CCEXOrderhistory cCEXTrade : cCEXOrderhistory) {
trades.add(adaptUserTrade(cCEXTrade));
}
return trades;
}
public static UserTrade adaptUserTrade(CCEXOrderhistory trade) {
String[] currencies = trade.getExchange().split("-");
CurrencyPair currencyPair = new CurrencyPair(currencies[1], currencies[0]);
OrderType orderType =
trade.getOrderType().equalsIgnoreCase("LIMIT_BUY") ? OrderType.BID : OrderType.ASK;
BigDecimal amount = trade.getQuantity().subtract(trade.getQuantityRemaining());
Date date = CCEXUtils.toDate(trade.getTimeStamp());
String orderId = String.valueOf(trade.getOrderUuid());
BigDecimal price = trade.getPricePerUnit();
if (price == null) {
price = trade.getLimit();
}
return new UserTrade(
orderType,
amount,
currencyPair,
price,
date,
orderId,
orderId,
trade.getCommission(),
currencyPair.counter);
}
public static Ticker adaptTicker(CCEXPriceResponse cCEXTicker, CurrencyPair currencyPair) {
BigDecimal last = cCEXTicker.getLastbuy();
BigDecimal bid = cCEXTicker.getBuy();
BigDecimal ask = cCEXTicker.getSell();
BigDecimal high = cCEXTicker.getHigh();
BigDecimal low = cCEXTicker.getLow();
BigDecimal volume = cCEXTicker.getBuysupport();
Date timestamp = new Date(cCEXTicker.getUpdated());
return new Ticker.Builder()
.currencyPair(currencyPair)
.last(last)
.bid(bid)
.ask(ask)
.high(high)
.low(low)
.volume(volume)
.timestamp(timestamp)
.build();
}
}
|
|
/*
*
* Copyright 2014 Jules White
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.magnum.dataup;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.magnum.dataup.model.Video;
import org.magnum.dataup.model.VideoStatus;
import org.magnum.dataup.model.VideoStatus.VideoState;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.multipart.MultipartFile;
import retrofit.http.Multipart;
@Controller
public class VideoSvcController {
private Map<Long, Video> videos = new HashMap<Long, Video>();
private static final AtomicLong currentId = new AtomicLong(0L);
private VideoFileManager videoDataMgr;
@RequestMapping(value = VideoSvcApi.VIDEO_SVC_PATH, method = RequestMethod.GET)
public @ResponseBody Collection<Video> GetVideoList() {
System.out.println("GetVideoList");
List<Video> videoList = new ArrayList<Video>(videos.values());
return videoList;
}
@RequestMapping(value = VideoSvcApi.VIDEO_SVC_PATH, method = RequestMethod.POST)
public @ResponseBody Video AddVideo(@RequestBody Video video) {
System.out.println("AddVideo: video.title - " + video.getTitle());
if (video.getId() == 0) {
long id = getNextId();
String dataUrl = getDataUrl(id);
video.setId(id);
video.setDataUrl(dataUrl);
}
videos.put(video.getId(), video);
return video;
}
@Multipart
@RequestMapping(value = VideoSvcApi.VIDEO_DATA_PATH, method = RequestMethod.POST)
public @ResponseBody VideoStatus AddVideoData(
@PathVariable(VideoSvcApi.ID_PARAMETER) long videoId,
@RequestPart(VideoSvcApi.DATA_PARAMETER) MultipartFile videoData,
HttpServletResponse response) throws IOException {
System.out.println("AddVideoData: videoId - " + videoId);
Video video = videos.get(videoId);
if (video != null) {
videoDataMgr = VideoFileManager.get();
videoDataMgr.saveVideoData(video, videoData.getInputStream());
VideoStatus status = new VideoStatus(VideoState.READY);
return status;
} else {
throw new ResourceNotFoundException();
}
}
@RequestMapping(value = VideoSvcApi.VIDEO_DATA_PATH, method = RequestMethod.GET)
public @ResponseBody void GetVideoData(
@PathVariable(VideoSvcApi.ID_PARAMETER) long videoId,
HttpServletResponse response) throws IOException {
System.out.println("GetVideoData: videoId - " + videoId);
Video video = videos.get(videoId);
if (video == null) {
throw new ResourceNotFoundException();
}
if (videoDataMgr.hasVideoData(video)) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
videoDataMgr = VideoFileManager.get();
videoDataMgr.copyVideoData(video, outputStream);
response.getOutputStream().write(outputStream.toByteArray());
} else {
throw new ResourceNotFoundException();
}
}
@RequestMapping(value = VideoSvcApi.VIDEO_RATING_PATH, method = RequestMethod.POST)
public @ResponseBody Video SetVideoRating(
@PathVariable(VideoSvcApi.ID_PARAMETER) long videoId,
@RequestBody float rating, HttpServletResponse response)
throws IOException {
System.out.println("SetVideoRating: videoId - " + videoId
+ " rating - " + rating);
Video video = videos.get(videoId);
if (video == null) {
throw new ResourceNotFoundException();
}
if (videoDataMgr.hasVideoData(video)) {
video.setRating(rating);
return video;
} else {
throw new ResourceNotFoundException();
}
}
@RequestMapping(value = VideoSvcApi.VIDEO_DOWNLOAD_PATH, method = RequestMethod.GET)
public @ResponseBody void DownloadVideo(
@PathVariable(VideoSvcApi.ID_PARAMETER) long videoId,
HttpServletResponse response) throws IOException {
System.out.println("DownloadVideo: videoId - " + videoId);
Video video = videos.get(videoId);
if (video == null) {
throw new ResourceNotFoundException();
}
if (videoDataMgr.hasVideoData(video)) {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
videoDataMgr.copyVideoData(video, byteArrayOutputStream);
response.setContentType("application/octet-stream");
response.setContentLength(byteArrayOutputStream.size());
response.setHeader(
"Content-Disposition",
String.format("attachment; filename=\"%s.mp4\"",
video.getTitle()));
OutputStream outStream = response.getOutputStream();
byteArrayOutputStream.writeTo(outStream);
outStream.close();
} else {
throw new ResourceNotFoundException();
}
}
private String getUrlBaseForLocalServer() {
HttpServletRequest request = ((ServletRequestAttributes) RequestContextHolder
.getRequestAttributes()).getRequest();
String base = "http://"
+ request.getServerName()
+ ((request.getServerPort() != 80) ? ":"
+ request.getServerPort() : "");
return base;
}
private String getDataUrl(long videoId) {
String url = getUrlBaseForLocalServer() + "/video/" + videoId + "/data";
return url;
}
private Long getNextId() {
return currentId.incrementAndGet();
}
}
|
|
package com.emerchantpay.gateway;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.Map;
import com.emerchantpay.gateway.api.RequestBuilder;
import com.emerchantpay.gateway.api.constants.Endpoints;
import com.emerchantpay.gateway.api.constants.Environments;
import com.emerchantpay.gateway.api.constants.ErrorCodes;
import com.emerchantpay.gateway.api.constants.TransactionTypes;
import com.emerchantpay.gateway.api.exceptions.GenesisException;
import com.emerchantpay.gateway.model.Notification;
import com.emerchantpay.gateway.util.Configuration;
import com.emerchantpay.gateway.util.SHA1Hasher;
import com.emerchantpay.gateway.util.StringUtils;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
public class NotificationTest {
private NotificationGateway notificationGtw;
private Notification notification;
private Configuration configuration;
private Map<String, String> notificationParams;
private RequestBuilder expectedResponse;
@Before
public void createNotification() throws MalformedURLException, UnsupportedEncodingException,
NoSuchAlgorithmException {
notificationGtw = mock(NotificationGateway.class);
notification = mock(Notification.class);
expectedResponse = mock(RequestBuilder.class);
doNothing().when(notificationGtw).parseNotification(isA(Map.class));
doNothing().when(notificationGtw).initReconciliation();
doNothing().when(notificationGtw).generateResponse();
//Create test configuration
configuration = new Configuration(Environments.STAGING, Endpoints.EMERCHANTPAY);
configuration.setUsername("test");
configuration.setPassword("test");
configuration.setToken("test");
//Create test notification params
notificationParams = new HashMap<String, String>();
notificationParams.put("transaction_type", TransactionTypes.SALE_3D);
notificationParams.put("terminal_token", configuration.getToken());
notificationParams.put("status", "approved");
notificationParams.put("amount", "10");
notificationParams.put("eci", "05");
notificationParams.put("avs_response_code", "5I");
notificationParams.put("avs_response_text", "Response+provided+by+issuer+processor%3B+ Address+information+not+verified");
}
@Test
public void testAPINotification() throws UnsupportedEncodingException, NoSuchAlgorithmException {
when(notificationGtw.getNotification()).thenReturn(notification);
when(notificationGtw.isAuthentic()).thenReturn(true);
when(notificationGtw.isApiNotification()).thenReturn(true);
when(notificationGtw.isWPFNotification()).thenReturn(false);
when(notificationGtw.getResponse()).thenReturn(expectedResponse);
assertTrue(notificationGtw.isAuthentic());
assertTrue(notificationGtw.isApiNotification());
assertFalse(notificationGtw.isWPFNotification());
assertEquals(notificationGtw.getResponse(), expectedResponse);
verify(notificationGtw).isAuthentic();
verify(notificationGtw).isApiNotification();
verify(notificationGtw).isWPFNotification();
verify(notificationGtw).getResponse();
verifyNoMoreInteractions(notificationGtw);
}
@Test
public void testAPINotificationParams() throws UnsupportedEncodingException, NoSuchAlgorithmException {
String uniqueId = new StringUtils().generateUID();;
String transactionId = new StringUtils().generateUID();
notificationParams.put("unique_id", uniqueId);
notificationParams.put("signature", SHA1Hasher.SHA1(uniqueId + configuration.getPassword()));
NotificationGateway notification = new NotificationGateway(configuration, notificationParams);
assertTrue(notification.isAuthentic());
assertTrue(notification.isApiNotification());
}
@Test
public void testWPFNotification() throws UnsupportedEncodingException, NoSuchAlgorithmException {
when(notificationGtw.getNotification()).thenReturn(notification);
when(notificationGtw.isAuthentic()).thenReturn(true);
when(notificationGtw.isApiNotification()).thenReturn(false);
when(notificationGtw.isWPFNotification()).thenReturn(true);
when(notificationGtw.getResponse()).thenReturn(expectedResponse);
assertEquals(notificationGtw.getNotification(), notification);
assertTrue(notificationGtw.isAuthentic());
assertFalse(notificationGtw.isApiNotification());
assertTrue(notificationGtw.isWPFNotification());
assertEquals(notificationGtw.getResponse(), expectedResponse);
verify(notificationGtw).getNotification();
verify(notificationGtw).isAuthentic();
verify(notificationGtw).isApiNotification();
verify(notificationGtw).isWPFNotification();
verify(notificationGtw).getResponse();
verifyNoMoreInteractions(notificationGtw);
}
@Test
public void testWPFNotificationParams() throws UnsupportedEncodingException, NoSuchAlgorithmException {
String uniqueId = new StringUtils().generateUID();;
String transactionId = new StringUtils().generateUID();
notificationParams.put("wpf_unique_id", uniqueId);
notificationParams.put("signature", SHA1Hasher.SHA1(uniqueId + configuration.getPassword()));
NotificationGateway notification = new NotificationGateway(configuration, notificationParams);
assertTrue(notification.isAuthentic());
assertTrue(notification.isWPFNotification());
}
@Test(expected = GenesisException.class)
public void testNonAuthenticNotification() throws UnsupportedEncodingException, NoSuchAlgorithmException {
NotificationGateway notification = new NotificationGateway(configuration, notificationParams);
}
@Test(expected = GenesisException.class)
public void testInvalidNotification() throws UnsupportedEncodingException, NoSuchAlgorithmException {
Integer errorCode = ErrorCodes.INPUT_DATA_ERROR.getCode();
GenesisException exception = new GenesisException(errorCode, "Invalid Genesis Notification!", new Throwable());
when(notificationGtw.getNotification()).thenReturn(notification);
when(notificationGtw.isAuthentic() == false).thenThrow(exception);
when(notificationGtw.isApiNotification()).thenReturn(true);
when(notificationGtw.isWPFNotification()).thenReturn(false);
when(notificationGtw.getResponse()).thenReturn(expectedResponse);
assertEquals(notificationGtw.getNotification(), notification);
assertFalse(notificationGtw.isAuthentic());
assertTrue(notificationGtw.isApiNotification());
assertFalse(notificationGtw.isWPFNotification());
assertEquals(notificationGtw.getResponse(), expectedResponse.toXML());
verify(notificationGtw).getNotification();
verify(notificationGtw).isAuthentic();
verify(notificationGtw).isApiNotification();
verify(notificationGtw).isWPFNotification();
verify(notificationGtw).getResponse();
verifyNoMoreInteractions(notificationGtw);
}
@Test(expected = GenesisException.class)
public void testNotificationWithMissingParams() throws UnsupportedEncodingException, NoSuchAlgorithmException {
Integer errorCode = ErrorCodes.INPUT_DATA_MISSING_ERROR.getCode();
GenesisException exception = new GenesisException(errorCode, ErrorCodes.getErrorDescription(errorCode), new Throwable());
when(notificationGtw.getNotification()).thenReturn(notification);
when(notificationGtw.getNotification().getUniqueId()).thenReturn(null);
when(notificationGtw.getNotification().getSignature()).thenReturn(null);
when(notificationGtw.getNotification().getUniqueId() == null ||
notificationGtw.getNotification().getSignature() == null).thenThrow(exception);
when(notificationGtw.isApiNotification()).thenReturn(true);
when(notificationGtw.isWPFNotification()).thenReturn(false);
when(notificationGtw.getResponse()).thenReturn(expectedResponse);
assertEquals(notificationGtw.getNotification(), notification);
assertNull(notificationGtw.getNotification().getUniqueId());
assertNull(notificationGtw.getNotification().getSignature());
assertFalse(notificationGtw.isAuthentic());
assertTrue(notificationGtw.isApiNotification());
assertFalse(notificationGtw.isWPFNotification());
assertEquals(notificationGtw.getResponse(), expectedResponse);
verify(notificationGtw).getNotification();
verify(notificationGtw).getNotification().getUniqueId();
verify(notificationGtw).getNotification().getSignature();
verify(notificationGtw).isAuthentic();
verify(notificationGtw).isApiNotification();
verify(notificationGtw).isWPFNotification();
verify(notificationGtw).getResponse();
verifyNoMoreInteractions(notificationGtw);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.persistence.checkpoint;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.function.Supplier;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.failure.FailureContext;
import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException;
import org.apache.ignite.internal.NodeStoppingException;
import org.apache.ignite.internal.processors.cache.persistence.DataRegion;
import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryEx;
import org.apache.ignite.internal.processors.failure.FailureProcessor;
import org.apache.ignite.internal.util.typedef.internal.U;
import static org.apache.ignite.failure.FailureType.SYSTEM_CRITICAL_OPERATION_TIMEOUT;
import static org.apache.ignite.internal.processors.cache.persistence.CheckpointState.LOCK_RELEASED;
/**
* Checkpoint lock for outer usage which should be used to protect data during writing to memory. It contains complex
* logic for the correct taking of inside checkpoint lock(timeout, force checkpoint, etc.).
*/
public class CheckpointTimeoutLock {
/** Ignite logger. */
protected final IgniteLogger log;
/** Failure processor. */
private final FailureProcessor failureProcessor;
/** Data regions which should be covered by this lock. */
private final Supplier<Collection<DataRegion>> dataRegions;
/** Internal checkpoint lock. */
private final CheckpointReadWriteLock checkpointReadWriteLock;
/** Service for triggering the checkpoint. */
private final Checkpointer checkpointer;
/** Timeout for checkpoint read lock acquisition in milliseconds. */
private volatile long checkpointReadLockTimeout;
/** Stop flag. */
private boolean stop;
/**
* @param logger Logger.
* @param processor Failure processor.
* @param regions Data regions.
* @param lock Checkpoint read-write lock.
* @param checkpointer Checkpointer.
* @param checkpointReadLockTimeout Checkpoint lock timeout.
*/
CheckpointTimeoutLock(
Function<Class<?>, IgniteLogger> logger,
FailureProcessor processor,
Supplier<Collection<DataRegion>> regions,
CheckpointReadWriteLock lock,
Checkpointer checkpointer,
long checkpointReadLockTimeout
) {
this.log = logger.apply(getClass());
failureProcessor = processor;
dataRegions = regions;
checkpointReadWriteLock = lock;
this.checkpointer = checkpointer;
this.checkpointReadLockTimeout = checkpointReadLockTimeout;
}
/**
* Gets the checkpoint read lock. While this lock is held, checkpoint thread will not acquireSnapshotWorker memory
* state.
*
* @throws IgniteException If failed.
*/
public void checkpointReadLock() {
if (checkpointReadWriteLock.isWriteLockHeldByCurrentThread())
return;
long timeout = checkpointReadLockTimeout;
long start = U.currentTimeMillis();
boolean interrupted = false;
try {
for (; ; ) {
try {
if (timeout > 0 && (U.currentTimeMillis() - start) >= timeout)
failCheckpointReadLock();
try {
if (timeout > 0) {
if (!checkpointReadWriteLock.tryReadLock(timeout - (U.currentTimeMillis() - start),
TimeUnit.MILLISECONDS))
failCheckpointReadLock();
}
else
checkpointReadWriteLock.readLock();
}
catch (InterruptedException e) {
interrupted = true;
continue;
}
if (stop) {
checkpointReadWriteLock.readUnlock();
throw new IgniteException(new NodeStoppingException("Failed to perform cache update: node is stopping."));
}
if (checkpointReadWriteLock.getReadHoldCount() > 1 || safeToUpdatePageMemories() || checkpointer.runner() == null)
break;
else {
CheckpointProgress pages = checkpointer.scheduleCheckpoint(0, "too many dirty pages");
checkpointReadWriteLock.readUnlock();
if (timeout > 0 && U.currentTimeMillis() - start >= timeout)
failCheckpointReadLock();
try {
pages
.futureFor(LOCK_RELEASED)
.getUninterruptibly();
}
catch (IgniteFutureTimeoutCheckedException e) {
failCheckpointReadLock();
}
catch (IgniteCheckedException e) {
throw new IgniteException("Failed to wait for checkpoint begin.", e);
}
}
}
catch (CheckpointReadLockTimeoutException e) {
log.error(e.getMessage(), e);
timeout = 0;
}
}
}
finally {
if (interrupted)
Thread.currentThread().interrupt();
}
}
/**
* @return {@code true} if all PageMemory instances are safe to update.
*/
private boolean safeToUpdatePageMemories() {
Collection<DataRegion> memPlcs = dataRegions.get();
if (memPlcs == null)
return true;
for (DataRegion memPlc : memPlcs) {
if (!memPlc.config().isPersistenceEnabled())
continue;
PageMemoryEx pageMemEx = (PageMemoryEx)memPlc.pageMemory();
if (!pageMemEx.safeToUpdate())
return false;
}
return true;
}
/**
* Releases the checkpoint read lock.
*/
public void checkpointReadUnlock() {
checkpointReadWriteLock.readUnlock();
}
/**
* Invokes critical failure processing. Always throws.
*
* @throws CheckpointReadLockTimeoutException If node was not invalidated as result of handling.
* @throws IgniteException If node was invalidated as result of handling.
*/
private void failCheckpointReadLock() throws CheckpointReadLockTimeoutException, IgniteException {
String msg = "Checkpoint read lock acquisition has been timed out.";
IgniteException e = new IgniteException(msg);
if (failureProcessor.process(new FailureContext(SYSTEM_CRITICAL_OPERATION_TIMEOUT, e)))
throw e;
throw new CheckpointReadLockTimeoutException(msg);
}
/**
* Timeout for checkpoint read lock acquisition.
*
* @return Timeout for checkpoint read lock acquisition in milliseconds.
*/
public long checkpointReadLockTimeout() {
return checkpointReadLockTimeout;
}
/**
* Sets timeout for checkpoint read lock acquisition.
*
* @param val New timeout in milliseconds, non-positive value denotes infinite timeout.
*/
public void checkpointReadLockTimeout(long val) {
checkpointReadLockTimeout = val;
}
/**
* @return true if checkpoint lock is held by current thread
*/
public boolean checkpointLockIsHeldByThread() {
return checkpointReadWriteLock.checkpointLockIsHeldByThread();
}
/**
* Forbid to take this lock.
*/
public void stop() {
checkpointReadWriteLock.writeLock();
try {
stop = true;
}
finally {
checkpointReadWriteLock.writeUnlock();
}
}
/**
* Prepare the lock to further usage.
*/
public void start() {
stop = false;
}
/** Indicates checkpoint read lock acquisition failure which did not lead to node invalidation. */
private static class CheckpointReadLockTimeoutException extends IgniteCheckedException {
/**
*
*/
private static final long serialVersionUID = 0L;
/**
*
*/
private CheckpointReadLockTimeoutException(String msg) {
super(msg);
}
}
}
|
|
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (c) 2015 Neustar Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*******************************************************************************/
package com.neulevel.epp.ext;
import java.util.*;
import org.w3c.dom.*;
import com.neulevel.epp.core.*;
/**
* This <code>EppSecDnsDsData</code> class implements DS data specified in the
* EPP DNS Security extension, defined by IETF Draft:
* <A HREF="http://www.ietf.org/internet-drafts/draft-hollenbeck-epp-secdns-04.txt">
* draft-hollenbeck-epp-secdns-04.txt</A>, with the following modifications:
*
* <UL>
* <LI>The XML schema file has been modified to handle DS data only
* <LI>The XML schema file has been modified to allow additions of new DS data
* </UL>
*
* @author Ning Zhang [email protected]
* @version $Revision: 1.6 $ $Date: 2012/06/26 12:09:39 $
*/
public class EppSecDnsDsData extends EppEntity
{
private int keyTag;
private int alg;
private int digestType;
private String digest;
private boolean keyDataPresent;
private EppSecDnsKeyData secKeyData;
/*
private Calendar sDate;
private Calendar eDate;
private String vInterval;
*/
private static String hex[] = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f"};
/**
* Creates an <code>EppSecDnsDsData</code> object
*/
public EppSecDnsDsData()
{
this.keyTag = 0;
this.alg = 0;
this.digestType = 0;
this.digest = null;
this.secKeyData = null;
this.keyDataPresent = false;
/*
this.sDate = null;
this.eDate = null;
this.vInterval = null;
*/
}
/**
* Gets the key tag value
*/
public int getKeyTag()
{
return this.keyTag;
}
/**
* Sets the key tag value
*/
public void setKeyTag( int keyTag )
{
this.keyTag = keyTag ;
}
/**
* Gets the algorithm value
*/
public int getAlgorithm()
{
return this.alg;
}
/**
* Sets the algorithm value
*/
public void setAlgorithm( int alg )
{
this.alg = alg ;
}
/**
* Gets the digest type
*/
public int getDigestType()
{
return this.digestType;
}
/**
* Sets the digest type
*/
public void setDigestType( int digestType )
{
this.digestType = digestType ;
}
/**
* Gets the digest value
*/
public String getDigest()
{
return this.digest;
}
/**
* Sets the digest value, given a byte array
*/
private void setDigest( byte [] digestBytes )
{
StringBuffer str = new StringBuffer(digestBytes.length * 2);
for( int i = 0; i < digestBytes.length; i++ )
{
str.append(hex[(digestBytes[i] & 0xF0) >> 4]);
str.append(hex[ digestBytes[i] & 0x0F]);
}
this.digest = str.toString();
}
/**
* Sets the digest value, if the digest string is a valid hex binary string
*/
public boolean setDigestString( String digestString )
{
this.digest = digestString;
return true;
}
public EppSecDnsKeyData getKeyData() {
return this.secKeyData;
}
public void setKeyData(EppSecDnsKeyData kd) {
if( null == kd )
return;
this.keyDataPresent = true;
this.secKeyData = kd;
}
public boolean isKeyDataPresent() {
return this.keyDataPresent;
}
/**
* Gets the optional start date for using the DS data
*/
/*
public Calendar getStartDate()
{
return this.sDate;
}
*/
/**
* Sets the optional start date for using the DS data
*/
/*
public void setStartDate( Calendar sDate )
{
this.sDate = sDate;
}
*/
/**
* Gets the optional end date for using the DS data
*/
/*
public Calendar getEndDate()
{
return this.eDate;
}
*/
/**
* Sets the optional end date for using the DS data
*/
/*
public void setEndDate( Calendar eDate )
{
this.eDate = eDate;
}
*/
/**
* Gets the optional validation interval, must be in XML duration format
*/
/*
public String getValidationInterval()
{
return this.vInterval;
}
*/
/**
* Sets the optional validation interval, in the XML duration format
*/
/*
public void setValidationInterval( String vInterval )
{
this.vInterval = vInterval;
}
*/
/**
* Converts an XML element into an <code>EppSecDnsDsData</code> object.
* The caller of this method must make sure that the root node is of
* EPP SECDNS dsDataType
*
* @param root root node for an <code>EppSecDnsDsData</code> object in XML format
*
* @return an <code>EppSecDnsDsData</code> object, or null if the node is invalid
*/
public static EppEntity fromXML( Node root )
{
String value = null;
Calendar date = null;
EppSecDnsDsData data = new EppSecDnsDsData();
NodeList list = root.getChildNodes();
for( int i = 0; i < list.getLength(); i++ )
{
Node node = list.item(i);
String name = node.getLocalName();
if( name == null )
{
continue;
}
if( name.equals("keyTag") )
{
value = EppUtil.getText(node);
if( (value != null) && (value.trim().length() > 0) )
{
data.setKeyTag(Integer.parseInt(value));
}
}
else if( name.equals("alg") )
{
value = EppUtil.getText(node);
if( (value != null) && (value.trim().length() > 0) )
{
data.setAlgorithm(Integer.parseInt(value));
}
}
else if( name.equals("digestType") )
{
value = EppUtil.getText(node);
if( (value != null) && (value.trim().length() > 0) )
{
data.setDigestType(Integer.parseInt(value));
}
}
else if( name.equals("digest") )
{
value = EppUtil.getText(node);
if( (value != null) && (value.trim().length() > 0) )
{
data.setDigestString(value);
}
} else if( name.equals("keyData") ) {
EppSecDnsKeyData kd = (EppSecDnsKeyData)EppSecDnsKeyData.fromXML(node);
if( null != kd ) {
data.keyDataPresent = true;/*NOTICE: this is redundant*/
data.setKeyData( kd );
}
}
/*
else if( name.equals("sDate") )
{
date = EppUtil.getDate(node);
if( date != null )
{
data.setStartDate(date);
}
}
else if( name.equals("eDate") )
{
date = EppUtil.getDate(node);
if( date != null )
{
data.setEndDate(date);
}
}
else if( name.equals("vInterval") )
{
value = EppUtil.getText(node);
if( (value != null) && (value.trim().length() > 0) )
{
data.setValidationInterval(value);
}
}
*/
}
return data;
}
/**
* Converts the <code>EppSecDnsDsData</code> object into an XML element
*
* @param doc the XML <code>Document</code> object
* @param tag the tag/element name for the <code>EppSecDnsDsData</code> object
*
* @return an <code>Element</code> object
*/
public Element toXML( Document doc, String tag )
{
Element elm;
Element body = doc.createElement(tag);
elm = doc.createElement("keyTag");
elm.appendChild(doc.createTextNode(Integer.toString(this.keyTag)));
body.appendChild(elm);
elm = doc.createElement("alg");
elm.appendChild(doc.createTextNode(Integer.toString(this.alg)));
body.appendChild(elm);
elm = doc.createElement("digestType");
elm.appendChild(doc.createTextNode(Integer.toString(this.digestType)));
body.appendChild(elm);
if( this.digest != null )
{
elm = doc.createElement("digest");
elm.appendChild(doc.createTextNode(this.digest));
body.appendChild(elm);
}
if( true == this.keyDataPresent && null != this.secKeyData ) {
Element element = this.secKeyData.toXML(doc, "keyData");
body.appendChild(element);
}
/*
if( this.sDate != null )
{
elm = doc.createElement("sDate");
elm.appendChild(EppUtil.createTextNode(doc, this.sDate));
body.appendChild(elm);
}
if( this.eDate != null )
{
elm = doc.createElement("eDate");
elm.appendChild(EppUtil.createTextNode(doc, this.eDate));
body.appendChild(elm);
}
if( this.vInterval != null )
{
elm = doc.createElement("vInterval");
elm.appendChild(doc.createTextNode(this.vInterval));
body.appendChild(elm);
}
*/
return body;
}
public String toString()
{
return toString("dsData");
}
}
|
|
/*
* Copyright 2006-2021 Prowide
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.prowidesoftware.swift.model.field;
import com.prowidesoftware.swift.model.Tag;
import com.prowidesoftware.Generated;
import com.prowidesoftware.deprecation.ProwideDeprecated;
import com.prowidesoftware.deprecation.TargetYear;
import java.io.Serializable;
import java.util.Locale;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import org.apache.commons.lang3.StringUtils;
import com.prowidesoftware.swift.model.field.SwiftParseUtils;
import com.prowidesoftware.swift.model.field.Field;
import com.prowidesoftware.swift.model.*;
import com.prowidesoftware.swift.utils.SwiftFormatUtils;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
/**
* SWIFT MT Field 12D.
* <p>
* Model and parser for field 12D of a SWIFT MT message.
*
* <p>Subfields (components) Data types
* <ol>
* <li><code>String</code></li>
* </ol>
*
* <p>Structure definition
* <ul>
* <li>validation pattern: <code>4!c</code></li>
* <li>parser pattern: <code>S</code></li>
* <li>components pattern: <code>S</code></li>
* </ul>
*
* <p>
* This class complies with standard release <strong>SRU2021</strong>
*/
@SuppressWarnings("unused")
@Generated
public class Field12D extends Field implements Serializable {
/**
* Constant identifying the SRU to which this class belongs to.
*/
public static final int SRU = 2021;
private static final long serialVersionUID = 1L;
/**
* Constant with the field name 12D.
*/
public static final String NAME = "12D";
/**
* Same as NAME, intended to be clear when using static imports.
*/
public static final String F_12D = "12D";
/**
* @deprecated use {@link #parserPattern()} method instead.
*/
@Deprecated
@ProwideDeprecated(phase2 = TargetYear.SRU2022)
public static final String PARSER_PATTERN = "S";
/**
* @deprecated use {@link #typesPattern()} method instead.
*/
@Deprecated
@ProwideDeprecated(phase2 = TargetYear.SRU2022)
public static final String COMPONENTS_PATTERN = "S";
/**
* @deprecated use {@link #typesPattern()} method instead.
*/
@Deprecated
@ProwideDeprecated(phase2 = TargetYear.SRU2022)
public static final String TYPES_PATTERN = "S";
/**
* Component number for the Option Type subfield.
*/
public static final Integer OPTION_TYPE = 1;
/**
* Default constructor. Creates a new field setting all components to null.
*/
public Field12D() {
super(1);
}
/**
* Creates a new field and initializes its components with content from the parameter value.
* @param value complete field value including separators and CRLF
*/
public Field12D(final String value) {
super(value);
}
/**
* Creates a new field and initializes its components with content from the parameter tag.
* The value is parsed with {@link #parse(String)}
* @throws IllegalArgumentException if the parameter tag is null or its tagname does not match the field name
* @since 7.8
*/
public Field12D(final Tag tag) {
this();
if (tag == null) {
throw new IllegalArgumentException("tag cannot be null.");
}
if (!StringUtils.equals(tag.getName(), "12D")) {
throw new IllegalArgumentException("cannot create field 12D from tag "+tag.getName()+", tagname must match the name of the field.");
}
parse(tag.getValue());
}
/**
* Copy constructor.
* Initializes the components list with a deep copy of the source components list.
* @param source a field instance to copy
* @since 7.7
*/
public static Field12D newInstance(Field12D source) {
Field12D cp = new Field12D();
cp.setComponents(new ArrayList<>(source.getComponents()));
return cp;
}
/**
* Create a Tag with this field name and the given value.
* Shorthand for <code>new Tag(NAME, value)</code>
* @see #NAME
* @since 7.5
*/
public static Tag tag(final String value) {
return new Tag(NAME, value);
}
/**
* Create a Tag with this field name and an empty string as value.
* Shorthand for <code>new Tag(NAME, "")</code>
* @see #NAME
* @since 7.5
*/
public static Tag emptyTag() {
return new Tag(NAME, "");
}
/**
* Parses the parameter value into the internal components structure.
*
* <p>Used to update all components from a full new value, as an alternative
* to setting individual components. Previous component values are overwritten.
*
* @param value complete field value including separators and CRLF
* @since 7.8
*/
@Override
public void parse(final String value) {
init(1);
setComponent1(value);
}
/**
* Serializes the fields' components into the single string value (SWIFT format)
*/
@Override
public String getValue() {
final StringBuilder result = new StringBuilder();
append(result, 1);
return result.toString();
}
/**
* Returns a localized suitable for showing to humans string of a field component.<br>
*
* @param component number of the component to display
* @param locale optional locale to format date and amounts, if null, the default locale is used
* @return formatted component value or null if component number is invalid or not present
* @throws IllegalArgumentException if component number is invalid for the field
* @since 7.8
*/
@Override
public String getValueDisplay(int component, Locale locale) {
if (component < 1 || component > 1) {
throw new IllegalArgumentException("invalid component number " + component + " for field 12D");
}
if (component == 1) {
//default format (as is)
return getComponent(1);
}
return null;
}
/**
* @deprecated use {@link #typesPattern()} instead.
*/
@Override
@Deprecated
@ProwideDeprecated(phase2 = TargetYear.SRU2022)
public String componentsPattern() {
return "S";
}
/**
* Returns the field component types pattern.
*
* This method returns a letter representing the type for each component in the Field. It supersedes
* the Components Pattern because it distinguishes between N (Number) and I (BigDecimal).
* @since 9.2.7
*/
@Override
public String typesPattern() {
return "S";
}
/**
* Returns the field parser pattern.
*/
@Override
public String parserPattern() {
return "S";
}
/**
* Returns the field validator pattern
*/
@Override
public String validatorPattern() {
return "4!c";
}
/**
* Given a component number it returns true if the component is optional,
* regardless of the field being mandatory in a particular message.<br>
* Being the field's value conformed by a composition of one or several
* internal component values, the field may be present in a message with
* a proper value but with some of its internal components not set.
*
* @param component component number, first component of a field is referenced as 1
* @return true if the component is optional for this field, false otherwise
*/
@Override
public boolean isOptional(int component) {
return false;
}
/**
* Returns true if the field is a GENERIC FIELD as specified by the standard.
* @return true if the field is generic, false otherwise
*/
@Override
public boolean isGeneric() {
return false;
}
/**
* Returns the defined amount of components.<br>
* This is not the amount of components present in the field instance, but the total amount of components
* that this field accepts as defined.
* @since 7.7
*/
@Override
public int componentsSize() {
return 1;
}
/**
* Returns english label for components.
* <br>
* The index in the list is in sync with specific field component structure.
* @see #getComponentLabel(int)
* @since 7.8.4
*/
@Override
public List<String> getComponentLabels() {
List<String> result = new ArrayList<>();
result.add("Option Type");
return result;
}
/**
* Returns a mapping between component numbers and their label in camel case format.
* @since 7.10.3
*/
@Override
protected Map<Integer, String> getComponentMap() {
Map<Integer, String> result = new HashMap<>();
result.put(1, "optionType");
return result;
}
/**
* Gets the component 1 (Option Type).
* @return the component 1
*/
public String getComponent1() {
return getComponent(1);
}
/**
* Gets the Option Type (component 1).
* @return the Option Type from component 1
*/
public String getOptionType() {
return getComponent1();
}
/**
* Set the component 1 (Option Type).
*
* @param component1 the Option Type to set
* @return the field object to enable build pattern
*/
public Field12D setComponent1(String component1) {
setComponent(1, component1);
return this;
}
/**
* Set the Option Type (component 1).
*
* @param component1 the Option Type to set
* @return the field object to enable build pattern
*/
public Field12D setOptionType(String component1) {
return setComponent1(component1);
}
/**
* Returns the field's name composed by the field number and the letter option (if any).
* @return the static value of Field12D.NAME
*/
@Override
public String getName() {
return NAME;
}
/**
* Gets the first occurrence form the tag list or null if not found.
* @return null if not found o block is null or empty
* @param block may be null or empty
*/
public static Field12D get(final SwiftTagListBlock block) {
if (block == null || block.isEmpty()) {
return null;
}
final Tag t = block.getTagByName(NAME);
if (t == null) {
return null;
}
return new Field12D(t);
}
/**
* Gets the first instance of Field12D in the given message.
* @param msg may be empty or null
* @return null if not found or msg is empty or null
* @see #get(SwiftTagListBlock)
*/
public static Field12D get(final SwiftMessage msg) {
if (msg == null || msg.getBlock4() == null || msg.getBlock4().isEmpty()) {
return null;
}
return get(msg.getBlock4());
}
/**
* Gets a list of all occurrences of the field Field12D in the given message
* an empty list is returned if none found.
* @param msg may be empty or null in which case an empty list is returned
* @see #getAll(SwiftTagListBlock)
*/
public static List<Field12D> getAll(final SwiftMessage msg) {
if (msg == null || msg.getBlock4() == null || msg.getBlock4().isEmpty()) {
return java.util.Collections.emptyList();
}
return getAll(msg.getBlock4());
}
/**
* Gets a list of all occurrences of the field Field12D from the given block
* an empty list is returned if none found.
*
* @param block may be empty or null in which case an empty list is returned
*/
public static List<Field12D> getAll(final SwiftTagListBlock block) {
final List<Field12D> result = new ArrayList<>();
if (block == null || block.isEmpty()) {
return result;
}
final Tag[] arr = block.getTagsByName(NAME);
if (arr != null && arr.length > 0) {
for (final Tag f : arr) {
result.add(new Field12D(f));
}
}
return result;
}
/**
* This method deserializes the JSON data into a Field12D object.
* @param json JSON structure including tuples with label and value for all field components
* @return a new field instance with the JSON data parsed into field components or an empty field id the JSON is invalid
* @since 7.10.3
* @see Field#fromJson(String)
*/
public static Field12D fromJson(final String json) {
final Field12D field = new Field12D();
final JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject();
// **** COMPONENT 1 - Option Type
if (jsonObject.get("optionType") != null) {
field.setComponent1(jsonObject.get("optionType").getAsString());
}
return field;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ranger.security.context;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
@Component
public class RangerAPIMapping {
/**
* @NOTE While adding new tab here, please don't forget to update the function:
* org.apache.ranger.security.context.RangerAPIMapping.getAvailableUITabs()
*/
public static final String TAB_RESOURCE_BASED_POLICIES = "Resource Based Policies";
public static final String TAB_AUDIT = "Audit";
public static final String TAB_USERS_GROUPS = "Users/Groups";
public static final String TAB_PERMISSIONS = "Permissions";
public static final String TAB_KEY_MANAGER = "Key Manager";
public static final String TAB_TAG_BASED_POLICIES = "Tag Based Policies";
public static final String TAB_REPORTS = "Reports";
private static HashMap<String, Set<String>> rangerAPIMappingWithUI = null;
private static Set<String> tabList = new HashSet<String>();
private static Map<String, Set<String>> mapApiToTabs = null;
public RangerAPIMapping() {
init();
}
private void init() {
if (rangerAPIMappingWithUI == null) {
rangerAPIMappingWithUI = new HashMap<String, Set<String>>();
}
if (mapApiToTabs == null) {
mapApiToTabs = new HashMap<String, Set<String>>();
}
mapResourceBasedPoliciesWithAPIs();
mapAuditWithAPIs();
mapUGWithAPIs();
mapPermissionsWithAPIs();
mapKeyManagerWithAPIs();
mapTagBasedPoliciesWithAPIs();
mapReportsWithAPIs();
if (CollectionUtils.isEmpty(tabList)) {
populateAvailableUITabs();
}
}
private void populateAvailableUITabs() {
tabList = new HashSet<String>();
tabList.add(TAB_RESOURCE_BASED_POLICIES);
tabList.add(TAB_TAG_BASED_POLICIES);
tabList.add(TAB_AUDIT);
tabList.add(TAB_REPORTS);
tabList.add(TAB_KEY_MANAGER);
tabList.add(TAB_PERMISSIONS);
tabList.add(TAB_USERS_GROUPS);
}
private void mapReportsWithAPIs() {
Set<String> apiAssociatedWithReports = new HashSet<String>();
apiAssociatedWithReports.add(RangerAPIList.COUNT_X_ASSETS);
apiAssociatedWithReports.add(RangerAPIList.GET_X_ASSET);
apiAssociatedWithReports.add(RangerAPIList.SEARCH_X_ASSETS);
apiAssociatedWithReports.add(RangerAPIList.COUNT_SERVICES);
apiAssociatedWithReports.add(RangerAPIList.GET_POLICY_FOR_VERSION_NO);
apiAssociatedWithReports.add(RangerAPIList.GET_POLICY_FROM_EVENT_TIME);
apiAssociatedWithReports.add(RangerAPIList.GET_POLICY_VERSION_LIST);
apiAssociatedWithReports.add(RangerAPIList.GET_SERVICE);
apiAssociatedWithReports.add(RangerAPIList.GET_SERVICE_BY_NAME);
apiAssociatedWithReports.add(RangerAPIList.GET_SERVICE_DEF);
apiAssociatedWithReports.add(RangerAPIList.GET_SERVICE_DEF_BY_NAME);
apiAssociatedWithReports.add(RangerAPIList.GET_SERVICE_DEFS);
apiAssociatedWithReports.add(RangerAPIList.GET_SERVICES);
apiAssociatedWithReports.add(RangerAPIList.LOOKUP_RESOURCE);
apiAssociatedWithReports.add(RangerAPIList.GET_USER_PROFILE_FOR_USER);
apiAssociatedWithReports.add(RangerAPIList.SEARCH_USERS);
apiAssociatedWithReports.add(RangerAPIList.COUNT_X_AUDIT_MAPS);
apiAssociatedWithReports.add(RangerAPIList.COUNT_X_GROUP_GROUPS);
apiAssociatedWithReports.add(RangerAPIList.COUNT_X_GROUPS);
apiAssociatedWithReports.add(RangerAPIList.COUNT_X_GROUP_USERS);
apiAssociatedWithReports.add(RangerAPIList.COUNT_X_PERM_MAPS);
apiAssociatedWithReports.add(RangerAPIList.COUNT_X_USERS);
apiAssociatedWithReports.add(RangerAPIList.GET_X_AUDIT_MAP);
apiAssociatedWithReports.add(RangerAPIList.GET_X_GROUP);
apiAssociatedWithReports.add(RangerAPIList.GET_X_GROUP_BY_GROUP_NAME);
apiAssociatedWithReports.add(RangerAPIList.GET_X_GROUP_GROUP);
apiAssociatedWithReports.add(RangerAPIList.GET_X_GROUP_USER);
apiAssociatedWithReports.add(RangerAPIList.GET_X_GROUP_USERS);
apiAssociatedWithReports.add(RangerAPIList.GET_X_PERM_MAP);
apiAssociatedWithReports.add(RangerAPIList.GET_X_USER);
apiAssociatedWithReports.add(RangerAPIList.GET_X_USER_BY_USER_NAME);
apiAssociatedWithReports.add(RangerAPIList.GET_X_USER_GROUPS);
apiAssociatedWithReports.add(RangerAPIList.SEARCH_X_AUDIT_MAPS);
apiAssociatedWithReports.add(RangerAPIList.SEARCH_X_GROUP_GROUPS);
apiAssociatedWithReports.add(RangerAPIList.SEARCH_X_GROUPS);
apiAssociatedWithReports.add(RangerAPIList.SEARCH_X_GROUP_USERS);
apiAssociatedWithReports.add(RangerAPIList.SEARCH_X_PERM_MAPS);
apiAssociatedWithReports.add(RangerAPIList.SEARCH_X_USERS);
apiAssociatedWithReports.add(RangerAPIList.SECURE_GET_X_GROUP);
apiAssociatedWithReports.add(RangerAPIList.SECURE_GET_X_USER);
rangerAPIMappingWithUI.put(TAB_REPORTS, apiAssociatedWithReports);
for (String api : apiAssociatedWithReports) {
if (mapApiToTabs.get(api) == null) {
mapApiToTabs.put(api, new HashSet<String>());
}
mapApiToTabs.get(api).add(TAB_REPORTS);
}
}
private void mapTagBasedPoliciesWithAPIs() {
Set<String> apiAssociatedWithTagBasedPolicy = new HashSet<String>();
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.COUNT_X_ASSETS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.CREATE_X_ASSET);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.DELETE_X_ASSET);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_ASSET);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SEARCH_X_ASSETS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.TEST_CONFIG);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.UPDATE_X_ASSET);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.COUNT_SERVICES);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.CREATE_SERVICE);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.CREATE_SERVICE_DEF);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.DELETE_SERVICE);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.DELETE_SERVICE_DEF);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_POLICY_FOR_VERSION_NO);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_POLICY_FROM_EVENT_TIME);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_POLICY_VERSION_LIST);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_SERVICE);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_SERVICE_BY_NAME);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_SERVICE_DEF);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_SERVICE_DEF_BY_NAME);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_SERVICE_DEFS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_SERVICES);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.LOOKUP_RESOURCE);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.UPDATE_SERVICE);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.UPDATE_SERVICE_DEF);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.VALIDATE_CONFIG);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_USER_PROFILE_FOR_USER);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SEARCH_USERS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.COUNT_X_AUDIT_MAPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.COUNT_X_GROUP_GROUPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.COUNT_X_GROUPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.COUNT_X_GROUP_USERS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.COUNT_X_PERM_MAPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.COUNT_X_USERS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.CREATE_X_AUDIT_MAP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.CREATE_X_PERM_MAP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.DELETE_X_AUDIT_MAP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.DELETE_X_PERM_MAP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_AUDIT_MAP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_GROUP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_GROUP_BY_GROUP_NAME);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_GROUP_GROUP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_GROUP_USER);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_GROUP_USERS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_PERM_MAP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_USER);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_USER_BY_USER_NAME);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.GET_X_USER_GROUPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.MODIFY_GROUPS_VISIBILITY);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.MODIFY_USER_ACTIVE_STATUS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.MODIFY_USER_VISIBILITY);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SEARCH_X_AUDIT_MAPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SEARCH_X_GROUP_GROUPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SEARCH_X_GROUPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SEARCH_X_GROUP_USERS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SEARCH_X_PERM_MAPS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SEARCH_X_USERS);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SECURE_GET_X_GROUP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SECURE_GET_X_USER);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.UPDATE_X_AUDIT_MAP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.UPDATE_X_PERM_MAP);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.CREATE);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.CREATE_DEFAULT_ACCOUNT_USER);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.UPDATE);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.SET_USER_ROLES);
apiAssociatedWithTagBasedPolicy.add(RangerAPIList.DEACTIVATE_USER);
rangerAPIMappingWithUI.put(TAB_TAG_BASED_POLICIES, apiAssociatedWithTagBasedPolicy);
for (String api : apiAssociatedWithTagBasedPolicy) {
if (mapApiToTabs.get(api) == null) {
mapApiToTabs.put(api, new HashSet<String>());
}
mapApiToTabs.get(api).add(TAB_TAG_BASED_POLICIES);
}
}
private void mapKeyManagerWithAPIs() {
Set<String> apiAssociatedWithKeyManager = new HashSet<String>();
apiAssociatedWithKeyManager.add(RangerAPIList.COUNT_X_ASSETS);
apiAssociatedWithKeyManager.add(RangerAPIList.CREATE_X_ASSET);
apiAssociatedWithKeyManager.add(RangerAPIList.DELETE_X_ASSET);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_X_ASSET);
apiAssociatedWithKeyManager.add(RangerAPIList.SEARCH_X_ASSETS);
apiAssociatedWithKeyManager.add(RangerAPIList.TEST_CONFIG);
apiAssociatedWithKeyManager.add(RangerAPIList.UPDATE_X_ASSET);
apiAssociatedWithKeyManager.add(RangerAPIList.COUNT_SERVICES);
apiAssociatedWithKeyManager.add(RangerAPIList.CREATE_SERVICE);
apiAssociatedWithKeyManager.add(RangerAPIList.CREATE_SERVICE_DEF);
apiAssociatedWithKeyManager.add(RangerAPIList.DELETE_SERVICE);
apiAssociatedWithKeyManager.add(RangerAPIList.DELETE_SERVICE_DEF);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_POLICY_FOR_VERSION_NO);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_POLICY_FROM_EVENT_TIME);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_POLICY_VERSION_LIST);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_SERVICE);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_SERVICE_BY_NAME);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_SERVICE_DEF);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_SERVICE_DEF_BY_NAME);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_SERVICE_DEFS);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_SERVICES);
apiAssociatedWithKeyManager.add(RangerAPIList.LOOKUP_RESOURCE);
apiAssociatedWithKeyManager.add(RangerAPIList.UPDATE_SERVICE);
apiAssociatedWithKeyManager.add(RangerAPIList.UPDATE_SERVICE_DEF);
apiAssociatedWithKeyManager.add(RangerAPIList.VALIDATE_CONFIG);
apiAssociatedWithKeyManager.add(RangerAPIList.CREATE_KEY);
apiAssociatedWithKeyManager.add(RangerAPIList.DELETE_KEY);
apiAssociatedWithKeyManager.add(RangerAPIList.GET_KEY);
apiAssociatedWithKeyManager.add(RangerAPIList.ROLLOVER_KEYS);
apiAssociatedWithKeyManager.add(RangerAPIList.SEARCH_KEYS);
rangerAPIMappingWithUI.put(TAB_KEY_MANAGER, apiAssociatedWithKeyManager);
for (String api : apiAssociatedWithKeyManager) {
if (mapApiToTabs.get(api) == null) {
mapApiToTabs.put(api, new HashSet<String>());
}
mapApiToTabs.get(api).add(TAB_KEY_MANAGER);
}
}
private void mapPermissionsWithAPIs() {
Set<String> apiAssociatedWithPermissions = new HashSet<String>();
apiAssociatedWithPermissions.add(RangerAPIList.COUNT_X_GROUP_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.COUNT_X_MODULE_DEF);
apiAssociatedWithPermissions.add(RangerAPIList.COUNT_X_USER_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.CREATE_X_GROUP_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.CREATE_X_MODULE_DEF_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.CREATE_X_USER_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.DELETE_X_GROUP_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.DELETE_X_MODULE_DEF_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.DELETE_X_USER_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.GET_X_GROUP_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.GET_X_MODULE_DEF_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.GET_X_USER_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.SEARCH_X_GROUP_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.SEARCH_X_MODULE_DEF);
apiAssociatedWithPermissions.add(RangerAPIList.SEARCH_X_USER_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.UPDATE_X_GROUP_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.UPDATE_X_MODULE_DEF_PERMISSION);
apiAssociatedWithPermissions.add(RangerAPIList.UPDATE_X_USER_PERMISSION);
rangerAPIMappingWithUI.put(TAB_PERMISSIONS, apiAssociatedWithPermissions);
for (String api : apiAssociatedWithPermissions) {
if (mapApiToTabs.get(api) == null) {
mapApiToTabs.put(api, new HashSet<String>());
}
mapApiToTabs.get(api).add(TAB_PERMISSIONS);
}
}
private void mapUGWithAPIs() {
Set<String> apiAssociatedWithUserAndGroups = new HashSet<String>();
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_USER_PROFILE_FOR_USER);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SEARCH_USERS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.COUNT_X_AUDIT_MAPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.COUNT_X_GROUP_GROUPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.COUNT_X_GROUPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.COUNT_X_GROUP_USERS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.COUNT_X_PERM_MAPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.COUNT_X_USERS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.CREATE_X_AUDIT_MAP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.CREATE_X_PERM_MAP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.DELETE_X_AUDIT_MAP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.DELETE_X_PERM_MAP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_AUDIT_MAP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_GROUP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_GROUP_BY_GROUP_NAME);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_GROUP_GROUP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_GROUP_USER);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_GROUP_USERS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_PERM_MAP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_USER);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_USER_BY_USER_NAME);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_X_USER_GROUPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.MODIFY_GROUPS_VISIBILITY);
apiAssociatedWithUserAndGroups.add(RangerAPIList.MODIFY_USER_ACTIVE_STATUS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.MODIFY_USER_VISIBILITY);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SEARCH_X_AUDIT_MAPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SEARCH_X_GROUP_GROUPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SEARCH_X_GROUPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SEARCH_X_GROUP_USERS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SEARCH_X_PERM_MAPS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SEARCH_X_USERS);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SECURE_GET_X_GROUP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SECURE_GET_X_USER);
apiAssociatedWithUserAndGroups.add(RangerAPIList.UPDATE_X_AUDIT_MAP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.UPDATE_X_PERM_MAP);
apiAssociatedWithUserAndGroups.add(RangerAPIList.CREATE);
apiAssociatedWithUserAndGroups.add(RangerAPIList.CREATE_DEFAULT_ACCOUNT_USER);
apiAssociatedWithUserAndGroups.add(RangerAPIList.UPDATE);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SET_USER_ROLES);
apiAssociatedWithUserAndGroups.add(RangerAPIList.DEACTIVATE_USER);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SET_USER_ROLES_BY_ID);
apiAssociatedWithUserAndGroups.add(RangerAPIList.SET_USER_ROLES_BY_NAME);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_USER_ROLES_BY_ID);
apiAssociatedWithUserAndGroups.add(RangerAPIList.GET_USER_ROLES_BY_NAME);
rangerAPIMappingWithUI.put(TAB_USERS_GROUPS, apiAssociatedWithUserAndGroups);
for (String api : apiAssociatedWithUserAndGroups) {
if (mapApiToTabs.get(api) == null) {
mapApiToTabs.put(api, new HashSet<String>());
}
mapApiToTabs.get(api).add(TAB_USERS_GROUPS);
}
}
private void mapAuditWithAPIs() {
Set<String> apiAssociatedWithAudit = new HashSet<String>();
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_ASSETS);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_ASSET);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_ASSETS);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_SERVICES);
apiAssociatedWithAudit.add(RangerAPIList.GET_POLICY_FOR_VERSION_NO);
apiAssociatedWithAudit.add(RangerAPIList.GET_POLICY_FROM_EVENT_TIME);
apiAssociatedWithAudit.add(RangerAPIList.GET_POLICY_VERSION_LIST);
apiAssociatedWithAudit.add(RangerAPIList.GET_PLUGINS_INFO);
apiAssociatedWithAudit.add(RangerAPIList.GET_SERVICE);
apiAssociatedWithAudit.add(RangerAPIList.GET_SERVICE_BY_NAME);
apiAssociatedWithAudit.add(RangerAPIList.GET_SERVICE_DEF);
apiAssociatedWithAudit.add(RangerAPIList.GET_SERVICE_DEF_BY_NAME);
apiAssociatedWithAudit.add(RangerAPIList.GET_SERVICE_DEFS);
apiAssociatedWithAudit.add(RangerAPIList.GET_SERVICES);
apiAssociatedWithAudit.add(RangerAPIList.LOOKUP_RESOURCE);
apiAssociatedWithAudit.add(RangerAPIList.GET_USER_PROFILE_FOR_USER);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_USERS);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_AUDIT_MAPS);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_GROUP_GROUPS);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_GROUPS);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_GROUP_USERS);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_PERM_MAPS);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_USERS);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_AUDIT_MAP);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_GROUP);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_GROUP_BY_GROUP_NAME);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_GROUP_GROUP);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_GROUP_USER);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_GROUP_USERS);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_PERM_MAP);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_USER);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_USER_BY_USER_NAME);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_USER_GROUPS);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_AUDIT_MAPS);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_GROUP_GROUPS);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_GROUPS);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_GROUP_USERS);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_PERM_MAPS);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_USERS);
apiAssociatedWithAudit.add(RangerAPIList.SECURE_GET_X_GROUP);
apiAssociatedWithAudit.add(RangerAPIList.SECURE_GET_X_USER);
apiAssociatedWithAudit.add(RangerAPIList.GET_X_TRX_LOG);
apiAssociatedWithAudit.add(RangerAPIList.CREATE_X_TRX_LOG);
apiAssociatedWithAudit.add(RangerAPIList.UPDATE_X_TRX_LOG);
apiAssociatedWithAudit.add(RangerAPIList.DELETE_X_TRX_LOG);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_TRX_LOG);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_TRX_LOGS);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_ACCESS_AUDITS);
apiAssociatedWithAudit.add(RangerAPIList.COUNT_X_ACCESS_AUDITS);
apiAssociatedWithAudit.add(RangerAPIList.SEARCH_X_POLICY_EXPORT_AUDITS);
apiAssociatedWithAudit.add(RangerAPIList.GET_REPORT_LOGS);
apiAssociatedWithAudit.add(RangerAPIList.GET_TRANSACTION_REPORT);
apiAssociatedWithAudit.add(RangerAPIList.GET_ACCESS_LOGS);
apiAssociatedWithAudit.add(RangerAPIList.GET_AUTH_SESSION);
apiAssociatedWithAudit.add(RangerAPIList.GET_AUTH_SESSIONS);
rangerAPIMappingWithUI.put(TAB_AUDIT, apiAssociatedWithAudit);
for (String api : apiAssociatedWithAudit) {
if (mapApiToTabs.get(api) == null) {
mapApiToTabs.put(api, new HashSet<String>());
}
mapApiToTabs.get(api).add(TAB_AUDIT);
}
}
private void mapResourceBasedPoliciesWithAPIs() {
Set<String> apiAssociatedWithRBPolicies = new HashSet<String>();
apiAssociatedWithRBPolicies.add(RangerAPIList.COUNT_X_ASSETS);
apiAssociatedWithRBPolicies.add(RangerAPIList.CREATE_X_ASSET);
apiAssociatedWithRBPolicies.add(RangerAPIList.DELETE_X_ASSET);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_ASSET);
apiAssociatedWithRBPolicies.add(RangerAPIList.SEARCH_X_ASSETS);
apiAssociatedWithRBPolicies.add(RangerAPIList.TEST_CONFIG);
apiAssociatedWithRBPolicies.add(RangerAPIList.UPDATE_X_ASSET);
apiAssociatedWithRBPolicies.add(RangerAPIList.COUNT_SERVICES);
apiAssociatedWithRBPolicies.add(RangerAPIList.CREATE_SERVICE);
apiAssociatedWithRBPolicies.add(RangerAPIList.CREATE_SERVICE_DEF);
apiAssociatedWithRBPolicies.add(RangerAPIList.DELETE_SERVICE);
apiAssociatedWithRBPolicies.add(RangerAPIList.DELETE_SERVICE_DEF);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_POLICY_FOR_VERSION_NO);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_POLICY_FROM_EVENT_TIME);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_POLICY_VERSION_LIST);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_SERVICE);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_SERVICE_BY_NAME);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_SERVICE_DEF);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_SERVICE_DEF_BY_NAME);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_SERVICE_DEFS);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_SERVICES);
apiAssociatedWithRBPolicies.add(RangerAPIList.LOOKUP_RESOURCE);
apiAssociatedWithRBPolicies.add(RangerAPIList.UPDATE_SERVICE);
apiAssociatedWithRBPolicies.add(RangerAPIList.UPDATE_SERVICE_DEF);
apiAssociatedWithRBPolicies.add(RangerAPIList.VALIDATE_CONFIG);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_USER_PROFILE_FOR_USER);
apiAssociatedWithRBPolicies.add(RangerAPIList.SEARCH_USERS);
apiAssociatedWithRBPolicies.add(RangerAPIList.COUNT_X_AUDIT_MAPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.COUNT_X_GROUP_GROUPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.COUNT_X_GROUPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.COUNT_X_GROUP_USERS);
apiAssociatedWithRBPolicies.add(RangerAPIList.COUNT_X_PERM_MAPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.COUNT_X_USERS);
apiAssociatedWithRBPolicies.add(RangerAPIList.CREATE_X_AUDIT_MAP);
apiAssociatedWithRBPolicies.add(RangerAPIList.CREATE_X_PERM_MAP);
apiAssociatedWithRBPolicies.add(RangerAPIList.DELETE_X_AUDIT_MAP);
apiAssociatedWithRBPolicies.add(RangerAPIList.DELETE_X_PERM_MAP);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_AUDIT_MAP);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_GROUP);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_GROUP_BY_GROUP_NAME);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_GROUP_GROUP);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_GROUP_USER);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_GROUP_USERS);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_PERM_MAP);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_USER);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_USER_BY_USER_NAME);
apiAssociatedWithRBPolicies.add(RangerAPIList.GET_X_USER_GROUPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.MODIFY_GROUPS_VISIBILITY);
apiAssociatedWithRBPolicies.add(RangerAPIList.MODIFY_USER_ACTIVE_STATUS);
apiAssociatedWithRBPolicies.add(RangerAPIList.MODIFY_USER_VISIBILITY);
apiAssociatedWithRBPolicies.add(RangerAPIList.SEARCH_X_AUDIT_MAPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.SEARCH_X_GROUP_GROUPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.SEARCH_X_GROUPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.SEARCH_X_GROUP_USERS);
apiAssociatedWithRBPolicies.add(RangerAPIList.SEARCH_X_PERM_MAPS);
apiAssociatedWithRBPolicies.add(RangerAPIList.SEARCH_X_USERS);
apiAssociatedWithRBPolicies.add(RangerAPIList.SECURE_GET_X_GROUP);
apiAssociatedWithRBPolicies.add(RangerAPIList.SECURE_GET_X_USER);
apiAssociatedWithRBPolicies.add(RangerAPIList.UPDATE_X_AUDIT_MAP);
apiAssociatedWithRBPolicies.add(RangerAPIList.UPDATE_X_PERM_MAP);
apiAssociatedWithRBPolicies.add(RangerAPIList.CREATE);
apiAssociatedWithRBPolicies.add(RangerAPIList.CREATE_DEFAULT_ACCOUNT_USER);
apiAssociatedWithRBPolicies.add(RangerAPIList.UPDATE);
apiAssociatedWithRBPolicies.add(RangerAPIList.SET_USER_ROLES);
apiAssociatedWithRBPolicies.add(RangerAPIList.DEACTIVATE_USER);
rangerAPIMappingWithUI.put(TAB_RESOURCE_BASED_POLICIES, apiAssociatedWithRBPolicies);
for (String api : apiAssociatedWithRBPolicies) {
if (mapApiToTabs.get(api) == null) {
mapApiToTabs.put(api, new HashSet<String>());
}
mapApiToTabs.get(api).add(TAB_RESOURCE_BASED_POLICIES);
}
}
// * Utility methods starts from here, to retrieve API-UItab mapping information *
public Set<String> getAvailableUITabs() {
if (CollectionUtils.isEmpty(tabList)) {
populateAvailableUITabs();
}
return tabList;
}
/**
* @param apiName
* @return
*
* @Note: apiName being passed to this function should strictly follow this format: {ClassName}.{apiMethodName} and also API should be listed into
* RangerAPIList and should be mapped properly with UI tabs in the current class.
*/
public Set<String> getAssociatedTabsWithAPI(String apiName) {
Set<String> associatedTabs = mapApiToTabs.get(apiName);
return associatedTabs;
}
}
|
|
/*******************************************************************************
* Copyright 2016, 2017 vanilladb.org contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.vanilladb.core.query.planner.index;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.vanilladb.core.query.algebra.Plan;
import org.vanilladb.core.query.algebra.SelectPlan;
import org.vanilladb.core.query.algebra.TablePlan;
import org.vanilladb.core.query.algebra.UpdateScan;
import org.vanilladb.core.query.parse.CreateIndexData;
import org.vanilladb.core.query.parse.CreateTableData;
import org.vanilladb.core.query.parse.CreateViewData;
import org.vanilladb.core.query.parse.DeleteData;
import org.vanilladb.core.query.parse.DropIndexData;
import org.vanilladb.core.query.parse.DropTableData;
import org.vanilladb.core.query.parse.DropViewData;
import org.vanilladb.core.query.parse.InsertData;
import org.vanilladb.core.query.parse.ModifyData;
import org.vanilladb.core.query.planner.UpdatePlanner;
import org.vanilladb.core.server.VanillaDb;
import org.vanilladb.core.sql.Constant;
import org.vanilladb.core.storage.index.Index;
import org.vanilladb.core.storage.index.SearchKey;
import org.vanilladb.core.storage.metadata.index.IndexInfo;
import org.vanilladb.core.storage.record.RecordId;
import org.vanilladb.core.storage.tx.Transaction;
/**
* A modification of the basic update planner. It dispatches each update
* statement to the corresponding index planner.
*/
public class IndexUpdatePlanner implements UpdatePlanner {
@Override
public int executeInsert(InsertData data, Transaction tx) {
String tblname = data.tableName();
Plan p = new TablePlan(tblname, tx);
// Construct a map from field names to values
Map<String, Constant> fldValMap = new HashMap<String, Constant>();
Iterator<Constant> valIter = data.vals().iterator();
for (String fldname : data.fields()) {
Constant val = valIter.next();
fldValMap.put(fldname, val);
}
// Insert the record into the record file
UpdateScan s = (UpdateScan) p.open();
s.insert();
for (Map.Entry<String, Constant> fldValPair : fldValMap.entrySet()) {
s.setVal(fldValPair.getKey(), fldValPair.getValue());
}
RecordId rid = s.getRecordId();
s.close();
// Insert the record to all corresponding indexes
Set<IndexInfo> indexes = new HashSet<IndexInfo>();
for (String fldname : data.fields()) {
List<IndexInfo> iis = VanillaDb.catalogMgr().getIndexInfo(tblname, fldname, tx);
indexes.addAll(iis);
}
for (IndexInfo ii : indexes) {
Index idx = ii.open(tx);
idx.insert(new SearchKey(ii.fieldNames(), fldValMap), rid, true);
idx.close();
}
VanillaDb.statMgr().countRecordUpdates(data.tableName(), 1);
return 1;
}
@Override
public int executeDelete(DeleteData data, Transaction tx) {
String tblName = data.tableName();
TablePlan tp = new TablePlan(tblName, tx);
Plan selectPlan = null;
// Create a IndexSelectPlan if there is matching index in the predicate
boolean usingIndex = false;
selectPlan = IndexSelector.selectByBestMatchedIndex(tblName, tp, data.pred(), tx);
if (selectPlan == null)
selectPlan = new SelectPlan(tp, data.pred());
else {
selectPlan = new SelectPlan(selectPlan, data.pred());
usingIndex = true;
}
// Retrieve all indexes
List<IndexInfo> allIndexes = new LinkedList<IndexInfo>();
Set<String> indexedFlds = VanillaDb.catalogMgr().getIndexedFields(tblName, tx);
for (String indexedFld : indexedFlds) {
List<IndexInfo> iis = VanillaDb.catalogMgr().getIndexInfo(tblName, indexedFld, tx);
allIndexes.addAll(iis);
}
// Open the scan
UpdateScan s = (UpdateScan) selectPlan.open();
int count = 0;
s.beforeFirst();
while (s.next()) {
RecordId rid = s.getRecordId();
// Delete the record from every index
for (IndexInfo ii : allIndexes) {
// Construct a key-value map
Map<String, Constant> fldValMap = new HashMap<String, Constant>();
for (String fldName : ii.fieldNames())
fldValMap.put(fldName, s.getVal(fldName));
SearchKey key = new SearchKey(ii.fieldNames(), fldValMap);
// Delete from the index
Index index = ii.open(tx);
index.delete(key, rid, true);
index.close();
}
// Delete the record from the record file
s.delete();
/*
* Re-open the index select scan to ensure the correctness of
* next(). E.g., index block before delete the current slot ^:
* [^5,5,6]. After the deletion: [^5,6]. When calling next() of
* index select scan, current slot pointer will move forward,
* [5,^6].
*/
if (usingIndex) {
s.close();
s = (UpdateScan) selectPlan.open();
s.beforeFirst();
}
count++;
}
s.close();
VanillaDb.statMgr().countRecordUpdates(data.tableName(), count);
return count;
}
@Override
public int executeModify(ModifyData data, Transaction tx) {
String tblName = data.tableName();
TablePlan tp = new TablePlan(tblName, tx);
Plan selectPlan = null;
// Create a IndexSelectPlan if there is matching index in the predicate
selectPlan = IndexSelector.selectByBestMatchedIndex(tblName, tp, data.pred(), tx, data.targetFields());
if (selectPlan == null)
selectPlan = new SelectPlan(tp, data.pred());
else
selectPlan = new SelectPlan(selectPlan, data.pred());
// Open all indexes associate with target fields
Set<Index> modifiedIndexes = new HashSet<Index>();
for (String fieldName : data.targetFields()) {
List<IndexInfo> iiList = VanillaDb.catalogMgr().getIndexInfo(tblName, fieldName, tx);
for (IndexInfo ii : iiList)
modifiedIndexes.add(ii.open(tx));
}
// Open the scan
UpdateScan s = (UpdateScan) selectPlan.open();
s.beforeFirst();
int count = 0;
while (s.next()) {
// Construct a mapping from field names to values
Map<String, Constant> oldValMap = new HashMap<String, Constant>();
Map<String, Constant> newValMap = new HashMap<String, Constant>();
for (String fieldName : data.targetFields()) {
Constant oldVal = s.getVal(fieldName);
Constant newVal = data.newValue(fieldName).evaluate(s);
oldValMap.put(fieldName, oldVal);
newValMap.put(fieldName, newVal);
s.setVal(fieldName, newVal);
}
RecordId rid = s.getRecordId();
// Update the indexes
for (Index index : modifiedIndexes) {
// Construct a SearchKey for the old value
Map<String, Constant> fldValMap = new HashMap<String, Constant>();
for (String fldName : index.getIndexInfo().fieldNames()) {
Constant oldVal = oldValMap.get(fldName);
if (oldVal == null)
oldVal = s.getVal(fldName);
fldValMap.put(fldName, oldVal);
}
SearchKey oldKey = new SearchKey(index.getIndexInfo().fieldNames(), fldValMap);
// Delete the old value from the index
index.delete(oldKey, rid, true);
// Construct a SearchKey for the new value
fldValMap = new HashMap<String, Constant>();
for (String fldName : index.getIndexInfo().fieldNames()) {
Constant newVal = newValMap.get(fldName);
if (newVal == null)
newVal = s.getVal(fldName);
fldValMap.put(fldName, newVal);
}
SearchKey newKey = new SearchKey(index.getIndexInfo().fieldNames(), fldValMap);
// Insert the new value to the index
index.insert(newKey, rid, true);
index.close();
}
count++;
}
// Close opened indexes and the record file
for (Index index : modifiedIndexes)
index.close();
s.close();
VanillaDb.statMgr().countRecordUpdates(data.tableName(), count);
return count;
}
@Override
public int executeCreateTable(CreateTableData data, Transaction tx) {
VanillaDb.catalogMgr().createTable(data.tableName(), data.newSchema(),
tx);
return 0;
}
@Override
public int executeCreateView(CreateViewData data, Transaction tx) {
VanillaDb.catalogMgr().createView(data.viewName(), data.viewDef(), tx);
return 0;
}
@Override
public int executeCreateIndex(CreateIndexData data, Transaction tx) {
VanillaDb.catalogMgr().createIndex(data.indexName(), data.tableName(),
data.fieldNames(), data.indexType(), tx);
return 0;
}
@Override
public int executeDropTable(DropTableData data, Transaction tx) {
VanillaDb.catalogMgr().dropTable(data.tableName(), tx);
return 0;
}
@Override
public int executeDropView(DropViewData data, Transaction tx) {
VanillaDb.catalogMgr().dropView(data.viewName(), tx);
return 0;
}
@Override
public int executeDropIndex(DropIndexData data, Transaction tx) {
VanillaDb.catalogMgr().dropIndex(data.indexName(), tx);
return 0;
}
}
|
|
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.config.Config;
import com.hazelcast.core.Client;
import com.hazelcast.core.ClientListener;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.HazelcastInstanceNotActiveException;
import com.hazelcast.core.IMap;
import com.hazelcast.core.IQueue;
import com.hazelcast.core.Message;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.spi.impl.operationparker.impl.OperationParkerImpl;
import com.hazelcast.spi.impl.operationservice.impl.InvocationRegistry;
import com.hazelcast.spi.impl.operationservice.impl.OperationServiceImpl;
import com.hazelcast.spi.properties.GroupProperty;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.topic.ReliableMessageListener;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.concurrent.CountDownLatch;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class ClientDisconnectTest extends HazelcastTestSupport {
private final TestHazelcastFactory hazelcastFactory = new TestHazelcastFactory();
@After
public void cleanup() {
hazelcastFactory.terminateAll();
}
@Test
public void testClientOperationCancelled_whenDisconnected() throws Exception {
Config config = new Config();
config.setProperty(GroupProperty.CLIENT_ENDPOINT_REMOVE_DELAY_SECONDS.getName(), String.valueOf(Integer.MAX_VALUE));
HazelcastInstance hazelcastInstance = hazelcastFactory.newHazelcastInstance(config);
final String queueName = "q";
final HazelcastInstance clientInstance = hazelcastFactory.newHazelcastClient();
final String uuid = clientInstance.getLocalEndpoint().getUuid();
final CountDownLatch clientDisconnectedFromNode = new CountDownLatch(1);
hazelcastInstance.getClientService().addClientListener(new ClientListener() {
@Override
public void clientConnected(Client client) {
}
@Override
public void clientDisconnected(Client client) {
if (client.getUuid().equals(uuid)) {
clientDisconnectedFromNode.countDown();
}
}
});
new Thread(new Runnable() {
@Override
public void run() {
IQueue<Integer> queue = clientInstance.getQueue(queueName);
try {
queue.take();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (HazelcastInstanceNotActiveException e) {
ignore(e);
}
}
}).start();
SECONDS.sleep(2);
clientInstance.shutdown();
assertOpenEventually(clientDisconnectedFromNode);
final IQueue<Integer> queue = hazelcastInstance.getQueue(queueName);
queue.add(1);
//dead client should not be able to consume item from queue
assertTrueAllTheTime(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals(queue.size(), 1);
}
}, 3);
}
@Test
public void testClientOperationCancelled_whenDisconnected_lock() throws Exception {
Config config = new Config();
config.setProperty(GroupProperty.CLIENT_ENDPOINT_REMOVE_DELAY_SECONDS.getName(), String.valueOf(Integer.MAX_VALUE));
HazelcastInstance hazelcastInstance = hazelcastFactory.newHazelcastInstance(config);
final String name = "m";
final IMap<Object, Object> map = hazelcastInstance.getMap(name);
final String key = "key";
map.lock(key);
final HazelcastInstance clientInstance = hazelcastFactory.newHazelcastClient();
final CountDownLatch clientDisconnectedFromNode = new CountDownLatch(1);
final String uuid = clientInstance.getLocalEndpoint().getUuid();
hazelcastInstance.getClientService().addClientListener(new ClientListener() {
@Override
public void clientConnected(Client client) {
}
@Override
public void clientDisconnected(Client client) {
if (client.getUuid().equals(uuid)) {
clientDisconnectedFromNode.countDown();
}
}
});
new Thread(new Runnable() {
@Override
public void run() {
IMap<Object, Object> clientMap = clientInstance.getMap(name);
try {
clientMap.lock(key);
} catch (Exception e) {
ignore(e);
}
}
}).start();
SECONDS.sleep(2);
clientInstance.shutdown();
assertOpenEventually(clientDisconnectedFromNode);
map.unlock(key);
//dead client should not be able to acquire the lock.
assertTrueAllTheTime(new AssertTask() {
@Override
public void run() throws Exception {
assertFalse(map.isLocked(key));
}
}, 3);
}
@Test
public void testPendingInvocationAndWaitEntryCancelled_whenDisconnected_withLock() {
Config config = new Config();
HazelcastInstance server = hazelcastFactory.newHazelcastInstance(config);
final String name = randomName();
server.getLock(name).lock();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient();
spawn(new Runnable() {
@Override
public void run() {
try {
client.getLock(name).lock();
} catch (Throwable ignored) {
}
}
});
assertNonEmptyPendingInvocationAndWaitSet(server);
client.shutdown();
assertEmptyPendingInvocationAndWaitSet(server);
}
@Test
public void testPendingInvocationAndWaitEntryCancelled_whenDisconnected_withReliableTopic() {
Config config = new Config();
HazelcastInstance server = hazelcastFactory.newHazelcastInstance(config);
HazelcastInstance client = hazelcastFactory.newHazelcastClient();
// ReliableTopic listener registers a blocking invocation
client.getReliableTopic(randomName()).addMessageListener(new NopReliableMessageListener());
assertNonEmptyPendingInvocationAndWaitSet(server);
client.shutdown();
assertEmptyPendingInvocationAndWaitSet(server);
}
private void assertNonEmptyPendingInvocationAndWaitSet(HazelcastInstance server) {
NodeEngineImpl nodeEngine = getNodeEngineImpl(server);
OperationServiceImpl operationService = (OperationServiceImpl) nodeEngine.getOperationService();
final InvocationRegistry invocationRegistry = operationService.getInvocationRegistry();
final OperationParkerImpl operationParker = (OperationParkerImpl) nodeEngine.getOperationParker();
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertFalse(invocationRegistry.entrySet().isEmpty());
}
});
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(operationParker.getTotalParkedOperationCount() > 0);
}
});
}
private void assertEmptyPendingInvocationAndWaitSet(HazelcastInstance server) {
NodeEngineImpl nodeEngine = getNodeEngineImpl(server);
OperationServiceImpl operationService = (OperationServiceImpl) nodeEngine.getOperationService();
final InvocationRegistry invocationRegistry = operationService.getInvocationRegistry();
final OperationParkerImpl operationParker = (OperationParkerImpl) nodeEngine.getOperationParker();
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(invocationRegistry.entrySet().isEmpty());
}
});
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertEquals(0, operationParker.getTotalParkedOperationCount());
}
});
}
private static class NopReliableMessageListener implements ReliableMessageListener<Object> {
@Override
public long retrieveInitialSequence() {
return 0;
}
@Override
public void storeSequence(long sequence) {
}
@Override
public boolean isLossTolerant() {
return false;
}
@Override
public boolean isTerminal(Throwable failure) {
return false;
}
@Override
public void onMessage(Message<Object> message) {
}
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.tools.util.side;
import com.intellij.diff.DiffContext;
import com.intellij.diff.actions.ProxyUndoRedoAction;
import com.intellij.diff.actions.impl.FocusOppositePaneAction;
import com.intellij.diff.actions.impl.OpenInEditorWithMouseAction;
import com.intellij.diff.actions.impl.SetEditorSettingsAction;
import com.intellij.diff.contents.DocumentContent;
import com.intellij.diff.requests.ContentDiffRequest;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.tools.holders.EditorHolderFactory;
import com.intellij.diff.tools.holders.TextEditorHolder;
import com.intellij.diff.tools.util.DiffDataKeys;
import com.intellij.diff.tools.util.SyncScrollSupport;
import com.intellij.diff.tools.util.SyncScrollSupport.TwosideSyncScrollSupport;
import com.intellij.diff.tools.util.base.InitialScrollPositionSupport;
import com.intellij.diff.tools.util.base.TextDiffSettingsHolder;
import com.intellij.diff.tools.util.base.TextDiffViewerUtil;
import com.intellij.diff.util.DiffUtil;
import com.intellij.diff.util.LineCol;
import com.intellij.diff.util.Side;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.VisibleAreaEvent;
import com.intellij.openapi.editor.event.VisibleAreaListener;
import com.intellij.openapi.editor.ex.EditorEx;
import consulo.util.dataholder.Key;
import com.intellij.pom.Navigatable;
import com.intellij.util.containers.ContainerUtil;
import consulo.logging.Logger;
import consulo.ui.annotation.RequiredUIAccess;
import org.jetbrains.annotations.NonNls;
import javax.annotation.Nonnull;
import javax.swing.*;
import java.util.List;
public abstract class TwosideTextDiffViewer extends TwosideDiffViewer<TextEditorHolder> {
public static final Logger LOG = Logger.getInstance(TwosideTextDiffViewer.class);
@Nonnull
private final List<? extends EditorEx> myEditableEditors;
@javax.annotation.Nullable
private List<? extends EditorEx> myEditors;
@Nonnull
protected final SetEditorSettingsAction myEditorSettingsAction;
@Nonnull
private final MyVisibleAreaListener myVisibleAreaListener = new MyVisibleAreaListener();
@javax.annotation.Nullable
private TwosideSyncScrollSupport mySyncScrollSupport;
public TwosideTextDiffViewer(@Nonnull DiffContext context, @Nonnull ContentDiffRequest request) {
super(context, request, TextEditorHolder.TextEditorHolderFactory.INSTANCE);
new MyFocusOppositePaneAction(true).install(myPanel);
new MyFocusOppositePaneAction(false).install(myPanel);
myEditorSettingsAction = new SetEditorSettingsAction(getTextSettings(), getEditors());
myEditorSettingsAction.applyDefaults();
new MyOpenInEditorWithMouseAction().install(getEditors());
myEditableEditors = TextDiffViewerUtil.getEditableEditors(getEditors());
TextDiffViewerUtil.checkDifferentDocuments(myRequest);
boolean editable1 = DiffUtil.canMakeWritable(getContent1().getDocument());
boolean editable2 = DiffUtil.canMakeWritable(getContent2().getDocument());
if (editable1 ^ editable2) {
ProxyUndoRedoAction.register(getProject(), editable1 ? getEditor1() : getEditor2(), myPanel);
}
}
@Override
@RequiredUIAccess
protected void onInit() {
super.onInit();
installEditorListeners();
}
@Override
@RequiredUIAccess
protected void onDispose() {
destroyEditorListeners();
super.onDispose();
}
@Nonnull
@Override
protected List<TextEditorHolder> createEditorHolders(@Nonnull EditorHolderFactory<TextEditorHolder> factory) {
List<TextEditorHolder> holders = super.createEditorHolders(factory);
boolean[] forceReadOnly = TextDiffViewerUtil.checkForceReadOnly(myContext, myRequest);
for (int i = 0; i < 2; i++) {
if (forceReadOnly[i]) holders.get(i).getEditor().setViewer(true);
}
Side.LEFT.select(holders).getEditor().setVerticalScrollbarOrientation(EditorEx.VERTICAL_SCROLLBAR_LEFT);
for (TextEditorHolder holder : holders) {
DiffUtil.disableBlitting(holder.getEditor());
}
return holders;
}
@Nonnull
@Override
protected List<JComponent> createTitles() {
return DiffUtil.createSyncHeightComponents(DiffUtil.createTextTitles(myRequest, getEditors()));
}
//
// Diff
//
@Nonnull
public TextDiffSettingsHolder.TextDiffSettings getTextSettings() {
return TextDiffViewerUtil.getTextSettings(myContext);
}
@Nonnull
protected List<AnAction> createEditorPopupActions() {
return TextDiffViewerUtil.createEditorPopupActions();
}
@Override
protected void onDocumentChange(@Nonnull DocumentEvent event) {
super.onDocumentChange(event);
myContentPanel.repaintDivider();
}
//
// Listeners
//
@RequiredUIAccess
protected void installEditorListeners() {
new TextDiffViewerUtil.EditorActionsPopup(createEditorPopupActions()).install(getEditors(), myPanel);
new TextDiffViewerUtil.EditorFontSizeSynchronizer(getEditors()).install(this);
getEditor(Side.LEFT).getScrollingModel().addVisibleAreaListener(myVisibleAreaListener);
getEditor(Side.RIGHT).getScrollingModel().addVisibleAreaListener(myVisibleAreaListener);
SyncScrollSupport.SyncScrollable scrollable = getSyncScrollable();
if (scrollable != null) {
mySyncScrollSupport = new TwosideSyncScrollSupport(getEditors(), scrollable);
myEditorSettingsAction.setSyncScrollSupport(mySyncScrollSupport);
}
}
@RequiredUIAccess
protected void destroyEditorListeners() {
getEditor(Side.LEFT).getScrollingModel().removeVisibleAreaListener(myVisibleAreaListener);
getEditor(Side.RIGHT).getScrollingModel().removeVisibleAreaListener(myVisibleAreaListener);
mySyncScrollSupport = null;
}
protected void disableSyncScrollSupport(boolean disable) {
if (mySyncScrollSupport != null) {
if (disable) {
mySyncScrollSupport.enterDisableScrollSection();
}
else {
mySyncScrollSupport.exitDisableScrollSection();
}
}
}
//
// Getters
//
@Nonnull
protected List<? extends DocumentContent> getContents() {
//noinspection unchecked
return (List)myRequest.getContents();
}
@Nonnull
public List<? extends EditorEx> getEditors() {
if (myEditors == null) {
myEditors = ContainerUtil.map(getEditorHolders(), holder -> holder.getEditor());
}
return myEditors;
}
@Nonnull
protected List<? extends EditorEx> getEditableEditors() {
return myEditableEditors;
}
@Nonnull
public EditorEx getCurrentEditor() {
return getEditor(getCurrentSide());
}
@Nonnull
public DocumentContent getCurrentContent() {
return getContent(getCurrentSide());
}
@Nonnull
public EditorEx getEditor1() {
return getEditor(Side.LEFT);
}
@Nonnull
public EditorEx getEditor2() {
return getEditor(Side.RIGHT);
}
@Nonnull
public EditorEx getEditor(@Nonnull Side side) {
return side.select(getEditors());
}
@Nonnull
public DocumentContent getContent(@Nonnull Side side) {
return side.select(getContents());
}
@Nonnull
public DocumentContent getContent1() {
return getContent(Side.LEFT);
}
@Nonnull
public DocumentContent getContent2() {
return getContent(Side.RIGHT);
}
@javax.annotation.Nullable
public TwosideSyncScrollSupport getSyncScrollSupport() {
return mySyncScrollSupport;
}
//
// Abstract
//
@RequiredUIAccess
@Nonnull
protected LineCol transferPosition(@Nonnull Side baseSide, @Nonnull LineCol position) {
if (mySyncScrollSupport == null) return position;
int line = mySyncScrollSupport.getScrollable().transfer(baseSide, position.line);
return new LineCol(line, position.column);
}
@RequiredUIAccess
protected void scrollToLine(@Nonnull Side side, int line) {
DiffUtil.scrollEditor(getEditor(side), line, false);
setCurrentSide(side);
}
@javax.annotation.Nullable
protected abstract SyncScrollSupport.SyncScrollable getSyncScrollable();
//
// Misc
//
@javax.annotation.Nullable
@Override
protected Navigatable getNavigatable() {
Side side = getCurrentSide();
LineCol position = LineCol.fromCaret(getEditor(side));
Navigatable navigatable = getContent(side).getNavigatable(position);
if (navigatable != null) return navigatable;
LineCol otherPosition = transferPosition(side, position);
return getContent(side.other()).getNavigatable(otherPosition);
}
public static boolean canShowRequest(@Nonnull DiffContext context, @Nonnull DiffRequest request) {
return TwosideDiffViewer.canShowRequest(context, request, TextEditorHolder.TextEditorHolderFactory.INSTANCE);
}
//
// Actions
//
private class MyFocusOppositePaneAction extends FocusOppositePaneAction {
public MyFocusOppositePaneAction(boolean scrollToPosition) {
super(scrollToPosition);
}
@Override
public void actionPerformed(@Nonnull AnActionEvent e) {
Side currentSide = getCurrentSide();
Side targetSide = currentSide.other();
EditorEx currentEditor = getEditor(currentSide);
EditorEx targetEditor = getEditor(targetSide);
if (myScrollToPosition) {
LineCol position = transferPosition(currentSide, LineCol.fromCaret(currentEditor));
targetEditor.getCaretModel().moveToOffset(position.toOffset(targetEditor));
}
setCurrentSide(targetSide);
targetEditor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE);
DiffUtil.requestFocus(getProject(), getPreferredFocusedComponent());
}
}
private class MyOpenInEditorWithMouseAction extends OpenInEditorWithMouseAction {
@Override
protected Navigatable getNavigatable(@Nonnull Editor editor, int line) {
Side side = Side.fromValue(getEditors(), editor);
if (side == null) return null;
return getContent(side).getNavigatable(new LineCol(line));
}
}
protected class MyToggleAutoScrollAction extends TextDiffViewerUtil.ToggleAutoScrollAction {
public MyToggleAutoScrollAction() {
super(getTextSettings());
}
}
//
// Helpers
//
@javax.annotation.Nullable
@Override
public Object getData(@Nonnull @NonNls Key<?> dataId) {
if (DiffDataKeys.CURRENT_EDITOR == dataId) {
return getCurrentEditor();
}
return super.getData(dataId);
}
private class MyVisibleAreaListener implements VisibleAreaListener {
@Override
public void visibleAreaChanged(VisibleAreaEvent e) {
if (mySyncScrollSupport != null) mySyncScrollSupport.visibleAreaChanged(e);
myContentPanel.repaint();
}
}
protected abstract class MyInitialScrollPositionHelper extends InitialScrollPositionSupport.TwosideInitialScrollHelper {
@Nonnull
@Override
protected List<? extends Editor> getEditors() {
return TwosideTextDiffViewer.this.getEditors();
}
@Override
protected void disableSyncScroll(boolean value) {
disableSyncScrollSupport(value);
}
@Override
protected boolean doScrollToLine() {
if (myScrollToLine == null) return false;
scrollToLine(myScrollToLine.first, myScrollToLine.second);
return true;
}
}
}
|
|
package se.culvertsoft.mgen.javapack.serialization;
import java.io.IOException;
import se.culvertsoft.mgen.api.model.*;
import se.culvertsoft.mgen.javapack.classes.ClassRegistryBase;
import se.culvertsoft.mgen.javapack.classes.MGenBase;
import se.culvertsoft.mgen.javapack.exceptions.SerializationException;
/**
* Warning: EXPERIMENTAL. The API of this class may change significantly.
*
* A command line parser generator. It takes an MGen object class and generates
* a statically typed command line argument parser from it.
*
* It works by converting standard formatted command line arguments to Json, and
* then handing off the parsing to the mgen JsonReader.
*/
public class CommandLineArgParser<T extends MGenBase> {
private final Class<T> m_cls;
private final ClassRegistryBase m_classRegistry;
private final Field[] m_fields;
private final JsonReader m_jsonReader;
private final StringBuilder m_builder = new StringBuilder();
private int m_n = 0;
/**
* Creates a new command line argument parser.
*
* @param cls
* The MGen object class to create a parser for
*
* @param classRegistry
* The class registry in which the class provided is registered
*/
public CommandLineArgParser(final Class<T> cls, final ClassRegistryBase classRegistry) {
m_cls = cls;
m_classRegistry = classRegistry;
m_fields = newInstance()._fields();
m_jsonReader = new JsonReader(m_classRegistry);
}
/**
* Parses command line arguments into an MGen object of previously (in
* constructor) specified type. It works by converting standard formatted
* command line arguments to Json, and then handing off the parsing to the
* mgen JsonReader.
*
* @param args
* The command line arguments
*
* @return The parsed object
*
* @throws IOException
* Compatibility layer with the underlying JsonReader, which may
* throw IOExceptions when the underlying data input stream
* behaves unexpectedly (e.g. reaches EOF before expected).
*/
public T parse(final String[] args) throws IOException {
m_builder.setLength(0);
m_builder.append("{");
m_n = 0;
for (int i = 0; i < args.length; i++) {
final Field field = findField(prune(args[i]));
final String value = i + 1 < args.length ? prune(args[i + 1]) : null;
if (field.typ() == BoolType.INSTANCE) {
addField(field, "true");
} else {
if (value == null)
throw new SerializationException("No value provided for field " + field);
addField(field, value);
i++;
}
}
m_builder.append("}");
return m_jsonReader.readObject(m_builder.toString(), m_cls);
}
/**
* Convenience method for instantiating MGen object classes without checked
* exceptions.
*
* @return The instantiated MGen object
*/
private T newInstance() {
try {
return m_cls.newInstance();
} catch (final Exception e) {
throw new SerializationException("Unable to create instance of class " + m_cls, e);
}
}
/**
* Convenience method for removing quotes around a string.
*
* @param txtIn
* The text to remove quotes from.
*
* @return The unquoted string.
*/
private String unQuote(final String txtIn) {
String src = txtIn.trim();
if (src.startsWith("\"")) {
src = src.substring(1);
}
if (src.endsWith("\"")) {
src = src.substring(0, src.length());
}
return src;
}
/**
* Internal helper method for pruning strings. Removes any leading '-'s and
* quotes.
*
* @param txt
* The string to prune
*
* @return The pruned string
*/
private String prune(final String txt) {
return removeMinuses(unQuote(removeMinuses(txt.trim())).trim());
}
/**
* Internal helper method for removing leading '-'s from a string
*
* @param txt
* The string to prune
*
* @return The pruned string
*/
private String removeMinuses(String txt) {
while (txt.startsWith("-")) {
txt = txt.substring(1);
}
return txt;
}
/**
* Internal helper method called during construction when converting command
* line arguments to JSON.
*
* @param field
* The field to add
*
* @param value
* The value of the field
*/
private void addField(final Field field, final String value) {
if (m_n > 0)
m_builder.append(", ");
m_builder.append(quote(field.name()) + ": ");
switch (field.typ().typeEnum()) {
case STRING:
case ENUM:
m_builder.append(quote(value));
break;
default:
m_builder.append(value);
break;
}
m_n++;
}
/**
* Internal helper function for finding a field in a class.
*
* @param txt
* The field name or short name (first letter of the field name)
*
* @return The field
*
* @throws SerializationException
* If the field was not found
*/
private Field findField(final String txt) {
for (final Field f : m_fields) {
if (f.name().startsWith(txt)) {
return f;
}
}
throw new SerializationException("Could not find any field by name " + txt + " in type "
+ m_cls);
}
/**
* Internal helper method for quoting a string
*
* @param txt
* The string to quote
*
* @return The quoted string
*/
private String quote(final String txt) {
return '"' + txt + '"';
}
}
|
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package mobemu.trace;
import java.util.ArrayList;
import java.util.List;
/**
* Class containing various statistics about a mobility trace.
*
* @author Radu
*/
public class TraceStats {
/**
* Average contact time.
*/
public double averageContactTime;
/**
* Average inter-contact time.
*/
public double averageInterContactTime;
/**
* Average any-contact time.
*/
public double averageAnyContactTime;
/**
* Average inter-any-contact time.
*/
public double averageInterAnyContactTime;
/**
* Trace the stats are computed for.
*/
private final Trace trace;
/**
* Number of nodes in the trace.
*/
private final int nodesNumber;
/**
* Constructs a {@code TraceStats} object.
*
* @param parser parser that these stats are computed for
*/
public TraceStats(Parser parser) {
trace = parser.getTraceData();
nodesNumber = parser.getNodesNumber();
computeStats();
}
/**
* Computes various tasks for a given trace.
*/
private void computeStats() {
computeAverageContactTime();
computeAverageInterContactTime();
computeAverageAnyContactTime();
computeAverageInterAnyContactTime();
}
/**
* Computes and prints the average contact time.
*/
private void computeAverageContactTime() {
int[][] contacts = new int[nodesNumber][nodesNumber];
double[][] contactTimes = new double[nodesNumber][nodesNumber];
int traceSize = trace.getContactsCount();
for (int i = 0; i < traceSize; i++) {
Contact contact = trace.getContactAt(i);
contacts[contact.getObserver()][contact.getObserved()]++;
contactTimes[contact.getObserver()][contact.getObserved()] += (contact.getEnd() - contact.getStart());
}
int count = 0;
averageContactTime = 0;
for (int i = 0; i < nodesNumber; i++) {
for (int j = 0; j < nodesNumber; j++) {
if (contacts[i][j] > 0) {
count++;
averageContactTime += contactTimes[i][j] / contacts[i][j];
}
}
}
averageContactTime /= count;
System.out.println("Average contact time: " + averageContactTime);
}
/**
* Computes and prints the average inter-contact time.
*/
private void computeAverageInterContactTime() {
// finish time of the last contact between two nodes
long[][] finishTimes = new long[nodesNumber][nodesNumber];
int[][] contacts = new int[nodesNumber][nodesNumber];
double[][] interContactTimes = new double[nodesNumber][nodesNumber];
int traceSize = trace.getContactsCount();
for (int i = 0; i < traceSize; i++) {
Contact contact = trace.getContactAt(i);
int observer = contact.getObserver();
int observed = contact.getObserved();
if (finishTimes[observer][observed] != 0) {
contacts[observer][observed]++;
interContactTimes[observer][observed] += contact.getStart() - finishTimes[observer][observed];
}
finishTimes[observer][observed] = contact.getEnd();
}
int count = 0;
averageInterContactTime = 0;
for (int i = 0; i < nodesNumber; i++) {
for (int j = 0; j < nodesNumber; j++) {
if (contacts[i][j] > 0) {
count++;
averageInterContactTime += interContactTimes[i][j] / contacts[i][j];
}
}
}
averageInterContactTime /= count;
System.out.println("Average inter-contact time: " + averageInterContactTime);
}
/**
* Computes and prints the average any-contact time.
*/
private void computeAverageAnyContactTime() {
averageAnyContactTime = 0.0;
int count = 0;
for (int i = 0; i < nodesNumber; i++) {
List<Contact> contacts = new ArrayList<>();
int traceSize = trace.getContactsCount();
for (int j = 0; j < traceSize; j++) {
Contact contact = trace.getContactAt(j);
if (contact.getObserver() == i) {
contacts.add(contact);
}
}
if (contacts.isEmpty()) {
continue;
}
long start = contacts.get(0).getStart();
long end = contacts.get(0).getEnd();
int localCount = 0;
double localAnyContactTime = 0;
for (Contact contact : contacts) {
if (contact.getStart() >= start && contact.getStart() <= end) {
end = Math.max(end, contact.getEnd());
} else {
// contact just finished, log it and reset
localCount++;
localAnyContactTime += end - start;
start = contact.getStart();
end = contact.getEnd();
}
}
localCount++;
localAnyContactTime += end - start;
localAnyContactTime /= localCount;
averageAnyContactTime += localAnyContactTime;
count++;
}
averageAnyContactTime /= count;
System.out.println("Average any contact time: " + averageAnyContactTime);
}
/**
* Computes and prints the average inter-any-contact time.
*/
private void computeAverageInterAnyContactTime() {
averageInterAnyContactTime = 0.0;
int count = 0;
for (int i = 0; i < nodesNumber; i++) {
List<Contact> contacts = new ArrayList<>();
int traceSize = trace.getContactsCount();
for (int j = 0; j < traceSize; j++) {
Contact contact = trace.getContactAt(j);
if (contact.getObserver() == i) {
contacts.add(contact);
}
}
if (contacts.size() <= 1) {
continue;
}
long end = contacts.get(0).getEnd();
int localCount = 0;
double localInterAnyContactTime = 0;
contacts.remove(0);
for (Contact contact : contacts) {
localCount++;
localInterAnyContactTime += contact.getStart() - end;
end = contact.getEnd();
}
localInterAnyContactTime /= localCount;
averageInterAnyContactTime += localInterAnyContactTime;
count++;
}
averageInterAnyContactTime /= count;
System.out.println("Average inter-any contact time: " + averageInterAnyContactTime);
}
}
|
|
/*
Copyright 1995-2013 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, contact:
Environmental Systems Research Institute, Inc.
Attn: Contracts Dept
380 New York Street
Redlands, California, USA 92373
email: [email protected]
*/
package com.esri.core.geometry;
import java.util.ArrayList;
class OperatorIntersectionCursor extends GeometryCursor {
GeometryCursor m_inputGeoms;
GeometryCursor m_smallCursor;
ProgressTracker m_progress_tracker;
SpatialReference m_spatial_reference;
Geometry m_geomIntersector;
Geometry m_geomIntersectorEmptyGeom;// holds empty geometry of intersector
// type.
int m_geomIntersectorType;
int m_currentGeomType;
int m_index;
int m_dimensionMask;
boolean m_bEmpty;
OperatorIntersectionCursor(GeometryCursor inputGeoms,
GeometryCursor geomIntersector, SpatialReference sr,
ProgressTracker progress_tracker, int dimensionMask) {
m_bEmpty = geomIntersector == null;
m_index = -1;
m_inputGeoms = inputGeoms;
m_spatial_reference = sr;
m_geomIntersector = geomIntersector.next();
m_geomIntersectorType = m_geomIntersector.getType().value();
m_currentGeomType = Geometry.Type.Unknown.value();
m_progress_tracker = progress_tracker;
m_dimensionMask = dimensionMask;
if (m_dimensionMask != -1
&& (m_dimensionMask <= 0 || m_dimensionMask > 7))
throw new IllegalArgumentException("bad dimension mask");// dimension
// mask
// can
// be
// -1,
// for
// the
// default
// behavior,
// or a
// value
// between
// 1 and
// 7.
}
@Override
public Geometry next() {
if (m_bEmpty)
return null;
Geometry geom;
if (m_smallCursor != null) {// when dimension mask is used, we produce a
geom = m_smallCursor.next();
if (geom != null)
return geom;
else
m_smallCursor = null;// done with the small cursor
}
while ((geom = m_inputGeoms.next()) != null) {
m_index = m_inputGeoms.getGeometryID();
if (m_dimensionMask == -1) {
Geometry resGeom = intersect(geom);
assert (resGeom != null);
return resGeom;
} else {
m_smallCursor = intersectEx(geom);
Geometry resGeom = m_smallCursor.next();
assert (resGeom != null);
return resGeom;
}
}
return null;
}
@Override
public int getGeometryID() {
return m_index;
}
Geometry intersect(Geometry input_geom) {
Geometry dst_geom = tryNativeImplementation_(input_geom);
if (dst_geom != null)
return dst_geom;
Envelope2D commonExtent = InternalUtils.getMergedExtent(
m_geomIntersector, input_geom);
// return Topological_operations::intersection(input_geom,
// m_geomIntersector, m_spatial_reference, m_progress_tracker);
// Preprocess geometries to be clipped to the extent of intersection to
// get rid of extra segments.
double tol = 0;
Envelope2D env = new Envelope2D();
m_geomIntersector.queryEnvelope2D(env);
Envelope2D env1 = new Envelope2D();
input_geom.queryEnvelope2D(env1);
env.intersect(env1);
assert (!env.isEmpty());
double t = InternalUtils.calculateToleranceFromGeometry(
m_spatial_reference, commonExtent, true) * 10;
env.inflate(10 * t, 10 * t);
Geometry clippedIntersector = Clipper.clip(m_geomIntersector, env, tol,
0.0);
Geometry clippedInputGeom = Clipper.clip(input_geom, env, tol, 0.0);
// perform the clip
return TopologicalOperations.intersection(clippedInputGeom,
clippedIntersector, m_spatial_reference, m_progress_tracker);
}
// Parses the input vector to ensure the out result contains only geometries
// as indicated with the dimensionMask
GeometryCursor prepareVector_(VertexDescription descr, int dimensionMask,
Geometry[] res_vec) {
int inext = 0;
if ((dimensionMask & 1) != 0) {
if (res_vec[0] == null)
res_vec[0] = new MultiPoint(descr);
inext++;
} else {
for (int i = 0; i < res_vec.length - 1; i++)
res_vec[i] = res_vec[i + 1];
}
if ((dimensionMask & 2) != 0) {
if (res_vec[inext] == null)
res_vec[inext] = new Polyline(descr);
inext++;
} else {
for (int i = inext; i < res_vec.length - 1; i++)
res_vec[i] = res_vec[i + 1];
}
if ((dimensionMask & 4) != 0) {
if (res_vec[inext] == null)
res_vec[inext] = new Polygon(descr);
inext++;
} else {
for (int i = inext; i < res_vec.length - 1; i++)
res_vec[i] = res_vec[i + 1];
}
if (inext != 3) {
Geometry[] r = new Geometry[inext];
for (int i = 0; i < inext; i++)
r[i] = res_vec[i];
return new SimpleGeometryCursor(r);
} else {
return new SimpleGeometryCursor(res_vec);
}
}
GeometryCursor intersectEx(Geometry input_geom) {
assert (m_dimensionMask != -1);
Geometry dst_geom = tryNativeImplementation_(input_geom);
if (dst_geom != null) {
Geometry[] res_vec = new Geometry[3];
res_vec[dst_geom.getDimension()] = dst_geom;
return prepareVector_(input_geom.getDescription(), m_dimensionMask,
res_vec);
}
Envelope2D commonExtent = InternalUtils.getMergedExtent(
m_geomIntersector, input_geom);
// Preprocess geometries to be clipped to the extent of intersection to
// get rid of extra segments.
double tol = 0;
Envelope2D env = new Envelope2D();
m_geomIntersector.queryEnvelope2D(env);
Envelope2D env1 = new Envelope2D();
input_geom.queryEnvelope2D(env1);
env.intersect(env1);
assert (!env.isEmpty());
double t = InternalUtils.calculateToleranceFromGeometry(
m_spatial_reference, commonExtent, true) * 10;
env.inflate(10 * t, 10 * t);
Geometry clippedIntersector = Clipper.clip(m_geomIntersector, env, tol,
0.0);
Geometry clippedInputGeom = Clipper.clip(input_geom, env, tol, 0.0);
// perform the clip
Geometry[] res_vec;
res_vec = TopologicalOperations.intersectionEx(clippedInputGeom,
clippedIntersector, m_spatial_reference, m_progress_tracker);
return prepareVector_(input_geom.getDescription(), m_dimensionMask,
res_vec);
}
Geometry tryNativeImplementation_(Geometry input_geom) {
// A note on attributes:
// 1. The geometry with lower dimension wins in regard to the
// attributes.
// 2. If the dimensions are the same, the input_geometry attributes win.
// 3. The exception to the 2. is when the input is an Envelope, and the
// intersector is a polygon, then the intersector wins.
// A note on the tolerance:
// This operator performs a simple intersection operation. Should it use
// the tolerance?
// Example: Point is intersected by the envelope.
// If it is slightly outside of the envelope, should we still return it
// if it is closer than the tolerance?
// Should we do crack and cluster and snap the point coordinates to the
// envelope boundary?
//
// Consider floating point arithmetics approach. When you compare
// doubles, you should use an epsilon (equals means ::fabs(a - b) <
// eps), however when you add/subtract, etc them, you do not use
// epsilon.
// Shouldn't we do same here? Relational operators use tolerance, but
// the action operators don't.
Envelope2D mergedExtent = InternalUtils.getMergedExtent(input_geom,
m_geomIntersector);
double tolerance = InternalUtils.calculateToleranceFromGeometry(
m_spatial_reference, mergedExtent, false);
int gtInput = input_geom.getType().value();
boolean bInputEmpty = input_geom.isEmpty();
boolean bGeomIntersectorEmpty = m_geomIntersector.isEmpty();
boolean bResultIsEmpty = bInputEmpty || bGeomIntersectorEmpty;
if (!bResultIsEmpty) {// test envelopes
Envelope2D env2D1 = new Envelope2D();
input_geom.queryEnvelope2D(env2D1);
Envelope2D env2D2 = new Envelope2D();
m_geomIntersector.queryEnvelope2D(env2D2);
bResultIsEmpty = !env2D1.isIntersecting(env2D2);
}
if (!bResultIsEmpty) {// try accelerated test
int res = OperatorInternalRelationUtils
.quickTest2D_Accelerated_DisjointOrContains(
m_geomIntersector, input_geom, tolerance);
if (res == OperatorInternalRelationUtils.Relation.Disjoint) {// disjoint
bResultIsEmpty = true;
} else if ((res & OperatorInternalRelationUtils.Relation.Within) != 0) {// intersector
// is
// within
// the
// input_geom
// TODO:
// assign
// input_geom
// attributes
// first
return m_geomIntersector;
} else if ((res & OperatorInternalRelationUtils.Relation.Contains) != 0) {// intersector
// contains
// input_geom
return input_geom;
}
}
if (bResultIsEmpty) {// When one geometry or both are empty, we need to
// return an empty geometry.
// Here we do that end also ensure the type is
// correct.
// That is the lower dimension need to be
// returned. Also, for Point vs Multi_point, an
// empty Point need to be returned.
int dim1 = Geometry.getDimensionFromType(gtInput);
int dim2 = Geometry.getDimensionFromType(m_geomIntersectorType);
if (dim1 < dim2)
return returnEmpty_(input_geom, bInputEmpty);
else if (dim1 > dim2)
return returnEmptyIntersector_();
else if (dim1 == 0) {
if (gtInput == Geometry.GeometryType.MultiPoint
&& m_geomIntersectorType == Geometry.GeometryType.Point) {// point
// vs
// Multi_point
// need
// special
// treatment
// to
// ensure
// Point
// is
// returned
// always.
return returnEmptyIntersector_();
} else
// Both input and intersector have same gtype, or input is
// Point.
return returnEmpty_(input_geom, bInputEmpty);
} else
return returnEmpty_(input_geom, bInputEmpty);
}
// Note: No empty geometries after this point!
// Warning: Do not try clip for polylines and polygons.
// Try clip of Envelope with Envelope.
if ((m_dimensionMask == -1 || m_dimensionMask == (1 << 2))
&& gtInput == Geometry.GeometryType.Envelope
&& m_geomIntersectorType == Geometry.GeometryType.Envelope) {
Envelope env1 = (Envelope) input_geom;
Envelope env2 = (Envelope) m_geomIntersector;
Envelope2D env2D_1 = new Envelope2D();
env1.queryEnvelope2D(env2D_1);
Envelope2D env2D_2 = new Envelope2D();
env2.queryEnvelope2D(env2D_2);
env2D_1.intersect(env2D_2);
Envelope result_env = new Envelope();
env1.copyTo(result_env);
result_env.setEnvelope2D(env2D_1);
return result_env;
}
// Use clip for Point and Multi_point with Envelope
if ((gtInput == Geometry.GeometryType.Envelope && Geometry
.getDimensionFromType(m_geomIntersectorType) == 0)
|| (m_geomIntersectorType == Geometry.GeometryType.Envelope && Geometry
.getDimensionFromType(gtInput) == 0)) {
Envelope env = gtInput == Geometry.GeometryType.Envelope ? (Envelope) input_geom
: (Envelope) m_geomIntersector;
Geometry other = gtInput == Geometry.GeometryType.Envelope ? m_geomIntersector
: input_geom;
Envelope2D env_2D = new Envelope2D();
env.queryEnvelope2D(env_2D);
return Clipper.clip(other, env_2D, tolerance, 0);
}
if ((Geometry.getDimensionFromType(gtInput) == 0 && Geometry
.getDimensionFromType(m_geomIntersectorType) > 0)
|| (Geometry.getDimensionFromType(gtInput) > 0 && Geometry
.getDimensionFromType(m_geomIntersectorType) == 0)) {// multipoint
// intersection
double tolerance1 = InternalUtils.calculateToleranceFromGeometry(
m_spatial_reference, input_geom, false);
if (gtInput == Geometry.GeometryType.MultiPoint)
return TopologicalOperations.intersection(
(MultiPoint) input_geom, m_geomIntersector, tolerance1);
if (gtInput == Geometry.GeometryType.Point)
return TopologicalOperations.intersection((Point) input_geom,
m_geomIntersector, tolerance1);
if (m_geomIntersectorType == Geometry.GeometryType.MultiPoint)
return TopologicalOperations.intersection(
(MultiPoint) m_geomIntersector, input_geom, tolerance1);
if (m_geomIntersectorType == Geometry.GeometryType.Point)
return TopologicalOperations.intersection(
(Point) m_geomIntersector, input_geom, tolerance1);
throw new GeometryException("internal error");
}
// Try Polyline vs Polygon
if ((m_dimensionMask == -1 || m_dimensionMask == (1 << 1))
&& (gtInput == Geometry.GeometryType.Polyline)
&& (m_geomIntersectorType == Geometry.GeometryType.Polygon)) {
return tryFastIntersectPolylinePolygon_((Polyline) (input_geom),
(Polygon) (m_geomIntersector));
}
// Try Polygon vs Polyline
if ((m_dimensionMask == -1 || m_dimensionMask == (1 << 1))
&& (gtInput == Geometry.GeometryType.Polygon)
&& (m_geomIntersectorType == Geometry.GeometryType.Polyline)) {
return tryFastIntersectPolylinePolygon_(
(Polyline) (m_geomIntersector), (Polygon) (input_geom));
}
return null;
}
Geometry tryFastIntersectPolylinePolygon_(Polyline polyline, Polygon polygon) {
MultiPathImpl polylineImpl = (MultiPathImpl) polyline._getImpl();
MultiPathImpl polygonImpl = (MultiPathImpl) polygon._getImpl();
double tolerance = InternalUtils.calculateToleranceFromGeometry(
m_spatial_reference, polygon, false);
Envelope2D clipEnvelope = new Envelope2D();
{
polygonImpl.queryEnvelope2D(clipEnvelope);
Envelope2D env1 = new Envelope2D();
polylineImpl.queryEnvelope2D(env1);
clipEnvelope.intersect(env1);
assert (!clipEnvelope.isEmpty());
}
clipEnvelope.inflate(10 * tolerance, 10 * tolerance);
if (true) {
double tol = 0;
Geometry clippedPolyline = Clipper.clip(polyline, clipEnvelope,
tol, 0.0);
polyline = (Polyline) clippedPolyline;
polylineImpl = (MultiPathImpl) polyline._getImpl();
}
AttributeStreamOfInt32 clipResult = new AttributeStreamOfInt32(0);
int unresolvedSegments = -1;
GeometryAccelerators accel = polygonImpl._getAccelerators();
if (accel != null) {
RasterizedGeometry2D rgeom = accel.getRasterizedGeometry();
if (rgeom != null) {
unresolvedSegments = 0;
clipResult.reserve(polylineImpl.getPointCount()
+ polylineImpl.getPathCount());
Envelope2D seg_env = new Envelope2D();
SegmentIteratorImpl iter = polylineImpl.querySegmentIterator();
while (iter.nextPath()) {
while (iter.hasNextSegment()) {
Segment seg = iter.nextSegment();
seg.queryEnvelope2D(seg_env);
RasterizedGeometry2D.HitType hit = rgeom
.queryEnvelopeInGeometry(seg_env);
if (hit == RasterizedGeometry2D.HitType.Inside) {
clipResult.add(1);
} else if (hit == RasterizedGeometry2D.HitType.Outside) {
clipResult.add(0);
} else {
clipResult.add(-1);
unresolvedSegments++;
}
}
}
}
}
if (polygon.getPointCount() > 5) {
double tol = 0;
Geometry clippedPolygon = Clipper.clip(polygon, clipEnvelope, tol,
0.0);
polygon = (Polygon) clippedPolygon;
polygonImpl = (MultiPathImpl) polygon._getImpl();
}
if (unresolvedSegments < 0) {
unresolvedSegments = polylineImpl.getSegmentCount();
}
// Some heuristics to decide if it makes sense to go with fast intersect
// vs going with the regular planesweep.
double totalPoints = (double) (polylineImpl.getPointCount() + polygonImpl
.getPointCount());
double thisAlgorithmComplexity = ((double) unresolvedSegments * polygonImpl
.getPointCount());// assume the worst case.
double planesweepComplexity = Math.log(totalPoints) * totalPoints;
double empiricConstantFactorPlaneSweep = 4;
if (thisAlgorithmComplexity > planesweepComplexity
* empiricConstantFactorPlaneSweep) {
// Based on the number of input points, we deduced that the
// plansweep performance should be better than the brute force
// performance.
return null; // resort to planesweep if quadtree does not help
}
QuadTreeImpl polygonQuadTree = null;
SegmentIteratorImpl polygonIter = polygonImpl.querySegmentIterator();
// Some logic to decide if it makes sense to build a quadtree on the
// polygon segments
if (accel != null && accel.getQuadTree() != null)
polygonQuadTree = accel.getQuadTree();
if (polygonQuadTree == null && polygonImpl.getPointCount() > 20) {
polygonQuadTree = InternalUtils.buildQuadTree(polygonImpl);
}
Polyline result_polyline = (Polyline) polyline.createInstance();
MultiPathImpl resultPolylineImpl = (MultiPathImpl) result_polyline
._getImpl();
QuadTreeImpl.QuadTreeIteratorImpl qIter = null;
SegmentIteratorImpl polylineIter = polylineImpl.querySegmentIterator();
double[] params = new double[9];
AttributeStreamOfDbl intersections = new AttributeStreamOfDbl(0);
SegmentBuffer segmentBuffer = new SegmentBuffer();
int start_index = -1;
int inCount = 0;
int segIndex = 0;
boolean bOptimized = clipResult.size() > 0;
// The algorithm is like that:
// Loop through all the segments of the polyline.
// For each polyline segment, intersect it with each of the polygon
// segments.
// If no intersections found then,
// If the polyline segment is completely inside, it is added to the
// result polyline.
// If it is outside, it is thrown out.
// If it intersects, then cut the polyline segment to pieces and test
// each part of the intersected result.
// The cut pieces will either have one point inside, or one point
// outside, or the middle point inside/outside.
//
int polylinePathIndex = -1;
while (polylineIter.nextPath()) {
polylinePathIndex = polylineIter.getPathIndex();
int stateNewPath = 0;
int stateAddSegment = 1;
int stateManySegments = 2;
int stateManySegmentsContinuePath = 2;
int stateManySegmentsNewPath = 3;
int state = stateNewPath;
start_index = -1;
inCount = 0;
while (polylineIter.hasNextSegment()) {
int clipStatus = bOptimized ? (int) clipResult.get(segIndex)
: -1;
segIndex++;
Segment polylineSeg = polylineIter.nextSegment();
if (clipStatus < 0) {
assert (clipStatus == -1);
// Analyse polyline segment for intersection with the
// polygon.
if (polygonQuadTree != null) {
if (qIter == null) {
qIter = polygonQuadTree.getIterator(polylineSeg,
tolerance);
} else {
qIter.resetIterator(polylineSeg, tolerance);
}
int path_index = -1;
for (int ind = qIter.next(); ind != -1; ind = qIter
.next()) {
polygonIter.resetToVertex(polygonQuadTree
.getElement(ind)); // path_index
path_index = polygonIter.getPathIndex();
Segment polygonSeg = polygonIter.nextSegment();
// intersect polylineSeg and polygonSeg.
int count = polylineSeg.intersect(polygonSeg, null,
params, null, tolerance);
for (int i = 0; i < count; i++)
intersections.add(params[i]);
}
} else {// no quadtree built
polygonIter.resetToFirstPath();
while (polygonIter.nextPath()) {
while (polygonIter.hasNextSegment()) {
Segment polygonSeg = polygonIter.nextSegment();
// intersect polylineSeg and polygonSeg.
int count = polylineSeg.intersect(polygonSeg,
null, params, null, tolerance);
for (int i = 0; i < count; i++)
intersections.add(params[i]);
}
}
}
if (intersections.size() > 0) {// intersections detected.
intersections.sort(0, intersections.size()); // std::sort(intersections.begin(),
// intersections.end());
double t0 = 0;
intersections.add(1.0);
int status = -1;
for (int i = 0, n = intersections.size(); i < n; i++) {
double t = intersections.get(i);
if (t == t0) {
continue;
}
boolean bWholeSegment = false;
Segment resSeg;
if (t0 != 0 || t != 1.0) {
polylineSeg.cut(t0, t, segmentBuffer);
resSeg = segmentBuffer.get();
} else {
resSeg = polylineSeg;
bWholeSegment = true;
}
if (state >= stateManySegments) {
resultPolylineImpl.addSegmentsFromPath(
polylineImpl, polylinePathIndex,
start_index, inCount,
state == stateManySegmentsNewPath);
if (analyseClipSegment_(polygon,
resSeg.getStartXY(), tolerance) != 1) {
if (analyseClipSegment_(polygon, resSeg,
tolerance) != 1) {
assert (false);// something went wrong.
return null;
}
}
resultPolylineImpl.addSegment(resSeg, false);
state = stateAddSegment;
inCount = 0;
} else {
status = analyseClipSegment_(polygon, resSeg,
tolerance);
switch (status) {
case 1:
if (!bWholeSegment) {
resultPolylineImpl.addSegment(resSeg,
state == stateNewPath);
state = stateAddSegment;
} else {
if (state < stateManySegments) {
start_index = polylineIter
.getStartPointIndex()
- polylineImpl
.getPathStart(polylinePathIndex);
inCount = 1;
if (state == stateNewPath)
state = stateManySegmentsNewPath;
else {
assert (state == stateAddSegment);
state = stateManySegmentsContinuePath;
}
} else
inCount++;
}
break;
case 0:
state = stateNewPath;
start_index = -1;
inCount = 0;
break;
default:
return null;// may happen if a segment
// coincides with the border.
}
}
t0 = t;
}
} else {
clipStatus = analyseClipSegment_(polygon,
polylineSeg.getStartXY(), tolerance);// simple
// case
// no
// intersection.
// Both
// points
// must
// be
// inside.
if (clipStatus < 0) {
assert (clipStatus >= 0);// E-mail the repro case to
// the Geometry team to
// investigate.
return null;// something goes wrong, resort to
// planesweep
}
assert (analyseClipSegment_(polygon,
polylineSeg.getEndXY(), tolerance) == clipStatus);
if (clipStatus == 1) {// the whole segment inside
if (state < stateManySegments) {
assert (inCount == 0);
start_index = polylineIter.getStartPointIndex()
- polylineImpl
.getPathStart(polylinePathIndex);
if (state == stateNewPath)
state = stateManySegmentsNewPath;
else {
assert (state == stateAddSegment);
state = stateManySegmentsContinuePath;
}
}
inCount++;
} else {
assert (state < stateManySegments);
start_index = -1;
inCount = 0;
}
}
intersections.clear(false);
} else {// clip status is determined by other means
if (clipStatus == 0) {// outside
assert (analyseClipSegment_(polygon, polylineSeg,
tolerance) == 0);
assert (start_index < 0);
assert (inCount == 0);
continue;
}
if (clipStatus == 1) {
assert (analyseClipSegment_(polygon, polylineSeg,
tolerance) == 1);
if (state == stateNewPath) {
state = stateManySegmentsNewPath;
start_index = polylineIter.getStartPointIndex()
- polylineImpl
.getPathStart(polylinePathIndex);
} else if (state == stateAddSegment) {
state = stateManySegmentsContinuePath;
start_index = polylineIter.getStartPointIndex()
- polylineImpl
.getPathStart(polylinePathIndex);
} else
assert (state >= stateManySegments);
inCount++;
continue;
}
}
}
if (state >= stateManySegments) {
resultPolylineImpl.addSegmentsFromPath(polylineImpl,
polylinePathIndex, start_index, inCount,
state == stateManySegmentsNewPath);
start_index = -1;
}
}
return result_polyline;
}
int analyseClipSegment_(Polygon polygon, Point2D pt, double tol) {
int v = PointInPolygonHelper.isPointInPolygon(polygon, pt, tol);
return v;
}
int analyseClipSegment_(Polygon polygon, Segment seg, double tol) {
Point2D pt_1 = seg.getStartXY();
Point2D pt_2 = seg.getEndXY();
int v_1 = PointInPolygonHelper.isPointInPolygon(polygon, pt_1, tol);
int v_2 = PointInPolygonHelper.isPointInPolygon(polygon, pt_2, tol);
if ((v_1 == 1 && v_2 == 0) || (v_1 == 0 && v_2 == 1)) {
// Operator_factory_local::SaveJSONToTextFileDbg("c:/temp/badPointInPolygon.json",
// polygon, m_spatial_reference);
assert (false);// if happens
return -1;// something went wrong. One point is inside, the other is
// outside. Should not happen. We'll resort to
// planesweep.
}
if (v_1 == 0 || v_2 == 0)
return 0;
if (v_1 == 1 || v_2 == 1)
return 1;
Point2D midPt = new Point2D();
midPt.add(pt_1, pt_2);
midPt.scale(0.5);// calculate midpoint
int v = PointInPolygonHelper.isPointInPolygon(polygon, midPt, tol);
if (v == 0) {
return 0;
}
if (v == 1) {
return 1;
}
return -1;
}
Geometry normalizeIntersectionOutput(Geometry geom, int GT_1, int GT_2) {
if (GT_1 == Geometry.GeometryType.Point
|| GT_2 == Geometry.GeometryType.Point) {
assert (geom.getType().value() == Geometry.GeometryType.Point);
}
if (GT_1 == Geometry.GeometryType.MultiPoint) {
if (geom.getType().value() == Geometry.GeometryType.Point) {
MultiPoint mp = new MultiPoint(geom.getDescription());
if (!geom.isEmpty())
mp.add((Point) geom);
return mp;
}
}
return geom;
}
static Geometry returnEmpty_(Geometry geom, boolean bEmpty) {
return bEmpty ? geom : geom.createInstance();
}
Geometry returnEmptyIntersector_() {
if (m_geomIntersectorEmptyGeom == null)
m_geomIntersectorEmptyGeom = m_geomIntersector.createInstance();
return m_geomIntersectorEmptyGeom;
}
// virtual boolean IsRecycling() OVERRIDE { return false; }
}
|
|
package actions;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.ServletContext;
import businessLogic.StudentService;
import org.apache.commons.lang.StringUtils;
import org.apache.struts2.ServletActionContext;
import org.apache.struts2.util.ServletContextAware;
import models.SearchFields;
import com.entity.Student;
import models.WinningResult;
import com.business.DataBeanRemote;
import com.client.EJBClient;
import com.opensymphony.xwork2.ActionSupport;
import com.opensymphony.xwork2.ModelDriven;
/**
* @author Jack Young
* @date February 07, 2014
* SWE 645 HW1
*
* Class Purpose: The Driver class is the model driven action class.
* It is the controller for this web application. It contains the appropriate
* references to the model objects (Student & WinningResult). Furthermore,
* this action class provides multiple methods to support different actions
* from the users. This action class has also overridden the validate method
* for the field specific error checking. In conjunction with the standard validate
* method, this class also provides a number of specific input error checking
* methods (i.e. for formatting and etc).
*
*/
@SuppressWarnings("rawtypes")
public class Driver extends ActionSupport implements ModelDriven, ServletContextAware {
//default serial id added to get rid of the warning message
private static final long serialVersionUID = 1L;
private Student student;
private ServletContext servletContext;
private List<Student> allTakenSurveys;
private WinningResult winningResult;
private SearchFields searchFields;
/**
* @return the searchFields
*/
public SearchFields getSearchFields() {
return searchFields;
}
/**
* @param searchFields the searchFields to set
*/
public void setSearchFields(SearchFields searchFields) {
this.searchFields = searchFields;
}
public String search() {
System.out.println("Search method in Driver was called");
System.out.println(searchFields);
return "success";
}
public String home() {
System.out.println("Home page view");
return "home";
}
/**
* This method is called when the user submits the survey form.
* Depending on the input raffle numbers, it will redirect them to
* a another jsp page. If their average is greater than 90, then they
* will go to the WinnerAcknowledgement.jsp page, else they will go
* to the SimpleAcknowledgement.jsp page.
*
* @return string message
*/
public String submit() {
System.out.println(student);
StudentService.FILENAME = servletContext.getRealPath("submitted_surveys.txt");
System.out.println("The path shall be: " + servletContext.getRealPath("submitted_surveys.txt"));
//Writing the students information to the text file.
StudentService.writeOut(student);
//inserting the student into the Database.
DataBeanRemote bean = EJBClient.doLookup();
bean.insertStudentSurveyRecord(student);
setWinningResult(StudentService.calculate(student.getRaffle()));
servletContext.setAttribute("compMean", getWinningResult().getMean());
servletContext.setAttribute("compStdv", getWinningResult().getStdv());
if (getWinningResult().getMean() > 90) {
return "winner";
}
return "success";
}
/**
* This method is called when the user clicks the view
* list surveys on the home page. This will redirect the user
* to the ListSurvey.jsp page.
*
* @return success message
*/
public String gotoListView() {
System.out.println("goToListView method was called");
//Manually setting the servletContext to avoid NPE
setServletContext(ServletActionContext.getServletContext());
StudentService.FILENAME = servletContext.getRealPath("submitted_surveys.txt");
if (servletContext == null) {
System.out.println("The servlet Context has been found to be null");
}
DataBeanRemote bean = EJBClient.doLookup();
// if (bean.ableToConnect()) {
// System.out.println("EJB was able to connect to the database.");
// System.out.println(bean.retrieveAllSurveys());
// } else {
// System.out.println("Error: the EJB was not able to connect to the database");
// }
//retrieving all the records from the database via the EJB
allTakenSurveys = bean.retrieveAllSurveys();
return "success";
}
/**
* This method is called when the user clicks the button
* on the home page, this will bring the user to the
* survey.jsp page. There the user will be able to fillout
* the survey form.
*
* @return success message
*/
public String gotoSurveyView() {
System.out.println("gotoSurveyView method was called");
return "success";
}
public String gotoSearchRecordsView() {
System.out.println("gotoSearchRecordsView method was called");
return "success";
}
/**
* This method is called when the user clicks the cancel
* button in the survey form on the survey.jsp page. By
* pressing the cancel button, the user is redirected back
* to the home page.
*
* @return success message
*/
public String cancelSurvey() {
System.out.println("cancelSurvey method was called, user will be return to the Homepage");
return "success";
}
/* (non-Javadoc)
* @see com.opensymphony.xwork2.ActionSupport#execute()
*/
public String execute() {
System.out.println("Driver Execute Method was called");
return "success";
}
/*************************************************************************************/
/************************ Getter/Setter Methods ********************************/
/*************************************************************************************/
/**
* @return the winningResult
*/
public WinningResult getWinningResult() {
return winningResult;
}
/**
* @param winningResult the winningResult to set
*/
public void setWinningResult(WinningResult winningResult) {
this.winningResult = winningResult;
}
/**
* @return the allSurveys
*/
public List<Student> getAllTakenSurveys() {
return allTakenSurveys;
}
/**
* @param allSurveys the allSurveys to set
*/
public void setAllTakenSurveys(List<Student> allSurveys) {
this.allTakenSurveys = allSurveys;
}
/**
* @return the student
*/
public Student getStudent() {
return student;
}
/**
* @param student the student to set
*/
public void setStudent(Student student) {
this.student = student;
}
@Override
public Object getModel() {
student = new Student();
return student;
}
@Override
public void setServletContext(ServletContext servletContext) {
this.servletContext = servletContext;
}
/*************************************************************************************/
/************************ Validation Methods ***********************************/
/*************************************************************************************/
public void validate() {
String e = " cannot be blank.";
if(StringUtils.isEmpty(student.getOriginOfInterest()) ) {
addFieldError("originOfInterest", "originOfInterest must have a selected a radio option");
}
if (student.getLikedAboutCampus().length < 1) {
addFieldError("likedAboutCampus", "likedAboutCampus must have a selected check box.");
}
System.out.println("Validate Method was called");
if (StringUtils.isEmpty(student.getFirstName())) {
addFieldError("firstName", "First name" + e);
}
if (StringUtils.isEmpty(student.getStreetAddress())) {
String it = "streetAddress";
addFieldError(it, it + e);
}
if (StringUtils.isEmpty(student.getCity())) {
String it = "city";
addFieldError(it, it + e);
}
if (StringUtils.isEmpty(student.getState())) {
String it = "state";
addFieldError(it, it + e);
}
if (StringUtils.isEmpty(student.getZip())) {
String it = "zip";
addFieldError(it, it + e);
} else if(!validateZipCode(student.getZip())) {
addFieldError("zip", "Zip does not appear to be in the correct format. (i.e. xxxxx)");
}
if (StringUtils.isEmpty(student.getTelephoneNumber())) {
String it = "telephoneNumber";
addFieldError(it, it + e);
} else if (!validatePhoneNumber(student.getTelephoneNumber())) {
addFieldError("telephoneNumber", "Telephone number does not appear to be in the correct format. (i.e. xxx-xxx-xxxx)");
}
if (StringUtils.isEmpty(student.getEmail())) {
String it = "email";
addFieldError(it, it + e);
} else if (!emailValidate(student.getEmail())) {
addFieldError("email", "Email does not appear to be in a correct email format.");
}
if (StringUtils.isEmpty(student.getDataOfSurvey())) {
String it = "dataOfSurvey";
addFieldError(it, it + e);
} else if (!validateSurveyDate(student.getDataOfSurvey())) {
addFieldError("dataOfSurvey", "DataOfSurvey format should be mm-dd-yyyy");
}
if (StringUtils.isEmpty(student.getRaffle())) {
addFieldError("raffle", "Raffle cannot be empty");
} else if (!validateRaffleInput(student.getRaffle())) {
addFieldError("raffle", "Raffle does not be the requirements.");
}
}
/**
* This method is used to validate the enter survey
* date.
*
* @param survey date
* @return boolean
*/
public static boolean validateSurveyDate(String n) {
// EFFECTS: if n is null return false, if the survey date
// is not in this format mm-dd-yyyy return
// false, else return true.
if (n == null) {
return false;
}
Pattern pattern = Pattern.compile("\\d{2}-\\d{2}-\\d{4}");
Matcher matcher = pattern.matcher(n);
if (matcher.matches()) {
return true;
}
return false;
}
/**
* This method is used to validate the entered zip code
*
* @param zip code
* @return boolean
*/
public static boolean validateZipCode(String n) {
// EFFECTS: if n is null return false, if the zip is not
// in this format xxxxx return false, else
// return true.
if (n == null) {
return false;
}
Pattern pattern = Pattern.compile("\\d{5}");
Matcher matcher = pattern.matcher(n);
if (matcher.matches()) {
return true;
}
return false;
}
/**
* This method is used to validate the entered phone number
*
* @param phone number
* @return boolean
*/
public static boolean validatePhoneNumber(String n) {
// EFFECTS: if n is null return false, if the number is
// not in this format xxx-xxx-xxx return
// false, else return true.
if (n == null) {
return false;
}
Pattern pattern = Pattern.compile("\\d{3}-\\d{3}-\\d{4}");
Matcher matcher = pattern.matcher(n);
if (matcher.matches()) {
return true;
}
return false;
}
/**
* This method validates the passed in email address.
* It returns false if, email is: null, less than three characters
* long, missing @ symbol, and if its missing a period. Else
* this method returns true.
*
* @param email
* @return boolean
*/
public static boolean emailValidate(String email) {
if (email == null) {
return false;
}
if ((email.trim().length() < 3) || (email.indexOf("@") == -1)) {
return false;
}
if (email.indexOf(".") == -1){
return false;
}
return true;
}
/**
* This method is used to validate the Raffle Input.
* The input must be 10 numbers seperated by a comma.
* The numbers need to be in an Inclusive range of 1 to 100.
* If its not in that range, or not numbers, or don't have 10 numbers
* , or if its null: this method will return false. Else it must be a valid
* raffle selection, and true is returned.
*
* @param str
* @return boolean
*/
public static boolean validateRaffleInput(String str) {
if (str == null || str.length() <= 0) {
System.out.println("Error. Either null or its a empty string");
return false;
}
if (str.indexOf(',') == -1) {
System.out.println("Error. This string has no commas at all");
return false;
}
List<String> e = Arrays.asList(str.trim().split("\\s*,\\s*"));
if (e.size() != 10) {
System.out.println("Error. There are not 10 items seperated by 9 commas");
return false;
}
List<Double> d = new ArrayList<Double>();
for (int i = 0; i < e.size(); i++) {
String temp = e.get(i);
if (!isNumeric(temp)) {
System.out.println("Error, a letter was found in your raffle input. They need to be numbers");
return false;
} else {
double tp = Double.parseDouble(temp);
if (tp >= 1 && tp <= 100) {
d.add(tp);
} else {
System.out.println("Error, the raffle numbers need to be in the range of 1 to 100 (Inclusive)");
return false;
}
}
}
return true;
}
/**
* This method is used to check whether or not the
* passed in string is in fact a number. If it is, then return
* true, else return false.
* @param str
* @return
*/
public static boolean isNumeric(String str) {
try {
@SuppressWarnings("unused")
double d = Double.parseDouble(str);
} catch (NumberFormatException nfe) {
return false;
}
return true;
}
}
|
|
/*
* Copyright 2004-2006 Stefan Reuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.asteriskjava.live;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.asteriskjava.config.ConfigFile;
import org.asteriskjava.manager.ManagerConnection;
import org.asteriskjava.manager.ManagerEventListener;
import org.asteriskjava.manager.action.OriginateAction;
/**
* The AsteriskServer is built on top of the
* {@link org.asteriskjava.manager.ManagerConnection} and is an attempt to
* simplify interaction with Asterisk by abstracting the interface. <br>
* You will certainly have less freedom using AsteriskServer but it will make
* life easier for easy things (like originating a call or getting a list of
* open channels). <br>
* AsteriskServer is still in an early state of development. So, when using
* AsteriskServer be aware that it might change in the future.
*
* @author srt
* @version $Id$
*/
public interface AsteriskServer
{
/**
* Returns the underlying ManagerConnection.
* <p>
* Unlike the methods operating on the manager connection this method does
* not implicitly initialize the connection. Thus you can use this method to
* add custom {@linkplain org.asteriskjava.manager.ManagerEventListener
* ManagerEventListeners} before the connection to the Asterisk server is
* established. If you want to ensure that the connection is established
* call {@link #initialize()}.
*
* @return the underlying ManagerConnection.
*/
ManagerConnection getManagerConnection();
/**
* Generates an outgoing channel.
*
* @param originateAction the action that contains parameters for the
* originate
* @return the generated channel
* @throws NoSuchChannelException if the channel is not available on the
* Asterisk server, for example because you used "SIP/1310" and
* 1310 is not a valid SIP user, the SIP channel module hasn't
* been loaded or the SIP or IAX peer is not registered
* currently.
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
public AsteriskChannel originate(OriginateAction originateAction)
throws ManagerCommunicationException, NoSuchChannelException;
/**
* Asynchronously generates an outgoing channel.
*
* @param originateAction the action that contains parameters for the
* originate
* @param cb callback to inform about the result
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
public void originateAsync(OriginateAction originateAction, OriginateCallback cb) throws ManagerCommunicationException;
/**
* Generates an outgoing channel to a dialplan entry (extension, context,
* priority).
*
* @param channel channel name to call, for example "SIP/1310".
* @param context context to connect to
* @param exten extension to connect to
* @param priority priority to connect to
* @param timeout how long to wait for the channel to be answered before its
* considered to have failed (in ms)
* @return the generated channel
* @throws NoSuchChannelException if the channel is not available on the
* Asterisk server, for example because you used "SIP/1310" and
* 1310 is not a valid SIP user, the SIP channel module hasn't
* been loaded or the SIP or IAX peer is not registered
* currently.
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
AsteriskChannel originateToExtension(String channel, String context, String exten, int priority, long timeout)
throws ManagerCommunicationException, NoSuchChannelException;
/**
* Generates an outgoing channel to a dialplan entry (extension, context,
* priority) and sets an optional map of channel variables.
*
* @param channel channel name to call, for example "SIP/1310".
* @param context context to connect to
* @param exten extension to connect to
* @param priority priority to connect to
* @param timeout how long to wait for the channel to be answered before its
* considered to have failed (in ms)
* @param callerId callerId to use for the outgoing channel, may be
* <code>null</code>.
* @param variables channel variables to set, may be <code>null</code>.
* @return the generated channel
* @throws NoSuchChannelException if the channel is not available on the
* Asterisk server, for example because you used "SIP/1310" and
* 1310 is not a valid SIP user, the SIP channel module hasn't
* been loaded or the SIP or IAX peer is not registered
* currently.
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
AsteriskChannel originateToExtension(String channel, String context, String exten, int priority, long timeout,
CallerId callerId, Map<String, String> variables) throws ManagerCommunicationException, NoSuchChannelException;
/**
* Generates an outgoing channel to an application.
*
* @param channel channel name to call, for example "SIP/1310".
* @param application application to connect to, for example "MeetMe"
* @param data data to pass to the application, for example "1000|d", may be
* <code>null</code>.
* @param timeout how long to wait for the channel to be answered before its
* considered to have failed (in ms)
* @return the generated channel
* @throws NoSuchChannelException if the channel is not available on the
* Asterisk server, for example because you used "SIP/1310" and
* 1310 is not a valid SIP user, the SIP channel module hasn't
* been loaded or the SIP or IAX peer is not registered
* currently.
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
AsteriskChannel originateToApplication(String channel, String application, String data, long timeout)
throws ManagerCommunicationException, NoSuchChannelException;
/**
* Generates an outgoing channel to an application and sets an optional map
* of channel variables.
*
* @param channel channel name to call, for example "SIP/1310".
* @param application application to connect to, for example "MeetMe"
* @param data data to pass to the application, for example "1000|d", may be
* <code>null</code>.
* @param timeout how long to wait for the channel to be answered before its
* considered to have failed (in ms)
* @param callerId callerId to use for the outgoing channel, may be
* <code>null</code>.
* @param variables channel variables to set, may be <code>null</code>.
* @return the generated channel
* @throws NoSuchChannelException if the channel is not available on the
* Asterisk server, for example because you used "SIP/1310" and
* 1310 is not a valid SIP user, the SIP channel module hasn't
* been loaded or the SIP or IAX peer is not registered
* currently.
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
AsteriskChannel originateToApplication(String channel, String application, String data, long timeout, CallerId callerId,
Map<String, String> variables) throws ManagerCommunicationException, NoSuchChannelException;
/**
* Asynchronously generates an outgoing channel to a dialplan entry
* (extension, context, priority).
*
* @param channel channel name to call, for example "SIP/1310".
* @param context context to connect to
* @param exten extension to connect to
* @param priority priority to connect to
* @param timeout how long to wait for the channel to be answered before its
* considered to have failed (in ms)
* @param callback callback to inform about the result
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
void originateToExtensionAsync(String channel, String context, String exten, int priority, long timeout,
OriginateCallback callback) throws ManagerCommunicationException;
/**
* Asynchronously generates an outgoing channel to a dialplan entry
* (extension, context, priority) and sets an optional map of channel
* variables.
*
* @param channel channel name to call, for example "SIP/1310".
* @param context context to connect to
* @param exten extension to connect to
* @param priority priority to connect to
* @param timeout how long to wait for the channel to be answered before its
* considered to have failed (in ms)
* @param callerId callerId to use for the outgoing channel, may be
* <code>null</code>.
* @param variables channel variables to set, may be <code>null</code>.
* @param callback callback to inform about the result
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
void originateToExtensionAsync(String channel, String context, String exten, int priority, long timeout,
CallerId callerId, Map<String, String> variables, OriginateCallback callback)
throws ManagerCommunicationException;
/**
* Asynchronously generates an outgoing channel to an application.
*
* @param channel channel name to call, for example "SIP/1310".
* @param application application to connect to, for example "MeetMe"
* @param data data to pass to the application, for example "1000|d", may be
* <code>null</code>.
* @param timeout how long to wait for the channel to be answered before its
* considered to have failed (in ms)
* @param callback callback to inform about the result
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
void originateToApplicationAsync(String channel, String application, String data, long timeout,
OriginateCallback callback) throws ManagerCommunicationException;
/**
* Asynchronously generates an outgoing channel to an application and sets
* an optional map of channel variables.
*
* @param channel channel name to call, for example "SIP/1310".
* @param application application to connect to, for example "MeetMe"
* @param data data to pass to the application, for example "1000|d", may be
* <code>null</code>.
* @param timeout how long to wait for the channel to be answered before its
* considered to have failed (in ms)
* @param callerId callerId to use for the outgoing channel, may be
* <code>null</code>.
* @param variables channel variables to set, may be <code>null</code>.
* @param callback callback to inform about the result
* @throws ManagerCommunicationException if the originate action cannot be
* sent to Asterisk
*/
void originateToApplicationAsync(String channel, String application, String data, long timeout, CallerId callerId,
Map<String, String> variables, OriginateCallback callback) throws ManagerCommunicationException;
/**
* Returns the active channels of the Asterisk server.
*
* @return a Collection of active channels.
* @throws ManagerCommunicationException if there is a problem communication
* with Asterisk
*/
Collection<AsteriskChannel> getChannels() throws ManagerCommunicationException;
/**
* Returns a channel by its name.
*
* @param name name of the channel to return
* @return the channel with the given name or <code>null</code> if there is
* no such channel.
* @throws ManagerCommunicationException if there is a problem communication
* with Asterisk
*/
AsteriskChannel getChannelByName(String name) throws ManagerCommunicationException;
/**
* Returns a channel by its unique id.
*
* @param id the unique id of the channel to return
* @return the channel with the given unique id or <code>null</code> if
* there is no such channel.
* @throws ManagerCommunicationException if there is a problem communication
* with Asterisk
*/
AsteriskChannel getChannelById(String id) throws ManagerCommunicationException;
/**
* Returns the acitve MeetMe rooms on the Asterisk server.
*
* @return a Collection of MeetMeRooms
* @throws ManagerCommunicationException if there is a problem communication
* with Asterisk
*/
Collection<MeetMeRoom> getMeetMeRooms() throws ManagerCommunicationException;
/**
* Returns the MeetMe room with the given number, if the room does not yet
* exist a new {@link MeetMeRoom} object is created.
*
* @param roomNumber the number of the room to return
* @return the MeetMe room with the given number.
* @throws ManagerCommunicationException if there is a problem communication
* with Asterisk
*/
MeetMeRoom getMeetMeRoom(String roomNumber) throws ManagerCommunicationException;
/**
* Returns the queues served by the Asterisk server.
*
* @return a Collection of queues.
* @throws ManagerCommunicationException if there is a problem communication
* with Asterisk
*/
Collection<AsteriskQueue> getQueues() throws ManagerCommunicationException;
/**
* Return the agents, registered at Asterisk server. (Consider remarks for
* {@link AsteriskAgent})
*
* @return a Collection of agents
* @throws ManagerCommunicationException if there is a problem communication
* with Asterisk
*/
Collection<AsteriskAgent> getAgents() throws ManagerCommunicationException;
/**
* Returns the exact version string of this Asterisk server. <br>
* This typically looks like "Asterisk 1.2.9.1-BRIstuffed-0.3.0-PRE-1q built
* by root @ pbx0 on a i686 running Linux on 2006-06-20 20:21:30 UTC".
*
* @return the version of this Asterisk server
* @throws ManagerCommunicationException if the version cannot be retrieved
* from Asterisk
* @since 0.2
*/
String getVersion() throws ManagerCommunicationException;
/**
* <<<<<<< HEAD Returns the CVS revision of a given source file of this
* Asterisk server. <br>
* ======= Returns the CVS revision of a given source file of this Asterisk
* server. <br>
* >>>>>>> refs/heads/release-1.1 For example getVersion("app_meetme.c") may
* return {1, 102} for CVS revision "1.102". <br>
* Note that this feature is not available with Asterisk 1.0.x. <br>
* You can use this feature if you need to write applications that behave
* different depending on specific modules being available in a specific
* version or not.
*
* @param file the file for which to get the version like "app_meetme.c"
* @return the CVS revision of the file, or <code>null</code> if that file
* is not part of the Asterisk instance you are connected to (maybe
* due to a module that provides it has not been loaded) or if you
* are connected to an Astersion 1.0.x
* @throws ManagerCommunicationException if the version cannot be retrieved
* from Asterisk
* @since 0.2
*/
int[] getVersion(String file) throws ManagerCommunicationException;
/**
* Returns the value of the given global variable.
*
* @param variable the name of the global variable to return.
* @return the value of the global variable or <code>null</code> if it is
* not set.
* @throws ManagerCommunicationException if the get variable action cannot
* be sent to Asterisk.
* @since 0.3
*/
String getGlobalVariable(String variable) throws ManagerCommunicationException;
/**
* Sets the value of the given global variable.
*
* @param variable the name of the global variable to set.
* @param value the value of the global variable to set.
* @throws ManagerCommunicationException if the set variable action cannot
* be sent to Asterisk.
* @since 0.3
*/
void setGlobalVariable(String variable, String value) throws ManagerCommunicationException;
/**
* Returns a collection of all voicemailboxes configured for this Asterisk
* server with the number of new and old messages they contain.
*
* @return a collection of all voicemailboxes configured for this Asterisk
* server
* @throws ManagerCommunicationException if the voicemailboxes can't be
* retrieved.
* @since 0.3
*/
Collection<Voicemailbox> getVoicemailboxes() throws ManagerCommunicationException;
/**
* Executes a command line interface (CLI) command.
*
* @param command the command to execute, for example "sip show peers".
* @return a List containing strings representing the lines returned by the
* CLI command.
* @throws ManagerCommunicationException if the command can't be executed.
* @see org.asteriskjava.manager.action.CommandAction
* @since 0.3
*/
List<String> executeCliCommand(String command) throws ManagerCommunicationException;
/**
* Checks whether a module is currently loaded.
* <p>
* Available since Asterisk 1.6
*
* @param module name of the module to load (with out without the ".so"
* extension).
* @return <code>true</code> if the module is currently loaded,
* <code>false</code> otherwise.
* @throws ManagerCommunicationException if the module can't be checked.
*/
boolean isModuleLoaded(String module) throws ManagerCommunicationException;
/**
* Loads a module or subsystem
* <p>
* Available since Asterisk 1.6
*
* @param module name of the module to load (including ".so" extension) or
* subsystem name.
* @throws ManagerCommunicationException if the module cannot be loaded.
* @since 1.0.0
*/
void loadModule(String module) throws ManagerCommunicationException;
/**
* Unloads a module or subsystem.
* <p>
* Available since Asterisk 1.6
*
* @param module name of the module to unload (including ".so" extension) or
* subsystem name.
* @throws ManagerCommunicationException if the module cannot be unloaded.
* @since 1.0.0
*/
void unloadModule(String module) throws ManagerCommunicationException;
/**
* Reloads a module or subsystem.
* <p>
* Available since Asterisk 1.6
*
* @param module name of the module to reload (including ".so" extension) or
* subsystem name.
* @throws ManagerCommunicationException if the module cannot be reloaded.
* @since 1.0.0
*/
void reloadModule(String module) throws ManagerCommunicationException;
/**
* Reloads all currently loaded modules.
* <p>
* Available since Asterisk 1.6
*
* @throws ManagerCommunicationException if the modules cannot be reloaded.
* @since 1.0.0
*/
void reloadAllModules() throws ManagerCommunicationException;
/**
* Reads the given Asterisk configuration file.
*
* @param filename the filename, for example "voicemail.conf".
* @return the configuration file.
* @throws ManagerCommunicationException if the command can't be executed.
*/
ConfigFile getConfig(String filename) throws ManagerCommunicationException;
/**
* Adds a listener to this AsteriskServer.<br>
* If this server is not yet connected it will be implicitly connected.
*
* @param listener the listener to add.
* @throws ManagerCommunicationException if the server is not yet connected
* and the connection or initialization fails.
*/
void addAsteriskServerListener(AsteriskServerListener listener) throws ManagerCommunicationException;
/**
* Removes a listener from this Asterisk server.
*
* @param listener the listener to remove.
*/
void removeAsteriskServerListener(AsteriskServerListener listener);
/**
* Checks whether the listener is already registered with this Asterisk
* server
*
* @param listener the listener to check
* @return true, if the listener is already registered.
*/
boolean isAsteriskServerListening(AsteriskServerListener listener);
/**
* The chainListener allows a listener to receive manager events after they
* have been processed by the AsteriskServer. If the AsteriskServer is
* handling messages using the asyncEventHandling then these messages will
* also be async. You would use the chainListener if you are processing raw
* events and using the AJ live ChannelManager. If you don't use the chain
* listener then you can't be certain that a channel name passed in a raw
* event will match the channel name held by the live Channel Manager. By
* chaining events you can be certain that events such as channel Rename
* events have been processed by the live ChannelManager before you receive
* an event and as such the names will always match. Whilst name matching is
* not always critical (as you should be matching by the channels unique id)
* the channel name does also contain state information (Zombie, Masq) in
* these cases it can be critical that you have the same name otherwise your
* state information will be out of date.
*/
public void addChainListener(ManagerEventListener chainListener);
/**
* remove the chain listener.
*
* @param chainListener
*/
public void removeChainListener(ManagerEventListener chainListener);
/**
* Closes the connection to this server.
*/
void shutdown();
/**
* Opens the connection to this server.
*
* @throws ManagerCommunicationException if login fails
*/
void initialize() throws ManagerCommunicationException;
/**
* get Asterisk Queue by name
*
* @author itaqua
* @param queueName Name of the queue to retrieve
* @return
*/
public AsteriskQueue getQueueByName(String queueName);
/**
* List of Queues Objects updated after certain date
*
* @author itaqua
* @param date
* @return
*/
public List<AsteriskQueue> getQueuesUpdatedAfter(Date date);
/**
* every time we get an event of a queue we reload the information about it
* from the Asterisk Server
*
* @author itaqua
*/
public void forceQueuesMonitor(boolean force);
/**
* Check if the Queue Information is forced
*
* @author itaqua
* @return
*/
public boolean isQueuesMonitorForced();
}
|
|
/**
* Copyright 2017 The GreyCat Authors. All rights reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package greycat.sparkey;
import com.spotify.sparkey.CompressionType;
import com.spotify.sparkey.Sparkey;
import com.spotify.sparkey.SparkeyReader;
import com.spotify.sparkey.SparkeyWriter;
import greycat.Callback;
import greycat.Constants;
import greycat.Graph;
import greycat.plugin.Storage;
import greycat.struct.Buffer;
import greycat.struct.BufferIterator;
import greycat.utility.Base64;
import greycat.utility.HashHelper;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class SparkeyDBStorage implements Storage {
private static final String _connectedError = "PLEASE CONNECT YOUR DATABASE FIRST";
private static final byte[] _prefixKey = "prefix".getBytes();
private Graph _graph;
private boolean _isConnected = false;
private final String _filePath;
private SparkeyWriter _writer;
private SparkeyReader _reader;
private final List<Callback<Buffer>> updates = new ArrayList<Callback<Buffer>>();
public SparkeyDBStorage(String filePath){
_filePath = filePath;
_isConnected = false;
}
@Override
public void get(Buffer keys, Callback<Buffer> callback) {
if (!_isConnected) {
throw new RuntimeException(_connectedError);
}
Buffer result = _graph.newBuffer();
BufferIterator it = keys.iterator();
boolean isFirst = true;
while (it.hasNext()) {
Buffer view = it.next();
try {
if (!isFirst) {
result.write(Constants.BUFFER_SEP);
} else {
isFirst = false;
}
byte[] res = _reader.getAsByteArray(view.data());
if (res != null) {
result.writeAll(res);
}
} catch (Exception e) {
e.printStackTrace();
}
}
if (callback != null) {
callback.on(result);
}
}
@Override
public void put(Buffer stream, Callback<Boolean> callback) {
if (!_isConnected) {
throw new RuntimeException(_connectedError);
}
try {
Buffer result = null;
if (updates.size() != 0) {
result = _graph.newBuffer();
}
BufferIterator it = stream.iterator();
boolean isFirst = true;
while (it.hasNext()) {
Buffer keyView = it.next();
Buffer valueView = it.next();
if (valueView != null) {
_writer.put(keyView.data(), valueView.data());
}
if (result != null) {
if (isFirst) {
isFirst = false;
} else {
result.write(Constants.KEY_SEP);
}
result.writeAll(keyView.data());
result.write(Constants.KEY_SEP);
Base64.encodeLongToBuffer(HashHelper.hashBuffer(valueView, 0, valueView.length()), result);
}
}
for (int i = 0; i < updates.size(); i++) {
final Callback<Buffer> explicit = updates.get(i);
explicit.on(result);
}
if (callback != null) {
callback.on(true);
}
} catch (Exception e) {
e.printStackTrace();
if (callback != null) {
callback.on(false);
}
}
}
@Override
public void putSilent(Buffer stream, Callback<Buffer> callback) {
if (!_isConnected) {
throw new RuntimeException(_connectedError);
}
try {
Buffer result = null;
if (updates.size() != 0) {
result = _graph.newBuffer();
}
BufferIterator it = stream.iterator();
boolean isFirst = true;
while (it.hasNext()) {
Buffer keyView = it.next();
Buffer valueView = it.next();
if (valueView != null) {
_writer.put(keyView.data(), valueView.data());
}
if (result != null) {
if (isFirst) {
isFirst = false;
} else {
result.write(Constants.KEY_SEP);
}
result.writeAll(keyView.data());
result.write(Constants.KEY_SEP);
Base64.encodeLongToBuffer(HashHelper.hashBuffer(valueView, 0, valueView.length()), result);
}
}
for (int i = 0; i < updates.size(); i++) {
final Callback<Buffer> explicit = updates.get(i);
explicit.on(result);
}
callback.on(result);
} catch (Exception e) {
e.printStackTrace();
if (callback != null) {
callback.on(null);
}
}
}
@Override
public void remove(Buffer keys, Callback<Boolean> callback) {
if (!_isConnected) {
throw new RuntimeException(_connectedError);
}
try {
BufferIterator it = keys.iterator();
while (it.hasNext()) {
Buffer view = it.next();
_writer.delete(view.data());
}
if (callback != null) {
callback.on(null);
}
} catch (Exception e) {
e.printStackTrace();
if (callback != null) {
callback.on(false);
}
}
}
@Override
public void connect(Graph graph, Callback<Boolean> callback) {
if(_isConnected){
if (callback != null) {
callback.on(true);
}
return;
}
_graph = graph;
File indexFile = new File(_filePath);
try{
if(!indexFile.exists()){
indexFile.mkdirs();
_writer = Sparkey.createNew(indexFile, CompressionType.SNAPPY, 8000);
_writer.flush();
_writer.writeHash();
_writer.close();
}
_writer = Sparkey.append(indexFile);
_reader = Sparkey.open(indexFile);
_isConnected = true;
if (callback != null) {
callback.on(true);
}
} catch (IOException e){
e.printStackTrace();
if (callback != null) {
callback.on(false);
}
}
}
@Override
public void lock(Callback<Buffer> callback) {
if (!_isConnected) {
throw new RuntimeException(_connectedError);
}
byte[] current = new byte[0];
try {
current = _reader.getAsByteArray(_prefixKey);
if (current == null) {
current = new String("0").getBytes();
}
Short currentPrefix = Short.parseShort(new String(current));
_writer.put(_prefixKey, ((currentPrefix + 1) + "").getBytes());
if (callback != null) {
Buffer newBuf = _graph.newBuffer();
Base64.encodeIntToBuffer(currentPrefix, newBuf);
callback.on(newBuf);
}
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void unlock(Buffer previousLock, Callback<Boolean> callback) {
if (!_isConnected) {
throw new RuntimeException(_connectedError);
}
callback.on(true);
}
@Override
public void disconnect(Callback<Boolean> callback) {
try {
_reader.close();
_reader = null;
_writer.flush();
_writer.writeHash();
_writer.close();
_writer = null;
_isConnected = false;
if (callback != null) {
callback.on(true);
}
} catch (Exception e) {
e.printStackTrace();
if (callback != null) {
callback.on(false);
}
}
}
@Override
public void listen(Callback<Buffer> synCallback) {
updates.add(synCallback);
}
}
|
|
/*
* $Id: EditorActions.java,v 1.1 2012/11/15 13:26:46 gaudenz Exp $
* Copyright (c) 2001-2012, JGraph Ltd
*/
package edu.odu.icat.graphicsinterface.editor;
import com.mxgraph.analysis.mxDistanceCostFunction;
import com.mxgraph.analysis.mxGraphAnalysis;
import com.mxgraph.canvas.mxGraphics2DCanvas;
import com.mxgraph.canvas.mxICanvas;
import com.mxgraph.canvas.mxSvgCanvas;
import com.mxgraph.examples.swing.GraphEditor;
import com.mxgraph.io.mxCodec;
import com.mxgraph.io.mxGdCodec;
import com.mxgraph.model.mxCell;
import com.mxgraph.model.mxGraphModel;
import com.mxgraph.model.mxIGraphModel;
import com.mxgraph.shape.mxStencilShape;
import com.mxgraph.swing.handler.mxConnectionHandler;
import com.mxgraph.swing.mxGraphComponent;
import com.mxgraph.swing.mxGraphOutline;
import com.mxgraph.swing.util.mxGraphActions;
import com.mxgraph.swing.view.mxCellEditor;
import com.mxgraph.util.*;
import com.mxgraph.util.mxCellRenderer.CanvasFactory;
import com.mxgraph.util.png.mxPngEncodeParam;
import com.mxgraph.util.png.mxPngImageEncoder;
import com.mxgraph.util.png.mxPngTextDecoder;
import com.mxgraph.view.mxGraph;
import com.mxgraph.view.mxStylesheet;
import edu.odu.icat.controller.Control;
import edu.odu.icat.graphicsinterface.Dashboard;
import edu.odu.icat.model.Entity;
import edu.odu.icat.model.Force;
import edu.odu.icat.model.Location;
import org.w3c.dom.Document;
import javax.imageio.ImageIO;
import javax.swing.*;
import javax.swing.filechooser.FileFilter;
import javax.swing.text.html.HTML;
import javax.swing.text.html.HTMLDocument;
import javax.swing.text.html.HTMLEditorKit;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.awt.print.PageFormat;
import java.awt.print.Paper;
import java.awt.print.PrinterException;
import java.awt.print.PrinterJob;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.*;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Map;
/**
*
*/
public class EditorActions
{
/**
*
* @param e
* @return Returns the graph for the given action event.
*/
public static final BasicGraphEditor getEditor(ActionEvent e)
{
if (e.getSource() instanceof Component)
{
Component component = (Component) e.getSource();
while (component != null
&& !(component instanceof BasicGraphEditor))
{
component = component.getParent();
}
return (BasicGraphEditor) component;
}
return null;
}
public static class NewVertexAction extends AbstractAction
{
private static int defaultIndex = 0;
/**
*
*/
public void actionPerformed(ActionEvent e)
{
defaultIndex++;
mxGraph graph = getEditor(e).getGraphComponent().getGraph();
Object vertex=null;
if (graph != null)
{
mxPoint pt = getEditor(e).mouseClickLoc;
Entity entity = new Entity("Name " + defaultIndex, Control.getInstance().getDefaultEntityClassification());
Control.getInstance().getCurrentProject().addEntity(entity);;
// graphComponent.refresh();\
entity.setLocation(new Location(pt.getX(), pt.getY()));
vertex=graph.insertVertex(graph.getDefaultParent(), null,entity, pt.getX(), pt.getY(),100,100,"shape=ellipse");
graph.setCellStyles(mxConstants.STYLE_GRADIENTCOLOR, "blue", new Object[]{vertex}); //changes the color to red
graph.setCellStyles(mxConstants.STYLE_FILLCOLOR, "blue", new Object[]{vertex}); //changes the color to red
graph.setCellStyles(mxConstants.STYLE_FONTCOLOR, "white", new Object[]{vertex});
graph.setCellStyles(mxConstants.STYLE_STROKECOLOR, "black", new Object[]{vertex});
// graph.insertVertex(graph.getDefaultParent(), null, entity, pt.getX(),pt.getY(), 80,
//30);
}
}
}
public static class NewLegendAction extends AbstractAction
{
public void actionPerformed(ActionEvent e)
{
mxGraph graph = getEditor(e).getGraphComponent().getGraph();
if (graph != null)
{
mxPoint pt = getEditor(e).mouseClickLoc;
graph.getModel().beginUpdate();
try
{
Object vertex = graph.insertVertex(graph.getDefaultParent(), null, "Legend", pt.getX(), pt.getY(), 145, 154, "shape=image;image=file:src/main/resources/legend.bmp;verticalLabelPosition=bottom;verticalAlign=top");
}
finally
{
graph.getModel().endUpdate();
}
}
}
}
public static class DeleteVertexAction extends AbstractAction
{
public void actionPerformed(ActionEvent e)
{
mxGraph graph = getEditor(e).getGraphComponent().getGraph();
if (graph != null) {
Object[] cells = graph.removeCells();
for (int i = 0; i < cells.length; i++)
{
mxCell cell = (mxCell) cells[i];
if (cell.getValue() instanceof Force) {
Control.getInstance().getCurrentProject().removeForce((Force) cell.getValue());
}
}
for (int i = 0; i < cells.length; i++)
{
mxCell cell = (mxCell) cells[i];
if (cell.getValue() instanceof Entity)
Control.getInstance().getCurrentProject().removeEntity((Entity) cell.getValue());
}
}
}
}
public static class TestAction extends AbstractAction {
public void actionPerformed(ActionEvent e)
{
mxGraph graph = getEditor(e).getGraphComponent().getGraph();
Object model = graph.getModel();
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleRulersItem extends JCheckBoxMenuItem
{
/**
*
*/
public ToggleRulersItem(final BasicGraphEditor editor, String name)
{
super(name);
setSelected(editor.getGraphComponent().getColumnHeader() != null);
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraphComponent graphComponent = editor
.getGraphComponent();
if (graphComponent.getColumnHeader() != null)
{
graphComponent.setColumnHeader(null);
graphComponent.setRowHeader(null);
}
else
{
graphComponent.setColumnHeaderView(new EditorRuler(
graphComponent,
EditorRuler.ORIENTATION_HORIZONTAL));
graphComponent.setRowHeaderView(new EditorRuler(
graphComponent,
EditorRuler.ORIENTATION_VERTICAL));
}
}
});
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleGridItem extends JCheckBoxMenuItem
{
/**
*
*/
public ToggleGridItem(final BasicGraphEditor editor, String name)
{
super(name);
setSelected(true);
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraphComponent graphComponent = editor
.getGraphComponent();
mxGraph graph = graphComponent.getGraph();
boolean enabled = !graph.isGridEnabled();
graph.setGridEnabled(enabled);
graphComponent.setGridVisible(enabled);
graphComponent.repaint();
setSelected(enabled);
}
});
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleOutlineItem extends JCheckBoxMenuItem
{
/**
*
*/
public ToggleOutlineItem(final BasicGraphEditor editor, String name)
{
super(name);
setSelected(true);
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
final mxGraphOutline outline = editor.getGraphOutline();
outline.setVisible(!outline.isVisible());
outline.revalidate();
SwingUtilities.invokeLater(new Runnable()
{
/*
* (non-Javadoc)
* @see java.lang.Runnable#run()
*/
public void run()
{
if (outline.getParent() instanceof JSplitPane)
{
if (outline.isVisible())
{
((JSplitPane) outline.getParent())
.setDividerLocation(editor
.getHeight() - 300);
((JSplitPane) outline.getParent())
.setDividerSize(6);
}
else
{
((JSplitPane) outline.getParent())
.setDividerSize(0);
}
}
}
});
}
});
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ExitAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
editor.exit();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class StylesheetAction extends AbstractAction
{
/**
*
*/
protected String stylesheet;
/**
*
*/
public StylesheetAction(String stylesheet)
{
this.stylesheet = stylesheet;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
mxCodec codec = new mxCodec();
Document doc = mxUtils.loadDocument(EditorActions.class
.getResource(stylesheet).toString());
if (doc != null)
{
codec.decode(doc.getDocumentElement(),
graph.getStylesheet());
graph.refresh();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ZoomPolicyAction extends AbstractAction
{
/**
*
*/
protected int zoomPolicy;
/**
*
*/
public ZoomPolicyAction(int zoomPolicy)
{
this.zoomPolicy = zoomPolicy;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
graphComponent.setPageVisible(true);
graphComponent.setZoomPolicy(zoomPolicy);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class GridStyleAction extends AbstractAction
{
/**
*
*/
protected int style;
/**
*
*/
public GridStyleAction(int style)
{
this.style = style;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
graphComponent.setGridStyle(style);
graphComponent.repaint();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class GridColorAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Color newColor = JColorChooser.showDialog(graphComponent,
mxResources.get("gridColor"),
graphComponent.getGridColor());
if (newColor != null)
{
graphComponent.setGridColor(newColor);
graphComponent.repaint();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ScaleAction extends AbstractAction
{
/**
*
*/
protected double scale;
/**
*
*/
public ScaleAction(double scale)
{
this.scale = scale;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
double scale = this.scale;
if (scale == 0)
{
String value = (String) JOptionPane.showInputDialog(
graphComponent, mxResources.get("value"),
mxResources.get("scale") + " (%)",
JOptionPane.PLAIN_MESSAGE, null, null, "");
if (value != null)
{
scale = Double.parseDouble(value.replace("%", "")) / 100;
}
}
if (scale > 0)
{
graphComponent.zoomTo(scale, graphComponent.isCenterZoom());
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PageSetupAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
PrinterJob pj = PrinterJob.getPrinterJob();
PageFormat format = pj.pageDialog(graphComponent
.getPageFormat());
if (format != null)
{
graphComponent.setPageFormat(format);
graphComponent.zoomAndCenter();
}
}
}
public static void formatPage(mxGraphComponent gc)
{
PrinterJob pj = PrinterJob.getPrinterJob();
PageFormat format = pj.pageDialog(gc.getPageFormat());
if (format != null)
{
gc.setPageFormat(format);
gc.zoomAndCenter();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PrintAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
PrinterJob pj = PrinterJob.getPrinterJob();
if (pj.printDialog())
{
PageFormat pf = graphComponent.getPageFormat();
Paper paper = new Paper();
double margin = 36;
paper.setImageableArea(margin, margin, paper.getWidth()
- margin * 2, paper.getHeight() - margin * 2);
pf.setPaper(paper);
pj.setPrintable(graphComponent, pf);
try
{
pj.print();
}
catch (PrinterException e2)
{
System.out.println(e2);
}
}
}
}
public static void printComp(mxGraphComponent gc)
{
PrinterJob pj = PrinterJob.getPrinterJob();
if (pj.printDialog())
{
PageFormat pf = gc.getPageFormat();
Paper paper = new Paper();
double margin = 18;
paper.setImageableArea(margin, margin, paper.getWidth()
- margin * 2, paper.getHeight() - margin * 2);
pf.setPaper(paper);
pj.setPrintable(gc, pf);
try
{
pj.print();
}
catch (PrinterException e2)
{
System.out.println(e2);
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SaveAction extends AbstractAction
{
/**
*
*/
protected boolean showDialog;
/**
*
*/
protected String lastDir = null;
/**
*
*/
public SaveAction(boolean showDialog)
{
this.showDialog = showDialog;
}
/**
* Saves XML+PNG format.
*/
protected void saveXmlPng(BasicGraphEditor editor, String filename,
Color bg) throws IOException
{
mxGraphComponent graphComponent = editor.getGraphComponent();
mxGraph graph = graphComponent.getGraph();
// Creates the image for the PNG file
BufferedImage image = mxCellRenderer.createBufferedImage(graph,
null, 1, bg, graphComponent.isAntiAlias(), null,
graphComponent.getCanvas());
// Creates the URL-encoded XML data
mxCodec codec = new mxCodec();
String xml = URLEncoder.encode(
mxXmlUtils.getXml(codec.encode(graph.getModel())), "UTF-8");
mxPngEncodeParam param = mxPngEncodeParam
.getDefaultEncodeParam(image);
param.setCompressedText(new String[] { "mxGraphModel", xml });
// Saves as a PNG file
FileOutputStream outputStream = new FileOutputStream(new File(
filename));
try
{
mxPngImageEncoder encoder = new mxPngImageEncoder(outputStream,
param);
if (image != null)
{
encoder.encode(image);
editor.setModified(false);
editor.setCurrentFile(new File(filename));
}
else
{
JOptionPane.showMessageDialog(graphComponent,
mxResources.get("noImageData"));
}
}
finally
{
outputStream.close();
}
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
mxGraphComponent graphComponent = editor.getGraphComponent();
mxGraph graph = graphComponent.getGraph();
FileFilter selectedFilter = null;
DefaultFileFilter xmlPngFilter = new DefaultFileFilter(".png",
"PNG+XML " + mxResources.get("file") + " (.png)");
FileFilter vmlFileFilter = new DefaultFileFilter(".html",
"VML " + mxResources.get("file") + " (.html)");
String filename = null;
boolean dialogShown = false;
if (showDialog || editor.getCurrentFile() == null)
{
String wd;
if (lastDir != null)
{
wd = lastDir;
}
else if (editor.getCurrentFile() != null)
{
wd = editor.getCurrentFile().getParent();
}
else
{
wd = System.getProperty("user.dir");
}
JFileChooser fc = new JFileChooser(wd);
// Adds the default file format
FileFilter defaultFilter = xmlPngFilter;
fc.addChoosableFileFilter(defaultFilter);
// Adds special vector graphics formats and HTML
fc.addChoosableFileFilter(new DefaultFileFilter(".mxe",
"mxGraph Editor " + mxResources.get("file")
+ " (.mxe)"));
fc.addChoosableFileFilter(new DefaultFileFilter(".txt",
"Graph Drawing " + mxResources.get("file")
+ " (.txt)"));
fc.addChoosableFileFilter(new DefaultFileFilter(".svg",
"SVG " + mxResources.get("file") + " (.svg)"));
fc.addChoosableFileFilter(vmlFileFilter);
fc.addChoosableFileFilter(new DefaultFileFilter(".html",
"HTML " + mxResources.get("file") + " (.html)"));
// Adds a filter for each supported image format
Object[] imageFormats = ImageIO.getReaderFormatNames();
// Finds all distinct extensions
HashSet<String> formats = new HashSet<String>();
for (int i = 0; i < imageFormats.length; i++)
{
String ext = imageFormats[i].toString().toLowerCase();
formats.add(ext);
}
imageFormats = formats.toArray();
for (int i = 0; i < imageFormats.length; i++)
{
String ext = imageFormats[i].toString();
fc.addChoosableFileFilter(new DefaultFileFilter("."
+ ext, ext.toUpperCase() + " "
+ mxResources.get("file") + " (." + ext + ")"));
}
// Adds filter that accepts all supported image formats
fc.addChoosableFileFilter(new DefaultFileFilter.ImageFileFilter(
mxResources.get("allImages")));
fc.setFileFilter(defaultFilter);
int rc = fc.showDialog(null, mxResources.get("save"));
dialogShown = true;
if (rc != JFileChooser.APPROVE_OPTION)
{
return;
}
else
{
lastDir = fc.getSelectedFile().getParent();
}
filename = fc.getSelectedFile().getAbsolutePath();
selectedFilter = fc.getFileFilter();
if (selectedFilter instanceof DefaultFileFilter)
{
String ext = ((DefaultFileFilter) selectedFilter)
.getExtension();
if (!filename.toLowerCase().endsWith(ext))
{
filename += ext;
}
}
if (new File(filename).exists()
&& JOptionPane.showConfirmDialog(graphComponent,
mxResources.get("overwriteExistingFile")) != JOptionPane.YES_OPTION)
{
return;
}
}
else
{
filename = editor.getCurrentFile().getAbsolutePath();
}
try
{
String ext = filename
.substring(filename.lastIndexOf('.') + 1);
if (ext.equalsIgnoreCase("svg"))
{
mxSvgCanvas canvas = (mxSvgCanvas) mxCellRenderer
.drawCells(graph, null, 1, null,
new CanvasFactory()
{
public mxICanvas createCanvas(
int width, int height)
{
mxSvgCanvas canvas = new mxSvgCanvas(
mxDomUtils.createSvgDocument(
width, height));
canvas.setEmbedded(true);
return canvas;
}
});
mxUtils.writeFile(mxXmlUtils.getXml(canvas.getDocument()),
filename);
}
else if (selectedFilter == vmlFileFilter)
{
mxUtils.writeFile(mxXmlUtils.getXml(mxCellRenderer
.createVmlDocument(graph, null, 1, null, null)
.getDocumentElement()), filename);
}
else if (ext.equalsIgnoreCase("html"))
{
mxUtils.writeFile(mxXmlUtils.getXml(mxCellRenderer
.createHtmlDocument(graph, null, 1, null, null)
.getDocumentElement()), filename);
}
else if (ext.equalsIgnoreCase("mxe")
|| ext.equalsIgnoreCase("xml"))
{
mxCodec codec = new mxCodec();
String xml = mxXmlUtils.getXml(codec.encode(graph
.getModel()));
mxUtils.writeFile(xml, filename);
editor.setModified(false);
editor.setCurrentFile(new File(filename));
}
else if (ext.equalsIgnoreCase("txt"))
{
String content = mxGdCodec.encode(graph);
mxUtils.writeFile(content, filename);
}
else
{
Color bg = null;
if ((!ext.equalsIgnoreCase("gif") && !ext
.equalsIgnoreCase("png"))
|| JOptionPane.showConfirmDialog(
graphComponent, mxResources
.get("transparentBackground")) != JOptionPane.YES_OPTION)
{
bg = graphComponent.getBackground();
}
if (selectedFilter == xmlPngFilter
|| (editor.getCurrentFile() != null
&& ext.equalsIgnoreCase("png") && !dialogShown))
{
saveXmlPng(editor, filename, bg);
}
else
{
BufferedImage image = mxCellRenderer
.createBufferedImage(graph, null, 1, bg,
graphComponent.isAntiAlias(), null,
graphComponent.getCanvas());
if (image != null)
{
ImageIO.write(image, ext, new File(filename));
}
else
{
JOptionPane.showMessageDialog(graphComponent,
mxResources.get("noImageData"));
}
}
}
}
catch (Throwable ex)
{
ex.printStackTrace();
JOptionPane.showMessageDialog(graphComponent,
ex.toString(), mxResources.get("error"),
JOptionPane.ERROR_MESSAGE);
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SelectShortestPathAction extends AbstractAction
{
/**
*
*/
protected boolean directed;
/**
*
*/
public SelectShortestPathAction(boolean directed)
{
this.directed = directed;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
Object source = null;
Object target = null;
Object[] cells = graph.getSelectionCells();
for (int i = 0; i < cells.length; i++)
{
if (model.isVertex(cells[i]))
{
if (source == null)
{
source = cells[i];
}
else if (target == null)
{
target = cells[i];
}
}
if (source != null && target != null)
{
break;
}
}
if (source != null && target != null)
{
int steps = graph.getChildEdges(graph.getDefaultParent()).length;
Object[] path = mxGraphAnalysis.getInstance()
.getShortestPath(graph, source, target,
new mxDistanceCostFunction(), steps,
directed);
graph.setSelectionCells(path);
}
else
{
JOptionPane.showMessageDialog(graphComponent,
mxResources.get("noSourceAndTargetSelected"));
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SelectSpanningTreeAction extends AbstractAction
{
/**
*
*/
protected boolean directed;
/**
*
*/
public SelectSpanningTreeAction(boolean directed)
{
this.directed = directed;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
Object parent = graph.getDefaultParent();
Object[] cells = graph.getSelectionCells();
for (int i = 0; i < cells.length; i++)
{
if (model.getChildCount(cells[i]) > 0)
{
parent = cells[i];
break;
}
}
Object[] v = graph.getChildVertices(parent);
Object[] mst = mxGraphAnalysis.getInstance()
.getMinimumSpanningTree(graph, v,
new mxDistanceCostFunction(), directed);
graph.setSelectionCells(mst);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleDirtyAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
graphComponent.showDirtyRectangle = !graphComponent.showDirtyRectangle;
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleConnectModeAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxConnectionHandler handler = graphComponent
.getConnectionHandler();
handler.setHandleEnabled(!handler.isHandleEnabled());
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleCreateTargetItem extends JCheckBoxMenuItem
{
/**
*
*/
public ToggleCreateTargetItem(final BasicGraphEditor editor, String name)
{
super(name);
setSelected(true);
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraphComponent graphComponent = editor
.getGraphComponent();
if (graphComponent != null)
{
mxConnectionHandler handler = graphComponent
.getConnectionHandler();
handler.setCreateTarget(!handler.isCreateTarget());
setSelected(handler.isCreateTarget());
}
}
});
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PromptPropertyAction extends AbstractAction
{
/**
*
*/
protected Object target;
/**
*
*/
protected String fieldname, message;
/**
*
*/
public PromptPropertyAction(Object target, String message)
{
this(target, message, message);
}
/**
*
*/
public PromptPropertyAction(Object target, String message,
String fieldname)
{
this.target = target;
this.message = message;
this.fieldname = fieldname;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof Component)
{
try
{
Method getter = target.getClass().getMethod(
"get" + fieldname);
Object current = getter.invoke(target);
// TODO: Support other atomic types
if (current instanceof Integer)
{
Method setter = target.getClass().getMethod(
"set" + fieldname, new Class[] { int.class });
String value = (String) JOptionPane.showInputDialog(
(Component) e.getSource(), "Value", message,
JOptionPane.PLAIN_MESSAGE, null, null, current);
if (value != null)
{
setter.invoke(target, Integer.parseInt(value));
}
}
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
// Repaints the graph component
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
graphComponent.repaint();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class TogglePropertyItem extends JCheckBoxMenuItem
{
/**
*
*/
public TogglePropertyItem(Object target, String name, String fieldname)
{
this(target, name, fieldname, false);
}
/**
*
*/
public TogglePropertyItem(Object target, String name, String fieldname,
boolean refresh)
{
this(target, name, fieldname, refresh, null);
}
/**
*
*/
public TogglePropertyItem(final Object target, String name,
final String fieldname, final boolean refresh,
ActionListener listener)
{
super(name);
// Since action listeners are processed last to first we add the given
// listener here which means it will be processed after the one below
if (listener != null)
{
addActionListener(listener);
}
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
execute(target, fieldname, refresh);
}
});
PropertyChangeListener propertyChangeListener = new PropertyChangeListener()
{
/*
* (non-Javadoc)
* @see java.beans.PropertyChangeListener#propertyChange(java.beans.PropertyChangeEvent)
*/
public void propertyChange(PropertyChangeEvent evt)
{
if (evt.getPropertyName().equalsIgnoreCase(fieldname))
{
update(target, fieldname);
}
}
};
if (target instanceof mxGraphComponent)
{
((mxGraphComponent) target)
.addPropertyChangeListener(propertyChangeListener);
}
else if (target instanceof mxGraph)
{
((mxGraph) target)
.addPropertyChangeListener(propertyChangeListener);
}
update(target, fieldname);
}
/**
*
*/
public void update(Object target, String fieldname)
{
if (target != null && fieldname != null)
{
try
{
Method getter = target.getClass().getMethod(
"is" + fieldname);
if (getter != null)
{
Object current = getter.invoke(target);
if (current instanceof Boolean)
{
setSelected(((Boolean) current).booleanValue());
}
}
}
catch (Exception e)
{
// ignore
}
}
}
/**
*
*/
public void execute(Object target, String fieldname, boolean refresh)
{
if (target != null && fieldname != null)
{
try
{
Method getter = target.getClass().getMethod(
"is" + fieldname);
Method setter = target.getClass().getMethod(
"set" + fieldname, new Class[] { boolean.class });
Object current = getter.invoke(target);
if (current instanceof Boolean)
{
boolean value = !((Boolean) current).booleanValue();
setter.invoke(target, value);
setSelected(value);
}
if (refresh)
{
mxGraph graph = null;
if (target instanceof mxGraph)
{
graph = (mxGraph) target;
}
else if (target instanceof mxGraphComponent)
{
graph = ((mxGraphComponent) target).getGraph();
}
graph.refresh();
}
}
catch (Exception e)
{
// ignore
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class HistoryAction extends AbstractAction
{
/**
*
*/
protected boolean undo;
/**
*
*/
public HistoryAction(boolean undo)
{
this.undo = undo;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
if (undo)
{
editor.getUndoManager().undo();
}
else
{
editor.getUndoManager().redo();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class FontStyleAction extends AbstractAction
{
/**
*
*/
protected boolean bold;
/**
*
*/
public FontStyleAction(boolean bold)
{
this.bold = bold;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Component editorComponent = null;
if (graphComponent.getCellEditor() instanceof mxCellEditor)
{
editorComponent = ((mxCellEditor) graphComponent
.getCellEditor()).getEditor();
}
if (editorComponent instanceof JEditorPane)
{
JEditorPane editorPane = (JEditorPane) editorComponent;
int start = editorPane.getSelectionStart();
int ende = editorPane.getSelectionEnd();
String text = editorPane.getSelectedText();
if (text == null)
{
text = "";
}
try
{
HTMLEditorKit editorKit = new HTMLEditorKit();
HTMLDocument document = (HTMLDocument) editorPane
.getDocument();
document.remove(start, (ende - start));
editorKit.insertHTML(document, start, ((bold) ? "<b>"
: "<i>") + text + ((bold) ? "</b>" : "</i>"),
0, 0, (bold) ? HTML.Tag.B : HTML.Tag.I);
}
catch (Exception ex)
{
ex.printStackTrace();
}
editorPane.requestFocus();
editorPane.select(start, ende);
}
else
{
mxIGraphModel model = graphComponent.getGraph().getModel();
model.beginUpdate();
try
{
graphComponent.stopEditing(false);
graphComponent.getGraph().toggleCellStyleFlags(
mxConstants.STYLE_FONTSTYLE,
(bold) ? mxConstants.FONT_BOLD
: mxConstants.FONT_ITALIC);
}
finally
{
model.endUpdate();
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class WarningAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Object[] cells = graphComponent.getGraph().getSelectionCells();
if (cells != null && cells.length > 0)
{
String warning = JOptionPane.showInputDialog(mxResources
.get("enterWarningMessage"));
for (int i = 0; i < cells.length; i++)
{
graphComponent.setCellWarning(cells[i], warning);
}
}
else
{
JOptionPane.showMessageDialog(graphComponent,
mxResources.get("noCellSelected"));
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class NewAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
if (!editor.isModified()
|| JOptionPane.showConfirmDialog(editor,
mxResources.get("loseChanges")) == JOptionPane.YES_OPTION)
{
mxGraph graph = editor.getGraphComponent().getGraph();
// Check modified flag and display save dialog
mxCell root = new mxCell();
root.insert(new mxCell());
graph.getModel().setRoot(root);
editor.setModified(false);
editor.setCurrentFile(null);
editor.getGraphComponent().zoomAndCenter();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ImportAction extends AbstractAction
{
/**
*
*/
protected String lastDir;
/**
* Loads and registers the shape as a new shape in mxGraphics2DCanvas and
* adds a new entry to use that shape in the specified palette
* @param palette The palette to add the shape to.
* @param nodeXml The raw XML of the shape
* @param path The path to the directory the shape exists in
* @return the string name of the shape
*/
public static String addStencilShape(EditorPalette palette,
String nodeXml, String path)
{
// Some editors place a 3 byte BOM at the start of files
// Ensure the first char is a "<"
int lessthanIndex = nodeXml.indexOf("<");
nodeXml = nodeXml.substring(lessthanIndex);
mxStencilShape newShape = new mxStencilShape(nodeXml);
String name = newShape.getName();
ImageIcon icon = null;
if (path != null)
{
String iconPath = path + newShape.getIconPath();
icon = new ImageIcon(iconPath);
}
// Registers the shape in the canvas shape registry
mxGraphics2DCanvas.putShape(name, newShape);
if (palette != null && icon != null)
{
palette.addTemplate(name, icon, "shape=" + name, 80, 80, "");
}
return name;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
String wd = (lastDir != null) ? lastDir : System
.getProperty("user.dir");
JFileChooser fc = new JFileChooser(wd);
fc.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES);
// Adds file filter for Dia shape import
fc.addChoosableFileFilter(new DefaultFileFilter(".shape",
"Dia Shape " + mxResources.get("file") + " (.shape)"));
int rc = fc.showDialog(null, mxResources.get("importStencil"));
if (rc == JFileChooser.APPROVE_OPTION)
{
lastDir = fc.getSelectedFile().getParent();
try
{
if (fc.getSelectedFile().isDirectory())
{
EditorPalette palette = editor.insertPalette(fc
.getSelectedFile().getName());
for (File f : fc.getSelectedFile().listFiles(
new FilenameFilter()
{
public boolean accept(File dir,
String name)
{
return name.toLowerCase().endsWith(
".shape");
}
}))
{
String nodeXml = mxUtils.readFile(f
.getAbsolutePath());
addStencilShape(palette, nodeXml, f.getParent()
+ File.separator);
}
JComponent scrollPane = (JComponent) palette
.getParent().getParent();
editor.getLibraryPane().setSelectedComponent(
scrollPane);
// FIXME: Need to update the size of the palette to force a layout
// update. Re/in/validate of palette or parent does not work.
//editor.getLibraryPane().revalidate();
}
else
{
String nodeXml = mxUtils.readFile(fc
.getSelectedFile().getAbsolutePath());
String name = addStencilShape(null, nodeXml, null);
JOptionPane.showMessageDialog(editor, mxResources
.get("stencilImported",
new String[] { name }));
}
}
catch (IOException e1)
{
e1.printStackTrace();
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class OpenAction extends AbstractAction
{
/**
*
*/
protected String lastDir;
/**
*
*/
protected void resetEditor(BasicGraphEditor editor)
{
editor.setModified(false);
editor.getUndoManager().clear();
editor.getGraphComponent().zoomAndCenter();
}
/**
* Reads XML+PNG format.
*/
protected void openXmlPng(BasicGraphEditor editor, File file)
throws IOException
{
Map<String, String> text = mxPngTextDecoder
.decodeCompressedText(new FileInputStream(file));
if (text != null)
{
String value = text.get("mxGraphModel");
if (value != null)
{
Document document = mxXmlUtils.parseXml(URLDecoder.decode(
value, "UTF-8"));
mxCodec codec = new mxCodec(document);
codec.decode(document.getDocumentElement(), editor
.getGraphComponent().getGraph().getModel());
editor.setCurrentFile(file);
resetEditor(editor);
return;
}
}
JOptionPane.showMessageDialog(editor,
mxResources.get("imageContainsNoDiagramData"));
}
/**
* @throws java.io.IOException
*
*/
protected void openGD(BasicGraphEditor editor, File file,
String gdText)
{
mxGraph graph = editor.getGraphComponent().getGraph();
// Replaces file extension with .mxe
String filename = file.getName();
filename = filename.substring(0, filename.length() - 4) + ".mxe";
if (new File(filename).exists()
&& JOptionPane.showConfirmDialog(editor,
mxResources.get("overwriteExistingFile")) != JOptionPane.YES_OPTION)
{
return;
}
((mxGraphModel) graph.getModel()).clear();
mxGdCodec.decode(gdText, graph);
editor.getGraphComponent().zoomAndCenter();
editor.setCurrentFile(new File(lastDir + "/" + filename));
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
if (!editor.isModified()
|| JOptionPane.showConfirmDialog(editor,
mxResources.get("loseChanges")) == JOptionPane.YES_OPTION)
{
mxGraph graph = editor.getGraphComponent().getGraph();
if (graph != null)
{
String wd = (lastDir != null) ? lastDir : System
.getProperty("user.dir");
JFileChooser fc = new JFileChooser(wd);
// Adds file filter for supported file format
DefaultFileFilter defaultFilter = new DefaultFileFilter(
".mxe", mxResources.get("allSupportedFormats")
+ " (.mxe, .png, .vdx)")
{
public boolean accept(File file)
{
String lcase = file.getName().toLowerCase();
return super.accept(file)
|| lcase.endsWith(".png")
|| lcase.endsWith(".vdx");
}
};
fc.addChoosableFileFilter(defaultFilter);
fc.addChoosableFileFilter(new DefaultFileFilter(".mxe",
"mxGraph Editor " + mxResources.get("file")
+ " (.mxe)"));
fc.addChoosableFileFilter(new DefaultFileFilter(".png",
"PNG+XML " + mxResources.get("file")
+ " (.png)"));
// Adds file filter for VDX import
fc.addChoosableFileFilter(new DefaultFileFilter(".vdx",
"XML Drawing " + mxResources.get("file")
+ " (.vdx)"));
// Adds file filter for GD import
fc.addChoosableFileFilter(new DefaultFileFilter(".txt",
"Graph Drawing " + mxResources.get("file")
+ " (.txt)"));
fc.setFileFilter(defaultFilter);
int rc = fc.showDialog(null,
mxResources.get("openFile"));
if (rc == JFileChooser.APPROVE_OPTION)
{
lastDir = fc.getSelectedFile().getParent();
try
{
if (fc.getSelectedFile().getAbsolutePath()
.toLowerCase().endsWith(".png"))
{
openXmlPng(editor, fc.getSelectedFile());
}
else if (fc.getSelectedFile().getAbsolutePath()
.toLowerCase().endsWith(".txt"))
{
openGD(editor, fc.getSelectedFile(),
mxUtils.readFile(fc
.getSelectedFile()
.getAbsolutePath()));
}
else
{
Document document = mxXmlUtils
.parseXml(mxUtils.readFile(fc
.getSelectedFile()
.getAbsolutePath()));
mxCodec codec = new mxCodec(document);
codec.decode(
document.getDocumentElement(),
graph.getModel());
editor.setCurrentFile(fc
.getSelectedFile());
resetEditor(editor);
}
}
catch (IOException ex)
{
ex.printStackTrace();
JOptionPane.showMessageDialog(
editor.getGraphComponent(),
ex.toString(),
mxResources.get("error"),
JOptionPane.ERROR_MESSAGE);
}
}
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleAction extends AbstractAction
{
/**
*
*/
protected String key;
/**
*
*/
protected boolean defaultValue;
/**
*
* @param key
*/
public ToggleAction(String key)
{
this(key, false);
}
/**
*
* @param key
*/
public ToggleAction(String key, boolean defaultValue)
{
this.key = key;
this.defaultValue = defaultValue;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null)
{
graph.toggleCellStyles(key, defaultValue);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SetLabelPositionAction extends AbstractAction
{
/**
*
*/
protected String labelPosition, alignment;
/**
*
* @param key
*/
public SetLabelPositionAction(String labelPosition, String alignment)
{
this.labelPosition = labelPosition;
this.alignment = alignment;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
graph.getModel().beginUpdate();
try
{
// Checks the orientation of the alignment to use the correct constants
if (labelPosition.equals(mxConstants.ALIGN_LEFT)
|| labelPosition.equals(mxConstants.ALIGN_CENTER)
|| labelPosition.equals(mxConstants.ALIGN_RIGHT))
{
graph.setCellStyles(mxConstants.STYLE_LABEL_POSITION,
labelPosition);
graph.setCellStyles(mxConstants.STYLE_ALIGN, alignment);
}
else
{
graph.setCellStyles(
mxConstants.STYLE_VERTICAL_LABEL_POSITION,
labelPosition);
graph.setCellStyles(mxConstants.STYLE_VERTICAL_ALIGN,
alignment);
}
}
finally
{
graph.getModel().endUpdate();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SetStyleAction extends AbstractAction
{
/**
*
*/
protected String value;
/**
*
* @param key
*/
public SetStyleAction(String value)
{
this.value = value;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
graph.setCellStyle(value);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class KeyValueAction extends AbstractAction
{
/**
*
*/
protected String key, value;
/**
*
* @param key
*/
public KeyValueAction(String key)
{
this(key, null);
}
/**
*
* @param key
*/
public KeyValueAction(String key, String value)
{
this.key = key;
this.value = value;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
graph.setCellStyles(key, value);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PromptValueAction extends AbstractAction
{
/**
*
*/
protected String key, message;
/**
*
* @param key
*/
public PromptValueAction(String key, String message)
{
this.key = key;
this.message = message;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof Component)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
String value = (String) JOptionPane.showInputDialog(
(Component) e.getSource(),
mxResources.get("value"), message,
JOptionPane.PLAIN_MESSAGE, null, null, "");
if (value != null)
{
if (value.equals(mxConstants.NONE))
{
value = null;
}
graph.setCellStyles(key, value);
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class AlignCellsAction extends AbstractAction
{
/**
*
*/
protected String align;
/**
*
* @param key
*/
public AlignCellsAction(String align)
{
this.align = align;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
graph.alignCells(align);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class AutosizeAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
Object[] cells = graph.getSelectionCells();
mxIGraphModel model = graph.getModel();
model.beginUpdate();
try
{
for (int i = 0; i < cells.length; i++)
{
graph.updateCellSize(cells[i]);
}
}
finally
{
model.endUpdate();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ColorAction extends AbstractAction
{
/**
*
*/
protected String name, key;
/**
*
* @param key
*/
public ColorAction(String name, String key)
{
this.name = name;
this.key = key;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
if (!graph.isSelectionEmpty())
{
Color newColor = JColorChooser.showDialog(graphComponent,
name, null);
if (newColor != null)
{
graph.setCellStyles(key, mxUtils.hexString(newColor));
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class BackgroundImageAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
String value = (String) JOptionPane.showInputDialog(
graphComponent, mxResources.get("backgroundImage"),
"URL", JOptionPane.PLAIN_MESSAGE, null, null,
"http://www.callatecs.com/images/background2.JPG");
if (value != null)
{
if (value.length() == 0)
{
graphComponent.setBackgroundImage(null);
}
else
{
Image background = mxUtils.loadImage(value);
// Incorrect URLs will result in no image.
// TODO provide feedback that the URL is not correct
if (background != null)
{
graphComponent.setBackgroundImage(new ImageIcon(
background));
}
}
// Forces a repaint of the outline
graphComponent.getGraph().repaint();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class BackgroundAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Color newColor = JColorChooser.showDialog(graphComponent,
mxResources.get("background"), null);
if (newColor != null)
{
graphComponent.getViewport().setOpaque(true);
graphComponent.getViewport().setBackground(newColor);
}
// Forces a repaint of the outline
graphComponent.getGraph().repaint();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PageBackgroundAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Color newColor = JColorChooser.showDialog(graphComponent,
mxResources.get("pageBackground"), null);
if (newColor != null)
{
graphComponent.setPageBackgroundColor(newColor);
}
// Forces a repaint of the component
graphComponent.repaint();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class StyleAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
String initial = graph.getModel().getStyle(
graph.getSelectionCell());
String value = (String) JOptionPane.showInputDialog(
graphComponent, mxResources.get("style"),
mxResources.get("style"), JOptionPane.PLAIN_MESSAGE,
null, null, initial);
if (value != null)
{
graph.setCellStyle(value);
}
}
}
}
}
|
|
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import com.google.common.collect.ImmutableList;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFInstructionApplyActionsVer13 implements OFInstructionApplyActions {
private static final Logger logger = LoggerFactory.getLogger(OFInstructionApplyActionsVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int MINIMUM_LENGTH = 8;
// maximum OF message length: 16 bit, unsigned
final static int MAXIMUM_LENGTH = 0xFFFF;
private final static List<OFAction> DEFAULT_ACTIONS = ImmutableList.<OFAction>of();
// OF message fields
private final List<OFAction> actions;
//
// Immutable default instance
final static OFInstructionApplyActionsVer13 DEFAULT = new OFInstructionApplyActionsVer13(
DEFAULT_ACTIONS
);
// package private constructor - used by readers, builders, and factory
OFInstructionApplyActionsVer13(List<OFAction> actions) {
if(actions == null) {
throw new NullPointerException("OFInstructionApplyActionsVer13: property actions cannot be null");
}
this.actions = actions;
}
// Accessors for OF message fields
@Override
public OFInstructionType getType() {
return OFInstructionType.APPLY_ACTIONS;
}
@Override
public List<OFAction> getActions() {
return actions;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
public OFInstructionApplyActions.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFInstructionApplyActions.Builder {
final OFInstructionApplyActionsVer13 parentMessage;
// OF message fields
private boolean actionsSet;
private List<OFAction> actions;
BuilderWithParent(OFInstructionApplyActionsVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFInstructionType getType() {
return OFInstructionType.APPLY_ACTIONS;
}
@Override
public List<OFAction> getActions() {
return actions;
}
@Override
public OFInstructionApplyActions.Builder setActions(List<OFAction> actions) {
this.actions = actions;
this.actionsSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFInstructionApplyActions build() {
List<OFAction> actions = this.actionsSet ? this.actions : parentMessage.actions;
if(actions == null)
throw new NullPointerException("Property actions must not be null");
//
return new OFInstructionApplyActionsVer13(
actions
);
}
}
static class Builder implements OFInstructionApplyActions.Builder {
// OF message fields
private boolean actionsSet;
private List<OFAction> actions;
@Override
public OFInstructionType getType() {
return OFInstructionType.APPLY_ACTIONS;
}
@Override
public List<OFAction> getActions() {
return actions;
}
@Override
public OFInstructionApplyActions.Builder setActions(List<OFAction> actions) {
this.actions = actions;
this.actionsSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
//
@Override
public OFInstructionApplyActions build() {
List<OFAction> actions = this.actionsSet ? this.actions : DEFAULT_ACTIONS;
if(actions == null)
throw new NullPointerException("Property actions must not be null");
return new OFInstructionApplyActionsVer13(
actions
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFInstructionApplyActions> {
@Override
public OFInstructionApplyActions readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 4
short type = bb.readShort();
if(type != (short) 0x4)
throw new OFParseError("Wrong type: Expected=OFInstructionType.APPLY_ACTIONS(4), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
// pad: 4 bytes
bb.skipBytes(4);
List<OFAction> actions = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFActionVer13.READER);
OFInstructionApplyActionsVer13 instructionApplyActionsVer13 = new OFInstructionApplyActionsVer13(
actions
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", instructionApplyActionsVer13);
return instructionApplyActionsVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFInstructionApplyActionsVer13Funnel FUNNEL = new OFInstructionApplyActionsVer13Funnel();
static class OFInstructionApplyActionsVer13Funnel implements Funnel<OFInstructionApplyActionsVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFInstructionApplyActionsVer13 message, PrimitiveSink sink) {
// fixed value property type = 4
sink.putShort((short) 0x4);
// FIXME: skip funnel of length
// skip pad (4 bytes)
FunnelUtils.putList(message.actions, sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFInstructionApplyActionsVer13> {
@Override
public void write(ByteBuf bb, OFInstructionApplyActionsVer13 message) {
int startIndex = bb.writerIndex();
// fixed value property type = 4
bb.writeShort((short) 0x4);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
// pad: 4 bytes
bb.writeZero(4);
ChannelUtils.writeList(bb, message.actions);
// update length field
int length = bb.writerIndex() - startIndex;
if (length > MAXIMUM_LENGTH) {
throw new IllegalArgumentException("OFInstructionApplyActionsVer13: message length (" + length + ") exceeds maximum (0xFFFF)");
}
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFInstructionApplyActionsVer13(");
b.append("actions=").append(actions);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFInstructionApplyActionsVer13 other = (OFInstructionApplyActionsVer13) obj;
if (actions == null) {
if (other.actions != null)
return false;
} else if (!actions.equals(other.actions))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((actions == null) ? 0 : actions.hashCode());
return result;
}
}
|
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.jps.model.serialization;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileFilters;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.util.ArrayUtil;
import com.intellij.util.concurrency.BoundedTaskExecutor;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.TimingLog;
import org.jetbrains.jps.model.JpsDummyElement;
import org.jetbrains.jps.model.JpsElement;
import org.jetbrains.jps.model.JpsElementFactory;
import org.jetbrains.jps.model.JpsProject;
import org.jetbrains.jps.model.java.JpsJavaModuleType;
import org.jetbrains.jps.model.library.sdk.JpsSdkType;
import org.jetbrains.jps.model.module.JpsModule;
import org.jetbrains.jps.model.serialization.artifact.JpsArtifactSerializer;
import org.jetbrains.jps.model.serialization.facet.JpsFacetSerializer;
import org.jetbrains.jps.model.serialization.impl.JpsModuleSerializationDataExtensionImpl;
import org.jetbrains.jps.model.serialization.impl.JpsProjectSerializationDataExtensionImpl;
import org.jetbrains.jps.model.serialization.library.JpsLibraryTableSerializer;
import org.jetbrains.jps.model.serialization.library.JpsSdkTableSerializer;
import org.jetbrains.jps.model.serialization.module.JpsModuleClasspathSerializer;
import org.jetbrains.jps.model.serialization.module.JpsModulePropertiesSerializer;
import org.jetbrains.jps.model.serialization.module.JpsModuleRootModelSerializer;
import org.jetbrains.jps.model.serialization.runConfigurations.JpsRunConfigurationSerializer;
import org.jetbrains.jps.service.SharedThreadPool;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
/**
* @author nik
*/
public class JpsProjectLoader extends JpsLoaderBase {
private static final Logger LOG = Logger.getInstance(JpsProjectLoader.class);
private static final BoundedTaskExecutor ourThreadPool = new BoundedTaskExecutor("JpsProjectLoader pool",SharedThreadPool.getInstance(), Runtime.getRuntime().availableProcessors());
public static final String CLASSPATH_ATTRIBUTE = "classpath";
public static final String CLASSPATH_DIR_ATTRIBUTE = "classpath-dir";
private final JpsProject myProject;
private final Map<String, String> myPathVariables;
private JpsProjectLoader(JpsProject project, Map<String, String> pathVariables, File baseDir) {
super(createProjectMacroExpander(pathVariables, baseDir));
myProject = project;
myPathVariables = pathVariables;
myProject.getContainer().setChild(JpsProjectSerializationDataExtensionImpl.ROLE, new JpsProjectSerializationDataExtensionImpl(baseDir));
}
static JpsMacroExpander createProjectMacroExpander(Map<String, String> pathVariables, File baseDir) {
final JpsMacroExpander expander = new JpsMacroExpander(pathVariables);
expander.addFileHierarchyReplacements(PathMacroUtil.PROJECT_DIR_MACRO_NAME, baseDir);
return expander;
}
public static void loadProject(final JpsProject project, Map<String, String> pathVariables, String projectPath) throws IOException {
File file = new File(FileUtil.toCanonicalPath(projectPath));
if (file.isFile() && projectPath.endsWith(".ipr")) {
new JpsProjectLoader(project, pathVariables, file.getParentFile()).loadFromIpr(file);
}
else {
File dotIdea = new File(file, PathMacroUtil.DIRECTORY_STORE_NAME);
File directory;
if (dotIdea.isDirectory()) {
directory = dotIdea;
}
else if (file.isDirectory() && file.getName().equals(PathMacroUtil.DIRECTORY_STORE_NAME)) {
directory = file;
}
else {
throw new IOException("Cannot find IntelliJ IDEA project files at " + projectPath);
}
new JpsProjectLoader(project, pathVariables, directory.getParentFile()).loadFromDirectory(directory);
}
}
public static String getDirectoryBaseProjectName(File dir) {
File nameFile = new File(dir, ".name");
if (nameFile.isFile()) {
try {
return FileUtilRt.loadFile(nameFile).trim();
}
catch (IOException ignored) {
}
}
return dir.getParentFile().getName();
}
private void loadFromDirectory(File dir) {
myProject.setName(getDirectoryBaseProjectName(dir));
JpsSdkType<?> projectSdkType = loadProjectRoot(loadRootElement(new File(dir, "misc.xml")));
for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) {
for (JpsProjectExtensionSerializer serializer : extension.getProjectExtensionSerializers()) {
loadComponents(dir, "misc.xml", serializer, myProject);
}
}
loadModules(loadRootElement(new File(dir, "modules.xml")), projectSdkType);
Runnable timingLog = TimingLog.startActivity("loading project libraries");
for (File libraryFile : listXmlFiles(new File(dir, "libraries"))) {
loadProjectLibraries(loadRootElement(libraryFile));
}
timingLog.run();
Runnable artifactsTimingLog = TimingLog.startActivity("loading artifacts");
for (File artifactFile : listXmlFiles(new File(dir, "artifacts"))) {
loadArtifacts(loadRootElement(artifactFile));
}
artifactsTimingLog.run();
if (hasRunConfigurationSerializers()) {
Runnable runConfTimingLog = TimingLog.startActivity("loading run configurations");
for (File configurationFile : listXmlFiles(new File(dir, "runConfigurations"))) {
JpsRunConfigurationSerializer.loadRunConfigurations(myProject, loadRootElement(configurationFile));
}
File workspaceFile = new File(dir, "workspace.xml");
if (workspaceFile.exists()) {
Element runManager = JDomSerializationUtil.findComponent(loadRootElement(workspaceFile), "RunManager");
JpsRunConfigurationSerializer.loadRunConfigurations(myProject, runManager);
}
runConfTimingLog.run();
}
}
private static boolean hasRunConfigurationSerializers() {
for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) {
if (!extension.getRunConfigurationPropertiesSerializers().isEmpty()) {
return true;
}
}
return false;
}
@NotNull
private static File[] listXmlFiles(final File dir) {
File[] files = dir.listFiles(FileFilters.filesWithExtension("xml"));
return files != null ? files : ArrayUtil.EMPTY_FILE_ARRAY;
}
private void loadFromIpr(File iprFile) {
final Element iprRoot = loadRootElement(iprFile);
String projectName = FileUtil.getNameWithoutExtension(iprFile);
myProject.setName(projectName);
File iwsFile = new File(iprFile.getParent(), projectName + ".iws");
Element iwsRoot = iwsFile.exists() ? loadRootElement(iwsFile) : null;
JpsSdkType<?> projectSdkType = loadProjectRoot(iprRoot);
for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) {
for (JpsProjectExtensionSerializer serializer : extension.getProjectExtensionSerializers()) {
Element rootTag = JpsProjectExtensionSerializer.WORKSPACE_FILE.equals(serializer.getConfigFileName()) ? iwsRoot : iprRoot;
Element component = JDomSerializationUtil.findComponent(rootTag, serializer.getComponentName());
if (component != null) {
serializer.loadExtension(myProject, component);
}
else {
serializer.loadExtensionWithDefaultSettings(myProject);
}
}
}
loadModules(iprRoot, projectSdkType);
loadProjectLibraries(JDomSerializationUtil.findComponent(iprRoot, "libraryTable"));
loadArtifacts(JDomSerializationUtil.findComponent(iprRoot, "ArtifactManager"));
if (hasRunConfigurationSerializers()) {
JpsRunConfigurationSerializer.loadRunConfigurations(myProject, JDomSerializationUtil.findComponent(iprRoot, "ProjectRunConfigurationManager"));
JpsRunConfigurationSerializer.loadRunConfigurations(myProject, JDomSerializationUtil.findComponent(iwsRoot, "RunManager"));
}
}
private void loadArtifacts(@Nullable Element artifactManagerComponent) {
JpsArtifactSerializer.loadArtifacts(myProject, artifactManagerComponent);
}
@Nullable
private JpsSdkType<?> loadProjectRoot(Element root) {
JpsSdkType<?> sdkType = null;
Element rootManagerElement = JDomSerializationUtil.findComponent(root, "ProjectRootManager");
if (rootManagerElement != null) {
String sdkName = rootManagerElement.getAttributeValue("project-jdk-name");
String sdkTypeId = rootManagerElement.getAttributeValue("project-jdk-type");
if (sdkName != null) {
sdkType = JpsSdkTableSerializer.getSdkType(sdkTypeId);
JpsSdkTableSerializer.setSdkReference(myProject.getSdkReferencesTable(), sdkName, sdkType);
}
}
return sdkType;
}
private void loadProjectLibraries(@Nullable Element libraryTableElement) {
JpsLibraryTableSerializer.loadLibraries(libraryTableElement, myProject.getLibraryCollection());
}
private void loadModules(Element root, final @Nullable JpsSdkType<?> projectSdkType) {
Runnable timingLog = TimingLog.startActivity("loading modules");
Element componentRoot = JDomSerializationUtil.findComponent(root, "ProjectModuleManager");
if (componentRoot == null) return;
final Set<File> foundFiles = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
final List<File> moduleFiles = new ArrayList<>();
for (Element moduleElement : JDOMUtil.getChildren(componentRoot.getChild("modules"), "module")) {
final String path = moduleElement.getAttributeValue("filepath");
final File file = new File(path);
if (foundFiles.add(file) && file.exists()) {
moduleFiles.add(file);
}
else {
LOG.info("Module '" + FileUtil.getNameWithoutExtension(file) + "' is skipped: " + file.getAbsolutePath() + " doesn't exist");
}
}
List<JpsModule> modules = loadModules(moduleFiles, projectSdkType, myPathVariables);
for (JpsModule module : modules) {
myProject.addModule(module);
}
timingLog.run();
}
@NotNull
public static List<JpsModule> loadModules(@NotNull List<File> moduleFiles, @Nullable final JpsSdkType<?> projectSdkType,
@NotNull final Map<String, String> pathVariables) {
List<JpsModule> modules = new ArrayList<>();
List<Future<Pair<File, Element>>> futureModuleFilesContents = new ArrayList<>();
for (final File file : moduleFiles) {
futureModuleFilesContents.add(ourThreadPool.submit(() -> {
final JpsMacroExpander expander = createModuleMacroExpander(pathVariables, file);
final Element moduleRoot = loadRootElement(file, expander);
return Pair.create(file, moduleRoot);
}));
}
try {
final List<String> classpathDirs = new ArrayList<>();
for (Future<Pair<File, Element>> moduleFile : futureModuleFilesContents) {
final String classpathDir = moduleFile.get().getSecond().getAttributeValue(CLASSPATH_DIR_ATTRIBUTE);
if (classpathDir != null) {
classpathDirs.add(classpathDir);
}
}
List<Future<JpsModule>> futures = new ArrayList<>();
for (final Future<Pair<File, Element>> futureModuleFile : futureModuleFilesContents) {
final Pair<File, Element> moduleFile = futureModuleFile.get();
futures.add(ourThreadPool.submit(
() -> loadModule(moduleFile.getFirst(), moduleFile.getSecond(), classpathDirs, projectSdkType, pathVariables)));
}
for (Future<JpsModule> future : futures) {
JpsModule module = future.get();
if (module != null) {
modules.add(module);
}
}
return modules;
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
@Nullable
private static JpsModule loadModule(@NotNull File file, @NotNull Element moduleRoot, List<String> paths,
@Nullable JpsSdkType<?> projectSdkType, Map<String, String> pathVariables) {
String name = FileUtil.getNameWithoutExtension(file);
final String typeId = moduleRoot.getAttributeValue("type");
final JpsModulePropertiesSerializer<?> serializer = getModulePropertiesSerializer(typeId);
final JpsModule module = createModule(name, moduleRoot, serializer);
module.getContainer().setChild(JpsModuleSerializationDataExtensionImpl.ROLE,
new JpsModuleSerializationDataExtensionImpl(file.getParentFile()));
for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) {
extension.loadModuleOptions(module, moduleRoot);
}
String baseModulePath = FileUtil.toSystemIndependentName(file.getParent());
String classpath = moduleRoot.getAttributeValue(CLASSPATH_ATTRIBUTE);
if (classpath == null) {
JpsModuleRootModelSerializer.loadRootModel(module, JDomSerializationUtil.findComponent(moduleRoot, "NewModuleRootManager"),
projectSdkType);
}
else {
for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) {
JpsModuleClasspathSerializer classpathSerializer = extension.getClasspathSerializer();
if (classpathSerializer != null && classpathSerializer.getClasspathId().equals(classpath)) {
String classpathDir = moduleRoot.getAttributeValue(CLASSPATH_DIR_ATTRIBUTE);
final JpsMacroExpander expander = createModuleMacroExpander(pathVariables, file);
classpathSerializer.loadClasspath(module, classpathDir, baseModulePath, expander, paths, projectSdkType);
}
}
}
JpsFacetSerializer.loadFacets(module, JDomSerializationUtil.findComponent(moduleRoot, "FacetManager"));
return module;
}
static JpsMacroExpander createModuleMacroExpander(final Map<String, String> pathVariables, File moduleFile) {
final JpsMacroExpander expander = new JpsMacroExpander(pathVariables);
String moduleDirPath = PathMacroUtil.getModuleDir(moduleFile.getAbsolutePath());
if (moduleDirPath != null) {
expander.addFileHierarchyReplacements(PathMacroUtil.MODULE_DIR_MACRO_NAME, new File(FileUtil.toSystemDependentName(moduleDirPath)));
}
return expander;
}
private static <P extends JpsElement> JpsModule createModule(String name, Element moduleRoot, JpsModulePropertiesSerializer<P> loader) {
String componentName = loader.getComponentName();
Element component = componentName != null ? JDomSerializationUtil.findComponent(moduleRoot, componentName) : null;
return JpsElementFactory.getInstance().createModule(name, loader.getType(), loader.loadProperties(component));
}
private static JpsModulePropertiesSerializer<?> getModulePropertiesSerializer(@Nullable String typeId) {
for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) {
for (JpsModulePropertiesSerializer<?> loader : extension.getModulePropertiesSerializers()) {
if (loader.getTypeId().equals(typeId)) {
return loader;
}
}
}
return new JpsModulePropertiesSerializer<JpsDummyElement>(JpsJavaModuleType.INSTANCE, "JAVA_MODULE", null) {
@Override
public JpsDummyElement loadProperties(@Nullable Element componentElement) {
return JpsElementFactory.getInstance().createDummyElement();
}
@Override
public void saveProperties(@NotNull JpsDummyElement properties, @NotNull Element componentElement) {
}
};
}
}
|
|
/*
* Copyright (C) 2013 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Forked from OkHttp 2.5.0
*/
package io.grpc.okhttp.internal.framed;
import com.google.errorprone.annotations.FormatMethod;
import io.grpc.okhttp.internal.Protocol;
import java.io.IOException;
import java.util.List;
import java.util.logging.Logger;
import okio.Buffer;
import okio.BufferedSink;
import okio.BufferedSource;
import okio.ByteString;
import okio.Source;
import okio.Timeout;
import static io.grpc.okhttp.internal.framed.Http2.FrameLogger.formatHeader;
import static java.lang.String.format;
import static java.util.logging.Level.FINE;
import static okio.ByteString.EMPTY;
/**
* Read and write HTTP/2 frames.
* <p>
* This implementation assumes we do not send an increased
* {@link io.grpc.okhttp.internal.framed.Settings#getMaxFrameSize frame size setting} to the peer. Hence, we
* expect all frames to have a max length of {@link #INITIAL_MAX_FRAME_SIZE}.
* <p>http://tools.ietf.org/html/draft-ietf-httpbis-http2-17
*/
public final class Http2 implements Variant {
private static final Logger logger = Logger.getLogger(FrameLogger.class.getName());
@Override public Protocol getProtocol() {
return Protocol.HTTP_2;
}
private static final ByteString CONNECTION_PREFACE
= ByteString.encodeUtf8("PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n");
/** The initial max frame size, applied independently writing to, or reading from the peer. */
static final int INITIAL_MAX_FRAME_SIZE = 0x4000; // 16384
static final byte TYPE_DATA = 0x0;
static final byte TYPE_HEADERS = 0x1;
static final byte TYPE_PRIORITY = 0x2;
static final byte TYPE_RST_STREAM = 0x3;
static final byte TYPE_SETTINGS = 0x4;
static final byte TYPE_PUSH_PROMISE = 0x5;
static final byte TYPE_PING = 0x6;
static final byte TYPE_GOAWAY = 0x7;
static final byte TYPE_WINDOW_UPDATE = 0x8;
static final byte TYPE_CONTINUATION = 0x9;
static final byte FLAG_NONE = 0x0;
static final byte FLAG_ACK = 0x1; // Used for settings and ping.
static final byte FLAG_END_STREAM = 0x1; // Used for headers and data.
static final byte FLAG_END_HEADERS = 0x4; // Used for headers and continuation.
static final byte FLAG_END_PUSH_PROMISE = 0x4;
static final byte FLAG_PADDED = 0x8; // Used for headers and data.
static final byte FLAG_PRIORITY = 0x20; // Used for headers.
static final byte FLAG_COMPRESSED = 0x20; // Used for data.
/**
* Creates a frame reader with max header table size of 4096 and data frame
* compression disabled.
*/
@Override public FrameReader newReader(BufferedSource source, boolean client) {
return new Reader(source, 4096, client);
}
@Override public io.grpc.okhttp.internal.framed.FrameWriter newWriter(BufferedSink sink, boolean client) {
return new Writer(sink, client);
}
static final class Reader implements FrameReader {
private final BufferedSource source;
private final ContinuationSource continuation;
private final boolean client;
// Visible for testing.
final Hpack.Reader hpackReader;
Reader(BufferedSource source, int headerTableSize, boolean client) {
this.source = source;
this.client = client;
this.continuation = new ContinuationSource(this.source);
this.hpackReader = new Hpack.Reader(headerTableSize, continuation);
}
@Override public void readConnectionPreface() throws IOException {
if (client) return; // Nothing to read; servers doesn't send a connection preface!
ByteString connectionPreface = source.readByteString(CONNECTION_PREFACE.size());
if (logger.isLoggable(FINE)) logger.fine(format("<< CONNECTION %s", connectionPreface.hex()));
if (!CONNECTION_PREFACE.equals(connectionPreface)) {
throw ioException("Expected a connection header but was %s", connectionPreface.utf8());
}
}
@Override public boolean nextFrame(Handler handler) throws IOException {
try {
source.require(9); // Frame header size
} catch (IOException e) {
return false; // This might be a normal socket close.
}
/* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Length (24) |
* +---------------+---------------+---------------+
* | Type (8) | Flags (8) |
* +-+-+-----------+---------------+-------------------------------+
* |R| Stream Identifier (31) |
* +=+=============================================================+
* | Frame Payload (0...) ...
* +---------------------------------------------------------------+
*/
int length = readMedium(source);
if (length < 0 || length > INITIAL_MAX_FRAME_SIZE) {
throw ioException("FRAME_SIZE_ERROR: %s", length);
}
byte type = (byte) (source.readByte() & 0xff);
byte flags = (byte) (source.readByte() & 0xff);
int streamId = (source.readInt() & 0x7fffffff); // Ignore reserved bit.
if (logger.isLoggable(FINE)) logger.fine(formatHeader(true, streamId, length, type, flags));
switch (type) {
case TYPE_DATA:
readData(handler, length, flags, streamId);
break;
case TYPE_HEADERS:
readHeaders(handler, length, flags, streamId);
break;
case TYPE_PRIORITY:
readPriority(handler, length, flags, streamId);
break;
case TYPE_RST_STREAM:
readRstStream(handler, length, flags, streamId);
break;
case TYPE_SETTINGS:
readSettings(handler, length, flags, streamId);
break;
case TYPE_PUSH_PROMISE:
readPushPromise(handler, length, flags, streamId);
break;
case TYPE_PING:
readPing(handler, length, flags, streamId);
break;
case TYPE_GOAWAY:
readGoAway(handler, length, flags, streamId);
break;
case TYPE_WINDOW_UPDATE:
readWindowUpdate(handler, length, flags, streamId);
break;
default:
// Implementations MUST discard frames that have unknown or unsupported types.
source.skip(length);
}
return true;
}
private void readHeaders(Handler handler, int length, byte flags, int streamId)
throws IOException {
if (streamId == 0) throw ioException("PROTOCOL_ERROR: TYPE_HEADERS streamId == 0");
boolean endStream = (flags & FLAG_END_STREAM) != 0;
short padding = (flags & FLAG_PADDED) != 0 ? (short) (source.readByte() & 0xff) : 0;
if ((flags & FLAG_PRIORITY) != 0) {
readPriority(handler, streamId);
length -= 5; // account for above read.
}
length = lengthWithoutPadding(length, flags, padding);
List<Header> headerBlock = readHeaderBlock(length, padding, flags, streamId);
handler.headers(false, endStream, streamId, -1, headerBlock, HeadersMode.HTTP_20_HEADERS);
}
private List<Header> readHeaderBlock(int length, short padding, byte flags, int streamId)
throws IOException {
continuation.length = continuation.left = length;
continuation.padding = padding;
continuation.flags = flags;
continuation.streamId = streamId;
// TODO: Concat multi-value headers with 0x0, except COOKIE, which uses 0x3B, 0x20.
// http://tools.ietf.org/html/draft-ietf-httpbis-http2-17#section-8.1.2.5
hpackReader.readHeaders();
return hpackReader.getAndResetHeaderList();
}
private void readData(Handler handler, int length, byte flags, int streamId)
throws IOException {
// TODO: checkState open or half-closed (local) or raise STREAM_CLOSED
boolean inFinished = (flags & FLAG_END_STREAM) != 0;
boolean gzipped = (flags & FLAG_COMPRESSED) != 0;
if (gzipped) {
throw ioException("PROTOCOL_ERROR: FLAG_COMPRESSED without SETTINGS_COMPRESS_DATA");
}
short padding = (flags & FLAG_PADDED) != 0 ? (short) (source.readByte() & 0xff) : 0;
length = lengthWithoutPadding(length, flags, padding);
handler.data(inFinished, streamId, source, length);
source.skip(padding);
}
private void readPriority(
Handler handler, int length, @SuppressWarnings("UnusedVariable") byte flags, int streamId)
throws IOException {
if (length != 5) throw ioException("TYPE_PRIORITY length: %d != 5", length);
if (streamId == 0) throw ioException("TYPE_PRIORITY streamId == 0");
readPriority(handler, streamId);
}
private void readPriority(Handler handler, int streamId) throws IOException {
int w1 = source.readInt();
boolean exclusive = (w1 & 0x80000000) != 0;
int streamDependency = (w1 & 0x7fffffff);
int weight = (source.readByte() & 0xff) + 1;
handler.priority(streamId, streamDependency, weight, exclusive);
}
private void readRstStream(
Handler handler, int length, @SuppressWarnings("UnusedVariable") byte flags, int streamId)
throws IOException {
if (length != 4) throw ioException("TYPE_RST_STREAM length: %d != 4", length);
if (streamId == 0) throw ioException("TYPE_RST_STREAM streamId == 0");
int errorCodeInt = source.readInt();
io.grpc.okhttp.internal.framed.ErrorCode errorCode = io.grpc.okhttp.internal.framed.ErrorCode.fromHttp2(errorCodeInt);
if (errorCode == null) {
throw ioException("TYPE_RST_STREAM unexpected error code: %d", errorCodeInt);
}
handler.rstStream(streamId, errorCode);
}
private void readSettings(Handler handler, int length, byte flags, int streamId)
throws IOException {
if (streamId != 0) throw ioException("TYPE_SETTINGS streamId != 0");
if ((flags & FLAG_ACK) != 0) {
if (length != 0) throw ioException("FRAME_SIZE_ERROR ack frame should be empty!");
handler.ackSettings();
return;
}
if (length % 6 != 0) throw ioException("TYPE_SETTINGS length %% 6 != 0: %s", length);
io.grpc.okhttp.internal.framed.Settings settings = new io.grpc.okhttp.internal.framed.Settings();
for (int i = 0; i < length; i += 6) {
short id = source.readShort();
int value = source.readInt();
switch (id) {
case 1: // SETTINGS_HEADER_TABLE_SIZE
break;
case 2: // SETTINGS_ENABLE_PUSH
if (value != 0 && value != 1) {
throw ioException("PROTOCOL_ERROR SETTINGS_ENABLE_PUSH != 0 or 1");
}
break;
case 3: // SETTINGS_MAX_CONCURRENT_STREAMS
id = 4; // Renumbered in draft 10.
break;
case 4: // SETTINGS_INITIAL_WINDOW_SIZE
id = 7; // Renumbered in draft 10.
if (value < 0) {
throw ioException("PROTOCOL_ERROR SETTINGS_INITIAL_WINDOW_SIZE > 2^31 - 1");
}
break;
case 5: // SETTINGS_MAX_FRAME_SIZE
if (value < INITIAL_MAX_FRAME_SIZE || value > 16777215) {
throw ioException("PROTOCOL_ERROR SETTINGS_MAX_FRAME_SIZE: %s", value);
}
break;
case 6: // SETTINGS_MAX_HEADER_LIST_SIZE
break; // Advisory only, so ignored.
default:
// Implementations MUST ignore any unknown or unsupported identifier.
continue;
}
settings.set(id, 0, value);
}
handler.settings(false, settings);
if (settings.getHeaderTableSize() >= 0) {
hpackReader.headerTableSizeSetting(settings.getHeaderTableSize());
}
}
private void readPushPromise(Handler handler, int length, byte flags, int streamId)
throws IOException {
if (streamId == 0) {
throw ioException("PROTOCOL_ERROR: TYPE_PUSH_PROMISE streamId == 0");
}
short padding = (flags & FLAG_PADDED) != 0 ? (short) (source.readByte() & 0xff) : 0;
int promisedStreamId = source.readInt() & 0x7fffffff;
length -= 4; // account for above read.
length = lengthWithoutPadding(length, flags, padding);
List<Header> headerBlock = readHeaderBlock(length, padding, flags, streamId);
handler.pushPromise(streamId, promisedStreamId, headerBlock);
}
private void readPing(Handler handler, int length, byte flags, int streamId)
throws IOException {
if (length != 8) throw ioException("TYPE_PING length != 8: %s", length);
if (streamId != 0) throw ioException("TYPE_PING streamId != 0");
int payload1 = source.readInt();
int payload2 = source.readInt();
boolean ack = (flags & FLAG_ACK) != 0;
handler.ping(ack, payload1, payload2);
}
private void readGoAway(
Handler handler, int length, @SuppressWarnings("UnusedVariable") byte flags, int streamId)
throws IOException {
if (length < 8) throw ioException("TYPE_GOAWAY length < 8: %s", length);
if (streamId != 0) throw ioException("TYPE_GOAWAY streamId != 0");
int lastStreamId = source.readInt();
int errorCodeInt = source.readInt();
int opaqueDataLength = length - 8;
io.grpc.okhttp.internal.framed.ErrorCode errorCode = io.grpc.okhttp.internal.framed.ErrorCode.fromHttp2(errorCodeInt);
if (errorCode == null) {
throw ioException("TYPE_GOAWAY unexpected error code: %d", errorCodeInt);
}
ByteString debugData = EMPTY;
if (opaqueDataLength > 0) { // Must read debug data in order to not corrupt the connection.
debugData = source.readByteString(opaqueDataLength);
}
handler.goAway(lastStreamId, errorCode, debugData);
}
private void readWindowUpdate(
Handler handler, int length, @SuppressWarnings("UnusedVariable") byte flags, int streamId)
throws IOException {
if (length != 4) throw ioException("TYPE_WINDOW_UPDATE length !=4: %s", length);
long increment = (source.readInt() & 0x7fffffffL);
if (increment == 0) throw ioException("windowSizeIncrement was 0");
handler.windowUpdate(streamId, increment);
}
@Override public void close() throws IOException {
source.close();
}
}
static final class Writer implements io.grpc.okhttp.internal.framed.FrameWriter {
private final BufferedSink sink;
private final boolean client;
private final Buffer hpackBuffer;
private final Hpack.Writer hpackWriter;
private int maxFrameSize;
private boolean closed;
Writer(BufferedSink sink, boolean client) {
this.sink = sink;
this.client = client;
this.hpackBuffer = new Buffer();
this.hpackWriter = new Hpack.Writer(hpackBuffer);
this.maxFrameSize = INITIAL_MAX_FRAME_SIZE;
}
@Override public synchronized void flush() throws IOException {
if (closed) throw new IOException("closed");
sink.flush();
}
@Override public synchronized void ackSettings(io.grpc.okhttp.internal.framed.Settings peerSettings) throws IOException {
if (closed) throw new IOException("closed");
this.maxFrameSize = peerSettings.getMaxFrameSize(maxFrameSize);
int length = 0;
byte type = TYPE_SETTINGS;
byte flags = FLAG_ACK;
int streamId = 0;
frameHeader(streamId, length, type, flags);
sink.flush();
}
@Override public synchronized void connectionPreface() throws IOException {
if (closed) throw new IOException("closed");
if (!client) return; // Nothing to write; servers don't send connection headers!
if (logger.isLoggable(FINE)) {
logger.fine(format(">> CONNECTION %s", CONNECTION_PREFACE.hex()));
}
sink.write(CONNECTION_PREFACE.toByteArray());
sink.flush();
}
@Override public synchronized void synStream(boolean outFinished, boolean inFinished,
int streamId, int associatedStreamId, List<Header> headerBlock)
throws IOException {
if (inFinished) throw new UnsupportedOperationException();
if (closed) throw new IOException("closed");
headers(outFinished, streamId, headerBlock);
}
@Override public synchronized void synReply(boolean outFinished, int streamId,
List<Header> headerBlock) throws IOException {
if (closed) throw new IOException("closed");
headers(outFinished, streamId, headerBlock);
}
@Override public synchronized void headers(int streamId, List<Header> headerBlock)
throws IOException {
if (closed) throw new IOException("closed");
headers(false, streamId, headerBlock);
}
@Override public synchronized void pushPromise(int streamId, int promisedStreamId,
List<Header> requestHeaders) throws IOException {
if (closed) throw new IOException("closed");
hpackWriter.writeHeaders(requestHeaders);
long byteCount = hpackBuffer.size();
int length = (int) Math.min(maxFrameSize - 4, byteCount);
byte type = TYPE_PUSH_PROMISE;
byte flags = byteCount == length ? FLAG_END_HEADERS : 0;
frameHeader(streamId, length + 4, type, flags);
sink.writeInt(promisedStreamId & 0x7fffffff);
sink.write(hpackBuffer, length);
if (byteCount > length) writeContinuationFrames(streamId, byteCount - length);
}
void headers(boolean outFinished, int streamId, List<Header> headerBlock) throws IOException {
if (closed) throw new IOException("closed");
hpackWriter.writeHeaders(headerBlock);
long byteCount = hpackBuffer.size();
int length = (int) Math.min(maxFrameSize, byteCount);
byte type = TYPE_HEADERS;
byte flags = byteCount == length ? FLAG_END_HEADERS : 0;
if (outFinished) flags |= FLAG_END_STREAM;
frameHeader(streamId, length, type, flags);
sink.write(hpackBuffer, length);
if (byteCount > length) writeContinuationFrames(streamId, byteCount - length);
}
private void writeContinuationFrames(int streamId, long byteCount) throws IOException {
while (byteCount > 0) {
int length = (int) Math.min(maxFrameSize, byteCount);
byteCount -= length;
frameHeader(streamId, length, TYPE_CONTINUATION, byteCount == 0 ? FLAG_END_HEADERS : 0);
sink.write(hpackBuffer, length);
}
}
@Override public synchronized void rstStream(int streamId, io.grpc.okhttp.internal.framed.ErrorCode errorCode)
throws IOException {
if (closed) throw new IOException("closed");
if (errorCode.httpCode == -1) throw new IllegalArgumentException();
int length = 4;
byte type = TYPE_RST_STREAM;
byte flags = FLAG_NONE;
frameHeader(streamId, length, type, flags);
sink.writeInt(errorCode.httpCode);
sink.flush();
}
@Override public int maxDataLength() {
return maxFrameSize;
}
@Override public synchronized void data(boolean outFinished, int streamId, Buffer source,
int byteCount) throws IOException {
if (closed) throw new IOException("closed");
byte flags = FLAG_NONE;
if (outFinished) flags |= FLAG_END_STREAM;
dataFrame(streamId, flags, source, byteCount);
}
void dataFrame(int streamId, byte flags, Buffer buffer, int byteCount) throws IOException {
byte type = TYPE_DATA;
frameHeader(streamId, byteCount, type, flags);
if (byteCount > 0) {
sink.write(buffer, byteCount);
}
}
@Override public synchronized void settings(io.grpc.okhttp.internal.framed.Settings settings) throws IOException {
if (closed) throw new IOException("closed");
int length = settings.size() * 6;
byte type = TYPE_SETTINGS;
byte flags = FLAG_NONE;
int streamId = 0;
frameHeader(streamId, length, type, flags);
for (int i = 0; i < io.grpc.okhttp.internal.framed.Settings.COUNT; i++) {
if (!settings.isSet(i)) continue;
int id = i;
if (id == 4) id = 3; // SETTINGS_MAX_CONCURRENT_STREAMS renumbered.
else if (id == 7) id = 4; // SETTINGS_INITIAL_WINDOW_SIZE renumbered.
sink.writeShort(id);
sink.writeInt(settings.get(i));
}
sink.flush();
}
@Override public synchronized void ping(boolean ack, int payload1, int payload2)
throws IOException {
if (closed) throw new IOException("closed");
int length = 8;
byte type = TYPE_PING;
byte flags = ack ? FLAG_ACK : FLAG_NONE;
int streamId = 0;
frameHeader(streamId, length, type, flags);
sink.writeInt(payload1);
sink.writeInt(payload2);
sink.flush();
}
@Override public synchronized void goAway(int lastGoodStreamId, io.grpc.okhttp.internal.framed.ErrorCode errorCode,
byte[] debugData) throws IOException {
if (closed) throw new IOException("closed");
if (errorCode.httpCode == -1) throw illegalArgument("errorCode.httpCode == -1");
int length = 8 + debugData.length;
byte type = TYPE_GOAWAY;
byte flags = FLAG_NONE;
int streamId = 0;
frameHeader(streamId, length, type, flags);
sink.writeInt(lastGoodStreamId);
sink.writeInt(errorCode.httpCode);
if (debugData.length > 0) {
sink.write(debugData);
}
sink.flush();
}
@Override public synchronized void windowUpdate(int streamId, long windowSizeIncrement)
throws IOException {
if (closed) throw new IOException("closed");
if (windowSizeIncrement == 0 || windowSizeIncrement > 0x7fffffffL) {
throw illegalArgument("windowSizeIncrement == 0 || windowSizeIncrement > 0x7fffffffL: %s",
windowSizeIncrement);
}
int length = 4;
byte type = TYPE_WINDOW_UPDATE;
byte flags = FLAG_NONE;
frameHeader(streamId, length, type, flags);
sink.writeInt((int) windowSizeIncrement);
sink.flush();
}
@Override public synchronized void close() throws IOException {
closed = true;
sink.close();
}
void frameHeader(int streamId, int length, byte type, byte flags) throws IOException {
if (logger.isLoggable(FINE)) logger.fine(formatHeader(false, streamId, length, type, flags));
if (length > maxFrameSize) {
throw illegalArgument("FRAME_SIZE_ERROR length > %d: %d", maxFrameSize, length);
}
if ((streamId & 0x80000000) != 0) throw illegalArgument("reserved bit set: %s", streamId);
writeMedium(sink, length);
sink.writeByte(type & 0xff);
sink.writeByte(flags & 0xff);
sink.writeInt(streamId & 0x7fffffff);
}
}
@FormatMethod
private static IllegalArgumentException illegalArgument(String message, Object... args) {
throw new IllegalArgumentException(format(message, args));
}
@FormatMethod
private static IOException ioException(String message, Object... args) throws IOException {
throw new IOException(format(message, args));
}
/**
* Decompression of the header block occurs above the framing layer. This
* class lazily reads continuation frames as they are needed by {@link
* Hpack.Reader#readHeaders()}.
*/
static final class ContinuationSource implements Source {
private final BufferedSource source;
int length;
byte flags;
int streamId;
int left;
short padding;
public ContinuationSource(BufferedSource source) {
this.source = source;
}
@Override public long read(Buffer sink, long byteCount) throws IOException {
while (left == 0) {
source.skip(padding);
padding = 0;
if ((flags & FLAG_END_HEADERS) != 0) return -1;
readContinuationHeader();
// TODO: test case for empty continuation header?
}
long read = source.read(sink, Math.min(byteCount, left));
if (read == -1) return -1;
left -= (int) read;
return read;
}
@Override public Timeout timeout() {
return source.timeout();
}
@Override public void close() throws IOException {
}
private void readContinuationHeader() throws IOException {
int previousStreamId = streamId;
length = left = readMedium(source);
byte type = (byte) (source.readByte() & 0xff);
flags = (byte) (source.readByte() & 0xff);
if (logger.isLoggable(FINE)) logger.fine(formatHeader(true, streamId, length, type, flags));
streamId = (source.readInt() & 0x7fffffff);
if (type != TYPE_CONTINUATION) throw ioException("%s != TYPE_CONTINUATION", type);
if (streamId != previousStreamId) throw ioException("TYPE_CONTINUATION streamId changed");
}
}
private static int lengthWithoutPadding(int length, byte flags, short padding)
throws IOException {
if ((flags & FLAG_PADDED) != 0) length--; // Account for reading the padding length.
if (padding > length) {
throw ioException("PROTOCOL_ERROR padding %s > remaining length %s", padding, length);
}
return (short) (length - padding);
}
/**
* Logs a human-readable representation of HTTP/2 frame headers.
*
* <p>The format is:
*
* <pre>
* direction streamID length type flags
* </pre>
* Where direction is {@code <<} for inbound and {@code >>} for outbound.
*
* <p> For example, the following would indicate a HEAD request sent from
* the client.
* <pre>
* {@code
* << 0x0000000f 12 HEADERS END_HEADERS|END_STREAM
* }
* </pre>
*/
static final class FrameLogger {
static String formatHeader(boolean inbound, int streamId, int length, byte type, byte flags) {
String formattedType = type < TYPES.length ? TYPES[type] : format("0x%02x", type);
String formattedFlags = formatFlags(type, flags);
return format("%s 0x%08x %5d %-13s %s", inbound ? "<<" : ">>", streamId, length,
formattedType, formattedFlags);
}
/**
* Looks up valid string representing flags from the table. Invalid
* combinations are represented in binary.
*/
// Visible for testing.
static String formatFlags(byte type, byte flags) {
if (flags == 0) return "";
switch (type) { // Special case types that have 0 or 1 flag.
case TYPE_SETTINGS:
case TYPE_PING:
return flags == FLAG_ACK ? "ACK" : BINARY[flags];
case TYPE_PRIORITY:
case TYPE_RST_STREAM:
case TYPE_GOAWAY:
case TYPE_WINDOW_UPDATE:
return BINARY[flags];
}
String result = flags < FLAGS.length ? FLAGS[flags] : BINARY[flags];
// Special case types that have overlap flag values.
if (type == TYPE_PUSH_PROMISE && (flags & FLAG_END_PUSH_PROMISE) != 0) {
return result.replace("HEADERS", "PUSH_PROMISE"); // TODO: Avoid allocation.
} else if (type == TYPE_DATA && (flags & FLAG_COMPRESSED) != 0) {
return result.replace("PRIORITY", "COMPRESSED"); // TODO: Avoid allocation.
}
return result;
}
/** Lookup table for valid frame types. */
private static final String[] TYPES = new String[] {
"DATA",
"HEADERS",
"PRIORITY",
"RST_STREAM",
"SETTINGS",
"PUSH_PROMISE",
"PING",
"GOAWAY",
"WINDOW_UPDATE",
"CONTINUATION"
};
/**
* Lookup table for valid flags for DATA, HEADERS, CONTINUATION. Invalid
* combinations are represented in binary.
*/
private static final String[] FLAGS = new String[0x40]; // Highest bit flag is 0x20.
private static final String[] BINARY = new String[256];
static {
for (int i = 0; i < BINARY.length; i++) {
BINARY[i] = format("%8s", Integer.toBinaryString(i)).replace(' ', '0');
}
FLAGS[FLAG_NONE] = "";
FLAGS[FLAG_END_STREAM] = "END_STREAM";
int[] prefixFlags = new int[] {FLAG_END_STREAM};
FLAGS[FLAG_PADDED] = "PADDED";
for (int prefixFlag : prefixFlags) {
FLAGS[prefixFlag | FLAG_PADDED] = FLAGS[prefixFlag] + "|PADDED";
}
FLAGS[FLAG_END_HEADERS] = "END_HEADERS"; // Same as END_PUSH_PROMISE.
FLAGS[FLAG_PRIORITY] = "PRIORITY"; // Same as FLAG_COMPRESSED.
FLAGS[FLAG_END_HEADERS | FLAG_PRIORITY] = "END_HEADERS|PRIORITY"; // Only valid on HEADERS.
int[] frameFlags =
new int[] {FLAG_END_HEADERS, FLAG_PRIORITY, FLAG_END_HEADERS | FLAG_PRIORITY};
for (int frameFlag : frameFlags) {
for (int prefixFlag : prefixFlags) {
FLAGS[prefixFlag | frameFlag] = FLAGS[prefixFlag] + '|' + FLAGS[frameFlag];
FLAGS[prefixFlag | frameFlag | FLAG_PADDED] =
FLAGS[prefixFlag] + '|' + FLAGS[frameFlag] + "|PADDED";
}
}
for (int i = 0; i < FLAGS.length; i++) { // Fill in holes with binary representation.
if (FLAGS[i] == null) FLAGS[i] = BINARY[i];
}
}
}
private static int readMedium(BufferedSource source) throws IOException {
return (source.readByte() & 0xff) << 16
| (source.readByte() & 0xff) << 8
| (source.readByte() & 0xff);
}
private static void writeMedium(BufferedSink sink, int i) throws IOException {
sink.writeByte((i >>> 16) & 0xff);
sink.writeByte((i >>> 8) & 0xff);
sink.writeByte(i & 0xff);
}
}
|
|
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.mtp;
import android.content.ContentProviderClient;
import android.database.Cursor;
import android.net.Uri;
import android.os.RemoteException;
import android.provider.MediaStore.Audio;
import android.provider.MediaStore.Files;
import android.provider.MediaStore.Images;
import android.provider.MediaStore.MediaColumns;
import android.util.Log;
import java.util.ArrayList;
class MtpPropertyGroup {
private static final String TAG = "MtpPropertyGroup";
private class Property {
// MTP property code
int code;
// MTP data type
int type;
// column index for our query
int column;
Property(int code, int type, int column) {
this.code = code;
this.type = type;
this.column = column;
}
}
private final MtpDatabase mDatabase;
private final ContentProviderClient mProvider;
private final String mVolumeName;
private final Uri mUri;
// list of all properties in this group
private final Property[] mProperties;
// list of columns for database query
private String[] mColumns;
private static final String ID_WHERE = Files.FileColumns._ID + "=?";
private static final String FORMAT_WHERE = Files.FileColumns.FORMAT + "=?";
private static final String ID_FORMAT_WHERE = ID_WHERE + " AND " + FORMAT_WHERE;
private static final String PARENT_WHERE = Files.FileColumns.PARENT + "=?";
private static final String PARENT_FORMAT_WHERE = PARENT_WHERE + " AND " + FORMAT_WHERE;
// constructs a property group for a list of properties
public MtpPropertyGroup(MtpDatabase database, ContentProviderClient provider, String volumeName,
int[] properties) {
mDatabase = database;
mProvider = provider;
mVolumeName = volumeName;
mUri = Files.getMtpObjectsUri(volumeName);
int count = properties.length;
ArrayList<String> columns = new ArrayList<String>(count);
columns.add(Files.FileColumns._ID);
mProperties = new Property[count];
for (int i = 0; i < count; i++) {
mProperties[i] = createProperty(properties[i], columns);
}
count = columns.size();
mColumns = new String[count];
for (int i = 0; i < count; i++) {
mColumns[i] = columns.get(i);
}
}
private Property createProperty(int code, ArrayList<String> columns) {
String column = null;
int type;
switch (code) {
case MtpConstants.PROPERTY_STORAGE_ID:
column = Files.FileColumns.STORAGE_ID;
type = MtpConstants.TYPE_UINT32;
break;
case MtpConstants.PROPERTY_OBJECT_FORMAT:
column = Files.FileColumns.FORMAT;
type = MtpConstants.TYPE_UINT16;
break;
case MtpConstants.PROPERTY_PROTECTION_STATUS:
// protection status is always 0
type = MtpConstants.TYPE_UINT16;
break;
case MtpConstants.PROPERTY_OBJECT_SIZE:
column = Files.FileColumns.SIZE;
type = MtpConstants.TYPE_UINT64;
break;
case MtpConstants.PROPERTY_OBJECT_FILE_NAME:
column = Files.FileColumns.DATA;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_NAME:
column = MediaColumns.TITLE;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_DATE_MODIFIED:
column = Files.FileColumns.DATE_MODIFIED;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_DATE_ADDED:
column = Files.FileColumns.DATE_ADDED;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_ORIGINAL_RELEASE_DATE:
column = Audio.AudioColumns.YEAR;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_PARENT_OBJECT:
column = Files.FileColumns.PARENT;
type = MtpConstants.TYPE_UINT32;
break;
case MtpConstants.PROPERTY_PERSISTENT_UID:
// PUID is concatenation of storageID and object handle
column = Files.FileColumns.STORAGE_ID;
type = MtpConstants.TYPE_UINT128;
break;
case MtpConstants.PROPERTY_DURATION:
column = Audio.AudioColumns.DURATION;
type = MtpConstants.TYPE_UINT32;
break;
case MtpConstants.PROPERTY_TRACK:
column = Audio.AudioColumns.TRACK;
type = MtpConstants.TYPE_UINT16;
break;
case MtpConstants.PROPERTY_DISPLAY_NAME:
column = MediaColumns.DISPLAY_NAME;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_ARTIST:
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_ALBUM_NAME:
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_ALBUM_ARTIST:
column = Audio.AudioColumns.ALBUM_ARTIST;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_GENRE:
// genre requires a special query
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_COMPOSER:
column = Audio.AudioColumns.COMPOSER;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_DESCRIPTION:
column = Images.ImageColumns.DESCRIPTION;
type = MtpConstants.TYPE_STR;
break;
case MtpConstants.PROPERTY_AUDIO_WAVE_CODEC:
case MtpConstants.PROPERTY_AUDIO_BITRATE:
case MtpConstants.PROPERTY_SAMPLE_RATE:
// these are special cased
type = MtpConstants.TYPE_UINT32;
break;
case MtpConstants.PROPERTY_BITRATE_TYPE:
case MtpConstants.PROPERTY_NUMBER_OF_CHANNELS:
// these are special cased
type = MtpConstants.TYPE_UINT16;
break;
default:
type = MtpConstants.TYPE_UNDEFINED;
Log.e(TAG, "unsupported property " + code);
break;
}
if (column != null) {
columns.add(column);
return new Property(code, type, columns.size() - 1);
} else {
return new Property(code, type, -1);
}
}
private String queryString(int id, String column) {
Cursor c = null;
try {
// for now we are only reading properties from the "objects" table
c = mProvider.query(mUri,
new String [] { Files.FileColumns._ID, column },
ID_WHERE, new String[] { Integer.toString(id) }, null, null);
if (c != null && c.moveToNext()) {
return c.getString(1);
} else {
return "";
}
} catch (Exception e) {
return null;
} finally {
if (c != null) {
c.close();
}
}
}
private String queryAudio(int id, String column) {
Cursor c = null;
try {
c = mProvider.query(Audio.Media.getContentUri(mVolumeName),
new String [] { Files.FileColumns._ID, column },
ID_WHERE, new String[] { Integer.toString(id) }, null, null);
if (c != null && c.moveToNext()) {
return c.getString(1);
} else {
return "";
}
} catch (Exception e) {
return null;
} finally {
if (c != null) {
c.close();
}
}
}
private String queryGenre(int id) {
Cursor c = null;
try {
Uri uri = Audio.Genres.getContentUriForAudioId(mVolumeName, id);
c = mProvider.query(uri,
new String [] { Files.FileColumns._ID, Audio.GenresColumns.NAME },
null, null, null, null);
if (c != null && c.moveToNext()) {
return c.getString(1);
} else {
return "";
}
} catch (Exception e) {
Log.e(TAG, "queryGenre exception", e);
return null;
} finally {
if (c != null) {
c.close();
}
}
}
private Long queryLong(int id, String column) {
Cursor c = null;
try {
// for now we are only reading properties from the "objects" table
c = mProvider.query(mUri,
new String [] { Files.FileColumns._ID, column },
ID_WHERE, new String[] { Integer.toString(id) }, null, null);
if (c != null && c.moveToNext()) {
return new Long(c.getLong(1));
}
} catch (Exception e) {
} finally {
if (c != null) {
c.close();
}
}
return null;
}
private static String nameFromPath(String path) {
// extract name from full path
int start = 0;
int lastSlash = path.lastIndexOf('/');
if (lastSlash >= 0) {
start = lastSlash + 1;
}
int end = path.length();
if (end - start > 255) {
end = start + 255;
}
return path.substring(start, end);
}
MtpPropertyList getPropertyList(int handle, int format, int depth) {
//Log.d(TAG, "getPropertyList handle: " + handle + " format: " + format + " depth: " + depth);
if (depth > 1) {
// we only support depth 0 and 1
// depth 0: single object, depth 1: immediate children
return new MtpPropertyList(0, MtpConstants.RESPONSE_SPECIFICATION_BY_DEPTH_UNSUPPORTED);
}
String where;
String[] whereArgs;
if (format == 0) {
if (handle == 0xFFFFFFFF) {
// select all objects
where = null;
whereArgs = null;
} else {
whereArgs = new String[] { Integer.toString(handle) };
if (depth == 1) {
where = PARENT_WHERE;
} else {
where = ID_WHERE;
}
}
} else {
if (handle == 0xFFFFFFFF) {
// select all objects with given format
where = FORMAT_WHERE;
whereArgs = new String[] { Integer.toString(format) };
} else {
whereArgs = new String[] { Integer.toString(handle), Integer.toString(format) };
if (depth == 1) {
where = PARENT_FORMAT_WHERE;
} else {
where = ID_FORMAT_WHERE;
}
}
}
Cursor c = null;
try {
// don't query if not necessary
if (depth > 0 || handle == 0xFFFFFFFF || mColumns.length > 1) {
c = mProvider.query(mUri, mColumns, where, whereArgs, null, null);
if (c == null) {
return new MtpPropertyList(0, MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE);
}
}
int count = (c == null ? 1 : c.getCount());
MtpPropertyList result = new MtpPropertyList(count * mProperties.length,
MtpConstants.RESPONSE_OK);
// iterate over all objects in the query
for (int objectIndex = 0; objectIndex < count; objectIndex++) {
if (c != null) {
c.moveToNext();
handle = (int)c.getLong(0);
}
// iterate over all properties in the query for the given object
for (int propertyIndex = 0; propertyIndex < mProperties.length; propertyIndex++) {
Property property = mProperties[propertyIndex];
int propertyCode = property.code;
int column = property.column;
// handle some special cases
switch (propertyCode) {
case MtpConstants.PROPERTY_PROTECTION_STATUS:
// protection status is always 0
result.append(handle, propertyCode, MtpConstants.TYPE_UINT16, 0);
break;
case MtpConstants.PROPERTY_OBJECT_FILE_NAME:
// special case - need to extract file name from full path
String value = c.getString(column);
if (value != null) {
result.append(handle, propertyCode, nameFromPath(value));
} else {
result.setResult(MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE);
}
break;
case MtpConstants.PROPERTY_NAME:
// first try title
String name = c.getString(column);
// then try name
if (name == null) {
name = queryString(handle, Audio.PlaylistsColumns.NAME);
}
// if title and name fail, extract name from full path
if (name == null) {
name = queryString(handle, Files.FileColumns.DATA);
if (name != null) {
name = nameFromPath(name);
}
}
if (name != null) {
result.append(handle, propertyCode, name);
} else {
result.setResult(MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE);
}
break;
case MtpConstants.PROPERTY_DATE_MODIFIED:
case MtpConstants.PROPERTY_DATE_ADDED:
// convert from seconds to DateTime
result.append(handle, propertyCode, format_date_time(c.getInt(column)));
break;
case MtpConstants.PROPERTY_ORIGINAL_RELEASE_DATE:
// release date is stored internally as just the year
int year = c.getInt(column);
String dateTime = Integer.toString(year) + "0101T000000";
result.append(handle, propertyCode, dateTime);
break;
case MtpConstants.PROPERTY_PERSISTENT_UID:
// PUID is concatenation of storageID and object handle
long puid = c.getLong(column);
puid <<= 32;
puid += handle;
result.append(handle, propertyCode, MtpConstants.TYPE_UINT128, puid);
break;
case MtpConstants.PROPERTY_TRACK:
result.append(handle, propertyCode, MtpConstants.TYPE_UINT16,
c.getInt(column) % 1000);
break;
case MtpConstants.PROPERTY_ARTIST:
result.append(handle, propertyCode,
queryAudio(handle, Audio.AudioColumns.ARTIST));
break;
case MtpConstants.PROPERTY_ALBUM_NAME:
result.append(handle, propertyCode,
queryAudio(handle, Audio.AudioColumns.ALBUM));
break;
case MtpConstants.PROPERTY_GENRE:
String genre = queryGenre(handle);
if (genre != null) {
result.append(handle, propertyCode, genre);
} else {
result.setResult(MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE);
}
break;
case MtpConstants.PROPERTY_AUDIO_WAVE_CODEC:
case MtpConstants.PROPERTY_AUDIO_BITRATE:
case MtpConstants.PROPERTY_SAMPLE_RATE:
// we don't have these in our database, so return 0
result.append(handle, propertyCode, MtpConstants.TYPE_UINT32, 0);
break;
case MtpConstants.PROPERTY_BITRATE_TYPE:
case MtpConstants.PROPERTY_NUMBER_OF_CHANNELS:
// we don't have these in our database, so return 0
result.append(handle, propertyCode, MtpConstants.TYPE_UINT16, 0);
break;
default:
if (property.type == MtpConstants.TYPE_STR) {
result.append(handle, propertyCode, c.getString(column));
} else if (property.type == MtpConstants.TYPE_UNDEFINED) {
result.append(handle, propertyCode, property.type, 0);
} else {
result.append(handle, propertyCode, property.type,
c.getLong(column));
}
break;
}
}
}
return result;
} catch (RemoteException e) {
return new MtpPropertyList(0, MtpConstants.RESPONSE_GENERAL_ERROR);
} finally {
if (c != null) {
c.close();
}
}
// impossible to get here, so no return statement
}
private native String format_date_time(long seconds);
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import org.activiti.engine.ActivitiIllegalArgumentException;
import org.activiti.engine.impl.context.Context;
import org.activiti.engine.impl.interceptor.CommandContext;
import org.activiti.engine.impl.interceptor.CommandExecutor;
import org.activiti.engine.runtime.TimerJobQuery;
import org.flowable.engine.runtime.Job;
/**
* @author Joram Barrez
* @author Tom Baeyens
* @author Falko Menge
*/
public class TimerJobQueryImpl extends AbstractQuery<TimerJobQuery, Job> implements TimerJobQuery, Serializable {
private static final long serialVersionUID = 1L;
protected String id;
protected String processInstanceId;
protected String executionId;
protected String processDefinitionId;
protected boolean executable;
protected boolean onlyTimers;
protected boolean onlyMessages;
protected Date duedateHigherThan;
protected Date duedateLowerThan;
protected Date duedateHigherThanOrEqual;
protected Date duedateLowerThanOrEqual;
protected boolean withException;
protected String exceptionMessage;
protected String tenantId;
protected String tenantIdLike;
protected boolean withoutTenantId;
public TimerJobQueryImpl() {
}
public TimerJobQueryImpl(CommandContext commandContext) {
super(commandContext);
}
public TimerJobQueryImpl(CommandExecutor commandExecutor) {
super(commandExecutor);
}
public TimerJobQuery jobId(String jobId) {
if (jobId == null) {
throw new ActivitiIllegalArgumentException("Provided job id is null");
}
this.id = jobId;
return this;
}
public TimerJobQueryImpl processInstanceId(String processInstanceId) {
if (processInstanceId == null) {
throw new ActivitiIllegalArgumentException("Provided process instance id is null");
}
this.processInstanceId = processInstanceId;
return this;
}
public TimerJobQueryImpl processDefinitionId(String processDefinitionId) {
if (processDefinitionId == null) {
throw new ActivitiIllegalArgumentException("Provided process definition id is null");
}
this.processDefinitionId = processDefinitionId;
return this;
}
public TimerJobQueryImpl executionId(String executionId) {
if (executionId == null) {
throw new ActivitiIllegalArgumentException("Provided execution id is null");
}
this.executionId = executionId;
return this;
}
public TimerJobQuery executable() {
executable = true;
return this;
}
public TimerJobQuery timers() {
if (onlyMessages) {
throw new ActivitiIllegalArgumentException("Cannot combine onlyTimers() with onlyMessages() in the same query");
}
this.onlyTimers = true;
return this;
}
public TimerJobQuery messages() {
if (onlyTimers) {
throw new ActivitiIllegalArgumentException("Cannot combine onlyTimers() with onlyMessages() in the same query");
}
this.onlyMessages = true;
return this;
}
public TimerJobQuery duedateHigherThan(Date date) {
if (date == null) {
throw new ActivitiIllegalArgumentException("Provided date is null");
}
this.duedateHigherThan = date;
return this;
}
public TimerJobQuery duedateLowerThan(Date date) {
if (date == null) {
throw new ActivitiIllegalArgumentException("Provided date is null");
}
this.duedateLowerThan = date;
return this;
}
public TimerJobQuery duedateHigherThen(Date date) {
return duedateHigherThan(date);
}
public TimerJobQuery duedateHigherThenOrEquals(Date date) {
if (date == null) {
throw new ActivitiIllegalArgumentException("Provided date is null");
}
this.duedateHigherThanOrEqual = date;
return this;
}
public TimerJobQuery duedateLowerThen(Date date) {
return duedateLowerThan(date);
}
public TimerJobQuery duedateLowerThenOrEquals(Date date) {
if (date == null) {
throw new ActivitiIllegalArgumentException("Provided date is null");
}
this.duedateLowerThanOrEqual = date;
return this;
}
public TimerJobQuery withException() {
this.withException = true;
return this;
}
public TimerJobQuery exceptionMessage(String exceptionMessage) {
if (exceptionMessage == null) {
throw new ActivitiIllegalArgumentException("Provided exception message is null");
}
this.exceptionMessage = exceptionMessage;
return this;
}
public TimerJobQuery jobTenantId(String tenantId) {
if (tenantId == null) {
throw new ActivitiIllegalArgumentException("job is null");
}
this.tenantId = tenantId;
return this;
}
public TimerJobQuery jobTenantIdLike(String tenantIdLike) {
if (tenantIdLike == null) {
throw new ActivitiIllegalArgumentException("job is null");
}
this.tenantIdLike = tenantIdLike;
return this;
}
public TimerJobQuery jobWithoutTenantId() {
this.withoutTenantId = true;
return this;
}
//sorting //////////////////////////////////////////
public TimerJobQuery orderByJobDuedate() {
return orderBy(JobQueryProperty.DUEDATE);
}
public TimerJobQuery orderByExecutionId() {
return orderBy(JobQueryProperty.EXECUTION_ID);
}
public TimerJobQuery orderByJobId() {
return orderBy(JobQueryProperty.JOB_ID);
}
public TimerJobQuery orderByProcessInstanceId() {
return orderBy(JobQueryProperty.PROCESS_INSTANCE_ID);
}
public TimerJobQuery orderByJobRetries() {
return orderBy(JobQueryProperty.RETRIES);
}
public TimerJobQuery orderByTenantId() {
return orderBy(JobQueryProperty.TENANT_ID);
}
//results //////////////////////////////////////////
public long executeCount(CommandContext commandContext) {
checkQueryOk();
return commandContext
.getTimerJobEntityManager()
.findTimerJobCountByQueryCriteria(this);
}
public List<Job> executeList(CommandContext commandContext, Page page) {
checkQueryOk();
return commandContext
.getTimerJobEntityManager()
.findTimerJobsByQueryCriteria(this, page);
}
//getters //////////////////////////////////////////
public String getProcessInstanceId() {
return processInstanceId;
}
public String getExecutionId() {
return executionId;
}
public boolean getExecutable() {
return executable;
}
public Date getNow() {
return Context.getProcessEngineConfiguration().getClock().getCurrentTime();
}
public boolean isWithException() {
return withException;
}
public String getExceptionMessage() {
return exceptionMessage;
}
public String getTenantId() {
return tenantId;
}
public String getTenantIdLike() {
return tenantIdLike;
}
public boolean isWithoutTenantId() {
return withoutTenantId;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.transform.encode;
import org.apache.commons.lang.ArrayUtils;
import org.apache.sysds.api.DMLScript;
import org.apache.sysds.common.Types.ValueType;
import org.apache.sysds.runtime.DMLRuntimeException;
import org.apache.sysds.runtime.matrix.data.FrameBlock;
import org.apache.sysds.runtime.transform.TfUtils.TfMethod;
import org.apache.sysds.runtime.transform.encode.ColumnEncoder.EncoderType;
import org.apache.sysds.runtime.transform.meta.TfMetaUtils;
import org.apache.sysds.runtime.util.UtilFunctions;
import org.apache.sysds.utils.Statistics;
import org.apache.wink.json4j.JSONArray;
import org.apache.wink.json4j.JSONObject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
import static org.apache.sysds.runtime.util.CollectionUtils.except;
import static org.apache.sysds.runtime.util.CollectionUtils.unionDistinct;
public class EncoderFactory {
public static MultiColumnEncoder createEncoder(String spec, String[] colnames, int clen, FrameBlock meta) {
return createEncoder(spec, colnames, UtilFunctions.nCopies(clen, ValueType.STRING), meta);
}
public static MultiColumnEncoder createEncoder(String spec, String[] colnames, int clen, FrameBlock meta,
int minCol, int maxCol) {
return createEncoder(spec, colnames, UtilFunctions.nCopies(clen, ValueType.STRING), meta, minCol, maxCol);
}
public static MultiColumnEncoder createEncoder(String spec, String[] colnames, ValueType[] schema, int clen,
FrameBlock meta) {
ValueType[] lschema = (schema == null) ? UtilFunctions.nCopies(clen, ValueType.STRING) : schema;
return createEncoder(spec, colnames, lschema, meta);
}
public static MultiColumnEncoder createEncoder(String spec, String[] colnames, ValueType[] schema,
FrameBlock meta) {
return createEncoder(spec, colnames, schema, meta, -1, -1);
}
public static MultiColumnEncoder createEncoder(String spec, String[] colnames, ValueType[] schema, FrameBlock meta,
int minCol, int maxCol) {
MultiColumnEncoder encoder;
int clen = schema.length;
try {
// parse transform specification
JSONObject jSpec = new JSONObject(spec);
List<ColumnEncoderComposite> lencoders = new ArrayList<>();
HashMap<Integer, List<ColumnEncoder>> colEncoders = new HashMap<>();
boolean ids = jSpec.containsKey("ids") && jSpec.getBoolean("ids");
// prepare basic id lists (recode, feature hash, dummycode, pass-through)
List<Integer> rcIDs = Arrays.asList(ArrayUtils
.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfMethod.RECODE.toString(), minCol, maxCol)));
List<Integer> haIDs = Arrays.asList(ArrayUtils
.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfMethod.HASH.toString(), minCol, maxCol)));
List<Integer> dcIDs = Arrays.asList(ArrayUtils
.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfMethod.DUMMYCODE.toString(), minCol, maxCol)));
List<Integer> binIDs = TfMetaUtils.parseBinningColIDs(jSpec, colnames, minCol, maxCol);
// note: any dummycode column requires recode as preparation, unless it follows binning
rcIDs = except(unionDistinct(rcIDs, except(dcIDs, binIDs)), haIDs);
List<Integer> ptIDs = except(except(UtilFunctions.getSeqList(1, clen, 1), unionDistinct(rcIDs, haIDs)),
binIDs);
List<Integer> oIDs = Arrays.asList(ArrayUtils
.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfMethod.OMIT.toString(), minCol, maxCol)));
List<Integer> mvIDs = Arrays.asList(ArrayUtils.toObject(
TfMetaUtils.parseJsonObjectIDList(jSpec, colnames, TfMethod.IMPUTE.toString(), minCol, maxCol)));
// create individual encoders
if(!rcIDs.isEmpty()) {
for(Integer id : rcIDs) {
ColumnEncoderRecode ra = new ColumnEncoderRecode(id);
addEncoderToMap(ra, colEncoders);
}
}
if(!haIDs.isEmpty()) {
for(Integer id : haIDs) {
ColumnEncoderFeatureHash ha = new ColumnEncoderFeatureHash(id, TfMetaUtils.getK(jSpec));
addEncoderToMap(ha, colEncoders);
}
}
if(!ptIDs.isEmpty())
for(Integer id : ptIDs) {
ColumnEncoderPassThrough pt = new ColumnEncoderPassThrough(id);
addEncoderToMap(pt, colEncoders);
}
if(!binIDs.isEmpty())
for(Object o : (JSONArray) jSpec.get(TfMethod.BIN.toString())) {
JSONObject colspec = (JSONObject) o;
int numBins = colspec.containsKey("numbins") ? colspec.getInt("numbins") : 1;
int id = TfMetaUtils.parseJsonObjectID(colspec, colnames, minCol, maxCol, ids);
if(id <= 0)
continue;
ColumnEncoderBin bin = new ColumnEncoderBin(id, numBins);
addEncoderToMap(bin, colEncoders);
}
if(!dcIDs.isEmpty())
for(Integer id : dcIDs) {
ColumnEncoderDummycode dc = new ColumnEncoderDummycode(id);
addEncoderToMap(dc, colEncoders);
}
// create composite decoder of all created encoders
for(Entry<Integer, List<ColumnEncoder>> listEntry : colEncoders.entrySet()) {
if(DMLScript.STATISTICS)
Statistics.incTransformEncoderCount(listEntry.getValue().size());
lencoders.add(new ColumnEncoderComposite(listEntry.getValue()));
}
encoder = new MultiColumnEncoder(lencoders);
if(!oIDs.isEmpty()) {
encoder.addReplaceLegacyEncoder(new EncoderOmit(jSpec, colnames, schema.length, minCol, maxCol));
if(DMLScript.STATISTICS)
Statistics.incTransformEncoderCount(1);
}
if(!mvIDs.isEmpty()) {
EncoderMVImpute ma = new EncoderMVImpute(jSpec, colnames, schema.length, minCol, maxCol);
ma.initRecodeIDList(rcIDs);
encoder.addReplaceLegacyEncoder(ma);
if(DMLScript.STATISTICS)
Statistics.incTransformEncoderCount(1);
}
// initialize meta data w/ robustness for superset of cols
if(meta != null) {
String[] colnames2 = meta.getColumnNames();
if(!TfMetaUtils.isIDSpec(jSpec) && colnames != null && colnames2 != null &&
!ArrayUtils.isEquals(colnames, colnames2)) {
HashMap<String, Integer> colPos = getColumnPositions(colnames2);
// create temporary meta frame block w/ shallow column copy
FrameBlock meta2 = new FrameBlock(meta.getSchema(), colnames2);
meta2.setNumRows(meta.getNumRows());
for(int i = 0; i < colnames.length; i++) {
if(!colPos.containsKey(colnames[i])) {
throw new DMLRuntimeException("Column name not found in meta data: " + colnames[i]
+ " (meta: " + Arrays.toString(colnames2) + ")");
}
int pos = colPos.get(colnames[i]);
meta2.setColumn(i, meta.getColumn(pos));
meta2.setColumnMetadata(i, meta.getColumnMetadata(pos));
}
meta = meta2;
}
encoder.initMetaData(meta);
}
}
catch(Exception ex) {
throw new DMLRuntimeException(ex);
}
return encoder;
}
private static void addEncoderToMap(ColumnEncoder encoder, HashMap<Integer, List<ColumnEncoder>> map) {
if(!map.containsKey(encoder._colID)) {
map.put(encoder._colID, new ArrayList<>());
}
map.get(encoder._colID).add(encoder);
}
public static int getEncoderType(ColumnEncoder columnEncoder) {
if(columnEncoder instanceof ColumnEncoderBin)
return EncoderType.Bin.ordinal();
else if(columnEncoder instanceof ColumnEncoderDummycode)
return EncoderType.Dummycode.ordinal();
else if(columnEncoder instanceof ColumnEncoderFeatureHash)
return EncoderType.FeatureHash.ordinal();
else if(columnEncoder instanceof ColumnEncoderPassThrough)
return EncoderType.PassThrough.ordinal();
else if(columnEncoder instanceof ColumnEncoderRecode)
return EncoderType.Recode.ordinal();
throw new DMLRuntimeException("Unsupported encoder type: " + columnEncoder.getClass().getCanonicalName());
}
public static ColumnEncoder createInstance(int type) {
EncoderType etype = EncoderType.values()[type];
switch(etype) {
case Bin:
return new ColumnEncoderBin();
case Dummycode:
return new ColumnEncoderDummycode();
case FeatureHash:
return new ColumnEncoderFeatureHash();
case PassThrough:
return new ColumnEncoderPassThrough();
case Recode:
return new ColumnEncoderRecode();
default:
throw new DMLRuntimeException("Unsupported encoder type: " + etype);
}
}
private static HashMap<String, Integer> getColumnPositions(String[] colnames) {
HashMap<String, Integer> ret = new HashMap<>();
for(int i = 0; i < colnames.length; i++)
ret.put(colnames[i], i);
return ret;
}
}
|
|
package com.brentvatne.exoplayer;
import android.view.View;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.id3.Id3Frame;
import com.google.android.exoplayer2.metadata.id3.TextInformationFrame;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import androidx.annotation.StringDef;
class VideoEventEmitter {
private final RCTEventEmitter eventEmitter;
private int viewId = View.NO_ID;
VideoEventEmitter(ReactContext reactContext) {
this.eventEmitter = reactContext.getJSModule(RCTEventEmitter.class);
}
private static final String EVENT_LOAD_START = "onVideoLoadStart";
private static final String EVENT_LOAD = "onVideoLoad";
private static final String EVENT_ERROR = "onVideoError";
private static final String EVENT_PROGRESS = "onVideoProgress";
private static final String EVENT_SEEK = "onVideoSeek";
private static final String EVENT_END = "onVideoEnd";
private static final String EVENT_STALLED = "onPlaybackStalled";
private static final String EVENT_RESUME = "onPlaybackResume";
private static final String EVENT_READY = "onReadyForDisplay";
private static final String EVENT_BUFFER = "onVideoBuffer";
private static final String EVENT_IDLE = "onVideoIdle";
private static final String EVENT_TIMED_METADATA = "onTimedMetadata";
private static final String EVENT_AUDIO_BECOMING_NOISY = "onAudioBecomingNoisy";
private static final String EVENT_AUDIO_FOCUS_CHANGE = "onAudioFocusChanged";
static final String[] Events = {
EVENT_LOAD_START,
EVENT_LOAD,
EVENT_ERROR,
EVENT_PROGRESS,
EVENT_SEEK,
EVENT_END,
EVENT_STALLED,
EVENT_RESUME,
EVENT_READY,
EVENT_BUFFER,
EVENT_IDLE,
EVENT_TIMED_METADATA,
EVENT_AUDIO_BECOMING_NOISY,
EVENT_AUDIO_FOCUS_CHANGE,
};
@Retention(RetentionPolicy.SOURCE)
@StringDef({
EVENT_LOAD_START,
EVENT_LOAD,
EVENT_ERROR,
EVENT_PROGRESS,
EVENT_SEEK,
EVENT_END,
EVENT_STALLED,
EVENT_RESUME,
EVENT_READY,
EVENT_BUFFER,
EVENT_IDLE,
EVENT_TIMED_METADATA,
EVENT_AUDIO_BECOMING_NOISY,
EVENT_AUDIO_FOCUS_CHANGE,
})
@interface VideoEvents {
}
private static final String EVENT_PROP_FAST_FORWARD = "canPlayFastForward";
private static final String EVENT_PROP_SLOW_FORWARD = "canPlaySlowForward";
private static final String EVENT_PROP_SLOW_REVERSE = "canPlaySlowReverse";
private static final String EVENT_PROP_REVERSE = "canPlayReverse";
private static final String EVENT_PROP_STEP_FORWARD = "canStepForward";
private static final String EVENT_PROP_STEP_BACKWARD = "canStepBackward";
private static final String EVENT_PROP_DURATION = "duration";
private static final String EVENT_PROP_PLAYABLE_DURATION = "playableDuration";
private static final String EVENT_PROP_CURRENT_TIME = "currentTime";
private static final String EVENT_PROP_SEEK_TIME = "seekTime";
private static final String EVENT_PROP_NATURAL_SIZE = "naturalSize";
private static final String EVENT_PROP_WIDTH = "width";
private static final String EVENT_PROP_HEIGHT = "height";
private static final String EVENT_PROP_ORIENTATION = "orientation";
private static final String EVENT_PROP_HAS_AUDIO_FOCUS = "hasAudioFocus";
private static final String EVENT_PROP_IS_BUFFERING = "isBuffering";
private static final String EVENT_PROP_ERROR = "error";
private static final String EVENT_PROP_ERROR_STRING = "errorString";
private static final String EVENT_PROP_ERROR_EXCEPTION = "errorException";
private static final String EVENT_PROP_TIMED_METADATA = "metadata";
void setViewId(int viewId) {
this.viewId = viewId;
}
void loadStart() {
receiveEvent(EVENT_LOAD_START, null);
}
void load(double duration, double currentPosition, int videoWidth, int videoHeight) {
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_DURATION, duration / 1000D);
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
WritableMap naturalSize = Arguments.createMap();
naturalSize.putInt(EVENT_PROP_WIDTH, videoWidth);
naturalSize.putInt(EVENT_PROP_HEIGHT, videoHeight);
if (videoWidth > videoHeight) {
naturalSize.putString(EVENT_PROP_ORIENTATION, "landscape");
} else {
naturalSize.putString(EVENT_PROP_ORIENTATION, "portrait");
}
event.putMap(EVENT_PROP_NATURAL_SIZE, naturalSize);
// TODO: Actually check if you can.
event.putBoolean(EVENT_PROP_FAST_FORWARD, true);
event.putBoolean(EVENT_PROP_SLOW_FORWARD, true);
event.putBoolean(EVENT_PROP_SLOW_REVERSE, true);
event.putBoolean(EVENT_PROP_REVERSE, true);
event.putBoolean(EVENT_PROP_FAST_FORWARD, true);
event.putBoolean(EVENT_PROP_STEP_BACKWARD, true);
event.putBoolean(EVENT_PROP_STEP_FORWARD, true);
receiveEvent(EVENT_LOAD, event);
}
void progressChanged(double currentPosition, double bufferedDuration) {
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
event.putDouble(EVENT_PROP_PLAYABLE_DURATION, bufferedDuration / 1000D);
receiveEvent(EVENT_PROGRESS, event);
}
void seek(long currentPosition, long seekTime) {
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
event.putDouble(EVENT_PROP_SEEK_TIME, seekTime / 1000D);
receiveEvent(EVENT_SEEK, event);
}
void ready() {
receiveEvent(EVENT_READY, null);
}
void buffering(boolean isBuffering) {
WritableMap map = Arguments.createMap();
map.putBoolean(EVENT_PROP_IS_BUFFERING, isBuffering);
receiveEvent(EVENT_BUFFER, map);
}
void idle() {
receiveEvent(EVENT_IDLE, null);
}
void end() {
receiveEvent(EVENT_END, null);
}
void error(String errorString, Exception exception) {
WritableMap error = Arguments.createMap();
error.putString(EVENT_PROP_ERROR_STRING, errorString);
error.putString(EVENT_PROP_ERROR_EXCEPTION, exception.getMessage());
WritableMap event = Arguments.createMap();
event.putMap(EVENT_PROP_ERROR, error);
receiveEvent(EVENT_ERROR, event);
}
void timedMetadata(Metadata metadata) {
WritableArray metadataArray = Arguments.createArray();
for (int i = 0; i < metadata.length(); i++) {
Id3Frame frame = (Id3Frame) metadata.get(i);
String value = "";
if (frame instanceof TextInformationFrame) {
TextInformationFrame txxxFrame = (TextInformationFrame) frame;
value = txxxFrame.value;
}
String identifier = frame.id;
WritableMap map = Arguments.createMap();
map.putString("identifier", identifier);
map.putString("value", value);
metadataArray.pushMap(map);
}
WritableMap event = Arguments.createMap();
event.putArray(EVENT_PROP_TIMED_METADATA, metadataArray);
receiveEvent(EVENT_TIMED_METADATA, event);
}
void audioFocusChanged(boolean hasFocus) {
WritableMap map = Arguments.createMap();
map.putBoolean(EVENT_PROP_HAS_AUDIO_FOCUS, hasFocus);
receiveEvent(EVENT_AUDIO_FOCUS_CHANGE, map);
}
void audioBecomingNoisy() {
receiveEvent(EVENT_AUDIO_BECOMING_NOISY, null);
}
private void receiveEvent(@VideoEvents String type, WritableMap event) {
eventEmitter.receiveEvent(viewId, type, event);
}
}
|
|
/*
* Copyright (c) 2022 Tectonicus contributors. All rights reserved.
*
* This file is part of Tectonicus. It is subject to the license terms in the LICENSE file found in
* the top-level directory of this distribution. The full list of project contributors is contained
* in the AUTHORS file found in the same location.
*
*/
package tectonicus;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.util.Stack;
import tectonicus.util.Vector2f;
import tectonicus.util.Vector3l;
public class JsonWriter
{
private enum State
{
ObjectStart,
ObjectContinue,
ArrayStart,
ArrayContinue
}
private OutputStream out;
private PrintWriter writer;
private int indent;
private Stack<State> stateStack;
public JsonWriter(File file) throws FileNotFoundException, IOException
{
if (file.exists())
file.delete();
out = new FileOutputStream(file);
writer = new PrintWriter(out);
stateStack = new Stack<State>();
stateStack.push(State.ObjectStart);
writer.println();
}
public void close()
{
try
{
if (writer != null)
writer.close();
if (out != null)
out.close();
}
catch (IOException e) {}
}
public void startObject(String name)
{
if (stateStack.size() == 1)
{
// Start a new top level object
writer.println();
writer.println();
writer.println(indent()+"var "+name+" =");
}
else
{
// Start an embedded object
if (stateStack.peek() == State.ObjectContinue)
{
writer.println(",");
stateStack.pop();
stateStack.push(State.ObjectContinue);
}
writer.println(indent()+name+": ");
}
writer.println(indent()+"{");
indent++;
stateStack.push(State.ObjectStart);
}
public void endObject()
{
stateStack.pop();
indent--;
if (stateStack.peek() == State.ObjectStart)
{
stateStack.pop();
stateStack.push(State.ObjectContinue);
}
writer.println();
writer.print(indent()+"}");
}
public void writeRawVariable(String varName, String varValue)
{
if (stateStack.size() > 1 && stateStack.peek() == State.ObjectContinue)
writer.println(",");
else
writer.println();
if (stateStack.size() == 1)
{
// Start a new top level var
writer.print(indent()+"var "+varName+" = "+varValue);
}
else
{
writer.print(indent()+varName+": "+varValue);
}
stateStack.pop();
stateStack.push(State.ObjectContinue);
}
public void writeVariable(String varName, String varValue)
{
writeRawVariable(varName, "\""+jsEscape(varValue)+"\"");
}
public void writeVariable(String varName, final int varValue)
{
writeRawVariable(varName, ""+varValue);
}
public void writeVariable(String varName, final long varValue)
{
writeRawVariable(varName, ""+varValue);
}
public void writeWorldCoord(String varName, Vector3l varValue)
{
String val = "new WorldCoord("+varValue.x+", "+varValue.y+", "+varValue.z+")";
writeRawVariable(varName, val);
}
public void writeMapsPoint(String varName, Vector2f varValue)
{
String val = "new L.Point("+varValue.x+", "+varValue.y+")";
writeRawVariable(varName, val);
}
public void writeMapsPoint(String varName, final long x, final long y)
{
String val = "new L.Point("+x+", "+y+")";
writeRawVariable(varName, val);
}
public void startArray(String arrayName)
{
if (stateStack.size() == 1)
{
// Start a new top level array
writer.println();
writer.println();
writer.println(indent()+"var "+arrayName+" =");
}
else
{
// Start an embedded array
if (stateStack.peek() == State.ObjectContinue)
{
writer.println(indent()+",");
stateStack.pop();
stateStack.push(State.ObjectContinue);
}
writer.println(indent()+arrayName+": ");
}
writer.println(indent()+"[");
indent++;
stateStack.push(State.ArrayStart);
}
public void startArrayObject()
{
if (stateStack.peek() == State.ArrayContinue)
writer.println(",");
// else
// writer.println();
writer.println(indent()+"{");
stateStack.pop();
stateStack.push(State.ArrayContinue);
stateStack.push(State.ObjectStart);
indent++;
}
public void endArrayObject()
{
stateStack.pop();
indent--;
writer.println();
writer.print(indent()+"}");
}
public void endArray()
{
stateStack.pop();
indent--;
writer.println();
writer.print(indent()+"]");
}
public String indent()
{
String res = "";
for (int i=0; i<indent; i++)
res += "\t";
return res;
}
private static String jsEscape(String text)
{
text = text.replace("\\", "\\\\"); // Replace \ with \\
text = text.replace(" ", " "); // Replace spaces with
text = text.replace("\"", "\\\""); // Replace " with \"
return text;
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.rest.service.api.runtime.task;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.Map;
import javax.annotation.PostConstruct;
import org.apache.commons.io.IOUtils;
import org.flowable.cmmn.api.CmmnRuntimeService;
import org.flowable.cmmn.rest.service.api.CmmnRestResponseFactory;
import org.flowable.cmmn.rest.service.api.engine.variable.RestVariable;
import org.flowable.cmmn.rest.service.api.engine.variable.RestVariable.RestVariableScope;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.api.FlowableIllegalArgumentException;
import org.flowable.common.engine.api.FlowableObjectNotFoundException;
import org.flowable.common.engine.api.scope.ScopeTypes;
import org.flowable.common.rest.exception.FlowableContentNotSupportedException;
import org.flowable.task.api.Task;
import org.flowable.variable.service.impl.persistence.entity.VariableInstanceEntity;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.MultipartHttpServletRequest;
/**
* @author Frederik Heremans
*/
public class TaskVariableBaseResource extends TaskBaseResource {
@Autowired
protected Environment env;
@Autowired
protected CmmnRuntimeService runtimeService;
protected boolean isSerializableVariableAllowed;
@PostConstruct
protected void postConstruct() {
isSerializableVariableAllowed = env.getProperty("rest.variables.allow.serializable", Boolean.class, true);
}
public RestVariable getVariableFromRequest(String taskId, String variableName, String scope, boolean includeBinary) {
Task task = getTaskFromRequest(taskId);
boolean variableFound = false;
Object value = null;
RestVariableScope variableScope = RestVariable.getScopeFromString(scope);
if (variableScope == null) {
// First, check local variables (which have precedence when no scope is supplied)
if (taskService.hasVariableLocal(taskId, variableName)) {
value = taskService.getVariableLocal(taskId, variableName);
variableScope = RestVariableScope.LOCAL;
variableFound = true;
} else {
// Revert to execution-variable when not present local on the task
if (ScopeTypes.CMMN.equals(task.getScopeType()) && task.getScopeId() != null && runtimeService.hasVariable(task.getScopeId(), variableName)) {
value = runtimeService.getVariable(task.getScopeId(), variableName);
variableScope = RestVariableScope.GLOBAL;
variableFound = true;
}
}
} else if (variableScope == RestVariableScope.GLOBAL) {
if (ScopeTypes.CMMN.equals(task.getScopeType()) && task.getScopeId() != null && runtimeService.hasVariable(task.getScopeId(), variableName)) {
value = runtimeService.getVariable(task.getScopeId(), variableName);
variableFound = true;
}
} else if (variableScope == RestVariableScope.LOCAL) {
if (taskService.hasVariableLocal(taskId, variableName)) {
value = taskService.getVariableLocal(taskId, variableName);
variableFound = true;
}
}
if (!variableFound) {
throw new FlowableObjectNotFoundException("Task '" + taskId + "' doesn't have a variable with name: '" + variableName + "'.", VariableInstanceEntity.class);
} else {
return restResponseFactory.createRestVariable(variableName, value, variableScope, taskId, CmmnRestResponseFactory.VARIABLE_TASK, includeBinary);
}
}
protected boolean hasVariableOnScope(Task task, String variableName, RestVariableScope scope) {
boolean variableFound = false;
if (scope == RestVariableScope.GLOBAL) {
if (ScopeTypes.CMMN.equals(task.getScopeType()) && task.getScopeId() != null && runtimeService.hasVariable(task.getScopeId(), variableName)) {
variableFound = true;
}
} else if (scope == RestVariableScope.LOCAL) {
if (taskService.hasVariableLocal(task.getId(), variableName)) {
variableFound = true;
}
}
return variableFound;
}
protected RestVariable setBinaryVariable(MultipartHttpServletRequest request, Task task, boolean isNew) {
// Validate input and set defaults
if (request.getFileMap().size() == 0) {
throw new FlowableIllegalArgumentException("No file content was found in request body.");
}
// Get first file in the map, ignore possible other files
MultipartFile file = request.getFile(request.getFileMap().keySet().iterator().next());
if (file == null) {
throw new FlowableIllegalArgumentException("No file content was found in request body.");
}
String variableScope = null;
String variableName = null;
String variableType = null;
Map<String, String[]> paramMap = request.getParameterMap();
for (String parameterName : paramMap.keySet()) {
if (paramMap.get(parameterName).length > 0) {
if ("scope".equalsIgnoreCase(parameterName)) {
variableScope = paramMap.get(parameterName)[0];
} else if ("name".equalsIgnoreCase(parameterName)) {
variableName = paramMap.get(parameterName)[0];
} else if ("type".equalsIgnoreCase(parameterName)) {
variableType = paramMap.get(parameterName)[0];
}
}
}
try {
if (variableName == null) {
throw new FlowableIllegalArgumentException("No variable name was found in request body.");
}
if (variableType != null) {
if (!CmmnRestResponseFactory.BYTE_ARRAY_VARIABLE_TYPE.equals(variableType) && !CmmnRestResponseFactory.SERIALIZABLE_VARIABLE_TYPE.equals(variableType)) {
throw new FlowableIllegalArgumentException("Only 'binary' and 'serializable' are supported as variable type.");
}
} else {
variableType = CmmnRestResponseFactory.BYTE_ARRAY_VARIABLE_TYPE;
}
RestVariableScope scope = RestVariableScope.LOCAL;
if (variableScope != null) {
scope = RestVariable.getScopeFromString(variableScope);
}
if (variableType.equals(CmmnRestResponseFactory.BYTE_ARRAY_VARIABLE_TYPE)) {
// Use raw bytes as variable value
byte[] variableBytes = IOUtils.toByteArray(file.getInputStream());
setVariable(task, variableName, variableBytes, scope, isNew);
} else if (isSerializableVariableAllowed) {
// Try deserializing the object
ObjectInputStream stream = new ObjectInputStream(file.getInputStream());
Object value = stream.readObject();
setVariable(task, variableName, value, scope, isNew);
stream.close();
} else {
throw new FlowableContentNotSupportedException("Serialized objects are not allowed");
}
return restResponseFactory.createBinaryRestVariable(variableName, scope, variableType, task.getId(), null);
} catch (IOException ioe) {
throw new FlowableIllegalArgumentException("Error getting binary variable", ioe);
} catch (ClassNotFoundException ioe) {
throw new FlowableContentNotSupportedException("The provided body contains a serialized object for which the class was not found: " + ioe.getMessage());
}
}
protected RestVariable setSimpleVariable(RestVariable restVariable, Task task, boolean isNew) {
if (restVariable.getName() == null) {
throw new FlowableIllegalArgumentException("Variable name is required");
}
// Figure out scope, revert to local is omitted
RestVariableScope scope = restVariable.getVariableScope();
if (scope == null) {
scope = RestVariableScope.LOCAL;
}
Object actualVariableValue = restResponseFactory.getVariableValue(restVariable);
setVariable(task, restVariable.getName(), actualVariableValue, scope, isNew);
return restResponseFactory.createRestVariable(restVariable.getName(), actualVariableValue, scope, task.getId(), CmmnRestResponseFactory.VARIABLE_TASK, false);
}
protected void setVariable(Task task, String name, Object value, RestVariableScope scope, boolean isNew) {
// Create can only be done on new variables. Existing variables should
// be updated using PUT
boolean hasVariable = hasVariableOnScope(task, name, scope);
if (isNew && hasVariable) {
throw new FlowableException("Variable '" + name + "' is already present on task '" + task.getId() + "'.");
}
if (!isNew && !hasVariable) {
throw new FlowableObjectNotFoundException("Task '" + task.getId() + "' doesn't have a variable with name: '" + name + "'.", null);
}
if (scope == RestVariableScope.LOCAL) {
taskService.setVariableLocal(task.getId(), name, value);
} else {
if (ScopeTypes.CMMN.equals(task.getScopeType()) && task.getScopeId() != null) {
// Explicitly set on execution, setting non-local variable on
// task will override local-variable if exists
runtimeService.setVariable(task.getScopeId(), name, value);
} else {
// Standalone task, no global variables possible
throw new FlowableIllegalArgumentException("Cannot set global variable '" + name + "' on task '" + task.getId() + "', task is not part of process.");
}
}
}
}
|
|
/*
* =========================================================================
* Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* more patents listed at http://www.pivotal.io/patents.
* ========================================================================
*/
package com.gemstone.gemfire.distributed;
import static org.junit.Assert.*;
import static org.junit.Assume.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Properties;
import com.gemstone.gemfire.distributed.LocatorLauncher.Builder;
import com.gemstone.gemfire.distributed.LocatorLauncher.Command;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.lang.SystemUtils;
import com.gemstone.gemfire.internal.util.IOUtils;
import com.gemstone.junit.UnitTest;
import joptsimple.OptionException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
* The LocatorLauncherJUnitTest class is a test suite of test cases for testing the contract and functionality of
* launching a GemFire Locator.
*
* @author John Blum
* @author Kirk Lund
* @see com.gemstone.gemfire.distributed.CommonLauncherTestSuite
* @see com.gemstone.gemfire.distributed.LocatorLauncher
* @see com.gemstone.gemfire.distributed.LocatorLauncher.Builder
* @see com.gemstone.gemfire.distributed.LocatorLauncher.Command
* @see org.junit.Assert
* @see org.junit.Test
* @since 7.0
*/
@Category(UnitTest.class)
public class LocatorLauncherJUnitTest extends CommonLauncherTestSuite {
private static final String GEMFIRE_PROPERTIES_FILE_NAME = "gemfire.properties";
private static final String TEMPORARY_FILE_NAME = "beforeLocatorLauncherJUnitTest_" + GEMFIRE_PROPERTIES_FILE_NAME;
@BeforeClass
public static void setUp() {
if (SystemUtils.isWindows()) {
return;
}
File file = new File(GEMFIRE_PROPERTIES_FILE_NAME);
if (file.exists()) {
File dest = new File(TEMPORARY_FILE_NAME);
assertTrue(file.renameTo(dest));
}
}
@AfterClass
public static void tearDown() {
if (SystemUtils.isWindows()) {
return;
}
File file = new File(TEMPORARY_FILE_NAME);
if (file.exists()) {
File dest = new File(GEMFIRE_PROPERTIES_FILE_NAME);
assertTrue(file.renameTo(dest));
}
}
@Test
public void testBuilderParseArguments() throws Exception {
String expectedWorkingDirectory = System.getProperty("user.dir");
Builder builder = new Builder();
builder.parseArguments("start", "memberOne", "--bind-address", InetAddress.getLocalHost().getHostAddress(),
"--dir", expectedWorkingDirectory, "--hostname-for-clients", "Tucows", "--pid", "1234", "--port", "11235",
"--redirect-output", "--force", "--debug");
assertEquals(Command.START, builder.getCommand());
assertEquals(InetAddress.getLocalHost(), builder.getBindAddress());
assertEquals(expectedWorkingDirectory, builder.getWorkingDirectory());
assertEquals("Tucows", builder.getHostnameForClients());
assertEquals(1234, builder.getPid().intValue());
assertEquals(11235, builder.getPort().intValue());
assertTrue(builder.getRedirectOutput());
assertTrue(builder.getForce());
assertTrue(builder.getDebug());
}
@Test
public void testBuilderParseArgumentsWithCommandInArguments() {
String expectedWorkingDirectory = System.getProperty("user.dir");
Builder builder = new Builder();
builder.parseArguments("start", "--dir=" + expectedWorkingDirectory, "--port", "12345", "memberOne");
assertEquals(Command.START, builder.getCommand());
assertFalse(Boolean.TRUE.equals(builder.getDebug()));
assertFalse(Boolean.TRUE.equals(builder.getForce()));
assertFalse(Boolean.TRUE.equals(builder.getHelp()));
assertNull(builder.getBindAddress());
assertNull(builder.getHostnameForClients());
assertEquals("12345", builder.getMemberName());
assertNull(builder.getPid());
assertEquals(expectedWorkingDirectory, builder.getWorkingDirectory());
assertEquals(12345, builder.getPort().intValue());
}
@Test(expected = IllegalArgumentException.class)
public void testBuilderParseArgumentsWithNonNumericPort() {
try {
new Builder().parseArguments("start", "locator1", "--port", "oneTwoThree");
}
catch (IllegalArgumentException expected) {
assertTrue(expected.getCause() instanceof OptionException);
assertTrue(expected.getMessage(), expected.getMessage().contains(
LocalizedStrings.Launcher_Builder_PARSE_COMMAND_LINE_ARGUMENT_ERROR_MESSAGE.toLocalizedString(
"Locator", expected.getCause().getMessage())));
throw expected;
}
}
@Test
public void testForceDefaultsToFalse() {
assertFalse(new Builder().getForce());
}
@Test
public void testForceSetToTrue() {
Builder builder = new Builder();
builder.parseArguments("start", "--force");
assertTrue(Boolean.TRUE.equals(builder.getForce()));
}
@Test
public void testSetAndGetCommand() {
final Builder builder = new Builder();
assertEquals(Builder.DEFAULT_COMMAND, builder.getCommand());
assertSame(builder, builder.setCommand(Command.START));
assertEquals(Command.START, builder.getCommand());
assertSame(builder, builder.setCommand(Command.STATUS));
assertEquals(Command.STATUS, builder.getCommand());
assertSame(builder, builder.setCommand(Command.STOP));
assertEquals(Command.STOP, builder.getCommand());
assertSame(builder, builder.setCommand(null));
assertEquals(Builder.DEFAULT_COMMAND, builder.getCommand());
}
@Test
public void testSetAndGetBindAddress() throws UnknownHostException {
final Builder builder = new Builder();
assertNull(builder.getBindAddress());
assertSame(builder, builder.setBindAddress(null));
assertNull(builder.getBindAddress());
assertSame(builder, builder.setBindAddress(""));
assertNull(builder.getBindAddress());
assertSame(builder, builder.setBindAddress(" "));
assertNull(builder.getBindAddress());
assertSame(builder, builder.setBindAddress(InetAddress.getLocalHost().getCanonicalHostName()));
assertEquals(InetAddress.getLocalHost(), builder.getBindAddress());
}
@Test(expected = IllegalArgumentException.class)
public void testSetBindAddressToUnknownHost() {
try {
new Builder().setBindAddress("badhostname.badcompany.bad");
}
catch (IllegalArgumentException expected) {
final String expectedMessage = LocalizedStrings.Launcher_Builder_UNKNOWN_HOST_ERROR_MESSAGE.toLocalizedString("Locator");
assertEquals(expectedMessage, expected.getMessage());
assertTrue(expected.getCause() instanceof UnknownHostException);
throw expected;
}
}
@Test
public void testSetAndGetHostnameForClients() {
final Builder builder = new Builder();
assertNull(builder.getHostnameForClients());
assertSame(builder, builder.setHostnameForClients("Pegasus"));
assertEquals("Pegasus", builder.getHostnameForClients());
assertSame(builder, builder.setHostnameForClients(null));
assertNull(builder.getHostnameForClients());
}
@Test(expected = IllegalArgumentException.class)
public void testSetHostnameForClientsWithBlankString() {
try {
new Builder().setHostnameForClients(" ");
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.LocatorLauncher_Builder_INVALID_HOSTNAME_FOR_CLIENTS_ERROR_MESSAGE
.toLocalizedString(), expected.getMessage());
throw expected;
}
}
@Test(expected = IllegalArgumentException.class)
public void testSetHostnameForClientsWithEmptyString() {
try {
new Builder().setHostnameForClients("");
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.LocatorLauncher_Builder_INVALID_HOSTNAME_FOR_CLIENTS_ERROR_MESSAGE
.toLocalizedString(), expected.getMessage());
throw expected;
}
}
@Test
public void testSetAndGetMemberName() {
Builder builder = new Builder();
assertNull(builder.getMemberName());
assertSame(builder, builder.setMemberName("locatorOne"));
assertEquals("locatorOne", builder.getMemberName());
assertSame(builder, builder.setMemberName(null));
assertNull(builder.getMemberName());
}
@Test(expected = IllegalArgumentException.class)
public void testSetMemberNameWithBlankString() {
try {
new Builder().setMemberName(" ");
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_MEMBER_NAME_ERROR_MESSAGE.toLocalizedString("Locator"),
expected.getMessage());
throw expected;
}
}
@Test(expected = IllegalArgumentException.class)
public void testSetMemberNameWithEmptyString() {
try {
new Builder().setMemberName("");
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_MEMBER_NAME_ERROR_MESSAGE.toLocalizedString("Locator"),
expected.getMessage());
throw expected;
}
}
@Test
public void testSetAndGetPid() {
Builder builder = new Builder();
assertNull(builder.getPid());
assertSame(builder, builder.setPid(0));
assertEquals(0, builder.getPid().intValue());
assertSame(builder, builder.setPid(1));
assertEquals(1, builder.getPid().intValue());
assertSame(builder, builder.setPid(1024));
assertEquals(1024, builder.getPid().intValue());
assertSame(builder, builder.setPid(12345));
assertEquals(12345, builder.getPid().intValue());
assertSame(builder, builder.setPid(null));
assertNull(builder.getPid());
}
@Test(expected = IllegalArgumentException.class)
public void testSetPidToInvalidValue() {
try {
new Builder().setPid(-1);
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_PID_ERROR_MESSAGE.toLocalizedString(), expected.getMessage());
throw expected;
}
}
@Test
public void testSetAndGetPort() {
Builder builder = new Builder();
assertEquals(LocatorLauncher.DEFAULT_LOCATOR_PORT, builder.getPort());
assertSame(builder, builder.setPort(65535));
assertEquals(65535, builder.getPort().intValue());
assertSame(builder, builder.setPort(1024));
assertEquals(1024, builder.getPort().intValue());
assertSame(builder, builder.setPort(80));
assertEquals(80, builder.getPort().intValue());
assertSame(builder, builder.setPort(1));
assertEquals(1, builder.getPort().intValue());
assertSame(builder, builder.setPort(0));
assertEquals(0, builder.getPort().intValue());
assertSame(builder, builder.setPort(null));
assertEquals(LocatorLauncher.DEFAULT_LOCATOR_PORT, builder.getPort());
}
@Test(expected = IllegalArgumentException.class)
public void testSetPortToOverflow() {
try {
new Builder().setPort(65536);
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_INVALID_PORT_ERROR_MESSAGE.toLocalizedString("Locator"),
expected.getMessage());
throw expected;
}
}
@Test(expected = IllegalArgumentException.class)
public void testSetPortToUnderflow() {
try {
new Builder().setPort(-1);
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_INVALID_PORT_ERROR_MESSAGE.toLocalizedString("Locator"),
expected.getMessage());
throw expected;
}
}
@Test
public void testSetAndGetWorkingDirectory() {
Builder builder = new Builder();
assertEquals(AbstractLauncher.DEFAULT_WORKING_DIRECTORY, builder.getWorkingDirectory());
assertSame(builder, builder.setWorkingDirectory(null));
assertEquals(AbstractLauncher.DEFAULT_WORKING_DIRECTORY, builder.getWorkingDirectory());
assertSame(builder, builder.setWorkingDirectory(""));
assertEquals(AbstractLauncher.DEFAULT_WORKING_DIRECTORY, builder.getWorkingDirectory());
assertSame(builder, builder.setWorkingDirectory(" "));
assertEquals(AbstractLauncher.DEFAULT_WORKING_DIRECTORY, builder.getWorkingDirectory());
assertSame(builder, builder.setWorkingDirectory(System.getProperty("user.dir")));
assertEquals(System.getProperty("user.dir"), builder.getWorkingDirectory());
assertSame(builder, builder.setWorkingDirectory(System.getProperty("java.io.tmpdir")));
assertEquals(IOUtils.tryGetCanonicalPathElseGetAbsolutePath(new File(System.getProperty("java.io.tmpdir"))),
builder.getWorkingDirectory());
assertSame(builder, builder.setWorkingDirectory(null));
assertEquals(AbstractLauncher.DEFAULT_WORKING_DIRECTORY, builder.getWorkingDirectory());
}
@Test(expected = IllegalArgumentException.class)
public void testSetWorkingDirectoryToFile() throws IOException {
File tmpFile = File.createTempFile("tmp", "file");
assertNotNull(tmpFile);
assertTrue(tmpFile.isFile());
tmpFile.deleteOnExit();
try {
new Builder().setWorkingDirectory(tmpFile.getCanonicalPath());
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_WORKING_DIRECTORY_NOT_FOUND_ERROR_MESSAGE
.toLocalizedString("Locator"), expected.getMessage());
assertTrue(expected.getCause() instanceof FileNotFoundException);
assertEquals(tmpFile.getCanonicalPath(), expected.getCause().getMessage());
throw expected;
}
}
@Test(expected = IllegalArgumentException.class)
public void testSetWorkingDirectoryToNonExistingDirectory() {
try {
new Builder().setWorkingDirectory("/path/to/non_existing/directory");
}
catch (IllegalArgumentException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_WORKING_DIRECTORY_NOT_FOUND_ERROR_MESSAGE
.toLocalizedString("Locator"), expected.getMessage());
assertTrue(expected.getCause() instanceof FileNotFoundException);
assertEquals("/path/to/non_existing/directory", expected.getCause().getMessage());
throw expected;
}
}
@Test
public void testBuild() {
Builder builder = new Builder();
LocatorLauncher launcher = builder.setCommand(Command.START)
.setDebug(true)
.setHostnameForClients("beanstock.vmware.com")
.setMemberName("Beanstock")
.setPort(8192)
.setWorkingDirectory(AbstractLauncher.DEFAULT_WORKING_DIRECTORY)
.build();
assertNotNull(launcher);
assertEquals(builder.getCommand(), launcher.getCommand());
assertTrue(launcher.isDebugging());
assertEquals(builder.getHostnameForClients(), launcher.getHostnameForClients());
assertEquals(builder.getMemberName(), launcher.getMemberName());
assertEquals(builder.getPort(), launcher.getPort());
assertEquals(builder.getWorkingDirectory(), launcher.getWorkingDirectory());
assertFalse(launcher.isHelping());
assertFalse(launcher.isRunning());
}
@Test
public void testBuildWithMemberNameSetInApiPropertiesOnStart() {
LocatorLauncher launcher = new Builder()
.setCommand(LocatorLauncher.Command.START)
.setMemberName(null)
.set(DistributionConfig.NAME_NAME, "locatorABC")
.build();
assertNotNull(launcher);
assertEquals(LocatorLauncher.Command.START, launcher.getCommand());
assertNull(launcher.getMemberName());
assertEquals("locatorABC", launcher.getProperties().getProperty(DistributionConfig.NAME_NAME));
}
@Test
public void testBuildWithMemberNameSetInGemfirePropertiesOnStart() {
// TODO fix this test on Windows; File renameTo and delete in finally fail on Windows
assumeFalse(SystemUtils.isWindows());
Properties gemfireProperties = new Properties();
gemfireProperties.setProperty(DistributionConfig.NAME_NAME, "locator123");
File gemfirePropertiesFile = writeGemFirePropertiesToFile(gemfireProperties, "gemfire.properties",
String.format("Test gemfire.properties file for %1$s.%2$s.", getClass().getSimpleName(),
"testBuildWithMemberNameSetInGemfirePropertiesOnStart"));
assertNotNull(gemfirePropertiesFile);
assertTrue(gemfirePropertiesFile.isFile());
try {
LocatorLauncher launcher = new Builder().setCommand(Command.START).setMemberName(null).build();
assertNotNull(launcher);
assertEquals(Command.START, launcher.getCommand());
assertNull(launcher.getMemberName());
}
finally {
assertTrue(gemfirePropertiesFile.delete());
assertFalse(gemfirePropertiesFile.isFile());
}
}
@Test
public void testBuildWithMemberNameSetInSystemPropertiesOnStart() {
try {
System.setProperty(DistributionConfig.GEMFIRE_PREFIX + DistributionConfig.NAME_NAME, "locatorXYZ");
LocatorLauncher launcher = new Builder()
.setCommand(LocatorLauncher.Command.START)
.setMemberName(null)
.build();
assertNotNull(launcher);
assertEquals(LocatorLauncher.Command.START, launcher.getCommand());
assertNull(launcher.getMemberName());
}
finally {
System.clearProperty(DistributionConfig.GEMFIRE_PREFIX + DistributionConfig.NAME_NAME);
}
}
@Test(expected = IllegalStateException.class)
public void testBuildWithNoMemberNameOnStart() {
try {
new Builder().setCommand(Command.START).build();
}
catch (IllegalStateException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_MEMBER_NAME_VALIDATION_ERROR_MESSAGE.toLocalizedString("Locator"),
expected.getMessage());
throw expected;
}
}
@Test(expected = IllegalStateException.class)
public void testBuildWithMismatchingCurrentAndWorkingDirectoryOnStart() {
try {
new Builder().setCommand(Command.START)
.setMemberName("memberOne")
.setWorkingDirectory(System.getProperty("java.io.tmpdir"))
.build();
}
catch (IllegalStateException expected) {
assertEquals(LocalizedStrings.Launcher_Builder_WORKING_DIRECTORY_OPTION_NOT_VALID_ERROR_MESSAGE
.toLocalizedString("Locator"), expected.getMessage());
throw expected;
}
}
}
|
|
/**
* Appia: Group communication and protocol composition framework library
* Copyright 2006 University of Lisbon
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Initial developer(s): Alexandre Pinto and Hugo Miranda.
* Contributor(s): See Appia web page for a list of contributors.
*/
/*
* Created on Mar 11, 2004
*
*/
package net.sf.appia.xml.templates;
import java.util.Hashtable;
import java.util.LinkedList;
import net.sf.appia.core.AppiaCursorException;
import net.sf.appia.core.AppiaInvalidQoSException;
import net.sf.appia.core.Channel;
import net.sf.appia.core.ChannelCursor;
import net.sf.appia.core.EventScheduler;
import net.sf.appia.core.Layer;
import net.sf.appia.core.QoS;
import net.sf.appia.core.Session;
import net.sf.appia.core.memoryManager.MemoryManager;
import net.sf.appia.management.jmx.JMXConfiguration;
import net.sf.appia.xml.AppiaXMLException;
import net.sf.appia.xml.interfaces.InitializableSession;
import net.sf.appia.xml.utils.ChannelProperties;
import net.sf.appia.xml.utils.SessionProperties;
import net.sf.appia.xml.utils.SharingState;
/**
* This class implements a channel template. It is used to generate one or more
* channels that have identical QoS.
*
* @author Jose Mocito
*
*/
public class ChannelTemplate {
// Template name
private String name;
// Session templates
private LinkedList sessionTemplates;
/**
* Builds a channel template.
*
* @param name the template's name.
*/
public ChannelTemplate(String name) {
this.name = name;
sessionTemplates = new LinkedList();
}
/**
* @return Returns the name.
*/
public String getName() {
return name;
}
/**
* @return Returns the sessionTemplates.
*/
public LinkedList getSessionTemplates() {
return sessionTemplates;
}
/**
* Adds a session to the channel template.
* <p>
* First session added corresponds to the bottom most layer.
*
* @param name the name of the session.
* @param sharing the sharing property of the session.
* @param layer the layer associated with the session.
* @see SharingState
*/
public void addSession(String name, int sharing, Layer layer) {
sessionTemplates.add(new SessionTemplate(name,sharing,layer));
}
/**
* <p>Returns the number of layers defined in this template.</p>
*
* @return the number of layers defined in this template.
*/
public int numberOfLayers() {
return sessionTemplates.size();
}
/**
* Creates a channel.
* <p>
* Channel returned is not initialized!
*
* @param name the name of the channel.
* @param label the label of the channel or null if none is defined.
* @param params the parameters passed to the channel.
* @param globalSessions Hashtable containing the
* shared "global sessions".
* @param labelSessions Hashtable containing the
* shared "label sessions".
* @return the channel created.
* @throws AppiaException
*/
/*public Channel createChannel(
String name,
String label,
ChannelProperties params,
Hashtable globalSessions,
Hashtable labelSessions)
throws AppiaException {
return createChannel(name,label,params,globalSessions,labelSessions,null);
}*/
/**
* Creates a channel.
* <p>
* Channel returned is not initialized!
*
* @param name the name of the channel.
* @param label the label of the channel or null if none is defined.
* @param params the parameters passed to the channel.
* @param globalSessions Hashtable containing the
* shared "global sessions".
* @param labelSessions Hashtable containing the
* shared "label sessions".
* @param eventScheduler the EventScheduler associated with the channel.
* @return the channel created.
* @throws AppiaXMLException
*/
public Channel createChannel(
String name,
String label,
ChannelProperties params,
Hashtable globalSessions,
Hashtable labelSessions,
EventScheduler eventScheduler,
MemoryManager memoryManager,
JMXConfiguration jmxConfig)
throws AppiaXMLException {
// Complete name is equal to the given name plus the template name
//String completeName = name + " " + this.name;
final int numberOfSessions = sessionTemplates.size();
final Layer[] qosList = new Layer[numberOfSessions];
SessionTemplate currSession = null;
// Generates the QoS
for (int i = 0; i < numberOfSessions; i++) {
currSession = (SessionTemplate) sessionTemplates.get(i);
qosList[i] = currSession.layerInstance();
}
QoS qos = null;
try {
qos = new QoS(name+" QoS",qosList);
} catch (AppiaInvalidQoSException e) {
throw new AppiaXMLException("Unable to create QoS: "+name+" QoS",e);
}
// Creates the channel based on the QoS
Channel channel;
if (eventScheduler == null && memoryManager == null)
channel = qos.createUnboundChannel(name,jmxConfig);
else if (eventScheduler == null && memoryManager != null)
channel = qos.createUnboundChannel(name,memoryManager,jmxConfig);
else if (eventScheduler != null && memoryManager == null)
channel = qos.createUnboundChannel(name,eventScheduler,jmxConfig);
else
channel = qos.createUnboundChannel(name,eventScheduler,memoryManager,jmxConfig);
final ChannelCursor cc = channel.getCursor();
cc.bottom();
// Associates the sessions to their corresponding layers
for (int i = 0; i < numberOfSessions; i++) {
currSession = (SessionTemplate) sessionTemplates.get(i);
Session sessionInstance = null;
// if "global session" then use only global sessions table.
if (currSession.getSharingState() == SharingState.GLOBAL)
sessionInstance = currSession.sessionInstance(label,globalSessions);
// else (is label or private) use only label sessions table.
else
sessionInstance = currSession.sessionInstance(label,labelSessions);
// Verifies if the session accepts parameters and if
// they are present in the configuration passes them to
// the session.
if (sessionInstance instanceof InitializableSession &&
params != null &&
params.containsKey(currSession.getName())) {
final SessionProperties parameters =
params.getParams(currSession.getName());
((InitializableSession)sessionInstance).init(parameters);
}
try {
cc.setSession(sessionInstance);
} catch (AppiaCursorException e) {
throw new AppiaXMLException("Unable to set the session "+sessionInstance+
" on channel " + channel.getChannelID()+": "+e.getMessage(),e);
}
try {
cc.up();
} catch (AppiaCursorException e) {
throw new AppiaXMLException("Unable to move the cursor up, on channel " + channel.getChannelID()+".",e);
}
}
return channel;
}
/**
* <b>FOR TESTING PURPOSES ONLY!</b>
*/
public void printChannelTemplate() {
final Object [] staux = sessionTemplates.toArray();
final SessionTemplate[] st = new SessionTemplate[staux.length];
for (int i = 0; i < staux.length; i++)
st[i] = (SessionTemplate) staux[i];
System.out.println("Template Name: "+name);
for (int i = 0; i < sessionTemplates.size(); i++)
st[i].printSessionTemplate();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.